diff --git a/.nojekyll b/.nojekyll new file mode 100644 index 0000000..e69de29 diff --git a/0. download models.html b/0. download models.html new file mode 100644 index 0000000..09a0705 --- /dev/null +++ b/0. download models.html @@ -0,0 +1,587 @@ + + + + + + + + + +WhisperSpeech - Download models + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + +
+ +
+ + +
+ + + +
+ +
+
+

Download models

+
+ + + +
+ + + + +
+ + + +
+ + + + + + +
+ +
+ + + + + \ No newline at end of file diff --git a/1. acoustic token extraction.html b/1. acoustic token extraction.html new file mode 100644 index 0000000..abbcf3a --- /dev/null +++ b/1. acoustic token extraction.html @@ -0,0 +1,735 @@ + + + + + + + + + +WhisperSpeech - Acoustic token extraction + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + +
+ +
+ + +
+ + + +
+ +
+
+

Acoustic token extraction

+
+ + + +
+ + + + +
+ + + +
+ + + +
+
# unpacked small.tar should go here:
+datadir = Path('/mnt/')
+# you can download it downloaded from
+# https://github.com/facebookresearch/libri-light/blob/main/data_preparation/README.md
+
+
+

source

+
+

load

+
+
 load (fname, newsr=24000)
+
+

Load an audio file to the GPU and resample to newsr.

+
+

source

+
+
+

load_model

+
+
 load_model ()
+
+

Load the pretrained EnCodec model

+
+

source

+
+
+

extract_Atoks

+
+
 extract_Atoks (model, audio)
+
+

Extract EnCodec tokens for the given audio tensor (or file path) using the given model (see load_model).

+
+

source

+
+
+

extract_acoustic

+
+
 extract_acoustic (srcdir:pathlib.Path, outdir:pathlib.Path)
+
+

Convert audio files to .encodec files with tensors of tokens

+ + + + + + + + + + + + + + + + + + + + +
TypeDetails
srcdirPathsource dir, should contain *.flac files
outdirPathoutput dir, will get the *.encodec files
+
+
# process all files for speaker 1401
+model = load_model()
+extract_acoustic(model, datadir/'small/1401', datadir/'acoustic-1401')
+
+ + +
+
+ +
+ + 100.00% [131/131 05:38<00:00] +
+ +
+
+
+
!du -hs {datadir}/acoustic-1401/
+
+
78M /mnt/acoustic-1401/
+
+
+ + +
+ +
+ +
+ + + + + \ No newline at end of file diff --git a/1B. Voice activity detection_files/figure-html/cell-9-output-2.png b/1B. Voice activity detection_files/figure-html/cell-9-output-2.png new file mode 100644 index 0000000..18cf70c Binary files /dev/null and b/1B. Voice activity detection_files/figure-html/cell-9-output-2.png differ diff --git a/1b. voice activity detection.html b/1b. voice activity detection.html new file mode 100644 index 0000000..ee23423 --- /dev/null +++ b/1b. voice activity detection.html @@ -0,0 +1,781 @@ + + + + + + + + + +WhisperSpeech - Perform Voice Activity Detection (VAD) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + +
+ +
+ + +
+ + + +
+ +
+
+

Perform Voice Activity Detection (VAD)

+
+ + + +
+ + + + +
+ + + +
+ + + +
+
from IPython.display import HTML
+import pylab as plt
+
+

We use the voice activity detection model from WhisperX (but we don’t use their merging algorithm):

+

Test just a few files:

+
+
ds = wds.WebDataset('/data2/libritts-r-raw-000000.tar').compose(wds.decode(wds.torch_audio))
+
+
+
for x in ds: break
+x
+
+
{'__key__': './dev-clean/1272/128104/1272_128104_000001_000000',
+ '__url__': '/data2/libritts-r-raw-000000.tar',
+ 'normalized.txt': "A 'JOLLY' ART CRITIC",
+ 'original.txt': "A 'JOLLY' ART CRITIC",
+ 'wav': (tensor([[ 0.0000,  0.0000,  0.0000,  ..., -0.0036, -0.0038, -0.0050]]),
+  24000)}
+
+
+
+
# test it locally
+input:str  = 'https://huggingface.co/datasets/collabora/librilight-webdataset/resolve/main/librilight-large-wo6454-flac-000002.tar'
+output:str = input.rsplit("/", 1)[1].replace('flac', 'vad') + ".gz"
+
+ds = load_dataset(input)
+vad_model = whisperx.vad.load_vad_model('cuda')
+
+with wds.TarWriter(output) as sink:
+    for s in progress_bar(ds, total=10):
+        audio, sr = s['audio']
+        assert(sr == 16000)
+        sink.write({
+            "__key__": s['__key__'],
+            "vad.npy": np.array(segment_audio(vad_model, audio), dtype=np.float32)
+        })
+        
+!ls -lh {output}
+!tar tf {output}
+
+
Lightning automatically upgraded your loaded checkpoint from v1.5.4 to v2.0.2. To apply the upgrade to your files permanently, run `python -m pytorch_lightning.utilities.upgrade_checkpoint --file ../../../.cache/torch/whisperx-vad-segmentation.bin`
+
+
+
Model was trained with pyannote.audio 0.0.1, yours is 2.1.1. Bad things might happen unless you revert pyannote.audio to 0.x.
+Model was trained with torch 1.10.0+cu102, yours is 2.0.1+cu118. Bad things might happen unless you revert torch to 1.x.
+-rw-r--r-- 1 root root 7.5K Sep 21 08:51 librilight-large-wo6454-vad-000002.tar.gz
+large/10089/five_minutes_stories_1508_librivox_64kb_mp3/5minutesstories_03_molesworth_64kb.vad.npy
+large/10089/five_minutes_stories_1508_librivox_64kb_mp3/5minutesstories_04_molesworth_64kb.vad.npy
+large/10089/five_minutes_stories_1508_librivox_64kb_mp3/5minutesstories_08_molesworth_64kb.vad.npy
+large/10089/five_minutes_stories_1508_librivox_64kb_mp3/5minutesstories_09_molesworth_64kb.vad.npy
+large/10089/five_minutes_stories_1508_librivox_64kb_mp3/5minutesstories_10_molesworth_64kb.vad.npy
+large/10089/five_minutes_stories_1508_librivox_64kb_mp3/5minutesstories_11_molesworth_64kb.vad.npy
+large/10089/goodcheerstories_1511_librivox_64kb_mp3/goodcheerstories_13_dickinson_64kb.vad.npy
+large/10089/goodcheerstories_1511_librivox_64kb_mp3/goodcheerstories_30_dickinson_64kb.vad.npy
+large/10089/mothers_nursery_tales_1512_librivox_64kb_mp3/mothers_nursery_tales_16_pyle_64kb.vad.npy
+large/10089/mothers_nursery_tales_1512_librivox_64kb_mp3/mothers_nursery_tales_25_pyle_64kb.vad.npy
+
+
+ + +
+
+ +
+ + 100.00% [10/10 00:10<00:00] +
+ +
+
+
+

Batch processing

+

Let’s put everything above together.

+
+
# for reference, this was the performance on a single 4090:
+process_shard('https://huggingface.co/datasets/collabora/librilight-webdataset/resolve/main/librilight-small-flac-000000.tar')
+
+
Lightning automatically upgraded your loaded checkpoint from v1.5.4 to v2.0.2. To apply the upgrade to your files permanently, run `python -m pytorch_lightning.utilities.upgrade_checkpoint --file ../../../.cache/torch/whisperx-vad-segmentation.bin`
+
+
+
Model was trained with pyannote.audio 0.0.1, yours is 2.1.1. Bad things might happen unless you revert pyannote.audio to 0.x.
+Model was trained with torch 1.10.0+cu102, yours is 2.0.1+cu118. Bad things might happen unless you revert torch to 1.x.
+
+
+ + +
+
+ +
+ + 100.00% [335/335 03:30<00:00] +
+ +
+
+
+
for x in wds.WebDataset('/data2/libritts-r-vad-000000.tar').decode(): break
+x['__key__'].split('/')
+
+
['.', 'dev-clean', '1272', '128104', '1272_128104_000001_000000']
+
+
+
+
plt.hist([x['vad.npy'].shape[0] for x in wds.WebDataset('/data2/libritts-r-vad-000000.tar').decode()])
+
+
(array([1.6967e+04, 0.0000e+00, 6.4500e+02, 0.0000e+00, 0.0000e+00,
+        1.0800e+02, 0.0000e+00, 2.5000e+01, 0.0000e+00, 7.0000e+00]),
+ array([1. , 1.4, 1.8, 2.2, 2.6, 3. , 3.4, 3.8, 4.2, 4.6, 5. ]),
+ <BarContainer object of 10 artists>)
+
+
+
+
+

+
+
+
+
+ + +
+ +
+ +
+ + + + + \ No newline at end of file diff --git a/1c. vad merging.html b/1c. vad merging.html new file mode 100644 index 0000000..156b0e0 --- /dev/null +++ b/1c. vad merging.html @@ -0,0 +1,1473 @@ + + + + + + + + + +WhisperSpeech - VAD merging + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + +
+ +
+ + +
+ + + +
+ +
+
+

VAD merging

+
+ + + +
+ + + + +
+ + + +
+ + + +
+

source

+
+

derived_name

+
+
 derived_name (input, kind, base='audio')
+
+
+
ds = wds.WebDataset(['../wolnelektury-wds2/wolnelektury-audio-000000.tar']).compose(
+    wds.decode(wds.torch_audio),
+    utils.merge_in(utils.derived_dataset('vad')),
+    utils.find_audio,
+    utils.split_to_chunks,
+    utils.merge_in(utils.derived_dataset('spk_emb')),
+)
+
+
+
import IPython
+import time
+
+
+
prev = None
+for s in progress_bar(ds, total=20):
+    sim = F.cosine_similarity(torch.tensor(s['spk_emb.npy']), torch.tensor((prev if prev is not None else s)['spk_emb.npy']), dim=0)
+    secs = s['tend'] - s['tstart']
+    same = sim > 0.6 if secs > 2 else sim > 0.1
+    if not same: print("new")
+    print(s['__key__'], sim, secs)
+    display(IPython.display.Audio(s['samples'], rate=s['sample_rate']))
+    if secs > 2:
+        prev = s
+    time.sleep(.5)
+s
+
+
+
ds = wds.WebDataset([utils.derived_name('../wolnelektury-wds2/wolnelektury-audio-000000.tar', 'vad')]).compose(
+    wds.decode(),
+    split,
+    utils.merge_in(utils.derived_dataset('spk_emb', base='vad', suffix='')),
+    merge_by_src_key,
+)
+
+
+
for s in ds: break
+s
+
+
+
ds = wds.WebDataset([utils.derived_name('../wolnelektury-wds2/wolnelektury-audio-000000.tar', 'vad')]).compose(
+    wds.decode(),
+    split,
+    utils.merge_in(utils.derived_dataset('spk_emb', base='vad', suffix='')),
+    merge_by_src_key,
+    chunk_merger,
+)
+
+
+
for s in ds: break
+s
+
+
+
ds = wds.WebDataset(['../wolnelektury-wds2/wolnelektury-audio-000000.tar']).compose(
+    wds.decode(wds.torch_audio),
+    utils.merge_in(utils.derived_dataset('vad')),
+    utils.find_audio,
+    utils.split_to_chunks,
+    utils.merge_in(utils.derived_dataset('spk_emb')),
+    merge_by_src_key,
+    chunk_merger,
+    utils.merge_in(utils.derived_dataset('audio', suffix='', decoders=[wds.torch_audio])),
+    utils.find_audio,
+    lambda x: utils.split_to_chunks(x, metakeys=['spk_emb.npy']),
+)
+
+
+
for s in ds: break
+s
+
+
+
prev = None
+for s in progress_bar(ds, total=20):
+    sim = F.cosine_similarity(torch.tensor(s['spk_emb.npy']), torch.tensor((prev if prev is not None else s)['spk_emb.npy']), dim=0)
+    secs = s['tend'] - s['tstart']
+    same = sim > 0.6 if secs > 2 else sim > 0.1
+    if not same: print("new")
+    print(s['__key__'], sim, secs, sum([e-s for s,e in s['orig_s']['subvads.pyd'][s['i']]]))
+    display(IPython.display.Audio(s['samples'], rate=s['sample_rate']))
+    if secs > 2:
+        prev = s
+    time.sleep(.5)
+
+
+
prepare_mvad('../wolnelektury-wds2/wolnelektury-audio-000000.tar')
+
+ + +
+
+ +
+ + 100.00% [235/235 00:04<00:00] +
+ +
+
+
+
!tar tf ../wolnelektury-wds2/wolnelektury-maxvad-000000.tar.gz
+
+
./kornhauser-wiatr/kornhauser-wiatr_001.spk_emb.npy
+./kornhauser-wiatr/kornhauser-wiatr_001.subvads.pyd
+./kornhauser-wiatr/kornhauser-wiatr_001.vad.npy
+./fraszki-ksiegi-pierwsze-epitafium-wysockiemu/jan-kochanowski-fraszki-ksiegi-pierwsze-epitafium-wysockiemu.spk_emb.npy
+./fraszki-ksiegi-pierwsze-epitafium-wysockiemu/jan-kochanowski-fraszki-ksiegi-pierwsze-epitafium-wysockiemu.subvads.pyd
+./fraszki-ksiegi-pierwsze-epitafium-wysockiemu/jan-kochanowski-fraszki-ksiegi-pierwsze-epitafium-wysockiemu.vad.npy
+./kucharczyk-jak-modlitwa-ochrania-przed-zlodziejami/jak-modlitwa-ochrania-przed-zlodziejami.spk_emb.npy
+./kucharczyk-jak-modlitwa-ochrania-przed-zlodziejami/jak-modlitwa-ochrania-przed-zlodziejami.subvads.pyd
+./kucharczyk-jak-modlitwa-ochrania-przed-zlodziejami/jak-modlitwa-ochrania-przed-zlodziejami.vad.npy
+./nowakowska-niska-rozdzielczosc-proba-wody/proba-wody.spk_emb.npy
+./nowakowska-niska-rozdzielczosc-proba-wody/proba-wody.subvads.pyd
+./nowakowska-niska-rozdzielczosc-proba-wody/proba-wody.vad.npy
+./slowka-zbior-dziwna-przygoda-rodziny-polanieckich/tadeusz-boy-zelenski-slowka-zbior-dziwna-przygoda-rodziny-polanieckich.spk_emb.npy
+./slowka-zbior-dziwna-przygoda-rodziny-polanieckich/tadeusz-boy-zelenski-slowka-zbior-dziwna-przygoda-rodziny-polanieckich.subvads.pyd
+./slowka-zbior-dziwna-przygoda-rodziny-polanieckich/tadeusz-boy-zelenski-slowka-zbior-dziwna-przygoda-rodziny-polanieckich.vad.npy
+./piesni-ksiegi-wtore-piesn-xii/jan-kochanowski-piesni-ksiegi-wtore-piesn-xii-nie-masz-i-po-drugi-raz-nie-masz-watp.spk_emb.npy
+./piesni-ksiegi-wtore-piesn-xii/jan-kochanowski-piesni-ksiegi-wtore-piesn-xii-nie-masz-i-po-drugi-raz-nie-masz-watp.subvads.pyd
+./piesni-ksiegi-wtore-piesn-xii/jan-kochanowski-piesni-ksiegi-wtore-piesn-xii-nie-masz-i-po-drugi-raz-nie-masz-watp.vad.npy
+./sonety-krymskie-stepy-akermanskie/adam-mickiewicz-sonety-krymskie-stepy-akermanskie.spk_emb.npy
+./sonety-krymskie-stepy-akermanskie/adam-mickiewicz-sonety-krymskie-stepy-akermanskie.subvads.pyd
+./sonety-krymskie-stepy-akermanskie/adam-mickiewicz-sonety-krymskie-stepy-akermanskie.vad.npy
+./napoj-cienisty-balwan-ze-sniegu/boleslaw-lesmian-napoj-cienisty-postacie-cykl-balwan-ze-sniegu.spk_emb.npy
+./napoj-cienisty-balwan-ze-sniegu/boleslaw-lesmian-napoj-cienisty-postacie-cykl-balwan-ze-sniegu.subvads.pyd
+./napoj-cienisty-balwan-ze-sniegu/boleslaw-lesmian-napoj-cienisty-postacie-cykl-balwan-ze-sniegu.vad.npy
+./fraczek-zolw-wiercipieta-prosiaczek/fraczek-zolw-wiercipieta-prosiaczek_001.spk_emb.npy
+./fraczek-zolw-wiercipieta-prosiaczek/fraczek-zolw-wiercipieta-prosiaczek_001.subvads.pyd
+./fraczek-zolw-wiercipieta-prosiaczek/fraczek-zolw-wiercipieta-prosiaczek_001.vad.npy
+./grabinski-nietykalny/grabinski-nietykalny.spk_emb.npy
+./grabinski-nietykalny/grabinski-nietykalny.subvads.pyd
+./grabinski-nietykalny/grabinski-nietykalny.vad.npy
+./wol-i-mrowki/ignacy-krasicki-bajki-i-przypowiesci-wol-i-mrowki.spk_emb.npy
+./wol-i-mrowki/ignacy-krasicki-bajki-i-przypowiesci-wol-i-mrowki.subvads.pyd
+./wol-i-mrowki/ignacy-krasicki-bajki-i-przypowiesci-wol-i-mrowki.vad.npy
+./pszczola-w-bursztynie/jan-andrzej-morsztyn-pszczola-w-bursztynie.spk_emb.npy
+./pszczola-w-bursztynie/jan-andrzej-morsztyn-pszczola-w-bursztynie.subvads.pyd
+./pszczola-w-bursztynie/jan-andrzej-morsztyn-pszczola-w-bursztynie.vad.npy
+./jastrzab-i-sokol/ignacy-krasicki-bajki-i-przypowiesci-jastrzab-i-sokol.spk_emb.npy
+./jastrzab-i-sokol/ignacy-krasicki-bajki-i-przypowiesci-jastrzab-i-sokol.subvads.pyd
+./jastrzab-i-sokol/ignacy-krasicki-bajki-i-przypowiesci-jastrzab-i-sokol.vad.npy
+./fraszki-ksiegi-pierwsze-o-doktorze-hiszpanie/jan-kochanowski-fraszki-ksiegi-pierwsze-o-doktorze-hiszpanie.spk_emb.npy
+./fraszki-ksiegi-pierwsze-o-doktorze-hiszpanie/jan-kochanowski-fraszki-ksiegi-pierwsze-o-doktorze-hiszpanie.subvads.pyd
+./fraszki-ksiegi-pierwsze-o-doktorze-hiszpanie/jan-kochanowski-fraszki-ksiegi-pierwsze-o-doktorze-hiszpanie.vad.npy
+./perrault-kopciuszek/perrault-kopciuszek.spk_emb.npy
+./perrault-kopciuszek/perrault-kopciuszek.subvads.pyd
+./perrault-kopciuszek/perrault-kopciuszek.vad.npy
+./napoj-cienisty-wieczor/boleslaw-lesmian-napoj-cienisty-w-chmur-odbiciu-cykl-wieczor.spk_emb.npy
+./napoj-cienisty-wieczor/boleslaw-lesmian-napoj-cienisty-w-chmur-odbiciu-cykl-wieczor.subvads.pyd
+./napoj-cienisty-wieczor/boleslaw-lesmian-napoj-cienisty-w-chmur-odbiciu-cykl-wieczor.vad.npy
+./satyry-czesc-druga-malzenstwo/satyry-czesc-druga-malzenstwo.spk_emb.npy
+./satyry-czesc-druga-malzenstwo/satyry-czesc-druga-malzenstwo.subvads.pyd
+./satyry-czesc-druga-malzenstwo/satyry-czesc-druga-malzenstwo.vad.npy
+./slowka-zbior-spleen/tadeusz-boy-zelenski-slowka-zbior-spleen.spk_emb.npy
+./slowka-zbior-spleen/tadeusz-boy-zelenski-slowka-zbior-spleen.subvads.pyd
+./slowka-zbior-spleen/tadeusz-boy-zelenski-slowka-zbior-spleen.vad.npy
+./fraczek-zolw-wiercipieta-strus/fraczek-zolw-wiercipieta-strus_001.spk_emb.npy
+./fraczek-zolw-wiercipieta-strus/fraczek-zolw-wiercipieta-strus_001.subvads.pyd
+./fraczek-zolw-wiercipieta-strus/fraczek-zolw-wiercipieta-strus_001.vad.npy
+./janko-muzykant/janko-muzykant.spk_emb.npy
+./janko-muzykant/janko-muzykant.subvads.pyd
+./janko-muzykant/janko-muzykant.vad.npy
+./slowka-zbior-piosenki-zb-dobra-mama/tadeusz-boy-zelenski-slowka-zbior-piosenki-zielonego-balonika-dobra-mama.spk_emb.npy
+./slowka-zbior-piosenki-zb-dobra-mama/tadeusz-boy-zelenski-slowka-zbior-piosenki-zielonego-balonika-dobra-mama.subvads.pyd
+./slowka-zbior-piosenki-zb-dobra-mama/tadeusz-boy-zelenski-slowka-zbior-piosenki-zielonego-balonika-dobra-mama.vad.npy
+./lange-nowy-tarzan/antoni-lange-nowy-tarzan.spk_emb.npy
+./lange-nowy-tarzan/antoni-lange-nowy-tarzan.subvads.pyd
+./lange-nowy-tarzan/antoni-lange-nowy-tarzan.vad.npy
+./w-pamietniku-zofii-bobrowny/w-pamietniku-zofii-bobrowny.spk_emb.npy
+./w-pamietniku-zofii-bobrowny/w-pamietniku-zofii-bobrowny.subvads.pyd
+./w-pamietniku-zofii-bobrowny/w-pamietniku-zofii-bobrowny.vad.npy
+./fraczek-zolw-wiercipieta-zebra/fraczek-zolw-wiercipieta-zebra_001.spk_emb.npy
+./fraczek-zolw-wiercipieta-zebra/fraczek-zolw-wiercipieta-zebra_001.subvads.pyd
+./fraczek-zolw-wiercipieta-zebra/fraczek-zolw-wiercipieta-zebra_001.vad.npy
+./but-w-butonierce-milosc-na-aucie/bruno-jasienski-but-w-butonierce-tomik-milosc-na-aucie.spk_emb.npy
+./but-w-butonierce-milosc-na-aucie/bruno-jasienski-but-w-butonierce-tomik-milosc-na-aucie.subvads.pyd
+./but-w-butonierce-milosc-na-aucie/bruno-jasienski-but-w-butonierce-tomik-milosc-na-aucie.vad.npy
+./sonety-krymskie-grob-potockiej/adam-mickiewicz-sonety-krymskie-grob-potockiej.spk_emb.npy
+./sonety-krymskie-grob-potockiej/adam-mickiewicz-sonety-krymskie-grob-potockiej.subvads.pyd
+./sonety-krymskie-grob-potockiej/adam-mickiewicz-sonety-krymskie-grob-potockiej.vad.npy
+./do-matki/juliusz-slowacki-do-matki-zadrzy-ci-nieraz-serce-mila-matko-moja.spk_emb.npy
+./do-matki/juliusz-slowacki-do-matki-zadrzy-ci-nieraz-serce-mila-matko-moja.subvads.pyd
+./do-matki/juliusz-slowacki-do-matki-zadrzy-ci-nieraz-serce-mila-matko-moja.vad.npy
+./slowka-zbior-piosenki-zb-z-niewydanej-szopki-krakowskiej-na-rok-1908/tadeusz-boy-zelenski-slowka-zbior-piosenki-zielonego-balonika-z-niewydanej-szopki-krakowskiej-na-rok-1908.spk_emb.npy
+./slowka-zbior-piosenki-zb-z-niewydanej-szopki-krakowskiej-na-rok-1908/tadeusz-boy-zelenski-slowka-zbior-piosenki-zielonego-balonika-z-niewydanej-szopki-krakowskiej-na-rok-1908.subvads.pyd
+./slowka-zbior-piosenki-zb-z-niewydanej-szopki-krakowskiej-na-rok-1908/tadeusz-boy-zelenski-slowka-zbior-piosenki-zielonego-balonika-z-niewydanej-szopki-krakowskiej-na-rok-1908.vad.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_030_tom-ii-rozdzial-lx.spk_emb.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_030_tom-ii-rozdzial-lx.subvads.pyd
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_030_tom-ii-rozdzial-lx.vad.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_006_tom-ii-rozdzial-xxxv.spk_emb.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_006_tom-ii-rozdzial-xxxv.subvads.pyd
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_006_tom-ii-rozdzial-xxxv.vad.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_017_tom-ii-rozdzial-xlvi.spk_emb.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_017_tom-ii-rozdzial-xlvi.subvads.pyd
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_017_tom-ii-rozdzial-xlvi.vad.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_022_tom-ii-rozdzial-li.spk_emb.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_022_tom-ii-rozdzial-li.subvads.pyd
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_022_tom-ii-rozdzial-li.vad.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_023_tom-ii-rozdzial-liii.spk_emb.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_023_tom-ii-rozdzial-liii.subvads.pyd
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_023_tom-ii-rozdzial-liii.vad.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_012_tom-ii-rozdzial-xli.spk_emb.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_012_tom-ii-rozdzial-xli.subvads.pyd
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_012_tom-ii-rozdzial-xli.vad.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_019_tom-ii-rozdzial-xlviii.spk_emb.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_019_tom-ii-rozdzial-xlviii.subvads.pyd
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_019_tom-ii-rozdzial-xlviii.vad.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_010_tom-ii-rozdzial-xxxix.spk_emb.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_010_tom-ii-rozdzial-xxxix.subvads.pyd
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_010_tom-ii-rozdzial-xxxix.vad.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_035_tom-ii-rozdzial-lii.spk_emb.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_035_tom-ii-rozdzial-lii.subvads.pyd
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_035_tom-ii-rozdzial-lii.vad.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_002_tom-ii-rozdzial-xxxi.spk_emb.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_002_tom-ii-rozdzial-xxxi.subvads.pyd
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_002_tom-ii-rozdzial-xxxi.vad.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_025_tom-ii-rozdzial-lv.spk_emb.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_025_tom-ii-rozdzial-lv.subvads.pyd
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_025_tom-ii-rozdzial-lv.vad.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_003_tom-ii-rozdzial-xxxii.spk_emb.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_003_tom-ii-rozdzial-xxxii.subvads.pyd
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_003_tom-ii-rozdzial-xxxii.vad.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_009_tom-ii-rozdzial-xxxviii.spk_emb.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_009_tom-ii-rozdzial-xxxviii.subvads.pyd
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_009_tom-ii-rozdzial-xxxviii.vad.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_008_tom-ii-rozdzial-xxxvii.spk_emb.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_008_tom-ii-rozdzial-xxxvii.subvads.pyd
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_008_tom-ii-rozdzial-xxxvii.vad.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_016_tom-ii-rozdzial-xlv.spk_emb.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_016_tom-ii-rozdzial-xlv.subvads.pyd
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_016_tom-ii-rozdzial-xlv.vad.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_004_tom-ii-rozdzial-xxxiii.spk_emb.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_004_tom-ii-rozdzial-xxxiii.subvads.pyd
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_004_tom-ii-rozdzial-xxxiii.vad.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_032_tom-ii-rozdzial-lxii.spk_emb.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_032_tom-ii-rozdzial-lxii.subvads.pyd
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_032_tom-ii-rozdzial-lxii.vad.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_021_tom-ii-rozdzial-l.spk_emb.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_021_tom-ii-rozdzial-l.subvads.pyd
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_021_tom-ii-rozdzial-l.vad.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_018_tom-ii-rozdzial-xlvii.spk_emb.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_018_tom-ii-rozdzial-xlvii.subvads.pyd
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_018_tom-ii-rozdzial-xlvii.vad.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_033_tom-ii-rozdzial-lxiii.spk_emb.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_033_tom-ii-rozdzial-lxiii.subvads.pyd
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_033_tom-ii-rozdzial-lxiii.vad.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_014_tom-ii-rozdzial-xliii.spk_emb.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_014_tom-ii-rozdzial-xliii.subvads.pyd
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_014_tom-ii-rozdzial-xliii.vad.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_005_tom-ii-rozdzial-xxxiv.spk_emb.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_005_tom-ii-rozdzial-xxxiv.subvads.pyd
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_005_tom-ii-rozdzial-xxxiv.vad.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_034_tom-ii-rozdzial-lxiv.spk_emb.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_034_tom-ii-rozdzial-lxiv.subvads.pyd
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_034_tom-ii-rozdzial-lxiv.vad.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_001_tom-ii-rozdzial-xxx.spk_emb.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_001_tom-ii-rozdzial-xxx.subvads.pyd
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_001_tom-ii-rozdzial-xxx.vad.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_029_tom-ii-rozdzial-lix.spk_emb.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_029_tom-ii-rozdzial-lix.subvads.pyd
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_029_tom-ii-rozdzial-lix.vad.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_007_tom-ii-rozdzial-xxxvi.spk_emb.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_007_tom-ii-rozdzial-xxxvi.subvads.pyd
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_007_tom-ii-rozdzial-xxxvi.vad.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_024_tom-ii-rozdzial-liv.spk_emb.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_024_tom-ii-rozdzial-liv.subvads.pyd
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_024_tom-ii-rozdzial-liv.vad.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_028_tom-ii-rozdzial-lviii.spk_emb.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_028_tom-ii-rozdzial-lviii.subvads.pyd
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_028_tom-ii-rozdzial-lviii.vad.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_026_tom-ii-rozdzial-lvi.spk_emb.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_026_tom-ii-rozdzial-lvi.subvads.pyd
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_026_tom-ii-rozdzial-lvi.vad.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_011_tom-ii-rozdzial-xl.spk_emb.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_011_tom-ii-rozdzial-xl.subvads.pyd
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_011_tom-ii-rozdzial-xl.vad.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_031_tom-ii-rozdzial-lxi.spk_emb.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_031_tom-ii-rozdzial-lxi.subvads.pyd
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_031_tom-ii-rozdzial-lxi.vad.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_015_tom-ii-rozdzial-xliv.spk_emb.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_015_tom-ii-rozdzial-xliv.subvads.pyd
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_015_tom-ii-rozdzial-xliv.vad.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_013_tom-ii-rozdzial-xlii.spk_emb.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_013_tom-ii-rozdzial-xlii.subvads.pyd
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_013_tom-ii-rozdzial-xlii.vad.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_027_tom-ii-rozdzial-lvii.spk_emb.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_027_tom-ii-rozdzial-lvii.subvads.pyd
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_027_tom-ii-rozdzial-lvii.vad.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_020_tom-ii-rozdzial-xlix.spk_emb.npy
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_020_tom-ii-rozdzial-xlix.subvads.pyd
+./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_020_tom-ii-rozdzial-xlix.vad.npy
+./historia-zoltej-cizemki/09-antonina-domanska-historia-zoltej-cizemki-tajemnica-dworku-pod-cmentarzem.spk_emb.npy
+./historia-zoltej-cizemki/09-antonina-domanska-historia-zoltej-cizemki-tajemnica-dworku-pod-cmentarzem.subvads.pyd
+./historia-zoltej-cizemki/09-antonina-domanska-historia-zoltej-cizemki-tajemnica-dworku-pod-cmentarzem.vad.npy
+./historia-zoltej-cizemki/11-antonina-domanska-historia-zoltej-cizemki-zakonczenie.spk_emb.npy
+./historia-zoltej-cizemki/11-antonina-domanska-historia-zoltej-cizemki-zakonczenie.subvads.pyd
+./historia-zoltej-cizemki/11-antonina-domanska-historia-zoltej-cizemki-zakonczenie.vad.npy
+./historia-zoltej-cizemki/08-antonina-domanska-historia-zoltej-cizemki-swiety-kazimierz.spk_emb.npy
+./historia-zoltej-cizemki/08-antonina-domanska-historia-zoltej-cizemki-swiety-kazimierz.subvads.pyd
+./historia-zoltej-cizemki/08-antonina-domanska-historia-zoltej-cizemki-swiety-kazimierz.vad.npy
+./historia-zoltej-cizemki/07-antonina-domanska-historia-zoltej-cizemki-jasiek.spk_emb.npy
+./historia-zoltej-cizemki/07-antonina-domanska-historia-zoltej-cizemki-jasiek.subvads.pyd
+./historia-zoltej-cizemki/07-antonina-domanska-historia-zoltej-cizemki-jasiek.vad.npy
+./historia-zoltej-cizemki/05-antonina-domanska-historia-zoltej-cizemki-u-jana-dlugosza.spk_emb.npy
+./historia-zoltej-cizemki/05-antonina-domanska-historia-zoltej-cizemki-u-jana-dlugosza.subvads.pyd
+./historia-zoltej-cizemki/05-antonina-domanska-historia-zoltej-cizemki-u-jana-dlugosza.vad.npy
+./historia-zoltej-cizemki/03-antonina-domanska-historia-zoltej-cizemki-u-wilow.spk_emb.npy
+./historia-zoltej-cizemki/03-antonina-domanska-historia-zoltej-cizemki-u-wilow.subvads.pyd
+./historia-zoltej-cizemki/03-antonina-domanska-historia-zoltej-cizemki-u-wilow.vad.npy
+./historia-zoltej-cizemki/10-antonina-domanska-historia-zoltej-cizemki-poreba.spk_emb.npy
+./historia-zoltej-cizemki/10-antonina-domanska-historia-zoltej-cizemki-poreba.subvads.pyd
+./historia-zoltej-cizemki/10-antonina-domanska-historia-zoltej-cizemki-poreba.vad.npy
+./historia-zoltej-cizemki/04-antonina-domanska-historia-zoltej-cizemki-konik-zwierzyniecki.spk_emb.npy
+./historia-zoltej-cizemki/04-antonina-domanska-historia-zoltej-cizemki-konik-zwierzyniecki.subvads.pyd
+./historia-zoltej-cizemki/04-antonina-domanska-historia-zoltej-cizemki-konik-zwierzyniecki.vad.npy
+./historia-zoltej-cizemki/02-antonina-domanska-historia-zoltej-cizemki-dziwny-pielgrzym.spk_emb.npy
+./historia-zoltej-cizemki/02-antonina-domanska-historia-zoltej-cizemki-dziwny-pielgrzym.subvads.pyd
+./historia-zoltej-cizemki/02-antonina-domanska-historia-zoltej-cizemki-dziwny-pielgrzym.vad.npy
+./historia-zoltej-cizemki/01-antonina-domanska-historia-zoltej-cizemki-w-domu-i-w-puszczy.spk_emb.npy
+./historia-zoltej-cizemki/01-antonina-domanska-historia-zoltej-cizemki-w-domu-i-w-puszczy.subvads.pyd
+./historia-zoltej-cizemki/01-antonina-domanska-historia-zoltej-cizemki-w-domu-i-w-puszczy.vad.npy
+./historia-zoltej-cizemki/06-antonina-domanska-historia-zoltej-cizemki-uczen-mistrza-wita.spk_emb.npy
+./historia-zoltej-cizemki/06-antonina-domanska-historia-zoltej-cizemki-uczen-mistrza-wita.subvads.pyd
+./historia-zoltej-cizemki/06-antonina-domanska-historia-zoltej-cizemki-uczen-mistrza-wita.vad.npy
+./slowka-zbior-piosenki-zb-piosenka-wzruszajaca/tadeusz-boy-zelenski-slowka-zbior-piosenki-zielonego-balonika-piosenka-wzruszajaca.spk_emb.npy
+./slowka-zbior-piosenki-zb-piosenka-wzruszajaca/tadeusz-boy-zelenski-slowka-zbior-piosenki-zielonego-balonika-piosenka-wzruszajaca.subvads.pyd
+./slowka-zbior-piosenki-zb-piosenka-wzruszajaca/tadeusz-boy-zelenski-slowka-zbior-piosenki-zielonego-balonika-piosenka-wzruszajaca.vad.npy
+./do-justyny/franciszek-karpinski-do-justyny.spk_emb.npy
+./do-justyny/franciszek-karpinski-do-justyny.subvads.pyd
+./do-justyny/franciszek-karpinski-do-justyny.vad.npy
+./pan-i-pies/ignacy-krasicki-bajki-i-przypowiesci-pan-i-pies.spk_emb.npy
+./pan-i-pies/ignacy-krasicki-bajki-i-przypowiesci-pan-i-pies.subvads.pyd
+./pan-i-pies/ignacy-krasicki-bajki-i-przypowiesci-pan-i-pies.vad.npy
+./pasterz-i-owce-bajki-nowe/pasterz-i-owce-bajki-nowe.spk_emb.npy
+./pasterz-i-owce-bajki-nowe/pasterz-i-owce-bajki-nowe.subvads.pyd
+./pasterz-i-owce-bajki-nowe/pasterz-i-owce-bajki-nowe.vad.npy
+./prawdziwy-opis-wypadku-z-p-waldemarem/prawdziwy-opis-wypadku-z-p-waldemarem.spk_emb.npy
+./prawdziwy-opis-wypadku-z-p-waldemarem/prawdziwy-opis-wypadku-z-p-waldemarem.subvads.pyd
+./prawdziwy-opis-wypadku-z-p-waldemarem/prawdziwy-opis-wypadku-z-p-waldemarem.vad.npy
+./grabinski-przypadek/grabinski-przypadek.spk_emb.npy
+./grabinski-przypadek/grabinski-przypadek.subvads.pyd
+./grabinski-przypadek/grabinski-przypadek.vad.npy
+./rozmowa-mistrza-polikarpa-ze-smiercia/rozmowa-mistrza-polikarpa-ze-smiercia.spk_emb.npy
+./rozmowa-mistrza-polikarpa-ze-smiercia/rozmowa-mistrza-polikarpa-ze-smiercia.subvads.pyd
+./rozmowa-mistrza-polikarpa-ze-smiercia/rozmowa-mistrza-polikarpa-ze-smiercia.vad.npy
+./hej-w-dzien-narodzenia/autor-nieznany-hej-w-dzien-narodzenia.spk_emb.npy
+./hej-w-dzien-narodzenia/autor-nieznany-hej-w-dzien-narodzenia.subvads.pyd
+./hej-w-dzien-narodzenia/autor-nieznany-hej-w-dzien-narodzenia.vad.npy
+./napoj-cienisty-dziewczyna/boleslaw-lesmian-napoj-cienisty-powiesc-o-rozumnej-dziewczynie-cykl-dziewczyna.spk_emb.npy
+./napoj-cienisty-dziewczyna/boleslaw-lesmian-napoj-cienisty-powiesc-o-rozumnej-dziewczynie-cykl-dziewczyna.subvads.pyd
+./napoj-cienisty-dziewczyna/boleslaw-lesmian-napoj-cienisty-powiesc-o-rozumnej-dziewczynie-cykl-dziewczyna.vad.npy
+./grabinski-niesamowita-opowiesc-na-tropie/grabinski-niesamowita-opowiesc-na-tropie.spk_emb.npy
+./grabinski-niesamowita-opowiesc-na-tropie/grabinski-niesamowita-opowiesc-na-tropie.subvads.pyd
+./grabinski-niesamowita-opowiesc-na-tropie/grabinski-niesamowita-opowiesc-na-tropie.vad.npy
+./sroczynska-lasowiackie-serce/lasowiackie-serce.spk_emb.npy
+./sroczynska-lasowiackie-serce/lasowiackie-serce.subvads.pyd
+./sroczynska-lasowiackie-serce/lasowiackie-serce.vad.npy
+./slowka-zbior-piosenki-zb-wiersz-inauguracyjny/tadeusz-boy-zelenski-slowka-zbior-piosenki-zielonego-balonika-wiersz-inauguracyjny-na-otwarcie-piatego-sezonu-zielonego-balonika.spk_emb.npy
+./slowka-zbior-piosenki-zb-wiersz-inauguracyjny/tadeusz-boy-zelenski-slowka-zbior-piosenki-zielonego-balonika-wiersz-inauguracyjny-na-otwarcie-piatego-sezonu-zielonego-balonika.subvads.pyd
+./slowka-zbior-piosenki-zb-wiersz-inauguracyjny/tadeusz-boy-zelenski-slowka-zbior-piosenki-zielonego-balonika-wiersz-inauguracyjny-na-otwarcie-piatego-sezonu-zielonego-balonika.vad.npy
+./nasza-czarna-jaskoleczka/nasza-czarna-jaskoleczka.spk_emb.npy
+./nasza-czarna-jaskoleczka/nasza-czarna-jaskoleczka.subvads.pyd
+./nasza-czarna-jaskoleczka/nasza-czarna-jaskoleczka.vad.npy
+./puszkin-bajka-o-rybaku-i-rybce/puszkin-bajka-o-rybaku-i-rybce.spk_emb.npy
+./puszkin-bajka-o-rybaku-i-rybce/puszkin-bajka-o-rybaku-i-rybce.subvads.pyd
+./puszkin-bajka-o-rybaku-i-rybce/puszkin-bajka-o-rybaku-i-rybce.vad.npy
+./nowakowska-niska-rozdzielczosc-daj-mi/daj-mi.spk_emb.npy
+./nowakowska-niska-rozdzielczosc-daj-mi/daj-mi.subvads.pyd
+./nowakowska-niska-rozdzielczosc-daj-mi/daj-mi.vad.npy
+./slowka-zbior-replika-kobiety-polskiej/tadeusz-boy-zelenski-slowka-zbior-replika-kobiety-polskiej.spk_emb.npy
+./slowka-zbior-replika-kobiety-polskiej/tadeusz-boy-zelenski-slowka-zbior-replika-kobiety-polskiej.subvads.pyd
+./slowka-zbior-replika-kobiety-polskiej/tadeusz-boy-zelenski-slowka-zbior-replika-kobiety-polskiej.vad.npy
+./but-w-butonierce-ipecacuana/bruno-jasienski-but-w-butonierce-tomik-ipecacuana.spk_emb.npy
+./but-w-butonierce-ipecacuana/bruno-jasienski-but-w-butonierce-tomik-ipecacuana.subvads.pyd
+./but-w-butonierce-ipecacuana/bruno-jasienski-but-w-butonierce-tomik-ipecacuana.vad.npy
+./madry-i-glupi/ignacy-krasicki-bajki-i-przypowiesci-madry-i-glupi.spk_emb.npy
+./madry-i-glupi/ignacy-krasicki-bajki-i-przypowiesci-madry-i-glupi.subvads.pyd
+./madry-i-glupi/ignacy-krasicki-bajki-i-przypowiesci-madry-i-glupi.vad.npy
+./sonety-krymskie-bakczysaraj-w-nocy/adam-mickiewicz-sonety-krymskie-bakczysaraj-w-nocy.spk_emb.npy
+./sonety-krymskie-bakczysaraj-w-nocy/adam-mickiewicz-sonety-krymskie-bakczysaraj-w-nocy.subvads.pyd
+./sonety-krymskie-bakczysaraj-w-nocy/adam-mickiewicz-sonety-krymskie-bakczysaraj-w-nocy.vad.npy
+./sklepy-cynamonowe-traktat-o-manekinach-dokonczenie/sklepy-cynamonowe-traktat-o-manekinach-dokonczenie.spk_emb.npy
+./sklepy-cynamonowe-traktat-o-manekinach-dokonczenie/sklepy-cynamonowe-traktat-o-manekinach-dokonczenie.subvads.pyd
+./sklepy-cynamonowe-traktat-o-manekinach-dokonczenie/sklepy-cynamonowe-traktat-o-manekinach-dokonczenie.vad.npy
+./schulz-sanatorium-pod-klepsydra-druga-jesien/schulz-sanatorium-pod-klepsydra-druga-jesien_001_druga-jesien.spk_emb.npy
+./schulz-sanatorium-pod-klepsydra-druga-jesien/schulz-sanatorium-pod-klepsydra-druga-jesien_001_druga-jesien.subvads.pyd
+./schulz-sanatorium-pod-klepsydra-druga-jesien/schulz-sanatorium-pod-klepsydra-druga-jesien_001_druga-jesien.vad.npy
+./medrcy-swiata/autor-nieznany-medrcy-swiata.spk_emb.npy
+./medrcy-swiata/autor-nieznany-medrcy-swiata.subvads.pyd
+./medrcy-swiata/autor-nieznany-medrcy-swiata.vad.npy
+./aniol/aniol.spk_emb.npy
+./aniol/aniol.subvads.pyd
+./aniol/aniol.vad.npy
+./do-motyla/jan-andrzej-morsztyn-do-motyla.spk_emb.npy
+./do-motyla/jan-andrzej-morsztyn-do-motyla.subvads.pyd
+./do-motyla/jan-andrzej-morsztyn-do-motyla.vad.npy
+./napoj-cienisty-dokola-klombu/boleslaw-lesmian-napoj-cienisty-postacie-cykl-dokola-klombu.spk_emb.npy
+./napoj-cienisty-dokola-klombu/boleslaw-lesmian-napoj-cienisty-postacie-cykl-dokola-klombu.subvads.pyd
+./napoj-cienisty-dokola-klombu/boleslaw-lesmian-napoj-cienisty-postacie-cykl-dokola-klombu.vad.npy
+./dabrowska-boze-narodzenie/maria-dabrowska-boze-narodzenie.spk_emb.npy
+./dabrowska-boze-narodzenie/maria-dabrowska-boze-narodzenie.subvads.pyd
+./dabrowska-boze-narodzenie/maria-dabrowska-boze-narodzenie.vad.npy
+./spiewak-spod-strzechy/spiewak-spod-strzechy.spk_emb.npy
+./spiewak-spod-strzechy/spiewak-spod-strzechy.subvads.pyd
+./spiewak-spod-strzechy/spiewak-spod-strzechy.vad.npy
+./ziemia-i-potok-bajki-nowe/ziemia-i-potok-bajki-nowe.spk_emb.npy
+./ziemia-i-potok-bajki-nowe/ziemia-i-potok-bajki-nowe.subvads.pyd
+./ziemia-i-potok-bajki-nowe/ziemia-i-potok-bajki-nowe.vad.npy
+./sonety-krymskie-aluszta-w-dzien/adam-mickiewicz-sonety-krymskie-aluszta-w-dzien.spk_emb.npy
+./sonety-krymskie-aluszta-w-dzien/adam-mickiewicz-sonety-krymskie-aluszta-w-dzien.subvads.pyd
+./sonety-krymskie-aluszta-w-dzien/adam-mickiewicz-sonety-krymskie-aluszta-w-dzien.vad.npy
+./napoj-cienisty-wiosna/boleslaw-lesmian-napoj-cienisty-postacie-cykl-wiosna.spk_emb.npy
+./napoj-cienisty-wiosna/boleslaw-lesmian-napoj-cienisty-postacie-cykl-wiosna.subvads.pyd
+./napoj-cienisty-wiosna/boleslaw-lesmian-napoj-cienisty-postacie-cykl-wiosna.vad.npy
+./berenice/edgar-allan-poe-berenice.spk_emb.npy
+./berenice/edgar-allan-poe-berenice.subvads.pyd
+./berenice/edgar-allan-poe-berenice.vad.npy
+./chlop-i-jowisz-bajki-nowe/chlop-i-jowisz-bajki-nowe.spk_emb.npy
+./chlop-i-jowisz-bajki-nowe/chlop-i-jowisz-bajki-nowe.subvads.pyd
+./chlop-i-jowisz-bajki-nowe/chlop-i-jowisz-bajki-nowe.vad.npy
+./podrozny-i-kaleka/ignacy-krasicki-bajki-i-przypowiesci-podrozny-i-kaleka.spk_emb.npy
+./podrozny-i-kaleka/ignacy-krasicki-bajki-i-przypowiesci-podrozny-i-kaleka.subvads.pyd
+./podrozny-i-kaleka/ignacy-krasicki-bajki-i-przypowiesci-podrozny-i-kaleka.vad.npy
+./grabinski-ksiega-ognia-czerwona-magda/grabinski-ksiega-ognia-czerwona-magda.spk_emb.npy
+./grabinski-ksiega-ognia-czerwona-magda/grabinski-ksiega-ognia-czerwona-magda.subvads.pyd
+./grabinski-ksiega-ognia-czerwona-magda/grabinski-ksiega-ognia-czerwona-magda.vad.npy
+./wino-i-woda/ignacy-krasicki-bajki-i-przypowiesci-wino-i-woda.spk_emb.npy
+./wino-i-woda/ignacy-krasicki-bajki-i-przypowiesci-wino-i-woda.subvads.pyd
+./wino-i-woda/ignacy-krasicki-bajki-i-przypowiesci-wino-i-woda.vad.npy
+./grabinski-z-wyjatkow-w-pomrokach-wiary-klatwa/grabinski-z-wyjatkow-w-pomrokach-wiary-klatwa.spk_emb.npy
+./grabinski-z-wyjatkow-w-pomrokach-wiary-klatwa/grabinski-z-wyjatkow-w-pomrokach-wiary-klatwa.subvads.pyd
+./grabinski-z-wyjatkow-w-pomrokach-wiary-klatwa/grabinski-z-wyjatkow-w-pomrokach-wiary-klatwa.vad.npy
+./blumengraber-do-profesorow/blumengraber-do-profesorow.spk_emb.npy
+./blumengraber-do-profesorow/blumengraber-do-profesorow.subvads.pyd
+./blumengraber-do-profesorow/blumengraber-do-profesorow.vad.npy
+./wyzel-i-brytan-ii-bajki-nowe/wyzel-i-brytan-ii-bajki-nowe.spk_emb.npy
+./wyzel-i-brytan-ii-bajki-nowe/wyzel-i-brytan-ii-bajki-nowe.subvads.pyd
+./wyzel-i-brytan-ii-bajki-nowe/wyzel-i-brytan-ii-bajki-nowe.vad.npy
+./mala-ksiezniczka/mala-ksiezniczka_017_17-oto-jest-to-dziecko.spk_emb.npy
+./mala-ksiezniczka/mala-ksiezniczka_017_17-oto-jest-to-dziecko.subvads.pyd
+./mala-ksiezniczka/mala-ksiezniczka_017_17-oto-jest-to-dziecko.vad.npy
+./mala-ksiezniczka/mala-ksiezniczka_006_6-kopalnia-diamentow.spk_emb.npy
+./mala-ksiezniczka/mala-ksiezniczka_006_6-kopalnia-diamentow.subvads.pyd
+./mala-ksiezniczka/mala-ksiezniczka_006_6-kopalnia-diamentow.vad.npy
+./mala-ksiezniczka/mala-ksiezniczka_011_11-ram-dass.spk_emb.npy
+./mala-ksiezniczka/mala-ksiezniczka_011_11-ram-dass.subvads.pyd
+./mala-ksiezniczka/mala-ksiezniczka_011_11-ram-dass.vad.npy
+./mala-ksiezniczka/mala-ksiezniczka_012_12-po-drugiej-stronie-sciany.spk_emb.npy
+./mala-ksiezniczka/mala-ksiezniczka_012_12-po-drugiej-stronie-sciany.subvads.pyd
+./mala-ksiezniczka/mala-ksiezniczka_012_12-po-drugiej-stronie-sciany.vad.npy
+./mala-ksiezniczka/mala-ksiezniczka_004_4-lottie.spk_emb.npy
+./mala-ksiezniczka/mala-ksiezniczka_004_4-lottie.subvads.pyd
+./mala-ksiezniczka/mala-ksiezniczka_004_4-lottie.vad.npy
+./mala-ksiezniczka/mala-ksiezniczka_008_8-na-poddaszu.spk_emb.npy
+./mala-ksiezniczka/mala-ksiezniczka_008_8-na-poddaszu.subvads.pyd
+./mala-ksiezniczka/mala-ksiezniczka_008_8-na-poddaszu.vad.npy
+./mala-ksiezniczka/mala-ksiezniczka_015_15-czarnoksieznik.spk_emb.npy
+./mala-ksiezniczka/mala-ksiezniczka_015_15-czarnoksieznik.subvads.pyd
+./mala-ksiezniczka/mala-ksiezniczka_015_15-czarnoksieznik.vad.npy
+./mala-ksiezniczka/mala-ksiezniczka_010_10-przybysz-z-indii.spk_emb.npy
+./mala-ksiezniczka/mala-ksiezniczka_010_10-przybysz-z-indii.subvads.pyd
+./mala-ksiezniczka/mala-ksiezniczka_010_10-przybysz-z-indii.vad.npy
+./mala-ksiezniczka/mala-ksiezniczka_018_18-staralam-sie-byc-nia-zawsze.spk_emb.npy
+./mala-ksiezniczka/mala-ksiezniczka_018_18-staralam-sie-byc-nia-zawsze.subvads.pyd
+./mala-ksiezniczka/mala-ksiezniczka_018_18-staralam-sie-byc-nia-zawsze.vad.npy
+./mala-ksiezniczka/mala-ksiezniczka_019_19-anna.spk_emb.npy
+./mala-ksiezniczka/mala-ksiezniczka_019_19-anna.subvads.pyd
+./mala-ksiezniczka/mala-ksiezniczka_019_19-anna.vad.npy
+./mala-ksiezniczka/mala-ksiezniczka_003_3-ermenegarda.spk_emb.npy
+./mala-ksiezniczka/mala-ksiezniczka_003_3-ermenegarda.subvads.pyd
+./mala-ksiezniczka/mala-ksiezniczka_003_3-ermenegarda.vad.npy
+./mala-ksiezniczka/mala-ksiezniczka_013_13-jedna-z-szarego-tlumu.spk_emb.npy
+./mala-ksiezniczka/mala-ksiezniczka_013_13-jedna-z-szarego-tlumu.subvads.pyd
+./mala-ksiezniczka/mala-ksiezniczka_013_13-jedna-z-szarego-tlumu.vad.npy
+./mala-ksiezniczka/mala-ksiezniczka_014_14-co-slyszal-i-widzial-melchizedech.spk_emb.npy
+./mala-ksiezniczka/mala-ksiezniczka_014_14-co-slyszal-i-widzial-melchizedech.subvads.pyd
+./mala-ksiezniczka/mala-ksiezniczka_014_14-co-slyszal-i-widzial-melchizedech.vad.npy
+./mala-ksiezniczka/mala-ksiezniczka_005_5-becky.spk_emb.npy
+./mala-ksiezniczka/mala-ksiezniczka_005_5-becky.subvads.pyd
+./mala-ksiezniczka/mala-ksiezniczka_005_5-becky.vad.npy
+./mala-ksiezniczka/mala-ksiezniczka_009_9-melchizedech.spk_emb.npy
+./mala-ksiezniczka/mala-ksiezniczka_009_9-melchizedech.subvads.pyd
+./mala-ksiezniczka/mala-ksiezniczka_009_9-melchizedech.vad.npy
+./mala-ksiezniczka/mala-ksiezniczka_016_16-odwiedziny.spk_emb.npy
+./mala-ksiezniczka/mala-ksiezniczka_016_16-odwiedziny.subvads.pyd
+./mala-ksiezniczka/mala-ksiezniczka_016_16-odwiedziny.vad.npy
+./mala-ksiezniczka/mala-ksiezniczka_001_1-sara.spk_emb.npy
+./mala-ksiezniczka/mala-ksiezniczka_001_1-sara.subvads.pyd
+./mala-ksiezniczka/mala-ksiezniczka_001_1-sara.vad.npy
+./mala-ksiezniczka/mala-ksiezniczka_002_2-lekcja-francuskiego.spk_emb.npy
+./mala-ksiezniczka/mala-ksiezniczka_002_2-lekcja-francuskiego.subvads.pyd
+./mala-ksiezniczka/mala-ksiezniczka_002_2-lekcja-francuskiego.vad.npy
+./mala-ksiezniczka/mala-ksiezniczka_007_7-jeszcze-o-kopalniach-diamentow.spk_emb.npy
+./mala-ksiezniczka/mala-ksiezniczka_007_7-jeszcze-o-kopalniach-diamentow.subvads.pyd
+./mala-ksiezniczka/mala-ksiezniczka_007_7-jeszcze-o-kopalniach-diamentow.vad.npy
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_014_czesc-14.spk_emb.npy
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_014_czesc-14.subvads.pyd
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_014_czesc-14.vad.npy
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_006_czesc-6.spk_emb.npy
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_006_czesc-6.subvads.pyd
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_006_czesc-6.vad.npy
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_002_czesc-2.spk_emb.npy
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_002_czesc-2.subvads.pyd
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_002_czesc-2.vad.npy
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_013_czesc-13.spk_emb.npy
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_013_czesc-13.subvads.pyd
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_013_czesc-13.vad.npy
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_011_czesc-11.spk_emb.npy
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_011_czesc-11.subvads.pyd
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_011_czesc-11.vad.npy
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_001_czesc-1.spk_emb.npy
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_001_czesc-1.subvads.pyd
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_001_czesc-1.vad.npy
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_005_czesc-5.spk_emb.npy
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_005_czesc-5.subvads.pyd
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_005_czesc-5.vad.npy
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_003_czesc-3.spk_emb.npy
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_003_czesc-3.subvads.pyd
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_003_czesc-3.vad.npy
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_009_czesc-9.spk_emb.npy
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_009_czesc-9.subvads.pyd
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_009_czesc-9.vad.npy
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_012_czesc-12.spk_emb.npy
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_012_czesc-12.subvads.pyd
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_012_czesc-12.vad.npy
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_016_czesc-16.spk_emb.npy
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_016_czesc-16.subvads.pyd
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_016_czesc-16.vad.npy
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_008_czesc-8.spk_emb.npy
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_008_czesc-8.subvads.pyd
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_008_czesc-8.vad.npy
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_015_czesc-15.spk_emb.npy
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_015_czesc-15.subvads.pyd
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_015_czesc-15.vad.npy
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_010_czesc-10.spk_emb.npy
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_010_czesc-10.subvads.pyd
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_010_czesc-10.vad.npy
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_007_czesc-7.spk_emb.npy
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_007_czesc-7.subvads.pyd
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_007_czesc-7.vad.npy
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_004_czesc-4.spk_emb.npy
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_004_czesc-4.subvads.pyd
+./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_004_czesc-4.vad.npy
+./kornhauser-tyle-rzeczy-niezwyklych-tyle-rzeczy-niezwyklych/kornhauser-tyle-rzeczy-niezwyklych-tyle-rzeczy-niezwyklych_001.spk_emb.npy
+./kornhauser-tyle-rzeczy-niezwyklych-tyle-rzeczy-niezwyklych/kornhauser-tyle-rzeczy-niezwyklych-tyle-rzeczy-niezwyklych_001.subvads.pyd
+./kornhauser-tyle-rzeczy-niezwyklych-tyle-rzeczy-niezwyklych/kornhauser-tyle-rzeczy-niezwyklych-tyle-rzeczy-niezwyklych_001.vad.npy
+./slowka-zbior-dziadzio/tadeusz-boy-zelenski-slowka-zbior-dziadzio.spk_emb.npy
+./slowka-zbior-dziadzio/tadeusz-boy-zelenski-slowka-zbior-dziadzio.subvads.pyd
+./slowka-zbior-dziadzio/tadeusz-boy-zelenski-slowka-zbior-dziadzio.vad.npy
+./osiel-i-wol/ignacy-krasicki-bajki-i-przypowiesci-osiel-i-wol.spk_emb.npy
+./osiel-i-wol/ignacy-krasicki-bajki-i-przypowiesci-osiel-i-wol.subvads.pyd
+./osiel-i-wol/ignacy-krasicki-bajki-i-przypowiesci-osiel-i-wol.vad.npy
+./hop-frog/edgar-allan-poe-hop-frog.spk_emb.npy
+./hop-frog/edgar-allan-poe-hop-frog.subvads.pyd
+./hop-frog/edgar-allan-poe-hop-frog.vad.npy
+./slowka-zbior-ernestynka/tadeusz-boy-zelenski-slowka-zbior-ernestynka.spk_emb.npy
+./slowka-zbior-ernestynka/tadeusz-boy-zelenski-slowka-zbior-ernestynka.subvads.pyd
+./slowka-zbior-ernestynka/tadeusz-boy-zelenski-slowka-zbior-ernestynka.vad.npy
+./maska-smierci-szkarlatnej/edgar-allan-poe-maska-smierci-szkarlatnej.spk_emb.npy
+./maska-smierci-szkarlatnej/edgar-allan-poe-maska-smierci-szkarlatnej.subvads.pyd
+./maska-smierci-szkarlatnej/edgar-allan-poe-maska-smierci-szkarlatnej.vad.npy
+./tulli-sny-i-kamienie/sny-i-kamienie.spk_emb.npy
+./tulli-sny-i-kamienie/sny-i-kamienie.subvads.pyd
+./tulli-sny-i-kamienie/sny-i-kamienie.vad.npy
+./beresewicz-czy-pisarzom-czy-lubil-pan-chodzic-do-szkoly/beresewicz-czy-pisarzom-czy-lubil-pan-chodzic-do-szkoly_001.spk_emb.npy
+./beresewicz-czy-pisarzom-czy-lubil-pan-chodzic-do-szkoly/beresewicz-czy-pisarzom-czy-lubil-pan-chodzic-do-szkoly_001.subvads.pyd
+./beresewicz-czy-pisarzom-czy-lubil-pan-chodzic-do-szkoly/beresewicz-czy-pisarzom-czy-lubil-pan-chodzic-do-szkoly_001.vad.npy
+./skrucha-jozi/skrucha-jozi.spk_emb.npy
+./skrucha-jozi/skrucha-jozi.subvads.pyd
+./skrucha-jozi/skrucha-jozi.vad.npy
+./piesni-ksiegi-pierwsze-piesn-xx/jan-kochanowski-piesni-ksiegi-pierwsze-piesn-xx-milo-szalec-kiedy-czas-po-temu.spk_emb.npy
+./piesni-ksiegi-pierwsze-piesn-xx/jan-kochanowski-piesni-ksiegi-pierwsze-piesn-xx-milo-szalec-kiedy-czas-po-temu.subvads.pyd
+./piesni-ksiegi-pierwsze-piesn-xx/jan-kochanowski-piesni-ksiegi-pierwsze-piesn-xx-milo-szalec-kiedy-czas-po-temu.vad.npy
+./proby-ksiega-trzecia/proby-ksiega-trzecia_005_rozdzial-v-o-wierszach-wergilego.spk_emb.npy
+./proby-ksiega-trzecia/proby-ksiega-trzecia_005_rozdzial-v-o-wierszach-wergilego.subvads.pyd
+./proby-ksiega-trzecia/proby-ksiega-trzecia_005_rozdzial-v-o-wierszach-wergilego.vad.npy
+./proby-ksiega-trzecia/proby-ksiega-trzecia_012_rozdzial-xii-o-fizjonomii.spk_emb.npy
+./proby-ksiega-trzecia/proby-ksiega-trzecia_012_rozdzial-xii-o-fizjonomii.subvads.pyd
+./proby-ksiega-trzecia/proby-ksiega-trzecia_012_rozdzial-xii-o-fizjonomii.vad.npy
+./proby-ksiega-trzecia/proby-ksiega-trzecia_007_rozdzial-vii-o-ciezarach-wielkosci.spk_emb.npy
+./proby-ksiega-trzecia/proby-ksiega-trzecia_007_rozdzial-vii-o-ciezarach-wielkosci.subvads.pyd
+./proby-ksiega-trzecia/proby-ksiega-trzecia_007_rozdzial-vii-o-ciezarach-wielkosci.vad.npy
+./proby-ksiega-trzecia/proby-ksiega-trzecia_001_rozdzial-i-o-pozytecznym-i-poczciwym.spk_emb.npy
+./proby-ksiega-trzecia/proby-ksiega-trzecia_001_rozdzial-i-o-pozytecznym-i-poczciwym.subvads.pyd
+./proby-ksiega-trzecia/proby-ksiega-trzecia_001_rozdzial-i-o-pozytecznym-i-poczciwym.vad.npy
+./proby-ksiega-trzecia/proby-ksiega-trzecia_013_rozdzial-xiii-o-doswiadczeniu.spk_emb.npy
+./proby-ksiega-trzecia/proby-ksiega-trzecia_013_rozdzial-xiii-o-doswiadczeniu.subvads.pyd
+./proby-ksiega-trzecia/proby-ksiega-trzecia_013_rozdzial-xiii-o-doswiadczeniu.vad.npy
+./proby-ksiega-trzecia/proby-ksiega-trzecia_006_rozdzial-vi-o-pojazdach.spk_emb.npy
+./proby-ksiega-trzecia/proby-ksiega-trzecia_006_rozdzial-vi-o-pojazdach.subvads.pyd
+./proby-ksiega-trzecia/proby-ksiega-trzecia_006_rozdzial-vi-o-pojazdach.vad.npy
+./proby-ksiega-trzecia/proby-ksiega-trzecia_010_rozdzial-x-o-oszczedzaniu-woli.spk_emb.npy
+./proby-ksiega-trzecia/proby-ksiega-trzecia_010_rozdzial-x-o-oszczedzaniu-woli.subvads.pyd
+./proby-ksiega-trzecia/proby-ksiega-trzecia_010_rozdzial-x-o-oszczedzaniu-woli.vad.npy
+./proby-ksiega-trzecia/proby-ksiega-trzecia_003_rozdzial-iii-o-trzech-rodzajach-obcowania.spk_emb.npy
+./proby-ksiega-trzecia/proby-ksiega-trzecia_003_rozdzial-iii-o-trzech-rodzajach-obcowania.subvads.pyd
+./proby-ksiega-trzecia/proby-ksiega-trzecia_003_rozdzial-iii-o-trzech-rodzajach-obcowania.vad.npy
+./proby-ksiega-trzecia/proby-ksiega-trzecia_004_rozdzial-iv-o-dywersji.spk_emb.npy
+./proby-ksiega-trzecia/proby-ksiega-trzecia_004_rozdzial-iv-o-dywersji.subvads.pyd
+./proby-ksiega-trzecia/proby-ksiega-trzecia_004_rozdzial-iv-o-dywersji.vad.npy
+./proby-ksiega-trzecia/proby-ksiega-trzecia_011_rozdzial-xi-o-kulawych.spk_emb.npy
+./proby-ksiega-trzecia/proby-ksiega-trzecia_011_rozdzial-xi-o-kulawych.subvads.pyd
+./proby-ksiega-trzecia/proby-ksiega-trzecia_011_rozdzial-xi-o-kulawych.vad.npy
+./proby-ksiega-trzecia/proby-ksiega-trzecia_008_rozdzial-viii-o-sztuce-rozmawiania.spk_emb.npy
+./proby-ksiega-trzecia/proby-ksiega-trzecia_008_rozdzial-viii-o-sztuce-rozmawiania.subvads.pyd
+./proby-ksiega-trzecia/proby-ksiega-trzecia_008_rozdzial-viii-o-sztuce-rozmawiania.vad.npy
+./proby-ksiega-trzecia/proby-ksiega-trzecia_009_rozdzial-ix-o-proznosci.spk_emb.npy
+./proby-ksiega-trzecia/proby-ksiega-trzecia_009_rozdzial-ix-o-proznosci.subvads.pyd
+./proby-ksiega-trzecia/proby-ksiega-trzecia_009_rozdzial-ix-o-proznosci.vad.npy
+./proby-ksiega-trzecia/proby-ksiega-trzecia_002_rozdzial-ii-o-zalu.spk_emb.npy
+./proby-ksiega-trzecia/proby-ksiega-trzecia_002_rozdzial-ii-o-zalu.subvads.pyd
+./proby-ksiega-trzecia/proby-ksiega-trzecia_002_rozdzial-ii-o-zalu.vad.npy
+./slonce-i-zaby-bajki-nowe/slonce-i-zaby-bajki-nowe.spk_emb.npy
+./slonce-i-zaby-bajki-nowe/slonce-i-zaby-bajki-nowe.subvads.pyd
+./slonce-i-zaby-bajki-nowe/slonce-i-zaby-bajki-nowe.vad.npy
+./slowka-zbior-nowa-wiara/tadeusz-boy-zelenski-slowka-zbior-nowa-wiara.spk_emb.npy
+./slowka-zbior-nowa-wiara/tadeusz-boy-zelenski-slowka-zbior-nowa-wiara.subvads.pyd
+./slowka-zbior-nowa-wiara/tadeusz-boy-zelenski-slowka-zbior-nowa-wiara.vad.npy
+./sonety-krymskie-bakczysaraj/adam-mickiewicz-sonety-krymskie-bakczysaraj.spk_emb.npy
+./sonety-krymskie-bakczysaraj/adam-mickiewicz-sonety-krymskie-bakczysaraj.subvads.pyd
+./sonety-krymskie-bakczysaraj/adam-mickiewicz-sonety-krymskie-bakczysaraj.vad.npy
+./wolny-hamkalo-nikt-nic/nikt-nic.spk_emb.npy
+./wolny-hamkalo-nikt-nic/nikt-nic.subvads.pyd
+./wolny-hamkalo-nikt-nic/nikt-nic.vad.npy
+./wabik-bajki-nowe/wabik-bajki-nowe.spk_emb.npy
+./wabik-bajki-nowe/wabik-bajki-nowe.subvads.pyd
+./wabik-bajki-nowe/wabik-bajki-nowe.vad.npy
+./wilk-i-baran-bajki-nowe/wilk-i-baran-bajki-nowe.spk_emb.npy
+./wilk-i-baran-bajki-nowe/wilk-i-baran-bajki-nowe.subvads.pyd
+./wilk-i-baran-bajki-nowe/wilk-i-baran-bajki-nowe.vad.npy
+./grabinski-ksiega-ognia-bialy-wyrak/grabinski-ksiega-ognia-bialy-wyrak.spk_emb.npy
+./grabinski-ksiega-ognia-bialy-wyrak/grabinski-ksiega-ognia-bialy-wyrak.subvads.pyd
+./grabinski-ksiega-ognia-bialy-wyrak/grabinski-ksiega-ognia-bialy-wyrak.vad.npy
+./konopnicka-w-polu/w-polu-pojdziemy-w-pole-w-ranny-czas.spk_emb.npy
+./konopnicka-w-polu/w-polu-pojdziemy-w-pole-w-ranny-czas.subvads.pyd
+./konopnicka-w-polu/w-polu-pojdziemy-w-pole-w-ranny-czas.vad.npy
+./lis-i-osiel/ignacy-krasicki-bajki-i-przypowiesci-lis-i-osiel.spk_emb.npy
+./lis-i-osiel/ignacy-krasicki-bajki-i-przypowiesci-lis-i-osiel.subvads.pyd
+./lis-i-osiel/ignacy-krasicki-bajki-i-przypowiesci-lis-i-osiel.vad.npy
+./do-delljusa/do-delljusa.spk_emb.npy
+./do-delljusa/do-delljusa.subvads.pyd
+./do-delljusa/do-delljusa.vad.npy
+./satyry-czesc-pierwsza-zona-modna/satyry-czesc-pierwsza-zona-modna.spk_emb.npy
+./satyry-czesc-pierwsza-zona-modna/satyry-czesc-pierwsza-zona-modna.subvads.pyd
+./satyry-czesc-pierwsza-zona-modna/satyry-czesc-pierwsza-zona-modna.vad.npy
+./janicki-i-nas-wybawi/i-nas-wybawi.spk_emb.npy
+./janicki-i-nas-wybawi/i-nas-wybawi.subvads.pyd
+./janicki-i-nas-wybawi/i-nas-wybawi.vad.npy
+./napoj-cienisty-cmentarz/boleslaw-lesmian-napoj-cienisty-w-nicosc-sniaca-sie-droga-cykl-cmentarz.spk_emb.npy
+./napoj-cienisty-cmentarz/boleslaw-lesmian-napoj-cienisty-w-nicosc-sniaca-sie-droga-cykl-cmentarz.subvads.pyd
+./napoj-cienisty-cmentarz/boleslaw-lesmian-napoj-cienisty-w-nicosc-sniaca-sie-droga-cykl-cmentarz.vad.npy
+./przyjaciel/ignacy-krasicki-bajki-i-przypowiesci-przyjaciel.spk_emb.npy
+./przyjaciel/ignacy-krasicki-bajki-i-przypowiesci-przyjaciel.subvads.pyd
+./przyjaciel/ignacy-krasicki-bajki-i-przypowiesci-przyjaciel.vad.npy
+./but-w-butonierce-jak-introdukcja/bruno-jasienski-but-w-butonierce-tomik-jak-introdukcja.spk_emb.npy
+./but-w-butonierce-jak-introdukcja/bruno-jasienski-but-w-butonierce-tomik-jak-introdukcja.subvads.pyd
+./but-w-butonierce-jak-introdukcja/bruno-jasienski-but-w-butonierce-tomik-jak-introdukcja.vad.npy
+./czlowiek-i-zwierciadla/ignacy-krasicki-bajki-i-przypowiesci-czlowiek-i-zwierciadla.spk_emb.npy
+./czlowiek-i-zwierciadla/ignacy-krasicki-bajki-i-przypowiesci-czlowiek-i-zwierciadla.subvads.pyd
+./czlowiek-i-zwierciadla/ignacy-krasicki-bajki-i-przypowiesci-czlowiek-i-zwierciadla.vad.npy
+./dobroczynnosc/ignacy-krasicki-bajki-i-przypowiesci-dobroczynnosc.spk_emb.npy
+./dobroczynnosc/ignacy-krasicki-bajki-i-przypowiesci-dobroczynnosc.subvads.pyd
+./dobroczynnosc/ignacy-krasicki-bajki-i-przypowiesci-dobroczynnosc.vad.npy
+./krol-i-pisarze/ignacy-krasicki-bajki-i-przypowiesci-krol-i-pisarze.spk_emb.npy
+./krol-i-pisarze/ignacy-krasicki-bajki-i-przypowiesci-krol-i-pisarze.subvads.pyd
+./krol-i-pisarze/ignacy-krasicki-bajki-i-przypowiesci-krol-i-pisarze.vad.npy
+./grzegorzewska-wszystkie-wieczory-swiata/wszystkie-wieczory-swiata.spk_emb.npy
+./grzegorzewska-wszystkie-wieczory-swiata/wszystkie-wieczory-swiata.subvads.pyd
+./grzegorzewska-wszystkie-wieczory-swiata/wszystkie-wieczory-swiata.vad.npy
+./do-leukonoe/do-leukonoe.spk_emb.npy
+./do-leukonoe/do-leukonoe.subvads.pyd
+./do-leukonoe/do-leukonoe.vad.npy
+./slonecznik-i-fialek-bajki-nowe/slonecznik-i-fialek-bajki-nowe.spk_emb.npy
+./slonecznik-i-fialek-bajki-nowe/slonecznik-i-fialek-bajki-nowe.subvads.pyd
+./slonecznik-i-fialek-bajki-nowe/slonecznik-i-fialek-bajki-nowe.vad.npy
+./slowka-zbior-odsiecz-wiednia/tadeusz-boy-zelenski-slowka-zbior-odsiecz-wiednia.spk_emb.npy
+./slowka-zbior-odsiecz-wiednia/tadeusz-boy-zelenski-slowka-zbior-odsiecz-wiednia.subvads.pyd
+./slowka-zbior-odsiecz-wiednia/tadeusz-boy-zelenski-slowka-zbior-odsiecz-wiednia.vad.npy
+./arystoteles-poetyka/arystoteles-poetyka_022_rozdzial-22.spk_emb.npy
+./arystoteles-poetyka/arystoteles-poetyka_022_rozdzial-22.subvads.pyd
+./arystoteles-poetyka/arystoteles-poetyka_022_rozdzial-22.vad.npy
+./arystoteles-poetyka/arystoteles-poetyka_010_rozdzial-10.spk_emb.npy
+./arystoteles-poetyka/arystoteles-poetyka_010_rozdzial-10.subvads.pyd
+./arystoteles-poetyka/arystoteles-poetyka_010_rozdzial-10.vad.npy
+./arystoteles-poetyka/arystoteles-poetyka_027_rozdzial-27.spk_emb.npy
+./arystoteles-poetyka/arystoteles-poetyka_027_rozdzial-27.subvads.pyd
+./arystoteles-poetyka/arystoteles-poetyka_027_rozdzial-27.vad.npy
+./arystoteles-poetyka/arystoteles-poetyka_001_rozdzial-1.spk_emb.npy
+./arystoteles-poetyka/arystoteles-poetyka_001_rozdzial-1.subvads.pyd
+./arystoteles-poetyka/arystoteles-poetyka_001_rozdzial-1.vad.npy
+./arystoteles-poetyka/arystoteles-poetyka_025_rozdzial-25.spk_emb.npy
+./arystoteles-poetyka/arystoteles-poetyka_025_rozdzial-25.subvads.pyd
+./arystoteles-poetyka/arystoteles-poetyka_025_rozdzial-25.vad.npy
+./arystoteles-poetyka/arystoteles-poetyka_020_rozdzial-20.spk_emb.npy
+./arystoteles-poetyka/arystoteles-poetyka_020_rozdzial-20.subvads.pyd
+./arystoteles-poetyka/arystoteles-poetyka_020_rozdzial-20.vad.npy
+./arystoteles-poetyka/arystoteles-poetyka_003_rozdzial-3.spk_emb.npy
+./arystoteles-poetyka/arystoteles-poetyka_003_rozdzial-3.subvads.pyd
+./arystoteles-poetyka/arystoteles-poetyka_003_rozdzial-3.vad.npy
+./arystoteles-poetyka/arystoteles-poetyka_011_rozdzial-11.spk_emb.npy
+./arystoteles-poetyka/arystoteles-poetyka_011_rozdzial-11.subvads.pyd
+./arystoteles-poetyka/arystoteles-poetyka_011_rozdzial-11.vad.npy
+./arystoteles-poetyka/arystoteles-poetyka_007_rozdzial-7.spk_emb.npy
+./arystoteles-poetyka/arystoteles-poetyka_007_rozdzial-7.subvads.pyd
+./arystoteles-poetyka/arystoteles-poetyka_007_rozdzial-7.vad.npy
+./arystoteles-poetyka/arystoteles-poetyka_019_rozdzial-19.spk_emb.npy
+./arystoteles-poetyka/arystoteles-poetyka_019_rozdzial-19.subvads.pyd
+./arystoteles-poetyka/arystoteles-poetyka_019_rozdzial-19.vad.npy
+./arystoteles-poetyka/arystoteles-poetyka_018_rozdzial-18.spk_emb.npy
+./arystoteles-poetyka/arystoteles-poetyka_018_rozdzial-18.subvads.pyd
+./arystoteles-poetyka/arystoteles-poetyka_018_rozdzial-18.vad.npy
+./arystoteles-poetyka/arystoteles-poetyka_013_rozdzial-13.spk_emb.npy
+./arystoteles-poetyka/arystoteles-poetyka_013_rozdzial-13.subvads.pyd
+./arystoteles-poetyka/arystoteles-poetyka_013_rozdzial-13.vad.npy
+./arystoteles-poetyka/arystoteles-poetyka_016_rozdzial-16.spk_emb.npy
+./arystoteles-poetyka/arystoteles-poetyka_016_rozdzial-16.subvads.pyd
+./arystoteles-poetyka/arystoteles-poetyka_016_rozdzial-16.vad.npy
+./arystoteles-poetyka/arystoteles-poetyka_008_rozdzial-8.spk_emb.npy
+./arystoteles-poetyka/arystoteles-poetyka_008_rozdzial-8.subvads.pyd
+./arystoteles-poetyka/arystoteles-poetyka_008_rozdzial-8.vad.npy
+./arystoteles-poetyka/arystoteles-poetyka_023_rozdzial-23.spk_emb.npy
+./arystoteles-poetyka/arystoteles-poetyka_023_rozdzial-23.subvads.pyd
+./arystoteles-poetyka/arystoteles-poetyka_023_rozdzial-23.vad.npy
+./arystoteles-poetyka/arystoteles-poetyka_021_rozdzial-21.spk_emb.npy
+./arystoteles-poetyka/arystoteles-poetyka_021_rozdzial-21.subvads.pyd
+./arystoteles-poetyka/arystoteles-poetyka_021_rozdzial-21.vad.npy
+./arystoteles-poetyka/arystoteles-poetyka_002_rozdzial-2.spk_emb.npy
+./arystoteles-poetyka/arystoteles-poetyka_002_rozdzial-2.subvads.pyd
+./arystoteles-poetyka/arystoteles-poetyka_002_rozdzial-2.vad.npy
+./arystoteles-poetyka/arystoteles-poetyka_014_rozdzial-14.spk_emb.npy
+./arystoteles-poetyka/arystoteles-poetyka_014_rozdzial-14.subvads.pyd
+./arystoteles-poetyka/arystoteles-poetyka_014_rozdzial-14.vad.npy
+./arystoteles-poetyka/arystoteles-poetyka_024_rozdzial-24.spk_emb.npy
+./arystoteles-poetyka/arystoteles-poetyka_024_rozdzial-24.subvads.pyd
+./arystoteles-poetyka/arystoteles-poetyka_024_rozdzial-24.vad.npy
+./arystoteles-poetyka/arystoteles-poetyka_009_rozdzial-9.spk_emb.npy
+./arystoteles-poetyka/arystoteles-poetyka_009_rozdzial-9.subvads.pyd
+./arystoteles-poetyka/arystoteles-poetyka_009_rozdzial-9.vad.npy
+./arystoteles-poetyka/arystoteles-poetyka_017_rozdzial-17.spk_emb.npy
+./arystoteles-poetyka/arystoteles-poetyka_017_rozdzial-17.subvads.pyd
+./arystoteles-poetyka/arystoteles-poetyka_017_rozdzial-17.vad.npy
+./arystoteles-poetyka/arystoteles-poetyka_005_rozdzial-5.spk_emb.npy
+./arystoteles-poetyka/arystoteles-poetyka_005_rozdzial-5.subvads.pyd
+./arystoteles-poetyka/arystoteles-poetyka_005_rozdzial-5.vad.npy
+./arystoteles-poetyka/arystoteles-poetyka_012_rozdzial-12.spk_emb.npy
+./arystoteles-poetyka/arystoteles-poetyka_012_rozdzial-12.subvads.pyd
+./arystoteles-poetyka/arystoteles-poetyka_012_rozdzial-12.vad.npy
+./arystoteles-poetyka/arystoteles-poetyka_028_rozdzial-28.spk_emb.npy
+./arystoteles-poetyka/arystoteles-poetyka_028_rozdzial-28.subvads.pyd
+./arystoteles-poetyka/arystoteles-poetyka_028_rozdzial-28.vad.npy
+./arystoteles-poetyka/arystoteles-poetyka_006_rozdzial-6.spk_emb.npy
+./arystoteles-poetyka/arystoteles-poetyka_006_rozdzial-6.subvads.pyd
+./arystoteles-poetyka/arystoteles-poetyka_006_rozdzial-6.vad.npy
+./arystoteles-poetyka/arystoteles-poetyka_026_rozdzial-26.spk_emb.npy
+./arystoteles-poetyka/arystoteles-poetyka_026_rozdzial-26.subvads.pyd
+./arystoteles-poetyka/arystoteles-poetyka_026_rozdzial-26.vad.npy
+./arystoteles-poetyka/arystoteles-poetyka_015_rozdzial-15.spk_emb.npy
+./arystoteles-poetyka/arystoteles-poetyka_015_rozdzial-15.subvads.pyd
+./arystoteles-poetyka/arystoteles-poetyka_015_rozdzial-15.vad.npy
+./arystoteles-poetyka/arystoteles-poetyka_004_rozdzial-4.spk_emb.npy
+./arystoteles-poetyka/arystoteles-poetyka_004_rozdzial-4.subvads.pyd
+./arystoteles-poetyka/arystoteles-poetyka_004_rozdzial-4.vad.npy
+./slowka-zbior-piosenki-zb-piosenka-w-stylu-klasycznym/tadeusz-boy-zelenski-slowka-zbior-piosenki-zielonego-balonika-piosenka-w-stylu-klasycznym.spk_emb.npy
+./slowka-zbior-piosenki-zb-piosenka-w-stylu-klasycznym/tadeusz-boy-zelenski-slowka-zbior-piosenki-zielonego-balonika-piosenka-w-stylu-klasycznym.subvads.pyd
+./slowka-zbior-piosenki-zb-piosenka-w-stylu-klasycznym/tadeusz-boy-zelenski-slowka-zbior-piosenki-zielonego-balonika-piosenka-w-stylu-klasycznym.vad.npy
+./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_010_w-noska.spk_emb.npy
+./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_010_w-noska.subvads.pyd
+./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_010_w-noska.vad.npy
+./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_007_bijatyka.spk_emb.npy
+./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_007_bijatyka.subvads.pyd
+./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_007_bijatyka.vad.npy
+./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_004_rozstanie.spk_emb.npy
+./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_004_rozstanie.subvads.pyd
+./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_004_rozstanie.vad.npy
+./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_009_list.spk_emb.npy
+./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_009_list.subvads.pyd
+./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_009_list.vad.npy
+./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_011_pimpus-smialek.spk_emb.npy
+./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_011_pimpus-smialek.subvads.pyd
+./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_011_pimpus-smialek.vad.npy
+./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_012_postanowienie.spk_emb.npy
+./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_012_postanowienie.subvads.pyd
+./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_012_postanowienie.vad.npy
+./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_005_marsz-z-kuchni.spk_emb.npy
+./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_005_marsz-z-kuchni.subvads.pyd
+./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_005_marsz-z-kuchni.vad.npy
+./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_003_lekcja-tanca.spk_emb.npy
+./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_003_lekcja-tanca.subvads.pyd
+./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_003_lekcja-tanca.vad.npy
+./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_001_szkola.spk_emb.npy
+./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_001_szkola.subvads.pyd
+./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_001_szkola.vad.npy
+./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_008_pimpus-buty-czysci.spk_emb.npy
+./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_008_pimpus-buty-czysci.subvads.pyd
+./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_008_pimpus-buty-czysci.vad.npy
+./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_002_szczescie-rodzinne.spk_emb.npy
+./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_002_szczescie-rodzinne.subvads.pyd
+./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_002_szczescie-rodzinne.vad.npy
+./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_006_katastrofa.spk_emb.npy
+./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_006_katastrofa.subvads.pyd
+./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_006_katastrofa.vad.npy
+./but-w-butonierce-przejechali/bruno-jasienski-but-w-butonierce-tomik-przejechali.spk_emb.npy
+./but-w-butonierce-przejechali/bruno-jasienski-but-w-butonierce-tomik-przejechali.subvads.pyd
+./but-w-butonierce-przejechali/bruno-jasienski-but-w-butonierce-tomik-przejechali.vad.npy
+./fiedorczuk-kazdy-snil-swoj-sen/fiedorczuk-kazdy-snil-swoj-sen.spk_emb.npy
+./fiedorczuk-kazdy-snil-swoj-sen/fiedorczuk-kazdy-snil-swoj-sen.subvads.pyd
+./fiedorczuk-kazdy-snil-swoj-sen/fiedorczuk-kazdy-snil-swoj-sen.vad.npy
+
+
+
+
ds = chunked_audio_dataset(['../wolnelektury-wds2/wolnelektury-audio-000000.tar'])
+prev = None
+for s in progress_bar(ds, total=6):
+    sim = F.cosine_similarity(torch.tensor(s['spk_emb.npy']), torch.tensor((prev if prev is not None else s)['spk_emb.npy']), dim=0)
+    if sim < 0.5: print("new")
+    print(s['__key__'], sim, s['tend'] - s['tstart'], sum([e-s for s,e in s['orig_s']['subvads.pyd'][s['i']]]))
+    display(IPython.display.Audio(s['samples'], rate=s['sample_rate']))
+    time.sleep(.5)
+    prev = s
+
+ + +
+ +
+ +
+ + + + + \ No newline at end of file diff --git a/2A. Speaker Embeddings.html b/2A. Speaker Embeddings.html new file mode 100644 index 0000000..5978865 --- /dev/null +++ b/2A. Speaker Embeddings.html @@ -0,0 +1,790 @@ + + + + + + + + + +WhisperSpeech + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + +
+ +
+ + +
+ + + +
+ + + + + +
+

Precompute Whisper transcriptions for VQ bottleneck distilation

+

Doing transcription means sampling from the Whisper auto-regresive decoder. This is too slow to do for each training batch. Fortunately the trainscriptions are small text snippets so we can precompute them once for the whole dataset.

+

We use segments from Voice Activity Detection to reduce any boundary issues, the we use webdataset to yields multiple chunks from a FLAC file we only load once. The VAD segments are merged into longer chunks to make Whisper processing more efficent (it always processes 30s at a time)

+

Usage:

+
python -m whisperspeech.extract_spk_emb librilight-large-wo6454-flac-000002.tar
+

You can pass in either a URL or a local file name. Either way it will expect a vad file in the local directory. The result will go into a file in the current directory named after the source file but replacing flac with txt.

+
+
+

Batch processing

+

Let’s put everything above together.

+
+
dl = chunked_dataset('../cc-small/cc-mix-000000.tar', 'mix')
+for keys, samples, seconds in dl: break
+keys, samples, seconds
+
+
(['cc/7 Sec Riddles/[-hIfETsPxPg] New TYPE Of Riddles: Can You Ace Our New Game? ?_023',
+  'cc/Bon Appetit/[jmntzm5yBYY] Melissa Makes Chicken Afritada | From the Home Kitchen | Bon Appétit_037',
+  'cc/7 Sec Riddles/[-hIfETsPxPg] New TYPE Of Riddles: Can You Ace Our New Game? ?_009',
+  'cc/Bon Appetit/[jmntzm5yBYY] Melissa Makes Chicken Afritada | From the Home Kitchen | Bon Appétit_010',
+  'cc/7 Sec Riddles/[-hIfETsPxPg] New TYPE Of Riddles: Can You Ace Our New Game? ?_004',
+  'cc/Bon Appetit/[jmntzm5yBYY] Melissa Makes Chicken Afritada | From the Home Kitchen | Bon Appétit_049',
+  'cc/Bon Appetit/[jmntzm5yBYY] Melissa Makes Chicken Afritada | From the Home Kitchen | Bon Appétit_000',
+  'cc/Bon Appetit/[jmntzm5yBYY] Melissa Makes Chicken Afritada | From the Home Kitchen | Bon Appétit_024',
+  'cc/7 Sec Riddles/[-hIfETsPxPg] New TYPE Of Riddles: Can You Ace Our New Game? ?_033',
+  'cc/Bon Appetit/[jmntzm5yBYY] Melissa Makes Chicken Afritada | From the Home Kitchen | Bon Appétit_034',
+  'cc/Bon Appetit/[jmntzm5yBYY] Melissa Makes Chicken Afritada | From the Home Kitchen | Bon Appétit_012',
+  'cc/Bon Appetit/[jmntzm5yBYY] Melissa Makes Chicken Afritada | From the Home Kitchen | Bon Appétit_052',
+  'cc/Bon Appetit/[jmntzm5yBYY] Melissa Makes Chicken Afritada | From the Home Kitchen | Bon Appétit_007',
+  'cc/7 Sec Riddles/[-hIfETsPxPg] New TYPE Of Riddles: Can You Ace Our New Game? ?_030',
+  'cc/Bon Appetit/[jmntzm5yBYY] Melissa Makes Chicken Afritada | From the Home Kitchen | Bon Appétit_055',
+  'cc/Bon Appetit/[jmntzm5yBYY] Melissa Makes Chicken Afritada | From the Home Kitchen | Bon Appétit_006'],
+ tensor([[-0.0154, -0.0289, -0.0376,  ...,  0.0000,  0.0000,  0.0000],
+         [-0.0035, -0.0058, -0.0082,  ...,  0.0000,  0.0000,  0.0000],
+         [-0.0082, -0.0150, -0.0179,  ...,  0.0000,  0.0000,  0.0000],
+         ...,
+         [-0.0018,  0.0017,  0.0005,  ...,  0.0000,  0.0000,  0.0000],
+         [ 0.0014,  0.0021,  0.0019,  ...,  0.0000,  0.0000,  0.0000],
+         [ 0.0055,  0.0106,  0.0086,  ...,  0.0000,  0.0000,  0.0000]]),
+ tensor([ 5.1536,  8.5666,  2.2867, 22.5939,  1.7406, 22.4744,  2.2355,  2.3549,
+          2.0307, 18.0717,  6.4505,  2.1843,  1.6382,  5.5461,  2.6450, 29.1297]))
+
+
+
+
classifier = EncoderClassifier.from_hparams("speechbrain/spkrec-ecapa-voxceleb",
+                                            savedir=os.path.expanduser("~/.cache/speechbrain/"),
+                                            run_opts={"device": "cuda"})
+
+
+
embs = F.normalize(classifier.encode_batch(samples, wav_lens=seconds/30).squeeze(1), dim=-1)
+
+
+
embs  @ embs.T
+
+
tensor([[ 1.0000e+00, -1.5016e-01, -1.0663e-01,  7.4717e-01,  6.6663e-01,
+          6.7088e-01,  7.3192e-01,  8.0751e-01, -1.4667e-01, -1.5538e-01,
+          7.8594e-01, -1.7131e-01,  4.7389e-02,  3.8923e-01,  8.0528e-02,
+          6.8884e-02],
+        [-1.5016e-01,  1.0000e+00,  7.4450e-01, -7.8480e-02, -9.2287e-02,
+         -4.8926e-02, -1.8269e-01, -1.2868e-01,  6.2437e-01,  7.6687e-01,
+         -1.2109e-01,  5.7231e-01,  5.4483e-02, -1.3711e-02,  5.4225e-02,
+          1.0608e-01],
+        [-1.0663e-01,  7.4450e-01,  1.0000e+00, -9.6316e-02, -8.8784e-02,
+         -3.4282e-02, -1.6323e-01, -1.2561e-01,  5.7611e-01,  7.6471e-01,
+         -1.1900e-01,  5.1093e-01,  3.6564e-02,  2.1784e-03,  4.5240e-02,
+          8.6847e-02],
+        [ 7.4717e-01, -7.8480e-02, -9.6316e-02,  1.0000e+00,  6.9144e-01,
+          7.3513e-01,  7.2880e-01,  7.7707e-01, -8.8781e-02, -8.2090e-02,
+          7.7152e-01, -5.2820e-02,  7.3040e-02,  3.4047e-01,  9.3617e-02,
+          1.1111e-01],
+        [ 6.6663e-01, -9.2287e-02, -8.8784e-02,  6.9144e-01,  1.0000e+00,
+          7.1832e-01,  6.3586e-01,  7.3829e-01, -7.3225e-02, -1.2791e-01,
+          7.3249e-01, -4.8655e-04, -2.3932e-02,  3.5492e-01,  5.7829e-02,
+          1.2558e-01],
+        [ 6.7088e-01, -4.8926e-02, -3.4282e-02,  7.3513e-01,  7.1832e-01,
+          1.0000e+00,  6.7989e-01,  7.1707e-01, -1.1102e-01, -3.2756e-02,
+          7.0298e-01,  9.7910e-04,  3.2516e-02,  3.2002e-01,  9.5534e-02,
+          1.2125e-01],
+        [ 7.3192e-01, -1.8269e-01, -1.6323e-01,  7.2880e-01,  6.3586e-01,
+          6.7989e-01,  1.0000e+00,  7.4862e-01, -1.4716e-01, -1.8850e-01,
+          7.7709e-01, -1.4848e-01,  3.5645e-02,  3.9155e-01,  8.5304e-02,
+          7.6598e-02],
+        [ 8.0751e-01, -1.2868e-01, -1.2561e-01,  7.7707e-01,  7.3829e-01,
+          7.1707e-01,  7.4862e-01,  1.0000e+00, -1.3192e-01, -9.4631e-02,
+          8.1980e-01, -1.0383e-01, -1.2569e-02,  4.0366e-01,  1.3611e-02,
+          7.2108e-02],
+        [-1.4667e-01,  6.2437e-01,  5.7611e-01, -8.8781e-02, -7.3225e-02,
+         -1.1102e-01, -1.4716e-01, -1.3192e-01,  1.0000e+00,  6.1238e-01,
+         -7.4339e-02,  4.6340e-01, -3.2115e-02,  1.9445e-02, -2.3383e-03,
+         -5.2721e-04],
+        [-1.5538e-01,  7.6687e-01,  7.6471e-01, -8.2090e-02, -1.2791e-01,
+         -3.2756e-02, -1.8850e-01, -9.4631e-02,  6.1238e-01,  1.0000e+00,
+         -1.2142e-01,  5.6736e-01,  3.0472e-02, -2.1869e-02,  3.7176e-02,
+          1.1145e-01],
+        [ 7.8594e-01, -1.2109e-01, -1.1900e-01,  7.7152e-01,  7.3249e-01,
+          7.0298e-01,  7.7709e-01,  8.1980e-01, -7.4339e-02, -1.2142e-01,
+          1.0000e+00, -4.7116e-02,  3.0283e-02,  3.6122e-01,  3.7660e-02,
+          1.3460e-01],
+        [-1.7131e-01,  5.7231e-01,  5.1093e-01, -5.2820e-02, -4.8655e-04,
+          9.7910e-04, -1.4848e-01, -1.0383e-01,  4.6340e-01,  5.6736e-01,
+         -4.7116e-02,  1.0000e+00,  1.2047e-01,  1.8673e-02,  1.4013e-01,
+          1.9592e-01],
+        [ 4.7389e-02,  5.4483e-02,  3.6564e-02,  7.3040e-02, -2.3932e-02,
+          3.2516e-02,  3.5645e-02, -1.2569e-02, -3.2115e-02,  3.0472e-02,
+          3.0283e-02,  1.2047e-01,  1.0000e+00, -2.5141e-02,  8.7659e-01,
+          6.1994e-01],
+        [ 3.8923e-01, -1.3711e-02,  2.1784e-03,  3.4047e-01,  3.5492e-01,
+          3.2002e-01,  3.9155e-01,  4.0366e-01,  1.9445e-02, -2.1869e-02,
+          3.6122e-01,  1.8673e-02, -2.5141e-02,  1.0000e+00,  2.9265e-04,
+          2.0769e-02],
+        [ 8.0528e-02,  5.4225e-02,  4.5240e-02,  9.3617e-02,  5.7829e-02,
+          9.5534e-02,  8.5304e-02,  1.3611e-02, -2.3383e-03,  3.7176e-02,
+          3.7660e-02,  1.4013e-01,  8.7659e-01,  2.9265e-04,  1.0000e+00,
+          6.3008e-01],
+        [ 6.8884e-02,  1.0608e-01,  8.6847e-02,  1.1111e-01,  1.2558e-01,
+          1.2125e-01,  7.6598e-02,  7.2108e-02, -5.2721e-04,  1.1145e-01,
+          1.3460e-01,  1.9592e-01,  6.1994e-01,  2.0769e-02,  6.3008e-01,
+          1.0000e+00]], device='cuda:0')
+
+
+
+
seconds
+
+
tensor([ 4.9147, 14.5051,  8.8225,  9.8293,  4.2150,  3.1399,  5.1536,  5.5290,
+         4.9317, 12.8499,  7.5085,  2.3379, 17.1672,  1.2287, 29.0785,  3.2935])
+
+
+
+
((embs.unsqueeze(1) - embs.unsqueeze(0))**2).sum(-1)
+
+
tensor([[0.0000, 2.3003, 2.2133, 0.5057, 0.6667, 0.6582, 0.5362, 0.3850, 2.2933,
+         2.3108, 0.4281, 2.3426, 1.9052, 1.2215, 1.8389, 1.8622],
+        [2.3003, 0.0000, 0.5110, 2.1570, 2.1846, 2.0979, 2.3654, 2.2574, 0.7513,
+         0.4663, 2.2422, 0.8554, 1.8910, 2.0274, 1.8916, 1.7878],
+        [2.2133, 0.5110, 0.0000, 2.1926, 2.1776, 2.0686, 2.3265, 2.2512, 0.8478,
+         0.4706, 2.2380, 0.9781, 1.9269, 1.9956, 1.9095, 1.8263],
+        [0.5057, 2.1570, 2.1926, 0.0000, 0.6171, 0.5297, 0.5424, 0.4459, 2.1776,
+         2.1642, 0.4570, 2.1056, 1.8539, 1.3191, 1.8128, 1.7778],
+        [0.6667, 2.1846, 2.1776, 0.6171, 0.0000, 0.5634, 0.7283, 0.5234, 2.1465,
+         2.2558, 0.5350, 2.0010, 2.0479, 1.2902, 1.8843, 1.7488],
+        [0.6582, 2.0979, 2.0686, 0.5297, 0.5634, 0.0000, 0.6402, 0.5659, 2.2220,
+         2.0655, 0.5940, 1.9980, 1.9350, 1.3600, 1.8089, 1.7575],
+        [0.5362, 2.3654, 2.3265, 0.5424, 0.7283, 0.6402, 0.0000, 0.5028, 2.2943,
+         2.3770, 0.4458, 2.2970, 1.9287, 1.2169, 1.8294, 1.8468],
+        [0.3850, 2.2574, 2.2512, 0.4459, 0.5234, 0.5659, 0.5028, 0.0000, 2.2638,
+         2.1893, 0.3604, 2.2077, 2.0251, 1.1927, 1.9728, 1.8558],
+        [2.2933, 0.7513, 0.8478, 2.1776, 2.1465, 2.2220, 2.2943, 2.2638, 0.0000,
+         0.7752, 2.1487, 1.0732, 2.0642, 1.9611, 2.0047, 2.0011],
+        [2.3108, 0.4663, 0.4706, 2.1642, 2.2558, 2.0655, 2.3770, 2.1893, 0.7752,
+         0.0000, 2.2428, 0.8653, 1.9391, 2.0437, 1.9256, 1.7771],
+        [0.4281, 2.2422, 2.2380, 0.4570, 0.5350, 0.5940, 0.4458, 0.3604, 2.1487,
+         2.2428, 0.0000, 2.0942, 1.9394, 1.2776, 1.9247, 1.7308],
+        [2.3426, 0.8554, 0.9781, 2.1056, 2.0010, 1.9980, 2.2970, 2.2077, 1.0732,
+         0.8653, 2.0942, 0.0000, 1.7591, 1.9627, 1.7197, 1.6082],
+        [1.9052, 1.8910, 1.9269, 1.8539, 2.0479, 1.9350, 1.9287, 2.0251, 2.0642,
+         1.9391, 1.9394, 1.7591, 0.0000, 2.0503, 0.2468, 0.7601],
+        [1.2215, 2.0274, 1.9956, 1.3191, 1.2902, 1.3600, 1.2169, 1.1927, 1.9611,
+         2.0437, 1.2776, 1.9627, 2.0503, 0.0000, 1.9994, 1.9585],
+        [1.8389, 1.8916, 1.9095, 1.8128, 1.8843, 1.8089, 1.8294, 1.9728, 2.0047,
+         1.9256, 1.9247, 1.7197, 0.2468, 1.9994, 0.0000, 0.7398],
+        [1.8622, 1.7878, 1.8263, 1.7778, 1.7488, 1.7575, 1.8468, 1.8558, 2.0011,
+         1.7771, 1.7308, 1.6082, 0.7601, 1.9585, 0.7398, 0.0000]],
+       device='cuda:0')
+
+
+
+
plt.imshow(((embs.unsqueeze(1) - embs.unsqueeze(0))**2).sum(-1).cpu())
+
+
+
+

+
+
+
+
+ + +
+ +
+ +
+ + + + + \ No newline at end of file diff --git a/2A. Speaker Embeddings_files/figure-html/cell-9-output-1.png b/2A. Speaker Embeddings_files/figure-html/cell-9-output-1.png new file mode 100644 index 0000000..d3e6190 Binary files /dev/null and b/2A. Speaker Embeddings_files/figure-html/cell-9-output-1.png differ diff --git a/2A. Whisper quantization dataset preparation.html b/2A. Whisper quantization dataset preparation.html new file mode 100644 index 0000000..177dac5 --- /dev/null +++ b/2A. Whisper quantization dataset preparation.html @@ -0,0 +1,1342 @@ + + + + + + + + + +WhisperSpeech + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + +
+ +
+ + +
+ + + +
+ + + + + +
+

Precompute Whisper transcriptions for VQ bottleneck distilation

+

Doing transcription means sampling from the Whisper auto-regresive decoder. This is too slow to do for each training batch. Fortunately the trainscriptions are small text snippets so we can precompute them once for the whole dataset.

+

We use segments from Voice Activity Detection to reduce any boundary issues, the we use webdataset to yields multiple chunks from a FLAC file we only load once. The VAD segments are merged into longer chunks to make Whisper processing more efficent (it always processes 30s at a time)

+

Usage:

+
python -m whisperspeech.wh_transcribe librilight-large-wo6454-flac-000002.tar
+

You can pass in either a URL or a local file name. Either way it will expect a vad file in the local directory. The result will go into a file in the current directory named after the source file but replacing flac with txt.

+
+
+
+
The autoreload extension is already loaded. To reload it, use:
+  %reload_ext autoreload
+
+
+
+
import pylab as plt
+import IPython
+
+
+
flac_url = 'https://huggingface.co/datasets/collabora/librilight-webdataset/resolve/main/librilight-small-flac-000000.tar'
+
+
+
flac_url = './librilight-small-flac-000000.tar'
+
+
+
+

Merge VAD segments into longer chunks

+
+
# load some VAD ouputs
+ds = wds.WebDataset(
+    vad.flac_to_vad_name(flac_url)
+).decode().to_tuple('vad.npy')
+chunks = [x[0] for x in progress_bar(ds, total='noinfer')]
+
+ + +
+
+ +
+ + 100.00% [335/335 00:00<00:00] +
+ +
+
+
+
# quick test
+len(chunks[0]), len(chunk_merger(chunks[0]))
+
+
(46, 28)
+
+
+
+
plt.hist([te-ts for x in chunks for ts,te in x])
+plt.title('Segment length distribution straight out of the VAD algorithm');
+
+
+
+

+
+
+
+
+
+
plt.hist([te-ts for x in chunks for ts,te in chunk_merger(x)]);
+plt.title('Chunk length distribution after greedy merging');
+
+
+
+

+
+
+
+
+
+
(np.array([te-ts for x in chunks for ts,te in chunk_merger(x)]) < 10).mean()
+
+
0.03671825647504738
+
+
+

In the above distribution only 3,7% of the samples have < 10 seconds. We noticed that this limits the ability of the T2S model to generate short sequences reliably.

+

It does not seem to matter for quantizing Whisper so we can keep this distribution (it uses less compute for training).

+

For T2S we can add some more shorter chunks at random:

+
+
plt.hist([te-ts for x in chunks for ts,te in chunk_merger(x, random_cutter)])
+plt.title('Chunk length distribution after randomized merging');
+
+
+
+

+
+
+
+
+
+
+

Merge the FLAC and VAD datasets

+

First we want to merge the VAD dataset with the FLAC audio data.

+
+
ds = wds_compose(vad.load_dataset(flac_url),
+    merge_in(wds.WebDataset(vad.flac_to_vad_name(flac_url)).decode())
+)
+
+
+
for s in ds: break
+s # notice the 'vad.npy' values that was missing from the FLAC dataset
+
+
{'__key__': 'small/100/sea_fairies_0812_librivox_64kb_mp3/01_baum_sea_fairies_64kb',
+ '__url__': 'librilight-small-vad-000000.tar.gz',
+ 'flac': (tensor([[0., 0., 0.,  ..., 0., 0., 0.]]), 16000),
+ 'json': {'speaker': '100',
+  'book_meta': {'id': '2315',
+   'title': 'Sea Fairies',
+   'description': "<p>In 1910, Baum hoped to end the Oz series and follow with a new series about a little girl named Trot and her sailor companion, Cap'n Bill. The Sea Fairies (1911) was the first book in the projected series and took Trot and Cap'n Bill under the sea where they had adventures with mermaids and other fantastic creatures. It was followed by Sky Island (1912) and then Baum returned to the Oz titles. He brought Trot and Cap'n Bill to Oz in the Scarecrow of Oz (1915). (Summary by Judy Bieber)</p>",
+   'url_text_source': 'http://www.gutenberg.org/etext/4358',
+   'language': 'English',
+   'copyright_year': '1911',
+   'num_sections': '22',
+   'url_rss': 'https://librivox.org/rss/2315',
+   'url_zip_file': 'http://www.archive.org/download/sea_fairies_0812_librivox/sea_fairies_0812_librivox_64kb_mp3.zip',
+   'url_project': 'http://en.wikipedia.org/wiki/The_Sea_Fairies',
+   'url_librivox': 'https://librivox.org/the-sea-fairies-by-l-frank-baum/',
+   'url_other': None,
+   'totaltimesecs': 15311,
+   'authors': [{'id': '406',
+     'first_name': 'L. Frank',
+     'last_name': 'Baum',
+     'dob': '1856',
+     'dod': '1919'}],
+   'genre': ['Action & Adventure'],
+   'Dramatic Readings': False,
+   'meta_genre': 'Literature'},
+  'snr': 11.4471,
+  'voice_activity': [[1.52, 11.2],
+   [11.84, 14.08],
+   [15.12, 35.76],
+   [36.32, 55.6],
+   [56.24, 70.48],
+   [71.28, 79.52],
+   [80.08, 89.76],
+   [90.24, 97.52],
+   [98.0, 101.28],
+   [102.8, 124.88],
+   [125.36, 133.12],
+   [133.68, 154.16],
+   [154.64, 177.2],
+   [178.0, 196.96],
+   [197.68, 211.44],
+   [212.32, 216.32],
+   [216.96, 243.52],
+   [244.0, 250.72],
+   [251.52, 268.32],
+   [268.96, 308.56],
+   [309.04, 315.28],
+   [316.0, 317.36],
+   [317.92, 325.44],
+   [326.24, 343.6],
+   [344.08, 350.32],
+   [350.88, 356.64],
+   [357.2, 363.2],
+   [363.76, 365.2],
+   [365.2, 373.2],
+   [373.84, 392.0],
+   [392.56, 401.04],
+   [401.6, 456.96],
+   [457.68, 501.92],
+   [502.4, 531.04],
+   [531.6, 554.48],
+   [554.96, 568.32],
+   [568.96, 585.84],
+   [587.04, 588.48],
+   [597.12, 597.92]]},
+ 'vad.npy': array([[  1.764,   6.49 ],
+        [  6.773,  11.18 ],
+        [ 11.98 ,  14.03 ],
+        [ 15.31 ,  36.3  ],
+        [ 36.3  ,  56.06 ],
+        [ 56.4  ,  70.6  ],
+        [ 71.4  , 101.2  ],
+        [102.75 , 103.56 ],
+        [103.7  , 121.75 ],
+        [122.06 , 125.   ],
+        [125.44 , 133.4  ],
+        [133.8  , 154.6  ],
+        [154.6  , 177.6  ],
+        [178.1  , 197.2  ],
+        [197.9  , 212.1  ],
+        [212.5  , 222.5  ],
+        [222.8  , 243.6  ],
+        [244.2  , 246.5  ],
+        [246.8  , 251.1  ],
+        [251.5  , 256.2  ],
+        [256.5  , 257.8  ],
+        [258.2  , 259.8  ],
+        [259.8  , 268.5  ],
+        [269.2  , 289.8  ],
+        [289.8  , 315.8  ],
+        [316.   , 317.2  ],
+        [318.   , 319.   ],
+        [319.8  , 344.   ],
+        [344.2  , 350.2  ],
+        [351.   , 352.5  ],
+        [353.   , 356.8  ],
+        [357.5  , 373.5  ],
+        [374.   , 388.   ],
+        [388.2  , 397.2  ],
+        [397.5  , 401.5  ],
+        [401.8  , 423.5  ],
+        [423.5  , 448.   ],
+        [448.   , 457.2  ],
+        [457.8  , 460.8  ],
+        [461.   , 477.8  ],
+        [478.5  , 502.2  ],
+        [502.2  , 527.5  ],
+        [527.5  , 550.5  ],
+        [550.5  , 576.5  ],
+        [577.   , 586.   ],
+        [587.5  , 588.5  ]], dtype=float16)}
+
+
+
+
+

Split the audio into chunks

+

After we merge the datasets and chunk the segments we can split each audio file into individual samples and pad them to 30s.

+
+
split_ds = wds_compose(ds,
+   wds.map_dict(**{"vad.npy":chunk_merger}),
+   split_to_chunks,
+   utils.resampler(16000, 'samples_16k')
+)
+
+
+
for s in split_ds: break
+s
+
+
{'__key__': './dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_000',
+ '__url__': '../wolnelektury-preproc-wds/wolnelektury-vad-000001.tar.gz',
+ 'i': 0,
+ 'imax': 115,
+ 'tstart': 0.00844,
+ 'tend': 10.06,
+ 'total_seconds': 2776.057029478458,
+ 'lpad': 0,
+ 'rpad': 879616,
+ 'lpad_s': 0.0,
+ 'rpad_s': 19.9459410430839,
+ 'samples': tensor([ 1.8147e-05, -4.9754e-06, -1.3190e-05,  ...,  0.0000e+00,
+          0.0000e+00,  0.0000e+00]),
+ 'sample_rate': 44100,
+ 'samples_16k': tensor([ 4.3992e-06,  9.4182e-07, -1.3307e-06,  ...,  0.0000e+00,
+          0.0000e+00,  0.0000e+00])}
+
+
+
+
IPython.display.display(IPython.display.Audio(s['samples_16k'], rate=16000))
+
+ + + +
+
+
+
+

Transcribe

+
+
whmodel = whisper.load_model('base.en')
+decoding_options = whisper.DecodingOptions(language='en')
+
+
+
output = flac_url.rsplit("/", 1)[1].replace('flac', 'txt') + ".gz"
+with wds.TarWriter(output) as sink:
+    for s in progress_bar(split_ds, total=256):
+        mel = whisper.log_mel_spectrogram(s['samples'].unsqueeze(0).cuda())
+        embs = whmodel.encoder(mel)
+        decs = whmodel.decode(embs, decoding_options)
+
+        sink.write({
+            "__key__": s['__key__'],
+            "txt": decs[0].text,
+        })
+
+ + +
+
+ +
+ + 100.00% [256/256 00:59<00:00] +
+ +
+
+
+
+

Transcribe in batches

+

We have one more thing to add – batch processing makes the transcription quite a bit faster (bs=16 brings a 4.5x speedup).

+
+
batched_ds = wds_compose(split_ds,
+    wds.to_tuple('__key__', 'samples'),
+    wds.batched(16),
+)
+
+
+
+

Verify the transcripts and the chunks work together

+
+
txt_ds = wds_compose(split_ds,
+    merge_in(wds.WebDataset('../wolnelektury-preproc-wds/'+flac_url.rsplit("/", 1)[1].replace('flac', 'txt') + ".gz").decode())
+)
+
+
+
for x in txt_ds: break
+x
+
+
{'__key__': './dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_000',
+ '__url__': '../wolnelektury-preproc-wds/wolnelektury-raw-000001.tar.gz',
+ 'i': 0,
+ 'imax': 115,
+ 'tstart': 0.00844,
+ 'tend': 10.06,
+ 'total_seconds': 2776.057029478458,
+ 'lpad': 0,
+ 'rpad': 879616,
+ 'lpad_s': 0.0,
+ 'rpad_s': 19.9459410430839,
+ 'samples': tensor([ 1.8147e-05, -4.9754e-06, -1.3190e-05,  ...,  0.0000e+00,
+          0.0000e+00,  0.0000e+00]),
+ 'sample_rate': 44100,
+ 'samples_16k': tensor([ 4.3992e-06,  9.4182e-07, -1.3307e-06,  ...,  0.0000e+00,
+          0.0000e+00,  0.0000e+00]),
+ 'txt': 'Rozdział 22. Stare mięśca, nowi ludzie. Stierfort i ja zabawiliśmy dwa tygodnie w tamtej okolicy.'}
+
+
+
+
for x in progress_bar(txt_ds, total=10):
+    IPython.display.display(IPython.display.Markdown(f"#### {x['__key__']} chunk {x['i']} of {x['imax']}"))
+    fname = f"test-{x['i']}.ogg"
+    torchaudio.save(fname, x['samples'][None,:int((x['tend']-x['tstart'])*s['sample_rate'])], s['sample_rate'])
+    IPython.display.display(IPython.display.Audio(url=fname, rate=x['sample_rate']))
+    IPython.display.display(IPython.display.Markdown(x['txt']))
+
+ + +
+
+ +
+ + 100.00% [10/10 00:02<00:00] +
+ +
+
+

./dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_000 chunk 0 of 115

+
+
+ + + +
+
+

Rozdział 22. Stare mięśca, nowi ludzie. Stierfort i ja zabawiliśmy dwa tygodnie w tamtej okolicy.

+
+
+

./dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_001 chunk 1 of 115

+
+
+ + + +
+
+

Byliśmy prawie ciągle razem. Czasem tylko rozstawaliśmy się na kilka godzin. Styrford, bowiem, był zawołanym żeglarzem. Ja zaś nie smakowałem zbytnia o w tego rodzaju rozrywkach. To też gdy przyjaciel mój puszczał się w towarzystwie pana PegoTi na morze, pozostawałem zwykle na lądzie. Korzystanie z pokoiku u PegoTi krępowało mnie nieco.

+
+
+

./dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_002 chunk 2 of 115

+
+
+ + + +
+
+

Wiedząc, jak dalece przez dzień cały jest zajęta do oglądaniem chorego męża, wracałem wcześniej bieczorem, gdy Steelfort, będąc panem swego czasu, niczym się nie krempował. To wiedziałem się też, że gdy już spał w najlepsze, on podejmował rybaków w ulubionej winiarni pana PegoT pod dobrą chęcią, lub wyrybaczkim odzieniu spędzał całe księżycowe nocy na morzu.

+
+
+

./dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_003 chunk 3 of 115

+
+
+ + + +
+
+

Wiedziałem, że jego żywa gorąca natura potrzebuje rozrywek i niebezpieczeństw i postępowanie jego wcale mnie nie dziwiło. Rostawaliśmy się i z tego jeszcze powodu, że Stirforta nie mogły pociągać tak jak mnie wycieczki do Blanderstone. To też czasem, że gnaliśmy się po wczesnym śniadaniu, a schodzi i dopiero późno na obiad. Nie miałem pojęcia co robił, czym się wówczas zajmował.

+
+
+

./dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_004 chunk 4 of 115

+
+
+ + + +
+
+

Wiedziałem tylko, że znany był i lubiany przez wszystkich, a posiadał darż, szczególny wynajdwanie rozrywek i zajęcia nawet tam, gdzie inny na jego miejscu nic będzie nie znalazł. Co do mnie przebiegając drogę do Blanderstom, przeżywałem w pamięci każdy dzień z przeszłości i to mi wypełniało myśl i serce. Przypominało mi się każde niegdyś tu przeżyte wrażenie.

+
+
+

./dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_005 chunk 5 of 115

+
+
+ + + +
+
+

nadmogiło pod drzewami, gdzie spoczywali moi rodzice, zamienioną przez pegoty w kwietnik, na którą gdybyła tylko jeszcze miejscem wiecznego z poczynku megajca, spoglądałem z takim żalem, i którą widział otwartą na przyjęcie z włog, mojej pięknej, kochanej matki jej dzieciątka, długie, spędzałem godziny. Lężała ona na uboczu, wrogu cmentarze.

+
+
+

./dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_006 chunk 6 of 115

+
+
+ + + +
+
+

Przechodząc z drogą, czytać mogłem wypisane na kamieniu nazwiska, a dzwonkościelny zdawał się być głosem pożegnania. W godzinach tych, myśląc o nich.

+
+
+

./dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_007 chunk 7 of 115

+
+
+ + + +
+
+

Myślałem zarazem o tym zawsze, jakie miejsce zajmę w życiu, jakich wielkich lub dobrych dokonam czynów. Ech okroków moich nie odbijało nud innych, te tylko, jak gdybym krocząc, ubokużyjącej jeszcze matki, nad powietrzne budował zamki. Stary dom nasz zmienił się niedopoznania.

+
+
+

./dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_008 chunk 8 of 115

+
+
+ + + +
+
+

Znikły bez śladu, powiżone przez wrony dawno opuszczone gniazda, adrzewa, strzyżone i ścinane utraciły, dawny kształt, ogród zdziczał, połowa okien była zabita. Dom zamieszkany został przez jakiegoś chorego umysłowo-gentelmena ich, tych, co go doglądali.

+
+
+

./dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_009 chunk 9 of 115

+
+
+ + + +
+
+

Chory przesiadawał zwykle w okniem, i niech ktoś pokójku i spoglądał na cmentarz. Ciekawy byłem, czy też jego myśli, kreślą te same obrazy, co moje, gdy w czeznym rankiem, w nocnej koszulce wyglądałem tym okienkiem, witając pasące się o wschodzie słońca trzody. Dawni nasi sąsiedzi, Państwu Grraper,

+
+
+
+
for x in progress_bar(txt_ds, total=10):
+    IPython.display.display(IPython.display.Markdown(f"#### {x['__key__']} chunk {x['i']} of {x['imax']}"))
+    fname = f"test-{x['i']}.ogg"
+    torchaudio.save(fname, x['samples'][None,:int((x['tend']-x['tstart'])*s['sample_rate'])], s['sample_rate'])
+    IPython.display.display(IPython.display.Audio(url=fname, rate=x['sample_rate']))
+    IPython.display.display(IPython.display.Markdown(x['txt']))
+
+ + +
+
+ +
+ + 100.00% [10/10 00:02<00:00] +
+ +
+
+

./dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_000 chunk 0 of 115

+
+
+ + + +
+
+

Rozdział dwudziesty drugi Stare miejsca, nowi ludzie Styrford i ja zabawiliśmy dwa tygodnie w tamtej okolicy.

+
+
+

./dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_001 chunk 1 of 115

+
+
+ + + +
+
+

Byliśmy prawie ciągle razem. Czasem tylko rozstawaliśmy się na kilka godzin. Styrford bowiem był zawołanym żeglarzem, ja zaś nie smakowałem zbytnio w tego rodzaju rozrywkach. Toteż gdy przyjaciel mój puszczał się w towarzystwie pana Pegoty na morze, pozostawałem zwykle na lądzie. Korzystanie z pokoiku u Pegoty krępowało mnie nieco.

+
+
+

./dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_002 chunk 2 of 115

+
+
+ + + +
+
+

Wiedząc, jak dalece przez dzień cały jest zajęta doglądaniem chorego męża, wracałem wcześniej wieczorem, gdy Stilford, będąc panem swego czasu, niczym się nie krępował. Dowiedziałem się też, że gdybym już spał w najlepsze, on podejmował rybaków w ulubionej winiarni pana Pegoty pod dobrą chęcią lub w rybackim odzieniu spędzał całe księżycowe noce na morzu.

+
+
+

./dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_003 chunk 3 of 115

+
+
+ + + +
+
+

Wiedziałem, że jego żywa, gorąca natura potrzebuje rozrywek i niebezpieczeństw i postępowanie jego wcale mnie nie dziwiło. Rozstawaliśmy się i z tego jeszcze powodu, że z Tyrforda nie mogły pociągać, tak jak mnie, wycieczki do Blunderstown. To też czasem żegnaliśmy się po wczesnym śniadaniu, a schodzili dopiero późno na obiad. Nie miałem pojęcia, co robił, czym się wówczas zajmował.

+
+
+

./dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_004 chunk 4 of 115

+
+
+ + + +
+
+

Wiedziałem tylko, że znany był i lubiany przez wszystkich, a posiadał dar szczególny wynajdywanie rozrywek i zajęcia nawet tam, gdzie inny na jego miejscu nic by nie znalazł. Co do mnie, przebiegając drogę do Blunderstone, przeżywałem w pamięci każdy dzień z przeszłości i to mi wypełniało myśl i serce. Przypominało mi się każde niegdyś tu przeżyte wrażenie.

+
+
+

./dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_005 chunk 5 of 115

+
+
+ + + +
+
+

Nad mogiłą pod drzewami, gdzie spoczywali moi rodzice, zamienioną przez pegot i w kwietnik, na którą, gdy była tylko jeszcze miejscem wiecznego spoczynku mego ojca, spoglądałem z takim żalem i którą widział otwartą na przyjęcie zwłok mojej pięknej, kochanej matki i jej dzieciątka. Długie spędzałem godziny. Leżała ona na uboczu, w rogu cmentarza.

+
+
+

./dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_006 chunk 6 of 115

+
+
+ + + +
+
+

Przechodząc drogą, czytać mogłem wypisane na kamieniu nazwiska, a dzwoń kościelny zdawał się być głosem pożegnania. W godzinach tych, myśląc o nich…

+
+
+

./dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_007 chunk 7 of 115

+
+
+ + + +
+
+

Myślałem zarazem o tym zawsze, jakie miejsce zajmę w życiu, jakich wielkich lub dobrych dokonam czynów. Echo kroków moich nie odbijało nut innych, te tylko, jak gdybym krocząc u boku żyjącej jeszcze matki nadpowietrzne budował zamki. Stary dom nasz zmienił się nie do poznania.

+
+
+

./dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_008 chunk 8 of 115

+
+
+ + + +
+
+

Znikły bez śladu powichrzone przez wrony dawno opuszczone gniazda, a drzewa strzyżone i ścinane utraciły dawny kształt. Ogród zdziczał. Połowa okien była zabita. Dom zamieszkany został przez jakiegoś chorego umysłowo dżentelmena i tych, co go doglądali.

+
+
+

./dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_009 chunk 9 of 115

+
+
+ + + +
+
+

Chory przesiadywał zwykle w oknie mego niegdyś pokoiku i spoglądał na cmentarz. Ciekawy byłem, czy też jego myśli kreślą te same obrazy co moje, gdy wczesnym rankiem w nocnej koszulce wyglądałem tym okienkiem, witając pasące się o wschodzie słońca trzody. Dawni nasi sąsiedzi, państwo Graper.

+
+
+
+
+

Batch processing

+

Let’s put everything above together.

+ + +
+ +
+ +
+ + + + + \ No newline at end of file diff --git a/2A. Whisper quantization dataset preparation_files/figure-html/cell-11-output-1.png b/2A. Whisper quantization dataset preparation_files/figure-html/cell-11-output-1.png new file mode 100644 index 0000000..8ff5f27 Binary files /dev/null and b/2A. Whisper quantization dataset preparation_files/figure-html/cell-11-output-1.png differ diff --git a/2A. Whisper quantization dataset preparation_files/figure-html/cell-8-output-1.png b/2A. Whisper quantization dataset preparation_files/figure-html/cell-8-output-1.png new file mode 100644 index 0000000..72543dc Binary files /dev/null and b/2A. Whisper quantization dataset preparation_files/figure-html/cell-8-output-1.png differ diff --git a/2A. Whisper quantization dataset preparation_files/figure-html/cell-9-output-1.png b/2A. Whisper quantization dataset preparation_files/figure-html/cell-9-output-1.png new file mode 100644 index 0000000..8a2b642 Binary files /dev/null and b/2A. Whisper quantization dataset preparation_files/figure-html/cell-9-output-1.png differ diff --git a/2B. Whisper quantization (semantic token) model_files/figure-html/cell-27-output-7.png b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-27-output-7.png new file mode 100644 index 0000000..c83ffa1 Binary files /dev/null and b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-27-output-7.png differ diff --git a/2B. Whisper quantization (semantic token) model_files/figure-html/cell-28-output-7.png b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-28-output-7.png new file mode 100644 index 0000000..9a6cbde Binary files /dev/null and b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-28-output-7.png differ diff --git a/2B. Whisper quantization (semantic token) model_files/figure-html/cell-29-output-6.png b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-29-output-6.png new file mode 100644 index 0000000..5f54be3 Binary files /dev/null and b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-29-output-6.png differ diff --git a/2B. Whisper quantization (semantic token) model_files/figure-html/cell-30-output-6.png b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-30-output-6.png new file mode 100644 index 0000000..10b828f Binary files /dev/null and b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-30-output-6.png differ diff --git a/2B. Whisper quantization (semantic token) model_files/figure-html/cell-31-output-7.png b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-31-output-7.png new file mode 100644 index 0000000..cedb6d0 Binary files /dev/null and b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-31-output-7.png differ diff --git a/2B. Whisper quantization (semantic token) model_files/figure-html/cell-32-output-6.png b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-32-output-6.png new file mode 100644 index 0000000..e37d587 Binary files /dev/null and b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-32-output-6.png differ diff --git a/2B. Whisper quantization (semantic token) model_files/figure-html/cell-33-output-7.png b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-33-output-7.png new file mode 100644 index 0000000..eaaa29c Binary files /dev/null and b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-33-output-7.png differ diff --git a/2B. Whisper quantization (semantic token) model_files/figure-html/cell-34-output-7.png b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-34-output-7.png new file mode 100644 index 0000000..92df301 Binary files /dev/null and b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-34-output-7.png differ diff --git a/2B. Whisper quantization (semantic token) model_files/figure-html/cell-35-output-7.png b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-35-output-7.png new file mode 100644 index 0000000..a062430 Binary files /dev/null and b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-35-output-7.png differ diff --git a/2B. Whisper quantization (semantic token) model_files/figure-html/cell-36-output-7.png b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-36-output-7.png new file mode 100644 index 0000000..fe704ce Binary files /dev/null and b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-36-output-7.png differ diff --git a/2B. Whisper quantization (semantic token) model_files/figure-html/cell-37-output-7.png b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-37-output-7.png new file mode 100644 index 0000000..a386ac9 Binary files /dev/null and b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-37-output-7.png differ diff --git a/2B. Whisper quantization (semantic token) model_files/figure-html/cell-38-output-7.png b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-38-output-7.png new file mode 100644 index 0000000..6d2ca53 Binary files /dev/null and b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-38-output-7.png differ diff --git a/2B. Whisper quantization (semantic token) model_files/figure-html/cell-39-output-7.png b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-39-output-7.png new file mode 100644 index 0000000..9171bbc Binary files /dev/null and b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-39-output-7.png differ diff --git a/2B. Whisper quantization (semantic token) model_files/figure-html/cell-40-output-7.png b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-40-output-7.png new file mode 100644 index 0000000..10ae41b Binary files /dev/null and b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-40-output-7.png differ diff --git a/2B. Whisper quantization (semantic token) model_files/figure-html/cell-41-output-7.png b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-41-output-7.png new file mode 100644 index 0000000..ed92a40 Binary files /dev/null and b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-41-output-7.png differ diff --git a/2B. Whisper quantization (semantic token) model_files/figure-html/cell-42-output-5.png b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-42-output-5.png new file mode 100644 index 0000000..ff85fee Binary files /dev/null and b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-42-output-5.png differ diff --git a/2B. Whisper quantization (semantic token) model_files/figure-html/cell-43-output-5.png b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-43-output-5.png new file mode 100644 index 0000000..ba1954a Binary files /dev/null and b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-43-output-5.png differ diff --git a/2B. Whisper quantization (semantic token) model_files/figure-html/cell-44-output-7.png b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-44-output-7.png new file mode 100644 index 0000000..1e3d77e Binary files /dev/null and b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-44-output-7.png differ diff --git a/2B. Whisper quantization (semantic token) model_files/figure-html/cell-45-output-7.png b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-45-output-7.png new file mode 100644 index 0000000..32c8741 Binary files /dev/null and b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-45-output-7.png differ diff --git a/2B. Whisper quantization (semantic token) model_files/figure-html/cell-46-output-7.png b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-46-output-7.png new file mode 100644 index 0000000..47fc06e Binary files /dev/null and b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-46-output-7.png differ diff --git a/2B. Whisper quantization (semantic token) model_files/figure-html/cell-47-output-7.png b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-47-output-7.png new file mode 100644 index 0000000..3cf69cd Binary files /dev/null and b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-47-output-7.png differ diff --git a/2B. Whisper quantization (semantic token) model_files/figure-html/cell-48-output-7.png b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-48-output-7.png new file mode 100644 index 0000000..a4e6af4 Binary files /dev/null and b/2B. Whisper quantization (semantic token) model_files/figure-html/cell-48-output-7.png differ diff --git a/2C. Whisper quantization (semantic token) evaluation_files/figure-html/cell-57-output-1.png b/2C. Whisper quantization (semantic token) evaluation_files/figure-html/cell-57-output-1.png new file mode 100644 index 0000000..e1f21fc Binary files /dev/null and b/2C. Whisper quantization (semantic token) evaluation_files/figure-html/cell-57-output-1.png differ diff --git a/2C. Whisper quantization (semantic token) evaluation_files/figure-html/cell-64-output-1.png b/2C. Whisper quantization (semantic token) evaluation_files/figure-html/cell-64-output-1.png new file mode 100644 index 0000000..92b32eb Binary files /dev/null and b/2C. Whisper quantization (semantic token) evaluation_files/figure-html/cell-64-output-1.png differ diff --git a/2C. Whisper quantization (semantic token) evaluation_files/figure-html/cell-67-output-1.png b/2C. Whisper quantization (semantic token) evaluation_files/figure-html/cell-67-output-1.png new file mode 100644 index 0000000..802b9af Binary files /dev/null and b/2C. Whisper quantization (semantic token) evaluation_files/figure-html/cell-67-output-1.png differ diff --git a/2C. Whisper quantization (semantic token) evaluation_files/figure-html/cell-68-output-1.png b/2C. Whisper quantization (semantic token) evaluation_files/figure-html/cell-68-output-1.png new file mode 100644 index 0000000..6c4fffb Binary files /dev/null and b/2C. Whisper quantization (semantic token) evaluation_files/figure-html/cell-68-output-1.png differ diff --git a/2C. Whisper quantization (semantic token) evaluation_files/figure-html/cell-69-output-1.png b/2C. Whisper quantization (semantic token) evaluation_files/figure-html/cell-69-output-1.png new file mode 100644 index 0000000..0e54867 Binary files /dev/null and b/2C. Whisper quantization (semantic token) evaluation_files/figure-html/cell-69-output-1.png differ diff --git a/2b. whisper quantization (semantic token) model.html b/2b. whisper quantization (semantic token) model.html new file mode 100644 index 0000000..7cb70d7 --- /dev/null +++ b/2b. whisper quantization (semantic token) model.html @@ -0,0 +1,4820 @@ + + + + + + + + + +WhisperSpeech - Distill Whisper with a VQ bottleneck + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + +
+ +
+ + +
+ + + +
+ +
+
+

Distill Whisper with a VQ bottleneck

+
+ + + +
+ + + + +
+ + + +
+ + + +
+
from whisperspeech import wh_transcribe
+import IPython
+
+
/opt/hostedtoolcache/Python/3.9.19/x64/lib/python3.9/site-packages/pyannote/audio/core/io.py:43: UserWarning: torchaudio._backend.set_audio_backend has been deprecated. With dispatcher enabled, this function is no-op. You can remove the function call.
+  torchaudio.set_audio_backend("soundfile")
+torchvision is not available - cannot save figures
+
+
+
+

Prepare the dataset

+
+
shards = [str(x) for x in Path('/data/whisperspeech-wds/').glob('librilight-*.tar')]
+
+
+
ds = wds.WebDataset(shards, shardshuffle=True)
+
+
+
ds2 = ds.compose(
+    wds.decode(wds.torch_audio),
+    utils.find_audio,
+    merge_in(derived_dataset('/data/whisperspeech-processed-wds/', 'vad')),
+    wds.map_dict(**{"vad.npy":wh_transcribe.chunk_merger}),
+    wh_transcribe.split_to_chunks,
+    merge_in(derived_dataset('/data/whisperspeech-processed-wds/', 'base.en-txt')),
+    wds.shuffle(),
+    wds.select(lambda x: x['i'] != 0 and x['i'] != x['imax']),
+)
+
+
+
vad_shards = [str(x) for x in Path('/data/whisperspeech-processed-wds/').glob('*-large-6454-vad-*.tar.gz')]
+
+
+
ds = wds.WebDataset(vad_shards).decode().map_dict(**{'vad.npy':wh_transcribe.chunk_merger})
+
+
+
chunks = [len(x['vad.npy'][1:-1]) for x in progress_bar(ds, total='noinfer')]
+
+ + +
+
+ +
+ + 100.00% [3411/3411 00:01<00:00] +
+ +
+
+
+
sum(chunks)
+
+
203078
+
+
+
+
for x in progress_bar(ds2, total=5):
+    IPython.display.display(IPython.display.Markdown(f"## {x['__key__']} from {x['__url__']}\n{x['txt']}"))
+    IPython.display.display(IPython.display.Audio(x['samples'], rate=16000))
+
+ + +
+
+ +
+ + 100.00% [5/5 00:01<00:00] +
+ +
+
+

large/6454/kaffirkangarooklondiketales_1611_librivox_64kb_mp3/kaffirkangaroo_03_leavitt_64kb_006 from /data/whisperspeech-wds/librilight-large-6454-flac-000007.tar

+

Physically I was incapable of complying with the command, and mentally I had not the slightest intention of departing. In an outhouse devoted to storing melees, sheepskins, and harness, an old man was sitting on the doorstep, compounding a mixture which I recognized as a sheep remedy.

+
+
+ + + +
+
+

large/6454/kaffirkangarooklondiketales_1611_librivox_64kb_mp3/kaffirkangaroo_03_leavitt_64kb_009 from /data/whisperspeech-wds/librilight-large-6454-flac-000007.tar

+

The following day I was the most surprised man in South Africa when I learned that my preparation was working a marvelous cure. I was invited to remain with the bore the balance of the season as an honoured guest. Day after day I tramped the hills, returning at night as wise and as rich as when I set out. There were unmistakable indications that gold should be found in the vicinity, but the stubborn fact remained that I could not find it.

+
+
+ + + +
+
+

large/6454/kaffirkangarooklondiketales_1611_librivox_64kb_mp3/kaffirkangaroo_03_leavitt_64kb_001 from /data/whisperspeech-wds/librilight-large-6454-flac-000007.tar

+

I was one of the first prospectors in the Transvaal to search for gold and a precious dance it led me. At that time, but few Englishmen had ventured into the Boer country, and such was the jealousy with which they were regarded that it was impossible to secure any information which would assist in the search. Footsoir and weary, I tramped from farm to farm, content

+
+
+ + + +
+
+

large/6454/kaffirkangarooklondiketales_1611_librivox_64kb_mp3/kaffirkangaroo_03_leavitt_64kb_032 from /data/whisperspeech-wds/librilight-large-6454-flac-000007.tar

+

Dead, more than twenty years. In fact, before I was married and came to live here, for he was my husband’s father. Did you know him? Yes, but I was only a little girl at the time. Why have the clothes been kept?

+
+
+ + + +
+
+

large/6454/kaffirkangarooklondiketales_1611_librivox_64kb_mp3/kaffirkangaroo_03_leavitt_64kb_004 from /data/whisperspeech-wds/librilight-large-6454-flac-000007.tar

+

Fortunately, I had acquired some knowledge of sheep in Australia else I believe that I should have starved. When all else failed, I became a sheep doctor and then did a compound whose virtues would have done credit to the most widely advertised path and medicine nostrum.

+
+
+ + + +
+
+
+
ds3 = ds2.compose(
+    add_masks,
+    tokenize_text,
+    wds.to_tuple('samples', 'mask', 'in_ttoks', 'out_ttoks')
+)
+
+
+
for x in ds3: break
+x
+
+
(tensor([0.0043, 0.0102, 0.0163,  ..., 0.0000, 0.0000, 0.0000]),
+ tensor([ True,  True,  True,  ..., False, False, False]),
+ tensor([50257,  3152,   257, 44823,  3154,  1589,    11,   484,   673,  1144,
+           572,   503,   286,  2837,   290,   706,  2063,   281,  1711,   338,
+          1057,    11,   262, 39535, 21067,   373,   625,   262,  2318,   290,
+           287,  5897, 10150,    13,  1119,  2582, 40424,   510,   262, 27913,
+          4608,   284, 47251,   290,  1043,   257,  1588,  1426,   325,   286,
+          4684, 13384,  3492,   284, 17655,   511, 15892,    13, 50256, 50256,
+         50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256,
+         50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256,
+         50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256,
+         50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256,
+         50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256,
+         50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256,
+         50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256,
+         50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256,
+         50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256,
+         50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256,
+         50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256,
+         50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256,
+         50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256,
+         50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256,
+         50256]),
+ tensor([ 3152,   257, 44823,  3154,  1589,    11,   484,   673,  1144,   572,
+           503,   286,  2837,   290,   706,  2063,   281,  1711,   338,  1057,
+            11,   262, 39535, 21067,   373,   625,   262,  2318,   290,   287,
+          5897, 10150,    13,  1119,  2582, 40424,   510,   262, 27913,  4608,
+           284, 47251,   290,  1043,   257,  1588,  1426,   325,   286,  4684,
+         13384,  3492,   284, 17655,   511, 15892,    13, 50256,  -100,  -100,
+          -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,
+          -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,
+          -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,
+          -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,
+          -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,
+          -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,
+          -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,
+          -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,
+          -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,
+          -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,
+          -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,
+          -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,
+          -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,
+          -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,
+          -100]))
+
+
+
+
ds3 = ds2.compose(
+    add_masks,
+    lambda x: tokenize_text(x, model='medium', language='en'),
+    wds.to_tuple('samples', 'mask', 'in_ttoks', 'out_ttoks')
+)
+
+
+
for x in ds3: break
+x
+
+
(tensor([0.0013, 0.0010, 0.0011,  ..., 0.0000, 0.0000, 0.0000]),
+ tensor([ True,  True,  True,  ..., False, False, False]),
+ tensor([50258, 50259, 50359,    32,  1326,  1270,  3931,   382,   613,    11,
+         11672,   293, 37632, 13809,    11,   576,  1319,   264,  1851,   295,
+           264,  1002,    11,   293,  1939,   576,   572,   544,  1643,   281,
+         18071,   264,  1164,   295,  3687,    11,   420,  1497,   554,  1952,
+          6018,    11,   813,   264,  1974,  5010,   295,   721,    11,   689,
+           264,  7700,   366,  4054,   293,  7006,   293, 14154,   292,    13,
+          2188,  1359, 17431,  2212,   281,  3511,   328,  3780,   311,  3567,
+           294,   702,  1536,  6717,  1062,   362, 16424,   796,   666,   257,
+          5403, 14763,    13, 50257, 50257, 50257, 50257, 50257, 50257, 50257,
+         50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257,
+         50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257,
+         50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257,
+         50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257,
+         50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257,
+         50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257,
+         50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257,
+         50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257,
+         50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257,
+         50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257,
+         50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257,
+         50257, 50257, 50257]),
+ tensor([50259, 50359,    32,  1326,  1270,  3931,   382,   613,    11, 11672,
+           293, 37632, 13809,    11,   576,  1319,   264,  1851,   295,   264,
+          1002,    11,   293,  1939,   576,   572,   544,  1643,   281, 18071,
+           264,  1164,   295,  3687,    11,   420,  1497,   554,  1952,  6018,
+            11,   813,   264,  1974,  5010,   295,   721,    11,   689,   264,
+          7700,   366,  4054,   293,  7006,   293, 14154,   292,    13,  2188,
+          1359, 17431,  2212,   281,  3511,   328,  3780,   311,  3567,   294,
+           702,  1536,  6717,  1062,   362, 16424,   796,   666,   257,  5403,
+         14763,    13, 50257,  -100,  -100,  -100,  -100,  -100,  -100,  -100,
+          -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,
+          -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,
+          -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,
+          -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,
+          -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,
+          -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,
+          -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,
+          -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,
+          -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,
+          -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,
+          -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,  -100,
+          -100,  -100,  -100]))
+
+
+
+
train_ds = load_dataset('librilight-wds/librilight-small-flac-000000-s0*.tar', 'librilight-preproc-wds/', samples=2500 * 32)
+
+
+
val_ds = load_dataset('librilight-wds/librilight-small-flac-000000-s11.tar', 'librilight-preproc-wds/', samples=500)
+
+
+
for x in progress_bar(wds.WebLoader(train_ds, num_workers=16, batch_size=None), total='noinfer'): pass
+
+ + +
+
+ +
+ + [245/? 00:09<?] +
+ +
+
+
╭─────────────────────────────── Traceback (most recent call last) ────────────────────────────────╮
+ in <module>:1                                                                                    
+                                                                                                  
+ 1 for x in progress_bar(wds.WebLoader(train_ds, num_workers=16, batch_size=None), total='n     
+   2                                                                                              
+                                                                                                  
+ /opt/conda/lib/python3.10/site-packages/fastprogress/fastprogress.py:41 in __iter__              
+                                                                                                  
+    38 def __iter__(self):                                                                    
+    39 │   │   if self.total != 0: self.update(0)                                                 
+    40 │   │   try:                                                                               
+  41 │   │   │   for i,o in enumerate(self.gen):                                                
+    42 │   │   │   │   if self.total and i >= self.total: break                                   
+    43 │   │   │   │   yield o                                                                    
+    44 │   │   │   │   self.update(i+1)                                                           
+                                                                                                  
+ /root/workspace/webdataset/webdataset/pipeline.py:64 in iterator                                 
+                                                                                                  
+    61 def iterator(self):                                                                    
+    62 │   │   """Create an iterator through the entire dataset, using the given number of repe   
+    63 │   │   for i in range(self.repetitions):                                                  
+  64 │   │   │   for sample in self.iterator1():                                                
+    65 │   │   │   │   yield sample                                                               
+    66                                                                                        
+    67 def __iter__(self):                                                                    
+                                                                                                  
+ /opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:633 in __next__           
+                                                                                                  
+    630 │   │   │   if self._sampler_iter is None:                                                
+    631 │   │   │   │   # TODO(https://github.com/pytorch/pytorch/issues/76750)                   
+    632 │   │   │   │   self._reset()  # type: ignore[call-arg]                                   
+  633 │   │   │   data = self._next_data()                                                      
+    634 │   │   │   self._num_yielded += 1                                                        
+    635 │   │   │   if self._dataset_kind == _DatasetKind.Iterable and \                          
+    636 │   │   │   │   │   self._IterableDataset_len_called is not None and \                    
+                                                                                                  
+ /opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:1328 in _next_data        
+                                                                                                  
+   1325 │   │   │   │   return self._process_data(data)                                           
+   1326 │   │   │                                                                                 
+   1327 │   │   │   assert not self._shutdown and self._tasks_outstanding > 0                     
+ 1328 │   │   │   idx, data = self._get_data()                                                  
+   1329 │   │   │   self._tasks_outstanding -= 1                                                  
+   1330 │   │   │   if self._dataset_kind == _DatasetKind.Iterable:                               
+   1331 │   │   │   │   # Check for _IterableDatasetStopIteration                                 
+                                                                                                  
+ /opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:1294 in _get_data         
+                                                                                                  
+   1291 │   │   │   # need to call `.task_done()` because we don't use `.join()`.                 
+   1292 │   │   else:                                                                             
+   1293 │   │   │   while True:                                                                   
+ 1294 │   │   │   │   success, data = self._try_get_data()                                      
+   1295 │   │   │   │   if success:                                                               
+   1296 │   │   │   │   │   return data                                                           
+   1297                                                                                           
+                                                                                                  
+ /opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:1132 in _try_get_data     
+                                                                                                  
+   1129 │   │   # Returns a 2-tuple:                                                              
+   1130 │   │   #   (bool: whether successfully get data, any: data if successful else None)      
+   1131 │   │   try:                                                                              
+ 1132 │   │   │   data = self._data_queue.get(timeout=timeout)                                  
+   1133 │   │   │   return (True, data)                                                           
+   1134 │   │   except Exception as e:                                                            
+   1135 │   │   │   # At timeout and error, we manually check whether any worker has              
+                                                                                                  
+ /opt/conda/lib/python3.10/multiprocessing/queues.py:113 in get                                   
+                                                                                                  
+   110 │   │   │   try:                                                                           
+   111 │   │   │   │   if block:                                                                  
+   112 │   │   │   │   │   timeout = deadline - time.monotonic()                                  
+ 113 │   │   │   │   │   if not self._poll(timeout):                                            
+   114 │   │   │   │   │   │   raise Empty                                                        
+   115 │   │   │   │   elif not self._poll():                                                     
+   116 │   │   │   │   │   raise Empty                                                            
+                                                                                                  
+ /opt/conda/lib/python3.10/multiprocessing/connection.py:257 in poll                              
+                                                                                                  
+   254 │   │   """Whether there is any input available to be read"""                              
+   255 │   │   self._check_closed()                                                               
+   256 │   │   self._check_readable()                                                             
+ 257 │   │   return self._poll(timeout)                                                         
+   258                                                                                        
+   259 def __enter__(self):                                                                   
+   260 │   │   return self                                                                        
+                                                                                                  
+ /opt/conda/lib/python3.10/multiprocessing/connection.py:424 in _poll                             
+                                                                                                  
+   421 │   │   return self._recv(size)                                                            
+   422                                                                                        
+   423 def _poll(self, timeout):                                                              
+ 424 │   │   r = wait([self], timeout)                                                          
+   425 │   │   return bool(r)                                                                     
+   426                                                                                            
+   427                                                                                            
+                                                                                                  
+ /opt/conda/lib/python3.10/multiprocessing/connection.py:931 in wait                              
+                                                                                                  
+   928 │   │   │   │   deadline = time.monotonic() + timeout                                      
+   929 │   │   │                                                                                  
+   930 │   │   │   while True:                                                                    
+ 931 │   │   │   │   ready = selector.select(timeout)                                           
+   932 │   │   │   │   if ready:                                                                  
+   933 │   │   │   │   │   return [key.fileobj for (key, events) in ready]                        
+   934 │   │   │   │   else:                                                                      
+                                                                                                  
+ /opt/conda/lib/python3.10/selectors.py:416 in select                                             
+                                                                                                  
+   413 │   │   │   timeout = math.ceil(timeout * 1e3)                                             
+   414 │   │   ready = []                                                                         
+   415 │   │   try:                                                                               
+ 416 │   │   │   fd_event_list = self._selector.poll(timeout)                                   
+   417 │   │   except InterruptedError:                                                           
+   418 │   │   │   return ready                                                                   
+   419 │   │   for fd, event in fd_event_list:                                                    
+╰──────────────────────────────────────────────────────────────────────────────────────────────────╯
+KeyboardInterrupt
+
+
+
+
+
for x in train_ds:
+    print(x[3])
+    break
+
+
tensor([[  464,  7664,   286,  ...,  -100,  -100,  -100],
+        [ 2953,   717,   612,  ...,  -100,  -100,  -100],
+        [25383,   339,   587,  ...,  -100,  -100,  -100],
+        ...,
+        [  392,   340,   880,  ...,  -100,  -100,  -100],
+        [  464, 31526, 11416,  ...,  -100,  -100,  -100],
+        [ 2202,   262, 16720,  ...,  -100,  -100,  -100]])
+
+
+
+
+

Training code

+
+

source

+
+

RQBottleneckTransformer

+
+
 RQBottleneckTransformer (vq_codes=512, q_depth=12, depth=1, n_head=2,
+                          head_width=64, ffn_mult=4, codebook_dim=2,
+                          threshold_ema_dead_code=2, use_cosine_sim=False,
+                          kl_loss_mul=1, downsample=1,
+                          whisper_model_name='tiny.en',
+                          tunables=Tunables(init_std=1.5,
+                          embeddings_std=0.045, embeddings_lr_scale=1,
+                          output_mult=1, query_mult=2, rope=True,
+                          mask_embs=True, downsample_conv=False,
+                          downsample_mean=True, codebook_dim=32,
+                          codebook_decay=0.9, lr0=0.0009,
+                          clip_gradient_norm=2, weight_decay=0.001,
+                          warmup_steps=850, random=False))
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+

Submodules assigned in this way will be registered, and will have their parameters converted too when you call :meth:to, etc.

+

.. note:: As per the example above, an __init__() call to the parent class must be made before assignment on the child.

+

:ivar training: Boolean represents whether this module is in training or evaluation mode. :vartype training: bool

+
+
import pylab as plt
+import fastprogress
+import IPython
+import numpy as np
+
+class RQVisual:
+    def __init__ (self, model, masterbar, total_steps):
+        self.model = model
+        self.masterbar = masterbar
+        self.total_steps = total_steps
+        self.epochs = total_steps // masterbar.main_bar.total
+        
+        gs = plt.GridSpec(3, 1, height_ratios=[2,2,1])
+        graph_fig = plt.figure(figsize=(10,6))
+        self.graph_fig = graph_fig
+        self.loss_p = graph_fig.add_subplot(gs[0])
+        self.acc_p = graph_fig.add_subplot(gs[1], sharex=self.loss_p)
+        self.acc_p.tick_params('x', labelbottom=False)
+        self.lr_p = graph_fig.add_subplot(gs[2], sharex=self.loss_p)
+        self.lr_p.tick_params('x', labelbottom=False)
+        self.graph_out = None
+        
+        self.its = []
+        self.train_losses = []
+        self.val_losses = []
+        self.lr_history = []
+        self.entropy = np.nan
+        self.entropy_history = []
+            
+    def show(self):
+        self.start_t = time.time()
+        self.masterbar.write(["samples", "train", "val", "codebook entropy", "time"], table=True)
+        self.graph_out = display(self.graph_fig, display_id=True)
+        self.entropy_out = display(IPython.display.HTML(''), display_id=True)
+    
+    def hide(self):
+        if self.graph_out is not None:
+            self.graph_out.update(IPython.display.HTML(''))
+    
+    def plot(self):
+        loss_p, acc_p, lr_p = self.loss_p, self.acc_p, self.lr_p
+        loss_p.clear()
+        loss_p.plot(self.its, self.train_losses)
+        loss_p.plot(self.its, self.val_losses)
+        loss_p.set_xlim(10000, self.total_steps)
+        loss_p.set_xscale('log')
+        loss_p.set_yscale('log')
+        acc_p.clear()
+        acc_p.plot(self.its, np.stack(self.entropy_history), ':')
+        lr_p.clear()
+        lrs = np.array(self.lr_history)
+        lr_p.plot(self.its, lrs)
+        self.graph_out.update(self.graph_fig)
+    
+    def add_data(self, it, lr, train_loss, val_los):
+        self.its.append(it)
+        self.train_losses.append(train_loss)
+        self.val_losses.append(val_los)
+        self.lr_history.append(lr)
+        with torch.no_grad():
+            cls = vqmodel.rq.layers[0]._codebook.cluster_size
+            pdf = cls / cls.sum()
+            entropy = -torch.nansum(pdf * pdf.log2())
+        self.entropy_history.append(entropy.cpu().numpy())
+        self.entropy_out.update(f"Entropy: {self.entropy_history[-1]:.2f}")
+        self.model.reset_stats()
+        self.plot()
+
+    def add_table_row(self, it, avg_train_loss, val_loss):
+        elapsed_t = time.time() - self.start_t
+        self.masterbar.write([it, f"{avg_train_loss:.5f}", f"{val_loss:.5f}", f"{self.entropy_history[-1]:.2f}", fastprogress.core.format_time(elapsed_t)], table=True)
+    
+    def on_iter(self, bar, it, avg_train_loss, val_loss):
+        epoch = math.ceil(it / self.total_steps * self.epochs)
+        bar.comment = f"#{epoch}/{self.epochs} loss: {avg_train_loss:.3f} / {val_loss:.3f}"
+
+
+

source

+
+
+

make_model

+
+
 make_model (size:str, tunables:__main__.Tunables=Tunables(init_std=1.5,
+             embeddings_std=0.045, embeddings_lr_scale=1, output_mult=1,
+             query_mult=2, rope=True, mask_embs=True,
+             downsample_conv=False, downsample_mean=True, codebook_dim=32,
+             codebook_decay=0.9, lr0=0.0009, clip_gradient_norm=2,
+             weight_decay=0.001, warmup_steps=850, random=False),
+             dataset:torch.utils.data.dataset.Dataset=None)
+
+
+
# convert the final checkpoint
+model = make_model('base.en-2d-512c-dim64').load_checkpoint('vq_stoks-epoch=3-step=28582-val_loss=11.42.ckpt')
+model.save_model(f'vqmodel-512c-dim64-4e-hyptuned-32gpu.model')
+
+
tunables: Tunables(init_std=1.5, embeddings_std=0.045, embeddings_lr_scale=1, output_mult=1, query_mult=2, rope=True, mask_embs=True, downsample_conv=False, downsample_mean=True, codebook_dim=32, codebook_decay=0.9, lr0=0.0009, clip_gradient_norm=2, weight_decay=0.001, warmup_steps=850, random=False)
+
+
+
Tunables(init_std=1.5, embeddings_std=0.045, embeddings_lr_scale=1, output_mult=1, query_mult=2, rope=True, mask_embs=True, downsample_conv=False, downsample_mean=True, codebook_dim=32, codebook_decay=0.9, lr0=0.0009, clip_gradient_norm=2, weight_decay=0.001, warmup_steps=850, random=False)
+
+
+
+
# convert the final checkpoint
+model = make_model('medium-2d-512c-dim64').load_checkpoint('../vq_stoks-epoch=0-step=9776-val_loss=0.00.ckpt')
+model.save_model(f'vqmodel-medium-en+pl-512c-dim64.model')
+
+
+
# convert the final checkpoint
+model = make_model('base-2d-512c-dim64').load_checkpoint('../vq_stoks--2-24696-acc=0.91.ckpt')
+model.save_model(f'vqmodel-base-en+pl-512c-dim64.model')
+
+
+
# convert the final checkpoint
+model = make_model('medium-2d-1024c-dim64').load_checkpoint('../vq_stoks-chad_gold-3-32813-acc=0.96.ckpt')
+model.save_model(f'vqmodel-medium-v2-en+pl-1024c-dim64.model')
+
+
+
+

Architectural experiments

+
+
# with learned positional embeddings, no out_blocks
+vqmodel = RQBottleneckTransformer(codebook_dim=16, vq_codes=512, q_depth=1, n_head=6, depth=1,
+                                  downsample=2, threshold_ema_dead_code=0, use_cosine_sim=True).cuda()
+train("svq", vqmodel, train_ds, val_ds, bs=32, epochs=1, lr=3e-3, warmup_steps=1000,
+      run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=4, visual_class=RQVisual)
+
+
OneCycle: 6290 1
+
+
+ +
+
+
'Entropy: 8.71'
+
+
+ + +
+
+
+ +
+ + 0.00% [0/1 00:00<?] +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
samplestrainvalcodebook entropytime
50016107.56952157.321138.7105:24
10000085.44750101.791718.7010:37
12668881.44776104.250178.7113:27
+

+ +

+ + 62.94% [3959/6290 13:26<07:54 #126688/201280 loss: 81.448 / 104.250] +
+ +
+
+
+
/opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:645: UserWarning: Length of IterableDataset Dataset: 10 batches x 32 samples, 1.9 hours) was reported to be 10 (when accessing len(dataloader)), but 11 samples have been fetched. For multiprocessing data-loading, this could be caused by not properly configuring the IterableDataset replica at each worker. Please see https://pytorch.org/docs/stable/data.html#torch.utils.data.IterableDataset for examples.
+  warnings.warn(warn_msg)
+/opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:645: UserWarning: Length of IterableDataset Dataset: 10 batches x 32 samples, 1.9 hours) was reported to be 10 (when accessing len(dataloader)), but 12 samples have been fetched. For multiprocessing data-loading, this could be caused by not properly configuring the IterableDataset replica at each worker. Please see https://pytorch.org/docs/stable/data.html#torch.utils.data.IterableDataset for examples.
+  warnings.warn(warn_msg)
+
+
+
+
+

+
+
+
+
+
+
# with learned positional embeddings, out_blocks before positional
+vqmodel = RQBottleneckTransformer(codebook_dim=16, vq_codes=512, q_depth=1, n_head=6, depth=1,
+                                  downsample=2, threshold_ema_dead_code=0, use_cosine_sim=True).cuda()
+train("svq", vqmodel, train_ds, val_ds, bs=32, epochs=1, lr=3e-3, warmup_steps=1000,
+      run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=4, visual_class=RQVisual)
+
+
OneCycle: 6290 1
+
+
+ +
+
+
'Entropy: 8.70'
+
+
+ + +
+
+
+ +
+ + 100.00% [1/1 22:57<00:00] +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
samplestrainvalcodebook entropytime
5001623.4599142.241138.8005:48
10000016.1968623.678098.7811:27
15001611.9902817.223068.7417:07
20000011.6803716.676058.7022:46
20128011.9263116.652368.7022:57
+

+ +

+ + 100.00% [6290/6290 22:57<00:00 #201280/201280 loss: 11.926 / 16.652] +
+ +
+
+
+
/opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:645: UserWarning: Length of IterableDataset Dataset: 6290 batches x 32 samples, 1307.9 hours) was reported to be 6290 (when accessing len(dataloader)), but 6291 samples have been fetched. For multiprocessing data-loading, this could be caused by not properly configuring the IterableDataset replica at each worker. Please see https://pytorch.org/docs/stable/data.html#torch.utils.data.IterableDataset for examples.
+  warnings.warn(warn_msg)
+
+
+
+
+

+
+
+
+
+
+
# with learned positional embeddings, out_blocks before positional, mlp before vq
+vqmodel = RQBottleneckTransformer(codebook_dim=16, vq_codes=512, q_depth=1, n_head=6, depth=1,
+                                  downsample=2, threshold_ema_dead_code=0, use_cosine_sim=True).cuda()
+train("svq", vqmodel, train_ds, val_ds, bs=32, epochs=1, lr=3e-3, warmup_steps=1000,
+      run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=4, visual_class=RQVisual)
+
+
OneCycle: 6290 1
+
+
+ +
+
+
'Entropy: 8.57'
+
+
+ + +
+
+
+ +
+ + 100.00% [1/1 23:09<00:00] +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
samplestrainvalcodebook entropytime
5001624.6322044.672388.7405:53
10000014.6998319.672988.6711:35
15001611.5077417.752038.5817:16
20000011.3389515.668928.5522:58
20128010.8742215.813628.5723:09
+

+ +

+ + 100.00% [6290/6290 23:08<00:00 #201280/201280 loss: 10.874 / 15.814] +
+ +
+
+
+
+
+

+
+
+
+
+
+
# with learned positional embeddings, out_blocks after positional, mlp before vq
+vqmodel = RQBottleneckTransformer(codebook_dim=16, vq_codes=512, q_depth=1, n_head=6, depth=1,
+                                  downsample=2, threshold_ema_dead_code=0, use_cosine_sim=True).cuda()
+train("svq", vqmodel, train_ds, val_ds, bs=32, epochs=1, lr=3e-3, warmup_steps=1000,
+      run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=4, visual_class=RQVisual)
+
+
OneCycle: 6290 1
+
+
+ +
+
+
'Entropy: 8.54'
+
+
+ + +
+
+
+ +
+ + 100.00% [1/1 23:11<00:00] +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
samplestrainvalcodebook entropytime
5001618.3789927.549978.6505:53
10000013.1332917.322408.6011:35
15001610.8343513.553718.5617:18
2000009.6949212.358558.5123:00
20128010.5427112.439948.5423:11
+

+ +

+ + 100.00% [6290/6290 23:11<00:00 #201280/201280 loss: 10.543 / 12.440] +
+ +
+
+
+
+
+

+
+
+
+
+
+
# with learned positional embeddings, out_blocks after positional, mlp before vq
+vqmodel = RQBottleneckTransformer(codebook_dim=16, vq_codes=512, q_depth=1, n_head=6, depth=1,
+                                  downsample=2, threshold_ema_dead_code=0, use_cosine_sim=True).cuda()
+train("svq", vqmodel, train_ds, val_ds, bs=32, epochs=5, lr=3e-3, warmup_steps=2000,
+      run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=4, visual_class=RQVisual)
+vqmodel.save_model('vq-2d-512c-cosine-padfix-premlp-learnpos-5e.model')
+
+
OneCycle: 6290 5
+
+
+ +
+
+
'Entropy: 8.40'
+
+
+ + +
+
+
+ +
+ + 100.00% [5/5 1:55:58<00:00] +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
samplestrainvalcodebook entropytime
5001624.2479047.619608.6205:53
10000014.3598318.501028.5511:35
15001612.3563416.842178.5617:18
20000011.7460316.106038.5223:00
25001610.8532314.830148.4928:56
30000010.7804614.042908.4734:38
35001610.0535412.981338.4040:21
4000009.5963113.780498.5046:03
4500169.2231612.764038.4051:57
5000009.3895811.960848.4657:40
5500168.3603412.598438.351:03:22
6000009.3924211.554118.431:09:05
6500168.3074910.802418.421:15:02
7000008.2043610.398528.481:20:45
7500168.2139210.363678.411:26:27
8000007.7318911.214388.481:32:10
8500167.6485210.938938.471:38:06
9000007.7201010.493918.391:43:48
9500167.589019.859258.421:49:31
10000007.1487110.679878.401:55:14
10064006.7305610.673238.401:55:58
+

+ +

+ + 100.00% [6290/6290 23:12<00:00 #201280/201280 loss: 6.731 / 10.673] +
+ +
+
+
+
/opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:645: UserWarning: Length of IterableDataset Dataset: 10 batches x 32 samples, 1.9 hours) was reported to be 10 (when accessing len(dataloader)), but 11 samples have been fetched. For multiprocessing data-loading, this could be caused by not properly configuring the IterableDataset replica at each worker. Please see https://pytorch.org/docs/stable/data.html#torch.utils.data.IterableDataset for examples.
+  warnings.warn(warn_msg)
+/opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:645: UserWarning: Length of IterableDataset Dataset: 10 batches x 32 samples, 1.9 hours) was reported to be 10 (when accessing len(dataloader)), but 12 samples have been fetched. For multiprocessing data-loading, this could be caused by not properly configuring the IterableDataset replica at each worker. Please see https://pytorch.org/docs/stable/data.html#torch.utils.data.IterableDataset for examples.
+  warnings.warn(warn_msg)
+/opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:645: UserWarning: Length of IterableDataset Dataset: 6290 batches x 32 samples, 1307.9 hours) was reported to be 6290 (when accessing len(dataloader)), but 6291 samples have been fetched. For multiprocessing data-loading, this could be caused by not properly configuring the IterableDataset replica at each worker. Please see https://pytorch.org/docs/stable/data.html#torch.utils.data.IterableDataset for examples.
+  warnings.warn(warn_msg)
+
+
+
+
+

+
+
+
+
+
+
# with learned positional embeddings, out_blocks after positional, mlp before vq
+vqmodel = RQBottleneckTransformer(codebook_dim=32, vq_codes=4096, q_depth=1, n_head=6, depth=1,
+                                  downsample=2, threshold_ema_dead_code=0, use_cosine_sim=True).cuda()
+train("svq", vqmodel, train_ds, val_ds, bs=32, epochs=5, lr=3e-3, warmup_steps=2000,
+      run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=4, visual_class=RQVisual)
+vqmodel.save_model('vq-2d-4096c-cosine32-padfix-premlp-learnpos-5e.model')
+
+
OneCycle: 6290 5
+
+
+ +
+
+
'Entropy: 11.07'
+
+
+ + +
+
+
+ +
+ + 100.00% [5/5 1:57:58<00:00] +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
samplestrainvalcodebook entropytime
5001615.4971826.4258111.2306:00
10000011.3600614.7807611.2511:48
15001610.2975213.6897411.1917:36
2000009.2201912.1481711.2623:24
2500169.0906713.1692811.1729:26
3000008.5611312.3834211.1335:14
3500168.3096512.0258911.1541:02
4000007.7613510.9790011.1446:50
4500167.3458510.1066711.1152:53
5000007.6525511.0244011.1058:41
5500167.4772610.7361911.101:04:29
6000006.969749.6320611.141:10:17
6500166.933959.9794011.081:16:19
7000006.645078.9194511.131:22:07
7500166.530369.2780011.011:27:55
8000006.504278.3084511.071:33:44
8500166.511139.0950211.121:39:48
9000006.056608.4446110.991:45:36
9500166.209748.8815611.061:51:25
10000005.950458.6992211.081:57:13
10064006.189398.8860411.071:57:58
+

+ +

+ + 100.00% [6290/6290 23:37<00:00 #201280/201280 loss: 6.189 / 8.886] +
+ +
+
+
+
+
+

+
+
+
+
+
+
# base.en Whisper with learned positional embeddings, out_blocks after positional, mlp before vq
+vqmodel = RQBottleneckTransformer(codebook_dim=32, vq_codes=4096, q_depth=1, n_head=8, depth=1,
+                                  downsample=2, threshold_ema_dead_code=0, use_cosine_sim=True, whisper_model_name="base.en").cuda()
+train("svq", vqmodel, train_ds, val_ds, bs=14, epochs=5, lr=3e-3, warmup_steps=2000,
+      run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=4, visual_class=RQVisual)
+vqmodel.save_model('vq-base.en-2d-4096c-cosine32-padfix-premlp-learnpos-5e.model')
+
+
OneCycle: 6280 5
+
+
+ +
+
+
'Entropy: 10.86'
+
+
+ + +
+
+
+ +
+ + 100.00% [5/5 3:05:51<00:00] +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
samplestrainvalcodebook entropytime
5001618.1789927.8368111.1109:23
10000013.5065817.3220611.0618:34
15001612.1049115.4941111.0827:47
20000011.8416915.3057010.9536:58
25001611.1951414.0527210.9946:23
30000010.9857813.6923410.8655:34
35001610.5851713.2561010.991:04:46
4000009.8715912.8884410.911:13:57
4500169.7635312.5016110.921:23:22
50000010.0809912.7194010.941:32:33
5500169.8538812.7023210.891:41:45
60000010.5084311.9450510.931:50:57
6500169.2932112.1616610.962:00:20
7000009.2471711.3538710.932:09:32
7500168.8079811.7882110.952:18:43
8000009.1449910.9749610.932:27:55
8500168.7532811.0863210.962:37:21
9000008.4008410.7985110.882:46:33
9500168.7348111.2711610.962:55:45
10000008.5584611.2896710.863:04:57
10048008.0917011.1292410.863:05:51
+

+ +

+ + 100.00% [6280/6280 37:12<00:00 #200960/200960 loss: 8.092 / 11.129] +
+ +
+
+
+
/opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:645: UserWarning: Length of IterableDataset Dataset: 10 batches x 32 samples, 1.9 hours) was reported to be 10 (when accessing len(dataloader)), but 11 samples have been fetched. For multiprocessing data-loading, this could be caused by not properly configuring the IterableDataset replica at each worker. Please see https://pytorch.org/docs/stable/data.html#torch.utils.data.IterableDataset for examples.
+  warnings.warn(warn_msg)
+/opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:645: UserWarning: Length of IterableDataset Dataset: 10 batches x 32 samples, 1.9 hours) was reported to be 10 (when accessing len(dataloader)), but 12 samples have been fetched. For multiprocessing data-loading, this could be caused by not properly configuring the IterableDataset replica at each worker. Please see https://pytorch.org/docs/stable/data.html#torch.utils.data.IterableDataset for examples.
+  warnings.warn(warn_msg)
+/opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:645: UserWarning: Length of IterableDataset Dataset: 6280 batches x 32 samples, 1306.1 hours) was reported to be 6280 (when accessing len(dataloader)), but 6281 samples have been fetched. For multiprocessing data-loading, this could be caused by not properly configuring the IterableDataset replica at each worker. Please see https://pytorch.org/docs/stable/data.html#torch.utils.data.IterableDataset for examples.
+  warnings.warn(warn_msg)
+
+
+
+
+

+
+
+
+
+
+
# base.en whisper with learned positional embeddings, out_blocks after positional, mlp before vq
+# cleaned dataset (removed 1st and last segments)
+vqmodel = RQBottleneckTransformer(codebook_dim=32, vq_codes=4096, q_depth=1, n_head=8, depth=1,
+                                  downsample=2, threshold_ema_dead_code=0, use_cosine_sim=True, whisper_model_name="base.en").cuda()
+train("svq", vqmodel, train_ds, val_ds, bs=14, epochs=5, lr=3e-3, warmup_steps=2000,
+      run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=4, visual_class=RQVisual)
+vqmodel.save_model('vq-base.en-2d-4096c-cosine32-padfix-premlp-learnpos-5e-cleaned.model')
+
+
OneCycle: 6132 5
+
+
+ +
+
+
'Entropy: 10.79'
+
+
+ + +
+
+
+ +
+ + 100.00% [5/5 3:09:42<00:00] +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
samplestrainvalcodebook entropytime
5001619.4405622.6725711.1309:46
10000013.5517814.5844311.2619:23
15001611.9683713.1896811.0929:00
20000011.4387112.4464011.0538:49
25001611.2836011.7008111.1048:26
30000010.8375111.3111011.0958:03
35001610.6931511.1708611.121:07:40
4000009.9877010.9253911.051:17:30
4500169.8317410.6918111.051:27:07
5000009.7723610.4835211.141:36:44
5500169.6663210.3659711.091:46:21
6000009.4093010.0865611.021:56:09
6500169.443579.9248411.042:05:46
7000008.965569.7905411.062:15:23
7500168.836019.6509911.012:25:00
8000008.661079.3914811.122:34:48
8500168.445819.4096911.002:44:26
9000008.564399.2245511.052:54:03
9500168.524899.3035111.033:03:40
9811208.846329.3310810.793:09:42
+

+ +

+ + 100.00% [6132/6132 37:57<00:00 #196224/196224 loss: 8.846 / 9.331] +
+ +
+
+
+
/tmp/ipykernel_90303/1747892456.py:43: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.
+  loss_p.clear()
+/tmp/ipykernel_90303/1747892456.py:46: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.
+  loss_p.set_xlim(10000, self.total_steps)
+/opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:645: UserWarning: Length of IterableDataset Dataset: 6132 batches x 32 samples, 1277.7 hours) was reported to be 6132 (when accessing len(dataloader)), but 6133 samples have been fetched. For multiprocessing data-loading, this could be caused by not properly configuring the IterableDataset replica at each worker. Please see https://pytorch.org/docs/stable/data.html#torch.utils.data.IterableDataset for examples.
+  warnings.warn(warn_msg)
+
+
+
+
+

+
+
+
+
+
+
# base.en whisper with learned positional embeddings, out_blocks after positional, mlp before vq
+# cleaned dataset
+vqmodel = RQBottleneckTransformer(codebook_dim=32, vq_codes=1024, q_depth=1, n_head=8, depth=1,
+                                  downsample=2, threshold_ema_dead_code=0, use_cosine_sim=True, whisper_model_name="base.en").cuda()
+train("svq", vqmodel, train_ds, val_ds, bs=12, epochs=5, lr=3e-3, warmup_steps=2000,
+      run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=4, visual_class=RQVisual)
+vqmodel.save_model('vq-base.en-2d-1024c-cosine32-padfix-premlp-learnpos-5e-cleaned.model')
+
+
OneCycle: 6132 5
+
+
+ +
+
+
'Entropy: 9.36'
+
+
+ + +
+
+
+ +
+ + 100.00% [5/5 3:08:14<00:00] +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
samplestrainvalcodebook entropytime
5001621.6620627.270919.5909:41
10000015.2506616.209159.5319:13
15001613.2184814.255819.5428:45
20000011.8287113.985829.4938:30
25001611.8588413.125969.4248:02
30000011.5410712.601879.4357:34
35001611.4531012.297009.461:07:07
40000011.0820711.984629.381:16:51
45001610.6516011.614829.441:26:24
50000010.6944811.576199.341:35:56
55001610.2576811.150849.381:45:29
6000009.8686010.864309.481:55:14
6500169.9098810.713159.442:04:47
7000009.5323310.520289.422:14:19
7500169.8957810.268279.362:23:52
8000009.1507810.151529.422:33:36
8500169.164819.965549.342:43:09
9000009.145129.905019.402:52:42
9500169.185249.927199.363:02:15
9811208.970339.955179.363:08:14
+

+ +

+ + 100.00% [6132/6132 37:41<00:00 #196224/196224 loss: 8.970 / 9.955] +
+ +
+
+
+
/tmp/ipykernel_90303/1747892456.py:43: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.
+  loss_p.clear()
+/tmp/ipykernel_90303/1747892456.py:46: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.
+  loss_p.set_xlim(10000, self.total_steps)
+
+
+
+
+

+
+
+
+
+
+
# base.en whisper with learned positional embeddings, out_blocks after positional, mlp before vq
+# cleaned dataset
+vqmodel = RQBottleneckTransformer(codebook_dim=32, vq_codes=64, q_depth=1, n_head=8, depth=1,
+                                  downsample=1, threshold_ema_dead_code=0, use_cosine_sim=True, whisper_model_name="base.en").cuda()
+train("svq", vqmodel, train_ds, val_ds, bs=14, epochs=5, lr=3e-3, warmup_steps=2000,
+      run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=4, visual_class=RQVisual)
+vqmodel.save_model('vq-base.en-64c-cosine32-padfix-premlp-learnpos-5e-cleaned.model')
+
+
OneCycle: 6132 5
+
+
+ +
+
+
'Entropy: 5.64'
+
+
+ + +
+
+
+ +
+ + 100.00% [5/5 3:09:51<00:00] +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
samplestrainvalcodebook entropytime
5001676.17780192.671655.8209:48
10000027.8580331.111435.7119:25
15001619.3892022.025955.7529:02
20000016.7552118.756115.6838:51
25001616.2283217.684155.6048:29
30000015.2887116.200285.6858:06
35001614.9166316.245655.631:07:43
40000014.0882415.300975.641:17:32
45001613.5369015.085755.611:27:10
50000013.6255814.453195.651:36:47
55001612.4545013.740455.661:46:25
60000012.2517214.057635.681:56:14
65001612.7619513.717305.692:05:51
70000012.1948313.020705.612:15:28
75001611.8311012.797145.622:25:06
80000012.2367312.707065.732:34:56
85001611.6990112.506065.642:44:34
90000012.0318012.294345.712:54:11
95001612.0652112.229855.673:03:49
98112013.1780212.703895.643:09:51
+

+ +

+ + 100.00% [6132/6132 38:00<00:00 #196224/196224 loss: 13.178 / 12.704] +
+ +
+
+
+
/tmp/ipykernel_94907/1747892456.py:43: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.
+  loss_p.clear()
+/tmp/ipykernel_94907/1747892456.py:46: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.
+  loss_p.set_xlim(10000, self.total_steps)
+/opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:645: UserWarning: Length of IterableDataset Dataset: 6132 batches x 32 samples, 1277.7 hours) was reported to be 6132 (when accessing len(dataloader)), but 6133 samples have been fetched. For multiprocessing data-loading, this could be caused by not properly configuring the IterableDataset replica at each worker. Please see https://pytorch.org/docs/stable/data.html#torch.utils.data.IterableDataset for examples.
+  warnings.warn(warn_msg)
+
+
+
+
+

+
+
+
+
+
+
# base.en! with learned positional embeddings, out_blocks after positional, mlp before vq
+# cleaned dataset
+vqmodel = RQBottleneckTransformer(codebook_dim=32, vq_codes=512, q_depth=1, n_head=8, depth=1,
+                                  downsample=1, threshold_ema_dead_code=0, use_cosine_sim=True, whisper_model_name="base.en").cuda()
+train("svq", vqmodel, train_ds, val_ds, bs=12, epochs=5, lr=3e-3, warmup_steps=2000,
+      run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=4, visual_class=RQVisual)
+vqmodel.save_model('vq-base.en-512c-cosine32-padfix-premlp-learnpos-5e-cleaned.model')
+
+
OneCycle: 6132 5
+
+
+ +
+
+
'Entropy: 8.44'
+
+
+ + +
+
+
+ +
+ + 100.00% [5/5 3:10:13<00:00] +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
samplestrainvalcodebook entropytime
5001621.9401827.540108.7009:48
10000015.3026516.387298.7219:26
15001613.5549114.224898.6729:04
20000012.2795813.593888.5338:54
25001611.4839412.794838.5948:33
30000011.4579112.345188.5258:11
35001611.5128811.732548.541:07:49
40000011.0488011.613408.441:17:41
45001610.7407411.151148.511:27:20
50000010.2275911.117608.521:36:59
55001610.2348510.821118.451:46:38
6000009.6260210.529018.481:56:30
6500169.5424710.395918.402:06:08
7000009.2761010.175798.412:15:47
7500169.3984810.030728.462:25:25
8000008.959399.876038.492:35:15
8500169.084469.745718.472:44:54
9000008.761729.791628.432:54:32
9500169.129319.586308.473:04:10
9811209.337009.721778.443:10:13
+

+ +

+ + 100.00% [6132/6132 38:02<00:00 #196224/196224 loss: 9.337 / 9.722] +
+ +
+
+
+
/tmp/ipykernel_94907/1747892456.py:43: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.
+  loss_p.clear()
+/tmp/ipykernel_94907/1747892456.py:46: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.
+  loss_p.set_xlim(10000, self.total_steps)
+
+
+
+
+

+
+
+
+
+
+
# base.en! with learned positional embeddings, out_blocks after positional, mlp before vq
+# cleaned dataset
+vqmodel = RQBottleneckTransformer(codebook_dim=64, vq_codes=512, q_depth=1, n_head=8, depth=1,
+                                  downsample=1, threshold_ema_dead_code=0, use_cosine_sim=True, whisper_model_name="base.en").cuda()
+train("svq", vqmodel, train_ds, val_ds, bs=14, epochs=1, lr=3e-3, warmup_steps=2000,
+      run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=4, visual_class=RQVisual)
+
+
OneCycle: 6132 1
+
+
+ +
+
+
'Entropy: 8.55'
+
+
+ + +
+
+
+ +
+ + 100.00% [1/1 38:00<00:00] +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
samplestrainvalcodebook entropytime
5001624.5413731.364358.5709:47
10000015.9088917.090208.5819:26
15001613.3040513.957598.5129:05
19622414.1989112.887088.5538:00
+

+ +

+ + 100.00% [6132/6132 38:00<00:00 #196224/196224 loss: 14.199 / 12.887] +
+ +
+
+
+
/tmp/ipykernel_94907/1747892456.py:43: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.
+  loss_p.clear()
+/tmp/ipykernel_94907/1747892456.py:46: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.
+  loss_p.set_xlim(10000, self.total_steps)
+
+
+
+
+

+
+
+
+
+
+
# base.en! with learned positional embeddings, out_blocks after positional, mlp before vq
+# cleaned dataset
+vqmodel = RQBottleneckTransformer(codebook_dim=32, vq_codes=4096, q_depth=1, n_head=8, depth=1,
+                                  downsample=2, threshold_ema_dead_code=0, use_cosine_sim=True, whisper_model_name="base.en").cuda()
+train("svq", vqmodel, train_ds, val_ds, bs=14, epochs=1, lr=3e-3, warmup_steps=2000,
+      run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=4, visual_class=RQVisual)
+
+
OneCycle: 6132 1
+
+
+ +
+
+
'Entropy: 11.28'
+
+
+ + +
+
+
+ +
+ + 100.00% [1/1 37:54<00:00] +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
samplestrainvalcodebook entropytime
5001617.2641722.2929911.2409:45
10000012.4138114.2285911.2519:22
15001611.1680111.9709611.2129:00
19622410.4981910.5730111.2837:54
+

+ +

+ + 100.00% [6132/6132 37:54<00:00 #196224/196224 loss: 10.498 / 10.573] +
+ +
+
+
+
/tmp/ipykernel_94907/1747892456.py:43: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.
+  loss_p.clear()
+/tmp/ipykernel_94907/1747892456.py:46: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.
+  loss_p.set_xlim(10000, self.total_steps)
+
+
+
+
+

+
+
+
+
+
+
# base.en! with learned positional embeddings, out_blocks after positional, mlp before vq
+# cleaned dataset
+vqmodel = RQBottleneckTransformer(codebook_dim=32, vq_codes=4096, q_depth=1, n_head=8, depth=1,
+                                  downsample=2, threshold_ema_dead_code=0, use_cosine_sim=True, whisper_model_name="base.en").cuda()
+train("svq", vqmodel, train_ds, val_ds, bs=14, epochs=5, lr=3e-3, warmup_steps=2000,
+      run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=4, visual_class=RQVisual)
+vqmodel.save_model('vq-base.en-2d-4096c-cosine32-padfix-premlp-preconv-learnpos-5e-cleaned.model')
+
+
OneCycle: 6132 5
+
+
+ +
+
+
'Entropy: 10.75'
+
+
+ + +
+
+
+ +
+ + 100.00% [5/5 3:11:21<00:00] +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
samplestrainvalcodebook entropytime
5001618.8533422.8969610.8009:51
10000013.8645416.3710110.7319:33
15001612.8560513.5504210.7029:15
20000011.5967612.8799710.7039:09
25001611.1280412.3980910.7648:52
30000011.1046011.6792710.7858:33
35001611.1171911.5558310.771:08:16
40000010.5718311.0755210.691:18:09
45001610.4924310.8282010.791:27:51
50000010.2085310.7779310.811:37:33
55001610.1181210.5480510.731:47:15
6000009.5649310.2206210.771:57:10
6500169.4059410.1921710.682:06:52
7000009.172599.8572610.742:16:34
7500169.182249.7491510.682:26:17
8000008.921059.4710410.702:36:09
8500168.612809.3929010.712:45:51
9000008.434189.3316610.722:55:33
9500168.579119.3382310.713:05:16
9811208.639249.3774910.753:11:21
+

+ +

+ + 100.00% [6132/6132 38:16<00:00 #196224/196224 loss: 8.639 / 9.377] +
+ +
+
+
+
/tmp/ipykernel_100642/1747892456.py:43: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.
+  loss_p.clear()
+/tmp/ipykernel_100642/1747892456.py:46: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.
+  loss_p.set_xlim(10000, self.total_steps)
+IOPub message rate exceeded.
+The notebook server will temporarily stop sending output
+to the client in order to avoid crashing it.
+To change this limit, set the config variable
+`--NotebookApp.iopub_msg_rate_limit`.
+
+Current values:
+NotebookApp.iopub_msg_rate_limit=1000.0 (msgs/sec)
+NotebookApp.rate_limit_window=3.0 (secs)
+
+
+
+
+
+

+
+
+
+
+
+
# base.en! with learned positional embeddings, out_blocks after positional, mlp before vq
+# cleaned dataset, mean downsampling
+vqmodel = RQBottleneckTransformer(codebook_dim=32, vq_codes=4096, q_depth=1, n_head=8, depth=1,
+                                  downsample=2, threshold_ema_dead_code=0, use_cosine_sim=True, whisper_model_name="base.en").cuda()
+train("svq", vqmodel, train_ds, val_ds, bs=14, epochs=5, lr=3e-3, warmup_steps=2000,
+      run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=4, visual_class=RQVisual)
+vqmodel.save_model('vq-base.en-2d-4096c-cosine32-padfix-premlp-premean-learnpos-5e-cleaned.model')
+
+
OneCycle: 6132 5
+
+
+ +
+
+
'Entropy: 10.87'
+
+
+ + +
+
+
+ +
+ + 100.00% [5/5 3:09:50<00:00] +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
samplestrainvalcodebook entropytime
5001617.4858022.8705110.9309:49
10000013.3008814.6739411.0719:26
15001612.2668312.9975210.9829:04
20000011.5384012.3359910.9638:53
25001610.8699412.0082411.0148:30
30000010.5997611.6365411.0158:08
35001610.7618111.2965910.931:07:45
4000009.9942810.9041210.981:17:35
4500169.7897210.6527410.921:27:13
5000009.7026210.5408010.931:36:50
5500169.8666310.3289610.961:46:28
6000009.4108210.1673410.971:56:16
6500169.544739.9417310.962:05:53
7000009.064069.7194710.932:15:30
7500169.101019.4691910.932:25:08
8000008.605369.4004110.942:34:56
8500168.502169.2399710.892:44:34
9000008.299709.2362610.902:54:11
9500168.521519.2089210.933:03:48
9811208.698049.1472110.873:09:50
+

+ +

+ + 100.00% [6132/6132 37:58<00:00 #196224/196224 loss: 8.698 / 9.147] +
+ +
+
+
+
/tmp/ipykernel_129075/774804256.py:43: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.
+  loss_p.clear()
+/tmp/ipykernel_129075/774804256.py:46: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.
+  loss_p.set_xlim(10000, self.total_steps)
+/opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:645: UserWarning: Length of IterableDataset Dataset: 6132 batches x 32 samples, 1277.7 hours) was reported to be 6132 (when accessing len(dataloader)), but 6133 samples have been fetched. For multiprocessing data-loading, this could be caused by not properly configuring the IterableDataset replica at each worker. Please see https://pytorch.org/docs/stable/data.html#torch.utils.data.IterableDataset for examples.
+  warnings.warn(warn_msg)
+
+
+
+
+

+
+
+
+
+
+
# base.en! with learned positional embeddings, out_blocks after positional, mlp before vq
+# cleaned dataset, mean downsampling
+vqmodel = RQBottleneckTransformer(codebook_dim=32, vq_codes=4096, q_depth=1, n_head=8, depth=1,
+                                  downsample=2, threshold_ema_dead_code=0, use_cosine_sim=True, whisper_model_name="base.en").cuda()
+vqmodel.ensure_whisper()
+train("svq", vqmodel, train_ds, val_ds, bs=14, epochs=5, lr=3e-3, warmup_steps=2000,
+      run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=16, visual_class=RQVisual)
+
+ +
+
+
'Entropy: 10.91'
+
+
+ + +
+
+
+ +
+ + 0.00% [0/5 00:00<?] +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + +
samplestrainvalcodebook entropytime
5000815.9357718.2665110.8831:51
7173614.0725215.2231410.9157:51
+

+ +

+ + 35.23% [5124/14546 57:50<1:46:20 #14348/203648 loss: 14.073 / 15.223] +
+ +
+
+
+
+
+

+
+
+
+
+
+
# base.en! with learned positional embeddings, out_blocks after positional, mlp before vq
+# cleaned dataset, mean downsampling
+vqmodel = RQBottleneckTransformer(codebook_dim=32, vq_codes=4096, q_depth=1, n_head=8, depth=1,
+                                  downsample=2, threshold_ema_dead_code=0, use_cosine_sim=True, whisper_model_name="base.en").cuda()
+vqmodel.ensure_whisper()
+train("svq", vqmodel, train_ds, val_ds, bs=14, epochs=5, lr=3e-3, warmup_steps=2000,
+      run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=8, visual_class=RQVisual)
+#vqmodel.save_model('vq-base.en-2d-4096c-cosine32-padfix-premlp-premean-learnpos-5e-cleaned.model')
+
+ +
+
+
'Entropy: 10.75'
+
+
+ + +
+
+
+ +
+ + 20.00% [1/5 30:53<2:03:32] +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
samplestrainvalcodebook entropytime
5000817.9925221.1344610.8607:13
10000214.7385115.2607410.7414:30
15001012.6767913.5075710.6122:25
20000411.9863612.6392910.7230:13
24837412.1437812.2616410.7537:45
+

+ +

+ + 22.25% [3236/14546 06:51<23:57 #49675/203648 loss: 12.144 / 12.262] +
+ +
+
+
+
+
+

+
+
+
+
+
+
# base.en! with learned positional embeddings, out_blocks after positional, mlp before vq
+# cleaned dataset, mean downsampling, eqvad
+vqmodel = RQBottleneckTransformer(codebook_dim=32, vq_codes=4096, q_depth=1, n_head=8, depth=1,
+                                  downsample=2, threshold_ema_dead_code=0, use_cosine_sim=True, whisper_model_name="base.en").cuda()
+train("svq", vqmodel, train_ds, val_ds, bs=14, epochs=5, lr=3e-3, warmup_steps=2000,
+      run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=4, visual_class=RQVisual)
+vqmodel.save_model('vq-base.en-2d-4096c-cosine32-padfix-premlp-premean-learnpos-5e-cleaned-eqvad.model')
+
+
OneCycle: 9933 5
+
+
+ +
+
+
'Entropy: 9.83'
+
+
+ + +
+
+
+ +
+ + 100.00% [5/5 5:07:42<00:00] +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
samplestrainvalcodebook entropytime
5001618.0645819.4554910.2709:48
10000013.2770513.0607710.3619:27
15001611.9195812.1539510.1729:05
20000011.5940411.6786210.2838:44
25001611.4424211.3251410.1648:22
30000010.8020011.1672110.1758:01
35001610.7853510.9416810.321:07:53
40000010.6627510.9329710.211:17:32
45001611.3286610.8269710.231:27:11
50000010.4000710.8780610.051:36:50
55001610.7483810.6303010.021:46:30
60000010.5756710.585609.971:56:08
65001610.2615910.4414810.192:06:01
70000010.0880310.5137110.122:15:40
75001610.0260010.392789.972:25:19
80000010.2762410.3935010.062:34:58
85001610.1915910.257639.812:44:37
90000010.0817110.2352710.002:54:16
9500169.8833910.253969.923:03:55
10000009.6214610.1180310.063:13:46
10500169.4633410.045619.843:23:25
11000009.5146510.114849.793:33:04
11500169.501319.958289.793:42:43
12000009.531499.943149.893:52:22
12500169.336889.856939.804:02:01
13000009.266279.810149.754:11:53
13500169.371449.766619.774:21:32
14000009.062409.804349.764:31:11
14500169.105739.802849.774:40:50
15000009.011369.717489.744:50:29
15500169.157759.715129.855:00:08
15892809.263629.718029.835:07:42
+

+ +

+ + 100.00% [9933/9933 1:01:29<00:00 #317856/317856 loss: 9.264 / 9.718] +
+ +
+
+
+
/tmp/ipykernel_133489/774804256.py:43: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.
+  loss_p.clear()
+/tmp/ipykernel_133489/774804256.py:46: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.
+  loss_p.set_xlim(10000, self.total_steps)
+/opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:645: UserWarning: Length of IterableDataset Dataset: 9933 batches x 32 samples, 1275.0 hours) was reported to be 9933 (when accessing len(dataloader)), but 9934 samples have been fetched. For multiprocessing data-loading, this could be caused by not properly configuring the IterableDataset replica at each worker. Please see https://pytorch.org/docs/stable/data.html#torch.utils.data.IterableDataset for examples.
+  warnings.warn(warn_msg)
+
+
+
+
+

+
+
+
+
+
+
# base.en! with learned positional embeddings, out_blocks after positional, mlp before vq
+# cleaned dataset
+# downsample conv
+vqmodel = RQBottleneckTransformer(codebook_dim=32, vq_codes=4096, q_depth=1, n_head=8, depth=1,
+                                  downsample=2, threshold_ema_dead_code=0, use_cosine_sim=True, whisper_model_name="base.en").cuda()
+train("svq", vqmodel, train_ds, val_ds, bs=14, epochs=1, lr=3e-3, warmup_steps=2000,
+      run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=4, visual_class=RQVisual)
+#vqmodel.save_model('vq-base.en-512c-cosine32-padfix-premlp-learnpos-5e-cleaned.model')
+
+
OneCycle: 6132 1
+
+
+ +
+
+
'Entropy: 10.70'
+
+
+ + +
+
+
+ +
+ + 100.00% [1/1 38:13<00:00] +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
samplestrainvalcodebook entropytime
5001618.5652721.8622610.7009:50
10000014.1629714.8338110.6619:32
15001611.5799412.2864910.6829:14
19622410.2723910.9685510.7038:13
+

+ +

+ + 100.00% [6132/6132 38:13<00:00 #196224/196224 loss: 10.272 / 10.969] +
+ +
+
+
+
/tmp/ipykernel_100642/1747892456.py:43: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.
+  loss_p.clear()
+/tmp/ipykernel_100642/1747892456.py:46: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.
+  loss_p.set_xlim(10000, self.total_steps)
+/opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:645: UserWarning: Length of IterableDataset Dataset: 6132 batches x 32 samples, 1277.7 hours) was reported to be 6132 (when accessing len(dataloader)), but 6133 samples have been fetched. For multiprocessing data-loading, this could be caused by not properly configuring the IterableDataset replica at each worker. Please see https://pytorch.org/docs/stable/data.html#torch.utils.data.IterableDataset for examples.
+  warnings.warn(warn_msg)
+
+
+
+
+

+
+
+
+
+
+
# base.en! with learned positional embeddings, out_blocks after positional, mlp before vq
+# cleaned dataset
+vqmodel = RQBottleneckTransformer(codebook_dim=64, vq_codes=4096, q_depth=1, n_head=8, depth=1,
+                                  downsample=2, threshold_ema_dead_code=0, use_cosine_sim=True, whisper_model_name="base.en").cuda()
+train("svq", vqmodel, train_ds, val_ds, bs=14, epochs=1, lr=3e-3, warmup_steps=2000,
+      run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=4, visual_class=RQVisual)
+#vqmodel.save_model('vq-base.en-512c-cosine32-padfix-premlp-learnpos-5e-cleaned.model')
+
+
OneCycle: 6132 1
+
+
+ +
+
+
'Entropy: 10.14'
+
+
+ + +
+
+
+ +
+ + 0.00% [0/1 00:00<?] +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
samplestrainvalcodebook entropytime
5001619.8867926.1812010.2109:49
10000014.0491115.8896210.1919:26
10752013.9812515.4147210.1420:55
+

+ +

+ + 54.79% [3360/6132 20:54<17:14 #107520/196224 loss: 13.981 / 15.415] +
+ +
+
+
+
/tmp/ipykernel_94907/1747892456.py:43: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.
+  loss_p.clear()
+/tmp/ipykernel_94907/1747892456.py:46: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.
+  loss_p.set_xlim(10000, self.total_steps)
+
+
+
+
+

+
+
+
+
+
+
# base.en! with learned positional embeddings, out_blocks after positional, mlp before vq
+# cleaned dataset
+vqmodel = RQBottleneckTransformer(codebook_dim=32, vq_codes=4096, q_depth=1, n_head=8, depth=2,
+                                  downsample=2, threshold_ema_dead_code=0, use_cosine_sim=True, whisper_model_name="base.en").cuda()
+train("svq", vqmodel, train_ds, val_ds, bs=14, epochs=1, lr=3e-3, warmup_steps=2000,
+      run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=4, visual_class=RQVisual)
+#vqmodel.save_model('vq-base.en-512c-cosine32-padfix-premlp-learnpos-5e-cleaned.model')
+
+
OneCycle: 6132 1
+
+
+ +
+
+
'Entropy: 11.10'
+
+
+ + +
+
+
+ +
+ + 100.00% [1/1 40:03<00:00] +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
samplestrainvalcodebook entropytime
5001618.6869525.2335811.0610:18
10000013.1734414.2034911.1120:28
15001610.6673611.5164311.0230:39
1962249.6809910.3636311.1040:03
+

+ +

+ + 100.00% [6132/6132 40:03<00:00 #196224/196224 loss: 9.681 / 10.364] +
+ +
+
+
+
/tmp/ipykernel_94907/1747892456.py:43: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.
+  loss_p.clear()
+/tmp/ipykernel_94907/1747892456.py:46: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.
+  loss_p.set_xlim(10000, self.total_steps)
+
+
+
+
+

+
+
+
+
+
+
# base.en! with learned positional embeddings, out_blocks after positional, mlp before vq
+# cleaned dataset
+vqmodel = RQBottleneckTransformer(codebook_dim=32, vq_codes=64, q_depth=2, n_head=8, depth=1,
+                                  downsample=2, threshold_ema_dead_code=0, use_cosine_sim=True, whisper_model_name="base.en").cuda()
+train("svq", vqmodel, train_ds, val_ds, bs=14, epochs=1, lr=3e-3, warmup_steps=2000,
+      run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=4, visual_class=RQVisual)
+#vqmodel.save_model('vq-base.en-512c-cosine32-padfix-premlp-learnpos-5e-cleaned.model')
+
+
OneCycle: 6132 1
+
+
+ +
+
+
'Entropy: 5.65'
+
+
+ + +
+
+
+ +
+ + 100.00% [1/1 37:35<00:00] +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
samplestrainvalcodebook entropytime
5001682.99027173.423015.9109:42
10000031.8597236.785155.8119:14
15001623.1668825.483405.7628:46
19622420.6851123.002165.6537:36
+

+ +

+ + 100.00% [6132/6132 37:35<00:00 #196224/196224 loss: 20.685 / 23.002] +
+ +
+
+
+
/tmp/ipykernel_94907/1747892456.py:43: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.
+  loss_p.clear()
+/tmp/ipykernel_94907/1747892456.py:46: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.
+  loss_p.set_xlim(10000, self.total_steps)
+
+
+
+
+

+
+
+
+
+ + +
+
+ +
+ +
+ + + + + \ No newline at end of file diff --git a/2c. whisper quantization (semantic token) evaluation.html b/2c. whisper quantization (semantic token) evaluation.html new file mode 100644 index 0000000..c78a860 --- /dev/null +++ b/2c. whisper quantization (semantic token) evaluation.html @@ -0,0 +1,8266 @@ + + + + + + + + + +WhisperSpeech - VQ semantic token extraction evaluation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + +
+ +
+ + +
+ + + +
+ +
+
+

VQ semantic token extraction evaluation

+
+ + + +
+ + + + +
+ + + +
+ + + +
+
import io
+import time
+import torch
+import torchaudio
+
+
+
from pathlib import Path
+import json
+from fastprogress import progress_bar, master_bar
+import fastprogress
+import numpy as np
+import pylab as plt
+import pandas as pd
+import random
+import IPython
+
+import whisper
+
+from fastcore.script import *
+from whisperspeech.wer_metrics import *
+
+
+

How Whisper works with speech cut at different lengths

+
+
def test_incremental(model_name, Tmax=15):
+    whmodel = whisper.load_model(model_name)
+    for i in range(Tmax):
+        print(i, whmodel.transcribe(snd[0,:int(i*16000)])['text'])
+
+
+
test_incremental('tiny.en')
+
+
0 
+1  Chapter
+2  Chapter 5 of the
+3  Chapter 5 of the things in our garden.
+4  Chapter 5 of the Things in Our Garden by Arthur Rachael.
+5  Chapter 5 of the things in our garden by Arthur Ransom.
+6  Chapter 5 of the Things in Our Garden by Arthur Ransom. This LibraVox Recordings.
+7  Chapter 5 of the Things in Our Garden by Arthur Ransom. This LibraVox recording is in the public.
+8  Chapter 5 of the Things in Our Garden by Arthur Ransom. This LibraVox recording is in the public domain.
+9  Chapter 5 of the Things in Our Garden by Arthur Ransom. This LibraVox recording is in the public domain. Chapter 5
+10  Chapter 5 of the Things in Our Garden by Arthur Ransom. This LibraVox recording is in the public domain. Chapter 5, their own garden.
+11  Chapter 5 of the Things in Our Garden by Arthur Ransom. This LibraVox recording is in the public domain. Chapter 5, Their Own Gardens.
+12  Chapter 5 of the Things in Our Garden by Arthur Ransom. This Libra-Vox recording is in the public domain. Chapter 5, Their Own Gardens.
+13  Chapter 5 of the Things in Our Garden by Arthur Ransom. This Libra-Vox recording is in the public domain. Chapter 5, their own gardens, close by the wood at the
+14  Chapter 5 of the Things in Our Garden by Arthur Ransom. This Libra box recording is in the public domain. Chapter 5, Their Own Gardens, Close by the wood at the bottom of the garden.
+
+
+
+
test_incremental('base.en')
+
+
0 
+1  Chapter 4
+2  Chapter 5 of the
+3  Chapter 5 of the Things in our Guard.
+4  Chapter 5 of The Things in Our Garden by Arthur Raffy
+5  Chapter 5 of The Things in Our Garden by Arthur Ransom.
+6  Chapter 5 of The Things in Our Garden by Arthur Ransom.
+7  CHAPTER V.
+8  CHAPTER V.
+9  CHAPTER V.
+10  CHAPTER V.
+11  CHAPTER V.
+12  CHAPTER V. Their Own Gardens.
+13  CHAPTER V. Their Own Gardens.
+14  CHAPTER V.
+
+
+
+
test_incremental('large-v2')
+
+
0 
+1  Chapter 4.
+2  Chapter 5 of the
+3  Chapter 5 of The Things in Our Garden
+4  V. THE THINGS IN OUR GARDEN
+5  V. THE THINGS IN OUR GARDEN.
+6  CHAPTER V
+7  V. THE THINGS IN OUR GARDEN
+8  CHAPTER V
+9  CHAPTER V
+10  V. THEIR OWN GARDEN
+11  V. THEIR OWN GARDENS
+12  V. THEIR OWN GARDENS
+13  V. THEIR OWN GARDENS CLOSE BY THE WOOD
+14  V. THEIR OWN GARDENS CLOSE BY THE WOOD AT THE BOTTOM OF THE GARDEN
+
+
+
+
+

Tests on LibriSpeech

+
+
def make_test_ds(): return progress_bar(librispeech_data('/data/LibriSpeech/test-clean'), total=1000)
+
+
+

Entropy of the token stream

+
+
from whisperspeech.vq_stoks import RQBottleneckTransformer
+
+
+
import collections
+def calc_model_entropy(ds, modelfile):
+    vqmodel = RQBottleneckTransformer.load_model(local_filename=modelfile).cuda()
+    cnts = collections.Counter()
+    for snd,txt in ds:
+        stoks = vqmodel.encode_audio(snd.cuda())
+        cnts.update(stoks[0].tolist())
+    pdf = torch.tensor([cnts[i] for i in range(max(cnts)+1)])
+    pdf = pdf / pdf.sum()
+    return -torch.nansum(pdf * np.log2(pdf))
+
+
+
# the original semantic token model from early 2023
+calc_model_entropy(make_test_ds(), None)
+
+ + +
+
+ +
+ + 100.00% [1000/1000 00:04<00:00] +
+ +
+
+
6.097853445304322
+
+
+
+
calc_model_entropy(make_test_ds(), "vq-ce9.2.model")
+
+ + +
+
+ +
+ + 100.00% [1000/1000 00:04<00:00] +
+ +
+
+
6.357563112144668
+
+
+
+
calc_model_entropy(make_test_ds(), "vq-256c.model")
+
+ + +
+
+ +
+ + 100.00% [1000/1000 00:04<00:00] +
+ +
+
+
3.0997004132066834
+
+
+
+
calc_model_entropy(make_test_ds(), "vq-256c-cosine.model")
+
+ + +
+
+ +
+ + 100.00% [1000/1000 00:04<00:00] +
+ +
+
+
5.6921860685011225
+
+
+
+
calc_model_entropy(make_test_ds(), "vq-2d-256c.model")
+
+ + +
+
+ +
+ + 100.00% [1000/1000 00:04<00:00] +
+ +
+
+
2.899952018598168
+
+
+
+
calc_model_entropy(make_test_ds(), "vq-2d-256c-cosine.model")
+
+ + +
+
+ +
+ + 100.00% [1000/1000 00:04<00:00] +
+ +
+
+
5.769594466589709
+
+
+
+
calc_model_entropy(make_test_ds(), "vq-2d-256c-cosine-padfix2.model")
+
+ + +
+
+ +
+ + 100.00% [1000/1000 00:04<00:00] +
+ +
+
+
7.741530540488036
+
+
+
+
calc_model_entropy(make_test_ds(), "vq-2d-512c-cosine-padfix-premlp-learnpos-5e.model")
+
+ + +
+
+ +
+ + 100.00% [1000/1000 00:04<00:00] +
+ +
+
+
8.164144580014993
+
+
+
+
calc_model_entropy(make_test_ds(), "vq-2d-512c-cosine32-padfix-premlp-learnpos-5e.model")
+
+ + +
+
+ +
+ + 100.00% [1000/1000 00:04<00:00] +
+ +
+
+
11.37221612373814
+
+
+
+
calc_model_entropy(make_test_ds(), "vq-base.en-2d-4096c-cosine32-padfix-premlp-learnpos-5e.model")
+
+ + +
+
+ +
+ + 100.00% [1000/1000 00:08<00:00] +
+ +
+
+
11.240560444030649
+
+
+
+
calc_model_entropy(make_test_ds(), "vq-base.en-2d-1024c-cosine32-padfix-premlp-learnpos-5e-cleaned.model")
+
+ + +
+
+ +
+ + 100.00% [1000/1000 00:17<00:00] +
+ +
+
+
/tmp/ipykernel_276/107266959.py:11: RuntimeWarning: divide by zero encountered in log2
+  return -torch.nansum(pdf * np.log2(pdf))
+
+
+
tensor(9.6971)
+
+
+
+
calc_model_entropy(make_test_ds(), "vq-base.en-2d-4096c-cosine32-padfix-premlp-learnpos-5e-cleaned.model")
+
+ + +
+
+ +
+ + 100.00% [1000/1000 00:06<00:00] +
+ +
+
+
/tmp/ipykernel_276/107266959.py:11: RuntimeWarning: divide by zero encountered in log2
+  return -torch.nansum(pdf * np.log2(pdf))
+
+
+
tensor(11.4108)
+
+
+
+
calc_model_entropy(make_test_ds(), "vq-base.en-2d-4096c-cosine32-padfix-premlp-preconv-learnpos-5e-cleaned.model")
+
+ + +
+
+ +
+ + 100.00% [1000/1000 00:17<00:00] +
+ +
+
+
/tmp/ipykernel_103351/107266959.py:11: RuntimeWarning: divide by zero encountered in log2
+  return -torch.nansum(pdf * np.log2(pdf))
+
+
+
tensor(9.9410)
+
+
+
+
calc_model_entropy(make_test_ds(), "vq-base.en-2d-4096c-cosine32-padfix-premlp-premean-learnpos-5e-cleaned.model")
+
+ + +
+
+ +
+ + 100.00% [1000/1000 00:17<00:00] +
+ +
+
+
/tmp/ipykernel_9385/107266959.py:11: RuntimeWarning: divide by zero encountered in log2
+  return -torch.nansum(pdf * np.log2(pdf))
+
+
+
tensor(11.2880)
+
+
+
+
calc_model_entropy(make_test_ds(), "vq-base.en-2d-4096c-60k.model")
+
+ + +
+
+ +
+ + 100.00% [1000/1000 00:20<00:00] +
+ +
+
+
tensor(11.4831)
+
+
+
+
calc_model_entropy(make_test_ds(), "vq-base.en-2d-4096c-60k.model")
+
+ + +
+
+ +
+ + 100.00% [1000/1000 00:20<00:00] +
+ +
+
+
tensor(11.4831)
+
+
+
+
# 4096 tokens, we later found out that tokens from this model do carry speaker information
+calc_model_entropy(make_test_ds(), "vqmodel-4e-hyptuned-32gpu.model")
+
+ + +
+
+ +
+ + 100.00% [1000/1000 00:05<00:00] +
+ +
+
+
tensor(11.6404)
+
+
+
+
calc_model_entropy(make_test_ds(), "vqmodel-256c-4e-hyptuned-32gpu.model")
+
+ + +
+
+ +
+ + 100.00% [1000/1000 00:05<00:00] +
+ +
+
+
tensor(8.7963)
+
+
+
+
calc_model_entropy(make_test_ds(), "vqmodel-256c-dim64-4e-hyptuned-32gpu.model")
+
+ + +
+
+ +
+ + 100.00% [1000/1000 00:14<00:00] +
+ +
+
+
tensor(8.7499)
+
+
+
+
calc_model_entropy(make_test_ds(), "vqmodel-base-en+pl-512c-dim64.model")
+
+ + +
+
+ +
+ + 100.00% [1000/1000 00:14<00:00] +
+ +
+
+
tensor(8.3956)
+
+
+
+
# the final model
+calc_model_entropy(make_test_ds(), "vqmodel-medium-en+pl-512c-dim64.model")
+
+ + +
+
+ +
+ + 100.00% [1000/1000 00:45<00:00] +
+ +
+
+
tensor(8.4314)
+
+
+
+
+

Word Error Rate measurements

+
+
from whisperspeech.wer_metrics import *
+
+
+

Vanilla Whisper models

+
+
def test_wh_model(whmodel):
+    decoding_options=whisper.DecodingOptions(language='en')
+    stats = WERStats()
+    for snd, gt_text in progress_bar(librispeech_data('/data/LibriSpeech/test-clean'), total=1000):
+        text = whmodel.decode(whisper.log_mel_spectrogram(whisper.pad_or_trim(snd[0])).cuda(), decoding_options).text
+        diff = stats.push_sample(snd, gt_text, text)
+        last_diff = diff.alignments[0][-1]
+        stats.push(hallucination = last_diff.type == 'insert' and last_diff.hyp_end_idx - last_diff.hyp_start_idx > 3)
+    stats = stats.df().sort_values('wer')
+    print(f"WER: {stats.wer.mean()*100:.2f}%")
+    print(f"WER (w/o hallucinations): {stats[~stats['hallucination']].wer.mean()*100:.2f}%")
+    return stats
+
+
+
test_wh_model(whisper.load_model('tiny.en'))
+
+ + +
+
+ +
+ + 100.00% [1000/1000 01:05<00:00] +
+ +
+
+
WER: 6.91%
+WER (w/o hallucinations): 6.91%
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
secsidxgt_texttextwermerwilwiphallucination
08.230NoneAND OFTEN HAS MY MOTHER SAID WHILE ON HER LAP ...And often has my mother said, while on her lap...0.0000000.0000000.0000001.000000False
3552.885NoneI'M AFRAID I DON'T KNOW MUCH ABOUT THE LAND OF OZI'm afraid I don't know much about the land of...0.0000000.0000000.0000001.000000False
3535.870NoneTHE FIRST LOT WE TESTED ON OUR GLASS CAT WHICH...The first lot we tested on our glass cat, whic...0.0000000.0000000.0000001.000000False
6742.295NoneHE ONLY SHOOK HIS HEADHe only shook his head.0.0000000.0000000.0000001.000000False
67511.545NoneWELL BUT NOW SAID THE PRINCESS AND SHE FILLED ...Well, but now said the princess, and she fille...0.0000000.0000000.0000001.000000False
..............................
5243.195NoneBROTHER MAC ARDLE BROTHER KEOGHBrother Maccardo, Brother Keoff.0.6000000.6000000.8000000.200000False
5921.805NoneHANS STIRS NOTHans Stairz-Nied.0.6666670.6666670.8888890.111111False
8202.155NoneTHE FORMER BOOLOOROO GROANEDThe former Billie Rook-Round0.7500000.6000000.8000000.200000False
9183.000NoneTHAT IS TRUE BADAUDERIEThat is true bad dealt gree.0.7500000.5000000.6250000.375000False
3712.440NoneCONSEIL WAS MY MANSERVANTCause A was my man's servant.1.2500000.7142860.8571430.142857False
+ +

1000 rows × 9 columns

+
+
+
+
+
+
test_wh_model(whisper.load_model('base.en'))
+
+ + +
+
+ +
+ + 100.00% [1000/1000 01:41<00:00] +
+ +
+
+
WER: 5.08%
+WER (w/o hallucinations): 5.08%
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
secsgt_texttextwerhallucination
08.230AND OFTEN HAS MY MOTHER SAID WHILE ON HER LAP ...And often has my mother said while on her lap ...0.000000False
4035.370DEPARTING FROM FIVE HUNDRED THOUSAND THROATS T...Departing from 500,000 throats, three cheers b...0.000000False
40413.140THOUSANDS OF HANDKERCHIEFS WERE WAVING ABOVE T...Thousands of handkerchiefs were waving above t...0.000000False
4052.695IT'S ALMOST BEYOND CONJECTUREIt's almost beyond conjecture.0.000000False
4067.805THIS REALITY BEGINS TO EXPLAIN THE DARK POWER ...This reality begins to explain the dark power ...0.000000False
..................
5243.195BROTHER MAC ARDLE BROTHER KEOGHBrother McCartill, Brother Kiaff.0.600000False
5921.805HANS STIRS NOTHans Sturznide.0.666667False
9183.000THAT IS TRUE BADAUDERIEThat is true, bad girl degree.0.750000False
3712.440CONSEIL WAS MY MANSERVANTCas← was my man's servant.1.000000False
5382.215STEPHANOS DEDALOSStefano Staedt-Loss1.500000False
+ +

1000 rows ᅲ 5 columns

+
+
+
+
+
+
test_wh_model(whisper.load_model('small.en'))
+
+ + +
+
+ +
+ + 100.00% [1000/1000 02:53<00:00] +
+ +
+
+
WER: 3.89%
+WER (w/o hallucinations): 3.84%
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
secsidxgt_texttextwermerwilwiphallucination
08.230NoneAND OFTEN HAS MY MOTHER SAID WHILE ON HER LAP ...And often has my mother said, while on her lap...0.0000000.0000000.0000001.000000False
7895.945NoneAND THIS PLAN WAS ADOPTED TOO IN ORDER TO EXTR...and this plan was adopted too in order to extr...0.0000000.0000000.0000001.000000False
46110.980NoneSHE MEANWHILE PASSED HER LIFE WITH HER PARENTS...She, meanwhile, passed her life with her paren...0.0000000.0000000.0000001.000000False
4648.845NoneONE DAY WHEN THE BOY WAS SENT BY HIS GRANDFATH...One day when the boy was sent by his grandfath...0.0000000.0000000.0000001.000000False
4658.785NoneTHE BED SHE TOO WELL REMEMBERED WAS THERE AND ...The bed she too well remembered was there, and...0.0000000.0000000.0000001.000000False
..............................
5243.195NoneBROTHER MAC ARDLE BROTHER KEOGHBrother McCardle. Brother Kiyof.0.6000000.6000000.8000000.200000False
2881.905NoneI DELIGHT IN YOUR KITCHENby delighting your kitchen.0.6000000.6000000.8000000.200000False
12115.270NoneAT LAST THE LITTLE MICE STAYED AWAY ALSO AND T...At last the little mice stayed away also, and ...0.6363640.6363640.6363640.363636False
9183.000NoneTHAT IS TRUE BADAUDERIEThat is true Bad Delt Grey.0.7500000.5000000.6250000.375000False
5382.215NoneSTEPHANOS DEDALOSStefano Staedtlos1.0000001.0000001.0000000.000000False
+ +

1000 rows × 9 columns

+
+
+
+
+
+
test_wh_model(whisper.load_model('medium.en'))
+
+ + +
+
+ +
+ + 100.00% [1000/1000 06:22<00:00] +
+ +
+
+
WER: 4.19%
+WER (w/o hallucinations): 3.19%
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
secsgt_texttextwerhallucination
3865.915YES WE ARE CERTAINLY I REPLIED EVASIVELY BUT A...Yes, we are, certainly, I replied evasively, b...0.00False
5076.480HIS CONDUCT AND PRESENCE OF MIND IN THIS EMERG...His conduct and presence of mind in this emerg...0.00False
8654.315THEIR SUFFERINGS HAVE NEVER YET BEEN FITLY CHR...Their sufferings have never yet been fitly chr...0.00False
50913.610FROM THE SAME MEN NEW REGIMENTS AND NEW COMPAN...From the same men new regiments and new compan...0.00False
51112.655THOUGH THE DISCIPLINE OF THE FORMER PARLIAMENT...Though the discipline of the former parliament...0.00False
..................
7822.260TO DAY I SHOUTEDToday, I shouted.0.50False
5243.195BROTHER MAC ARDLE BROTHER KEOGHBrother McCardle, Brother Kiyof.0.60False
9183.000THAT IS TRUE BADAUDERIEThat is true bad health grief.0.75False
5382.215STEPHANOS DEDALOSStefanos Daedalus1.00False
2266.750HE CONTINUED HIS PRETENDED SEARCH AND TO GIVE ...He continued his pretended search, and to give...9.80True
+ +

1000 rows × 5 columns

+
+
+
+
+
+
test_wh_model(whisper.load_model('large-v2'))
+
+ + +
+
+ +
+ + 100.00% [1000/1000 07:39<00:00] +
+ +
+
+
WER: 6.07%
+WER (w/o hallucinations): 3.19%
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
secsidxgt_texttextwermerwilwiphallucination
08.230NoneAND OFTEN HAS MY MOTHER SAID WHILE ON HER LAP ...And often has my mother said while on her lap ...0.0000000.0000000.0000001.000000False
6062.610NoneWE SUFFER STIFLING PAINSWe suffer stifling pains.0.0000000.0000000.0000001.000000False
6077.040NoneSATURDAY AUGUST FIFTEENTH THE SEA UNBROKEN ALL...Saturday, August 15th. The sea unbroken all ro...0.0000000.0000000.0000001.000000False
6083.070NoneTHE HORIZON SEEMS EXTREMELY DISTANTThe horizon seems extremely distant.0.0000000.0000000.0000001.000000False
6099.985NoneALL MY DANGER AND SUFFERINGS WERE NEEDED TO ST...All my danger and sufferings were needed to st...0.0000000.0000000.0000001.000000False
..............................
5921.805NoneHANS STIRS NOTHans Sturznott0.6666670.6666670.8333330.166667False
958.800NoneTHOUGHT THE FIR TREE AND BELIEVED IT ALL BECAU...thought the fir tree, and believed it all, bec...4.2857140.8108110.8108110.189189True
9027.370NoneI HAD A NAME I BELIEVE IN MY YOUNG DAYS BUT I ...I had a name, I believe, in my young days, but...7.4761900.8820220.8820220.117978True
6107.370NoneYOU SEEM ANXIOUS MY UNCLE I SAID SEEING HIM CO..."'You seem anxious, my uncle,' I said, seeing ...7.8235290.8866670.8866670.113333True
4386.665NoneAS TO HIS AGE AND ALSO THE NAME OF HIS MASTER ...As to his age, and also the name of his master...8.6315790.8961750.8961750.103825True
+ +

1000 rows × 9 columns

+
+
+
+
+
+
+

Quantized Whisper models

+
+
def test_model(modelfile, N=1000):
+    vqmodel = RQBottleneckTransformer.load_model(local_filename=modelfile).cuda()
+    stats = WERStats()
+    for snd, gt_text in progress_bar(librispeech_data('/data/LibriSpeech/test-clean'), total=N):
+        stoks = vqmodel.encode_audio(snd.cuda())
+        text = vqmodel.decode_text(stoks[0])[0].text
+        diff = stats.push_sample(snd, gt_text, text)
+        last_diff = diff.alignments[0][-1]
+        stats.push(hallucination = last_diff.type == 'insert' and last_diff.hyp_end_idx - last_diff.hyp_start_idx > 3)
+    stats = stats.df().sort_values('wer')
+    print(f"WER: {stats.wer.mean()*100:.2f}%")
+    print(f"WER (w/o hallucinations): {stats[~stats['hallucination']].wer.mean()*100:.2f}%")
+    return stats
+
+
+
test_model(None) # the old stoks model from early 2023
+
+ + +
+
+ +
+ + 100.00% [1000/1000 01:10<00:00] +
+ +
+
+
WER: 16.06%
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
secsgt_textstextswers
2074.075SEVERAL HUNDRED FREE STATE MEN PROMPTLY RESPON...several hundred free state men promptly respon...0.000000
2095.295THE LEADERS OF THE CONSPIRACY BECAME DISTRUSTF...The leaders of the conspiracy became distrustf...0.000000
7092.440THE THREE MODES OF MANAGEMENTThe three modes of management.0.000000
70813.020THE PAIN PRODUCED BY AN ACT OF HASTY AND ANGRY...The pain produced by an act of hasty and angry...0.000000
7055.250THEY ARE CHIEFLY FORMED FROM COMBINATIONS OF T...They are chiefly formed from combinations of t...0.000000
...............
3712.440CONSEIL WAS MY MANSERVANTCOSA was my man's servant.1.000000
1444.680AND BESIDES SUPPOSE THEE DOES LEARN MEDICINEand be sides, supposed to be lost, Lord medicine.1.000000
9074.195MADAME QUINSON BESIDES CAN ANSWER YOUR ENQUIRIESMadam Gwen-Saun, besides Ken Sir Ian Corrie's.1.142857
1872.230NO ITS NOT TOO SOONKnow what's sought to assume.1.200000
5382.215STEPHANOS DEDALOSStephano's Nerdos.1.500000
+ +

1000 rows × 4 columns

+
+
+
+
+
+
test_model('vq-ce9.2.model')
+
+ + +
+
+ +
+ + 100.00% [1000/1000 01:12<00:00] +
+ +
+
+
WER: 8.80%
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
secsgt_textstextswers
08.230AND OFTEN HAS MY MOTHER SAID WHILE ON HER LAP ...And often has my mother said, while on her lap...0.000000
2831.420DIRECTIONdirection.0.000000
2822.385I DIDN'T PREACH WITHOUT DIRECTIONI didn't preach without direction.0.000000
6243.975I SHUDDER AS I RECALL THESE MONSTERS TO MY REM...I shudder as I recall these monsters to my rem...0.000000
27910.490WE CAN ALL BE SERVANTS OF GOD WHEREVER OUR LOT...We can all be servants of God, wherever our lo...0.000000
...............
8202.155THE FORMER BOOLOOROO GROANEDthe former Boula Rook round.0.750000
9183.000THAT IS TRUE BADAUDERIEThat is true, bad, old-gree.0.750000
1056.555IF IT ONLY WERE NOT SO DARK HERE AND SO TERRIB...If... ... ... ... ... ... ... ... ... ... ... ...0.916667
3712.440CONSEIL WAS MY MANSERVANTJose was my man's servant.1.000000
5382.215STEPHANOS DEDALOSStefanos de los1.500000
+ +

1000 rows × 4 columns

+
+
+
+
+
+
test_model('vq-256c.model')
+
+ + +
+
+ +
+ + 100.00% [1000/1000 01:13<00:00] +
+ +
+
+
WER: 10.26%
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
secsgt_textstextswers
7895.945AND THIS PLAN WAS ADOPTED TOO IN ORDER TO EXTR...And this plan was adopted too, in order to ext...0.000
3655.780I WILL SHOW YOU WHAT A GOOD JOB I DID AND SHE ...I will show you what a good job I did. And she...0.000
72210.720AS I SPOKE I MADE HIM A GRACIOUS BOW AND I THI...As I spoke, I made him a gracious bow, and I t...0.000
7237.840I HAVE COME TO YOUR SHORES MISTER PRESIDENT WI...I have come to your shores, Mr. President, wit...0.000
3625.335SOMETIMES IT IS CALLED A CRAZY QUILT BECAUSE T...Sometimes it is called a crazy quilt because t...0.000
...............
1062.020SQUEAK SQUEAKSquick. Squick.1.000
5382.215STEPHANOS DEDALOSStephanos de Arlos.1.000
2881.905I DELIGHT IN YOUR KITCHENI'd like to introduce you in your kitchen.1.000
3712.440CONSEIL WAS MY MANSERVANTCall say, was my man servant?1.000
3814.880CONSEIL I CALLED A THIRD TIME CONSEIL APPEAREDCan't say, at call the third time. Can't say a...1.125
+ +

1000 rows × 4 columns

+
+
+
+
+
+
test_model('vq-256c-cosine.model')
+
+ + +
+
+ +
+ + 100.00% [1000/1000 01:10<00:00] +
+ +
+
+
WER: 10.24%
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
secsgt_textstextswers
71011.490TO SUPPOSE THAT THE OBJECT OF THIS WORK IS TO ...To suppose that the object of this work is to ...0.000000
6293.235TWO HOURS AFTERWARDS A TERRIBLE SHOCK AWOKE METwo hours afterwards, a terrible shock awoke me.0.000000
6401.740POOR ALICEPoor Alice.0.000000
2622.435THAT'S WHAT YOU'D LIKE TO BE DOING IS ITThat's what you'd like to be doing, is it?0.000000
6443.105AND YESTERDAY THINGS WENT ON JUST AS USUALAnd yesterday, things went on just as usual.0.000000
...............
1872.230NO ITS NOT TOO SOONNo, it's not just here.0.800000
1154.470WHO IS HUMPY DUMPY ASKED THE MICEWho is a MP? Don't be. Ask the mice.0.857143
3712.440CONSEIL WAS MY MANSERVANTCross say, was my man servant.1.000000
1062.020SQUEAK SQUEAKquick, quick.1.000000
5382.215STEPHANOS DEDALOSStephenos der los.1.500000
+ +

1000 rows × 4 columns

+
+
+
+
+
+
test_model('vq-2d-256c.model')
+
+ + +
+
+ +
+ + 100.00% [1000/1000 01:11<00:00] +
+ +
+
+
WER: 21.75%
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
secsgt_textstextswers
7092.440THE THREE MODES OF MANAGEMENTThe Three Modes of Management0.000000
4192.415FATHOM SIX FEETFathom six feet.0.000000
7034.775NATURE OF THE EFFECT PRODUCED BY EARLY IMPRESS...nature of the effect produced by early impress...0.000000
6932.110I AM VERY GLADI am very glad.0.000000
6862.740NO MY LITTLE SON SHE SAIDNo, my little son, she said.0.000000
...............
6273.060TUESDAY AUGUST EIGHTEENTH2. Day August 8th1.000000
8202.155THE FORMER BOOLOOROO GROANEDThe former Bill of Rook around.1.000000
285.530KESWICK MARCH TWENTY SECOND EIGHTEEN THIRTY SE...Yes, we wish between second 1837. Did you reme...1.333333
1062.020SQUEAK SQUEAKQuick, quick, quick.1.500000
7921.810VENICEThen Next2.000000
+ +

1000 rows × 4 columns

+
+
+
+
+
+
test_model('vq-2d-256c-cosine.model')
+
+ + +
+
+ +
+ + 100.00% [1000/1000 01:11<00:00] +
+ +
+
+
WER: 11.61%
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
secsgt_textstextswers
6862.740NO MY LITTLE SON SHE SAIDNo, my little son, she said.0.000000
9027.370I HAD A NAME I BELIEVE IN MY YOUNG DAYS BUT I ...I had a name I believe in my young days, but I...0.000000
9043.300YOU DO ME A GREAT HONOURYou do me a great honor.0.000000
2286.775AS HE HAD PROMISED TO PROTECT THE HOTEL THE RE...As he had promised to protect the hotel, the r...0.000000
5213.440SOON THE WHOLE BRIDGE WAS TREMBLING AND RESOUN...Soon the whole bridge was trembling and resoun...0.000000
...............
9183.000THAT IS TRUE BADAUDERIEThat is true, bad, old-gree.0.750000
3814.880CONSEIL I CALLED A THIRD TIME CONSEIL APPEAREDConse, at call to third town. Conse, appeared.0.750000
1154.470WHO IS HUMPY DUMPY ASKED THE MICEWho eats umpi, don't pee? Ask the mice.0.857143
5382.215STEPHANOS DEDALOSStephenau Stairlauce.1.000000
1062.020SQUEAK SQUEAKSpeak. Speak. Speak.1.500000
+ +

1000 rows × 4 columns

+
+
+
+
+
+
# full crop
+test_model('vq-2d-256c-cosine-padfix2.model')
+
+ + +
+
+ +
+ + 100.00% [1000/1000 01:10<00:00] +
+ +
+
+
WER: 16.13%
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
secsgt_textstextswers
6523.475I AM SO VERY TIRED OF BEING ALL ALONE HEREI'm so very tired of being all alone here.0.000000
9062.610AT YOUR SERVICE SIRAt your service, sir.0.000000
9043.300YOU DO ME A GREAT HONOURYou do me a great honor.0.000000
9027.370I HAD A NAME I BELIEVE IN MY YOUNG DAYS BUT I ...I had a name I believe in my young days, but I...0.000000
9012.755I NEVER HAD ANY FAMILYI never had any family.0.000000
...............
4482.215WHO TOUCHES ME AM I IN BEDLook at us, me, our young dad.1.000000
9344.205I RESIDE IN THE MARAIS RUE DE DOUZE PORTESIrae's eye in the Ma'rae's crew did to support.1.111111
5382.215STEPHANOS DEDALOSStep 4, Zetelos.1.500000
161.695FAREWELL MADAMFair will, damn.1.500000
3712.440CONSEIL WAS MY MANSERVANTCos they were my man's servant.1.500000
+ +

1000 rows × 4 columns

+
+
+
+
+
+
# no cropping
+test_model('vq-2d-256c-cosine-padfix2.model')
+
+ + +
+
+ +
+ + 100.00% [1000/1000 01:10<00:00] +
+ +
+
+
WER: 11.17%
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
secsgt_textstextswers
8392.275THE CAPTAIN SHOOK HIS HEADThe captain shook his head.0.000000
4089.935NEMO BUILDS A FABULOUS FUTURISTIC SUBMARINE TH...Nemo builds a fabulous futuristic submarine, t...0.000000
4052.695IT'S ALMOST BEYOND CONJECTUREIt's almost beyond conjecture.0.000000
40413.140THOUSANDS OF HANDKERCHIEFS WERE WAVING ABOVE T...Thousands of handkerchiefs were waving above t...0.000000
79014.900BRIGHTER THAN EARLY DAWN'S MOST BRILLIANT DYE ...Brighter than early dawn's most brilliant dye ...0.000000
...............
5382.215STEPHANOS DEDALOSStephenos dellos1.000000
5921.805HANS STIRS NOTHonsters nod.1.000000
9074.195MADAME QUINSON BESIDES CAN ANSWER YOUR ENQUIRIESMadam Quinsong, besides Cinanza, you're in que...1.000000
1154.470WHO IS HUMPY DUMPY ASKED THE MICEPhew, he's on P, don't pee. Ask the mice.1.142857
3712.440CONSEIL WAS MY MANSERVANTCos they were my man's servant.1.500000
+ +

1000 rows ᅲ 4 columns

+
+
+
+
+
+
# crop to 200 toks minimum
+test_model('vq-2d-256c-cosine-padfix2.model')
+
+ + +
+
+ +
+ + 100.00% [1000/1000 01:10<00:00] +
+ +
+
+
WER: 12.56%
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
secsgt_textstextswers
8712.920WHO BEGAN THE QUARREL WAS IT THE MORMONSWho began the quarrel? Was it the Mormons?0.000000
93812.435HOW STRANGE IT SEEMED TO THE SAD WOMAN AS SHE ...How strange it seemed to the sad woman, as she...0.000000
93712.605HIS HOUSEKEEPER HAD THE MANAGEMENT OF EVERYTHI...His housekeeper had the management of everythi...0.000000
55815.720IT WAS STRANGE TOO THAT HE FOUND AN ARID PLEAS...It was strange too, that he found an arid plea...0.000000
3053.835THE HEAD OF THE PATCHWORK GIRL WAS THE MOST CU...The head of the patchwork girl was the most cu...0.000000
...............
5382.215STEPHANOS DEDALOSStephenos dellos1.000000
9074.195MADAME QUINSON BESIDES CAN ANSWER YOUR ENQUIRIESMadam Quinsong, besides Cenanza, you're in que...1.000000
1062.020SQUEAK SQUEAKQuick, quick.1.000000
1154.470WHO IS HUMPY DUMPY ASKED THE MICEP-E-S-A-P, don't be... asked the mice.1.142857
3712.440CONSEIL WAS MY MANSERVANTCos-A was my man's servant.1.250000
+ +

1000 rows ᅲ 4 columns

+
+
+
+
+
+
# crop to audio length
+test_model('vq-2d-512c-cosine-padfix-premlp-learnpos.model')
+
+ + +
+
+ +
+ + 100.00% [1000/1000 01:09<00:00] +
+ +
+
+
WER: 9.89%
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
secsgt_textstextswers
5702.715BEWARE OF MAKING THAT MISTAKEBeware of making that mistake.0.000000
2603.155WHO TAUGHT YOU TO SCRUB A FLOOR I SHOULD LIKE ...Who taught you to scrub a floor? I should like...0.000000
8005.770OLD DANCES ARE SIMPLIFIED OF THEIR YEARNING BL...Old dances are simplified of their yearning, b...0.000000
2582.260SPINNING INDEEDSpinning Indeed.0.000000
6533.815AND I DECLARE IT'S TOO BAD THAT IT ISAnd I declare it's too bad that it is.0.000000
...............
9344.205I RESIDE IN THE MARAIS RUE DE DOUZE PORTESIries I'd in the Marfra Grudetus port.0.777778
1154.470WHO IS HUMPY DUMPY ASKED THE MICEWho is a P-Don't Be? Ask the mice.0.857143
4482.215WHO TOUCHES ME AM I IN BEDPotatys me, and my embed.0.857143
5921.805HANS STIRS NOTHan Stersnide1.000000
5382.215STEPHANOS DEDALOSStefanos de los1.500000
+ +

1000 rows ᅲ 4 columns

+
+
+
+
+
+
# crop to audio length
+test_model('vq-2d-512c-cosine-padfix-premlp-learnpos-5e.model')
+
+ + +
+
+ +
+ + 100.00% [1000/1000 01:09<00:00] +
+ +
+
+
WER: 9.51%
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
secsgt_textstextswers
08.230AND OFTEN HAS MY MOTHER SAID WHILE ON HER LAP ...And often has my mother said, while on her lap...0.000
6077.040SATURDAY AUGUST FIFTEENTH THE SEA UNBROKEN ALL...Saturday, August 15th. The sea unbroken all ro...0.000
6083.070THE HORIZON SEEMS EXTREMELY DISTANTThe horizon seems extremely distant.0.000
6153.735THEREFORE DON'T TALK TO ME ABOUT VIEWS AND PRO...Therefore, don't talk to me about views and pr...0.000
6165.795I TAKE THIS AS MY ANSWER AND I LEAVE THE PROFE...I take this as my answer and I leave the profe...0.000
...............
1573.830AND THEE WON'T GO WHY SHOULD IAnd, see you all next time!0.875
3814.880CONSEIL I CALLED A THIRD TIME CONSEIL APPEAREDCan say, at call the third time, can say appea...0.875
3712.440CONSEIL WAS MY MANSERVANTCaus← was my man's servant.1.000
5382.215STEPHANOS DEDALOSStefanos dellos.1.000
1062.020SQUEAK SQUEAKSweet, sweet.1.000
+ +

1000 rows ᅲ 4 columns

+
+
+
+
+
+
# crop to audio length
+test_model('vq-2d-512c-cosine32-padfix-premlp-learnpos-5e.model')
+
+ + +
+
+ +
+ + 100.00% [1000/1000 01:08<00:00] +
+ +
+
+
WER: 9.84%
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
secsgt_texttextwer
3104.040SHE POURED INTO THE DISH A QUANTITY FROM EACH ...She poured into the dish a quantity from each ...0.0
3872.735A ROUTE SLIGHTLY LESS DIRECT THAT'S ALLa route slightly less direct, that's all.0.0
3854.530ANYHOW WE'LL LEAVE INSTRUCTIONS TO SHIP THE WH...Anyhow, we'll leave instructions to ship the w...0.0
7424.730WE SAT WITH THE OFFICERS SOME LITTLE TIME AFTE...We sat with the officers some little time afte...0.0
3839.300PACK AS MUCH INTO MY TRUNK AS YOU CAN MY TRAVE...Pack as much into my trunk as you can. My trav...0.0
...............
55913.895THE SENTENCE OF SAINT JAMES WHICH SAYS THAT HE...Thank you.1.0
7755.545THE PECULIAR CIRCUMSTANCES OF THE COLONY ARE W...Thank you.1.0
1062.020SQUEAK SQUEAKQuick, quick.1.0
5382.215STEPHANOS DEDALOSStephanos de los1.0
4914.805THE PARLIAMENT AND THE SCOTS LAID THEIR PROPOS...Thank you.1.0
+ +

1000 rows × 4 columns

+
+
+
+
+
+
test_model("vq-base.en-2d-4096c-cosine32-padfix-premlp-learnpos-5e.model")
+
+ + +
+
+ +
+ + 100.00% [1000/1000 01:41<00:00] +
+ +
+
+
WER: 7.51%
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
secsgt_texttextwer
8626.720TO THE FERVENT LATTER DAY SAINT A TEMPLE IS NO...To the fervent Latter-day Saint, a temple is n...0.000000
4366.380SHE WAS A LARGE HOMELY WOMAN THEY WERE COMMON ...She was a large, homely woman. They were commo...0.000000
4375.425SUBSTANTIALLY THIS WAS JACOB'S UNVARNISHED DES...Substantially, this was Jacob's unvarnished de...0.000000
4386.665AS TO HIS AGE AND ALSO THE NAME OF HIS MASTER ...As to his age and also the name of his master,...0.000000
4393.020OF STARTING I DIDN'T KNOW THE WAY TO COMEof starting. I didn't know the way to come.0.000000
...............
48012.510THIS WAS DONE FOR THE EVENT TOOK PLACE AT A TI...This was done for the event took place.0.783784
71317.945THE MOTHER AS SOON AS THE CHAISE IS SO FAR TUR...The Mother. As soon as the chase0.869565
4547.720AMONG OTHER THINGS ON WHICH SHE CAST HER EYES ...Among other things...0.869565
3712.440CONSEIL WAS MY MANSERVANTCossay was my man's servant.1.000000
5382.215STEPHANOS DEDALOSStefano Staedt-Los1.500000
+ +

1000 rows × 4 columns

+
+
+
+
+
+
test_model("vq-base.en-2d-4096c-cosine32-padfix-premlp-learnpos-5e.model")
+
+ + +
+
+ +
+ + 100.00% [1000/1000 01:33<00:00] +
+ +
+
+
WER: 7.49%
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
secsidxgt_texttextwermerwilwip
7148.010NoneSO YOU WILL BE A GOOD GIRL I KNOW AND NOT MAKE...So you will be a good girl, I know, and not ma...0.0000000.0000000.0000001.000000
3655.780NoneI WILL SHOW YOU WHAT A GOOD JOB I DID AND SHE ...I will show you what a good job I did, and she...0.0000000.0000000.0000001.000000
6083.070NoneTHE HORIZON SEEMS EXTREMELY DISTANTThe horizon seems extremely distant.0.0000000.0000000.0000001.000000
3625.335NoneSOMETIMES IT IS CALLED A CRAZY QUILT BECAUSE T...Sometimes it is called a crazy quilt because t...0.0000000.0000000.0000001.000000
3616.045NoneA BED QUILT MADE OF PATCHES OF DIFFERENT KINDS...A bed quilt made of patches of different kinds...0.0000000.0000000.0000001.000000
...........................
48012.510NoneTHIS WAS DONE FOR THE EVENT TOOK PLACE AT A TI...This was done for the event took place.0.7837840.7837840.7837840.216216
4547.720NoneAMONG OTHER THINGS ON WHICH SHE CAST HER EYES ...Among other things...0.8695650.8695650.8695650.130435
71317.945NoneTHE MOTHER AS SOON AS THE CHAISE IS SO FAR TUR...The Mother. As soon as the chase0.8695650.8695650.8881990.111801
3712.440NoneCONSEIL WAS MY MANSERVANTCossay was my man's servant.1.0000000.6666670.8333330.166667
5382.215NoneSTEPHANOS DEDALOSStefano Staedt-Los1.5000001.0000001.0000000.000000
+ +

1000 rows × 8 columns

+
+
+
+
+
+
test_model("vq-base.en-2d-1024c-cosine32-padfix-premlp-learnpos-5e-cleaned.model")
+
+ + +
+
+ +
+ + 100.00% [1000/1000 01:31<00:00] +
+ +
+
+
WER: 10.44%
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
secsidxgt_texttextwermerwilwip
6698.540NoneAT THE FARTHER END OF THE LARGEST HALL A TABLE...At the farther end of the largest hall, a tabl...0.00.00.01.0
3492.130NoneTHE WOMAN SEEMED THOUGHTFULThe woman seemed thoughtful.0.00.00.01.0
5724.090NoneHE IS CALLED AS YOU KNOW THE APOSTLE OF THE IN...He is called, as you know, the apostle of the ...0.00.00.01.0
3473.665NoneOJO HAD NEVER EATEN SUCH A FINE MEAL IN ALL HI...Ojo had never eaten such a fine meal in all hi...0.00.00.01.0
3463.705NoneAND YOU MUST BE OJO THE UNLUCKY SHE ADDEDAnd you must be Ojo the unlucky," she added.0.00.00.01.0
...........................
89618.540NoneSILVIA WAS THE ADORATION OF FRANCE AND HER TAL...Sylvia1.01.01.00.0
6895.995NoneDELLA HAD A YOUNG SISTER NAMED MARIA AND A COU...Dela.1.01.01.00.0
51227.525NoneVALOR INDEED WAS VERY GENERALLY DIFFUSED OVER ...Vala.1.01.01.00.0
89723.740NoneSILVIA DID NOT THINK THAT HER GOOD CONDUCT WAS...Sylvia.1.01.01.00.0
5382.215NoneSTEPHANOS DEDALOSStefano Staedt-Loss1.51.01.00.0
+ +

1000 rows × 8 columns

+
+
+
+
+
+
test_model("vq-base.en-2d-4096c-cosine32-padfix-premlp-learnpos-5e-cleaned.model")
+
+ + +
+
+ +
+ + 100.00% [1000/1000 01:45<00:00] +
+ +
+
+
WER: 6.64%
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
secsidxgt_texttextwermerwilwip
08.230NoneAND OFTEN HAS MY MOTHER SAID WHILE ON HER LAP ...And often has my mother said, while on her lap...0.0000000.0000000.0000001.000000
3773.910NoneHE WENT HERE THERE AND EVERYWHERE IN PERFECT C...He went here, there, and everywhere in perfect...0.0000000.0000000.0000001.000000
3768.340NoneNEVER DID HE OBJECT TO BUCKLING UP HIS SUITCAS...Never did he object to buckling up his suitcas...0.0000000.0000000.0000001.000000
6878.500NoneIF YOU DRESSED IN SILK AND GOLD FROM TOP TO TO...If you dressed in silk and gold from top to to...0.0000000.0000000.0000001.000000
68811.125NoneTO SUCH PERSONS THESE INDIRECT MODES OF TRAINI...To such persons, these indirect modes of train...0.0000000.0000000.0000001.000000
...........................
9183.000NoneTHAT IS TRUE BADAUDERIEThat is true bad-dulch-gree.0.7500000.5000000.6250000.375000
4625.640NoneA GOOD NEIGHBOUR OF THE BRONTES A CLEVER INTEL...A good neighbor of the Bronte's, a clever, int...0.7971010.7857140.8369570.163043
22115.060NoneIN THE SHOOTING OF SHERIFF JONES IN LAWRENCE A...In the shooting of Sheriff's0.8571430.8571430.8809520.119048
87917.840NoneTHEY KNEW NO NORTH NO SOUTH NO EAST NO WEST TH...They knew no North.0.9259260.9259260.9259260.074074
5382.215NoneSTEPHANOS DEDALOSStefano Staedalus.1.0000001.0000001.0000000.000000
+ +

1000 rows × 8 columns

+
+
+
+
+
+
_9.plot.scatter('secs', 'wer', alpha=.2)
+
+
+
+

+
+
+
+
+
+
test_model("vq-base.en-2d-4096c-cosine32-padfix-premlp-preconv-learnpos-5e-cleaned.model")
+
+ + +
+
+ +
+ + 100.00% [1000/1000 01:36<00:00] +
+ +
+
+
WER: 6.34%
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
secsidxgt_texttextwermerwilwip
08.230NoneAND OFTEN HAS MY MOTHER SAID WHILE ON HER LAP ...And often has my mother said, while on her lap...0.0000000.0000000.0000001.000000
69618.415NoneFOR INSTANCE ONE DAY THE CHILDREN HAD BEEN PLA...For instance, one day the children had been pl...0.0000000.0000000.0000001.000000
3702.340NoneBUT NOW NOTHING COULD HOLD ME BACKBut now nothing could hold me back.0.0000000.0000000.0000001.000000
3699.340NoneI WANTED NOTHING MORE THAN TO SEE MY COUNTRY A...I wanted nothing more than to see my country a...0.0000000.0000000.0000001.000000
3686.190NoneEVEN SO I HAD JUST RETURNED FROM AN ARDUOUS JO...Even so, I had just returned from an arduous j...0.0000000.0000000.0000001.000000
...........................
8202.155NoneTHE FORMER BOOLOOROO GROANEDthe former Boula-Ri-Growned.0.7500000.6000000.8000000.200000
8432.110NoneFINE GLORIOUSFind. Chlorious.1.0000001.0000001.0000000.000000
3712.440NoneCONSEIL WAS MY MANSERVANTCossay was my man's servant.1.0000000.6666670.8333330.166667
5921.805NoneHANS STIRS NOTHon Stir's Night.1.3333331.0000001.0000000.000000
5382.215NoneSTEPHANOS DEDALOSStefano Staedt-Loss1.5000001.0000001.0000000.000000
+ +

1000 rows × 8 columns

+
+
+
+
+
+
test_model("vq-base.en-2d-4096c-cosine32-padfix-premlp-premean-learnpos-5e-cleaned-repro.model")
+
+ + +
+
+ +
+ + 100.00% [1000/1000 01:30<00:00] +
+ +
+
+
WER: 10.00%
+WER (w/o hallucinations): 10.00%
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
secsidxgt_texttextwermerwilwiphallucination
69618.415NoneFOR INSTANCE ONE DAY THE CHILDREN HAD BEEN PLA...For instance, one day the children had been pl...0.0000000.0000000.0000001.000000False
5944.865NoneI REFER TO THE THERMOMETER IT INDICATES THE FI...I refer to the thermometer, it indicates the f...0.0000000.0000000.0000001.000000False
7388.105NoneTHEN THERE WERE THREE OR FOUR LEADING MEN OF T...Then there were three or four leading men of t...0.0000000.0000000.0000001.000000False
3552.885NoneI'M AFRAID I DON'T KNOW MUCH ABOUT THE LAND OF OZI'm afraid I don't know much about the land of...0.0000000.0000000.0000001.000000False
3549.840NoneI THINK THE NEXT GLASS CAT THE MAGICIAN MAKES ...I think the next glass cat the magician makes ...0.0000000.0000000.0000001.000000False
..............................
88622.095NoneTHIS MEANT THAT FOR AN ALLEGED MISDEMEANOR FOR...This is the end of the video.0.9491530.9491530.9782080.021792False
3712.440NoneCONSEIL WAS MY MANSERVANTCossay was my man's servant.1.0000000.6666670.8333330.166667False
852.610NoneTHIS EVENING THEY ALL SAIDThis is the end of the video.1.2000000.8571430.9714290.028571False
5382.215NoneSTEPHANOS DEDALOSStefano Staedt-Loss1.5000001.0000001.0000000.000000False
41817.640NoneFOR MANY THEN THIS BOOK HAS BEEN A SOURCE OF F...For many then, this is the end of the video. F...2.9230770.9421490.9896160.010384False
+ +

1000 rows × 9 columns

+
+
+
+
+
+
test_model("vq-base.en-2d-4096c-cosine32-padfix-premlp-premean-learnpos-5e-cleaned-repro.model")
+
+ + +
+
+ +
+ + 100.00% [1000/1000 01:28<00:00] +
+ +
+
+
WER: 7.82%
+WER (w/o hallucinations): 7.82%
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
secsidxgt_texttextwermerwilwiphallucination
7092.440NoneTHE THREE MODES OF MANAGEMENTthe three modes of management.0.0000000.0000000.0000001.000000False
7402.715NoneBUT I MEAN TO HAVE MY INNINGS BEFORE LONGBut I mean to have my innings before long.0.0000000.0000000.0000001.000000False
3625.335NoneSOMETIMES IT IS CALLED A CRAZY QUILT BECAUSE T...Sometimes it is called a crazy quilt because t...0.0000000.0000000.0000001.000000False
3616.045NoneA BED QUILT MADE OF PATCHES OF DIFFERENT KINDS...A bed quilt made of patches of different kinds...0.0000000.0000000.0000001.000000False
6056.305NoneA SUFFOCATING SMELL OF NITROGEN FILLS THE AIR ...A suffocating smell of nitrogen fills the air....0.0000000.0000000.0000001.000000False
..............................
79314.580NoneIN A SUNSET GLOWING OF CRIMSON AND GOLD SHE LI...In a sunset0.9062500.9062500.9062500.093750False
1708.740NoneRUTH WAS GLAD TO HEAR THAT PHILIP HAD MADE A P...Ruth was0.9310340.9310340.9310340.068966False
8189.870NoneI'LL GLADLY DO THAT PROMISED THE NEW BOOLOOROO...I'll0.9333330.9333330.9333330.066667False
3712.440NoneCONSEIL WAS MY MANSERVANTCosse was my man's servant.1.0000000.6666670.8333330.166667False
5382.215NoneSTEPHANOS DEDALOSStefano Staedt-Loss1.5000001.0000001.0000000.000000False
+ +

1000 rows × 9 columns

+
+
+
+
+
+
test_model("vq-base.en-2d-4096c-cosine32-padfix-premlp-premean-learnpos-5e-cleaned-repro-warm1000.model")
+
+ + +
+
+ +
+ + 100.00% [1000/1000 01:36<00:00] +
+ +
+
+
WER: 7.23%
+WER (w/o hallucinations): 7.23%
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
secsidxgt_texttextwermerwilwiphallucination
7606.370NoneTHERE CAME UPON ME A SUDDEN SHOCK WHEN I HEARD...There came upon me a sudden shock when I heard...0.0000000.0000000.0000001.000000False
3686.190NoneEVEN SO I HAD JUST RETURNED FROM AN ARDUOUS JO...Even so, I had just returned from an arduous j...0.0000000.0000000.0000001.000000False
5865.515NoneTHERE'S A HEAVY STORM COMING ON I CRIED POINTI...There's a heavy storm coming on, I cried, poin...0.0000000.0000000.0000001.000000False
3663.615NoneCHAPTER THREE AS MASTER WISHESChapter 3 As Master Wishes0.0000000.0000000.0000001.000000False
3655.780NoneI WILL SHOW YOU WHAT A GOOD JOB I DID AND SHE ...I will show you what a good job I did, and she...0.0000000.0000000.0000001.000000False
..............................
6945.965NoneI EXPECT YOU HAVE BEEN A VERY GOOD GIRL ANDELL...I0.9333330.9333330.9333330.066667False
88113.950NoneWE BELIEVE IN A LITERAL RESURRECTION AND AN AC...We believe that we are the most important ones.0.9444440.9444440.9876540.012346False
1062.020NoneSQUEAK SQUEAKQuick, quick!1.0000001.0000001.0000000.000000False
3712.440NoneCONSEIL WAS MY MANSERVANTCossay was my man's servant.1.0000000.6666670.8333330.166667False
5382.215NoneSTEPHANOS DEDALOSStefano's dead loss.2.0000001.0000001.0000000.000000False
+ +

1000 rows × 9 columns

+
+
+
+
+
+
test_model("vq-base.en-2d-4096c-cosine32-padfix-premlp-premean-learnpos-5e-cleaned-repro-warm1000-2.model")
+
+ + +
+
+ +
+ + 100.00% [1000/1000 01:35<00:00] +
+ +
+
+
WER: 6.47%
+WER (w/o hallucinations): 6.47%
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
secsidxgt_texttextwermerwilwiphallucination
08.230NoneAND OFTEN HAS MY MOTHER SAID WHILE ON HER LAP ...And often has my mother said, while on her lap...0.00.0000000.0000001.000000False
70214.175NoneAND THIS METHOD OF TREATING THE CASE WAS MUCH ...And this method of treating the case was much ...0.00.0000000.0000001.000000False
7034.775NoneNATURE OF THE EFFECT PRODUCED BY EARLY IMPRESS...Nature of the Effect produced by Early Impress...0.00.0000000.0000001.000000False
3773.910NoneHE WENT HERE THERE AND EVERYWHERE IN PERFECT C...He went here, there, and everywhere in perfect...0.00.0000000.0000001.000000False
3768.340NoneNEVER DID HE OBJECT TO BUCKLING UP HIS SUITCAS...Never did he object to buckling up his suitcas...0.00.0000000.0000001.000000False
..............................
3712.440NoneCONSEIL WAS MY MANSERVANTCossé was my man's servant.1.00.6666670.8333330.166667False
3223.200NoneI NOW USE THEM AS ORNAMENTAL STATUARY IN MY GA...and1.01.0000001.0000000.000000False
6523.475NoneI AM SO VERY TIRED OF BEING ALL ALONE HEREand1.01.0000001.0000000.000000False
5555.815NoneBUT THE DUSK DEEPENING IN THE SCHOOLROOM COVER...and1.01.0000001.0000000.000000False
5382.215NoneSTEPHANOS DEDALOSStaphano's dead loss.2.01.0000001.0000000.000000False
+ +

1000 rows × 9 columns

+
+
+
+
+
+
test_model("vq-base.en-2d-4096c-cosine32-padfix-premlp-premean-learnpos-5e-cleaned.model")
+
+ + +
+
+ +
+ + 100.00% [1000/1000 01:33<00:00] +
+ +
+
+
WER: 5.93%
+WER (w/o hallucinations): 5.93%
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
secsidxgt_texttextwermerwilwiphallucination
7813.050NoneWHEN DO YOU INTEND THAT THE JOHN BRIGHT SHALL ...When do you intend that the John Bright shall ...0.0000000.0000000.0000001.000000False
3882.355NoneWE'RE LEAVING ON THE ABRAHAM LINCOLNWe're leaving on the Abraham Lincoln.0.0000000.0000000.0000001.000000False
3872.735NoneA ROUTE SLIGHTLY LESS DIRECT THAT'S ALLa route slightly less direct. That's all.0.0000000.0000000.0000001.000000False
3865.915NoneYES WE ARE CERTAINLY I REPLIED EVASIVELY BUT A...Yes, we are. Certainly, I replied evasively, b...0.0000000.0000000.0000001.000000False
3854.530NoneANYHOW WE'LL LEAVE INSTRUCTIONS TO SHIP THE WH...Anyhow, we'll leave instructions to ship the w...0.0000000.0000000.0000001.000000False
..............................
5243.195NoneBROTHER MAC ARDLE BROTHER KEOGHBrother McCarle, Brother Kioff.0.6000000.6000000.8000000.200000False
5921.805NoneHANS STIRS NOTHans-Stirrsnacht.0.6666670.6666670.8333330.166667False
7662.540NoneYOU PROPOSE TO KIDNAP ME I SAIDYou proposed a kenatmi set.0.8571430.8571430.9714290.028571False
5382.215NoneSTEPHANOS DEDALOSSteffano Staedalus1.0000001.0000001.0000000.000000False
3712.440NoneCONSEIL WAS MY MANSERVANTCossay was my man's servant.1.0000000.6666670.8333330.166667False
+ +

1000 rows × 9 columns

+
+
+
+
+
+
ax = _14.plot.scatter('secs', 'wer', alpha=.2)
+ax.set_ylim(0, 1.5)
+
+
+
+

+
+
+
+
+
+
test_model("vq-base.en-2d-4096c-60k.model")
+
+ + +
+
+ +
+ + 100.00% [1000/1000 01:30<00:00] +
+ +
+
+
WER: 9.34%
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
secsidxgt_texttextwermerwilwip
6463.385NoneI ALMOST THINK I CAN REMEMBER FEELING A LITTLE...I almost think I can remember feeling a little...0.0000000.0000000.0000001.000000
8626.720NoneTO THE FERVENT LATTER DAY SAINT A TEMPLE IS NO...To the fervent Latter-day Saint, a temple is n...0.0000000.0000000.0000001.000000
3702.340NoneBUT NOW NOTHING COULD HOLD ME BACKBut now nothing could hold me back.0.0000000.0000000.0000001.000000
3699.340NoneI WANTED NOTHING MORE THAN TO SEE MY COUNTRY A...I wanted nothing more than to see my country a...0.0000000.0000000.0000001.000000
3686.190NoneEVEN SO I HAD JUST RETURNED FROM AN ARDUOUS JO...Even so, I had just returned from an arduous j...0.0000000.0000000.0000001.000000
...........................
6110.250NoneIN WINTER WHEN THE SNOW LAY GLITTERING ON THE ...In winter, when the snow lay glittering on the...1.7916670.6515150.6660350.333965
46812.250NoneI HAVE GREAT THINGS TO TELL YOU SENOR SAID DON...I have great things to tell you, Senor, sadona...1.8611110.6767680.7126820.287318
55815.720NoneIT WAS STRANGE TOO THAT HE FOUND AN ARID PLEAS...It was strange, too, that he found an arid ple...2.3170730.6985290.6985290.301471
77013.960NoneWHAT WORLD WIDE INIQUITY SUCH A SPEECH AS THAT...What worldwide iniquity such a speech as that ...2.3750000.7196970.7387400.261260
44412.475NoneTHEY DREW THEIR SWORDS HID THEIR FACES IN THE ...They drew their swords, hid their faces in the...4.2000000.8076920.8076920.192308
+ +

1000 rows × 8 columns

+
+
+
+
+
+
test_model("vq-base.en-2d-4096c-cosine32-padfix-premlp-premean-learnpos-5e-cleaned-eqvad.model")
+
+ + +
+
+ +
+ + 100.00% [1000/1000 01:38<00:00] +
+ +
+
+
WER: 7.47%
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
secsidxgt_texttextwermerwilwip
08.230NoneAND OFTEN HAS MY MOTHER SAID WHILE ON HER LAP ...And often has my mother said, while on her lap...0.0000000.0000000.0000001.000000
67312.130NoneTHE PRINCESS CERTAINLY WAS BEAUTIFUL AND HE WO...The princess certainly was beautiful, and he w...0.0000000.0000000.0000001.000000
6742.295NoneHE ONLY SHOOK HIS HEADHe only shook his head.0.0000000.0000000.0000001.000000
3552.885NoneI'M AFRAID I DON'T KNOW MUCH ABOUT THE LAND OF OZI'm afraid I don't know much about the land of...0.0000000.0000000.0000001.000000
3535.870NoneTHE FIRST LOT WE TESTED ON OUR GLASS CAT WHICH...The first lot we tested on our glass cat, whic...0.0000000.0000000.0000001.000000
...........................
3712.440NoneCONSEIL WAS MY MANSERVANTCossay was my man's servant.1.0000000.6666670.8333330.166667
5382.215NoneSTEPHANOS DEDALOSStefano Stettelos.1.0000001.0000001.0000000.000000
5921.805NoneHANS STIRS NOTHonsters, nod.1.0000001.0000001.0000000.000000
1463.260NoneWHERE THEE AND THY FAMILY ARE KNOWNWhere's D and I-F where's D and I-F are known?1.4285710.7142860.8367350.163265
99619.915NoneEDISON HAD INSTALLED HIS HISTORIC FIRST GREAT ...Edison had installed his historic first-grade ...3.2083330.7661690.7710410.228959
+ +

1000 rows × 8 columns

+
+
+
+
+
+
ax = _8.plot.scatter('secs', 'wer', alpha=.2)
+ax.set_ylim(0, 1.5)
+
+
+
+

+
+
+
+
+
+
ax = _15['secs'].hist()
+ax.set_yscale('log')
+
+
+
+

+
+
+
+
+
+
plt.plot(_15['secs'], 1/_15['gt_text'].str.split('\w+').str.len(), '.')
+
+
+
+

+
+
+
+
+
+
# the reproducibility got pretty low ;)
+for i in range(4):
+    print(i)
+    test_model(f"test-run-{i}.model")
+    print()
+
+
0
+WER: 6.37%
+WER (w/o hallucinations): 6.37%
+
+1
+WER: 10.69%
+WER (w/o hallucinations): 9.89%
+
+2
+WER: 12.34%
+WER (w/o hallucinations): 11.79%
+
+3
+WER: 15.83%
+WER (w/o hallucinations): 15.30%
+
+
+
+ + +
+
+ +
+ + 100.00% [1000/1000 01:33<00:00] +
+ +
+
+ + +
+
+ +
+ + 100.00% [1000/1000 01:28<00:00] +
+ +
+
+ + +
+
+ +
+ + 100.00% [1000/1000 01:34<00:00] +
+ +
+
+ + +
+
+ +
+ + 100.00% [1000/1000 01:31<00:00] +
+ +
+
+
+
test_model("test-run-warm1000.model")
+
+ + +
+
+ +
+ + 100.00% [1000/1000 01:31<00:00] +
+ +
+
+
WER: 8.81%
+WER (w/o hallucinations): 8.81%
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
secsidxgt_texttextwermerwilwiphallucination
08.230NoneAND OFTEN HAS MY MOTHER SAID WHILE ON HER LAP ...And often has my mother said, while on her lap...0.0000000.0000000.0000001.000000False
3686.190NoneEVEN SO I HAD JUST RETURNED FROM AN ARDUOUS JO...Even so, I had just returned from an arduous j...0.0000000.0000000.0000001.000000False
6914.985NoneTO GIVE AN IDEA OF THESE CONVERSATIONS I WILL ...To give an idea of these conversations, I will...0.0000000.0000000.0000001.000000False
3663.615NoneCHAPTER THREE AS MASTER WISHESChapter 3 As Master Wishes0.0000000.0000000.0000001.000000False
3655.780NoneI WILL SHOW YOU WHAT A GOOD JOB I DID AND SHE ...I will show you what a good job I did, and she...0.0000000.0000000.0000001.000000False
..............................
7921.810NoneVENICEVINIS.1.0000001.0000001.0000000.000000False
3242.700NoneASKED THE VOICE IN SCORNFUL ACCENTSAsk the voice in the voice in the voice in the...1.5000000.7500000.8750000.125000False
5382.215NoneSTEPHANOS DEDALOSStefano's dead loss.2.0000001.0000001.0000000.000000False
2616.735NoneP S PRAY SIR EXCUSE ME FOR WRITING TO YOU A SE...P-S-P-S-P-S-P-S-P-S-P-S-P-S-P-S-P-S-P-S-P-S-P-...2.0370370.9821430.9993390.000661False
1062.020NoneSQUEAK SQUEAKIn the past, we have a question.3.5000001.0000001.0000000.000000False
+ +

1000 rows × 9 columns

+
+
+
+
+
+
test_model("test-run-1e.model")
+
+ + +
+
+ +
+ + 100.00% [1000/1000 01:34<00:00] +
+ +
+
+
WER: 8.41%
+WER (w/o hallucinations): 8.05%
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
secsidxgt_texttextwermerwilwiphallucination
08.230NoneAND OFTEN HAS MY MOTHER SAID WHILE ON HER LAP ...And often has my mother said, while on her lap...0.0000000.0000000.0000001.000000False
6554.895NoneI SHALL BE PUNISHED FOR IT NOW I SUPPOSE BY BE...I shall be punished for it now, I suppose, by ...0.0000000.0000000.0000001.000000False
6573.640NoneI AM VERY TIRED OF SWIMMING ABOUT HERE O MOUSEI am very tired of swimming about here, oh mouse.0.0000000.0000000.0000001.000000False
3185.115NoneMOST PEOPLE TALK TOO MUCH SO IT IS A RELIEF TO...Most people talk too much, so it is a relief t...0.0000000.0000000.0000001.000000False
3177.920NoneHE SELECTED A SMALL GOLD BOTTLE WITH A PEPPER ...He selected a small gold bottle with a pepper ...0.0000000.0000000.0000001.000000False
..............................
54910.575NoneAT MOST BY AN ALMS GIVEN TO A BEGGAR WHOSE BLE...At most, by an alms given to a beggar whose bl...1.0000000.5000000.5000000.500000True
3996.365NoneI WAS WELL SATISFIED WITH MY CABIN WHICH WAS L...I was well satisfied with my cabin, which was ...1.0526320.5405410.5889050.411095False
5382.215NoneSTEPHANOS DEDALOSSteffinor's Daedalus.1.5000001.0000001.0000000.000000False
6594.995NoneWE WON'T TALK ABOUT HER ANY MORE IF YOU'D RATH...We won't talk about her anymore if he'd rather...1.8666670.7000000.7600000.240000True
958.800NoneTHOUGHT THE FIR TREE AND BELIEVED IT ALL BECAU...thought the fur tree, and believed it all, bec...4.6190480.8290600.8372000.162800False
+ +

1000 rows × 9 columns

+
+
+
+
+
+
# but it got better after some hyperparam tuning
+test_model("vqmodel-4e-6454-hyptuned.model")
+
+ + +
+
+ +
+ + 100.00% [1000/1000 01:30<00:00] +
+ +
+
+
WER: 7.71%
+WER (w/o hallucinations): 7.71%
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
secsidxgt_texttextwermerwilwiphallucination
4035.370NoneDEPARTING FROM FIVE HUNDRED THOUSAND THROATS T...Departing from 500,000 throats, three cheers b...0.0000000.0000000.0000001.000000False
9224.400NoneBUT HOW DID SHE MANAGE TO RENDER IT SO FASHION...But how did she manage to render it so fashion...0.0000000.0000000.0000001.000000False
6293.235NoneTWO HOURS AFTERWARDS A TERRIBLE SHOCK AWOKE METwo hours afterwards, a terrible shock, awoke me.0.0000000.0000000.0000001.000000False
3552.885NoneI'M AFRAID I DON'T KNOW MUCH ABOUT THE LAND OF OZI'm afraid I don't know much about the land of...0.0000000.0000000.0000001.000000False
3535.870NoneTHE FIRST LOT WE TESTED ON OUR GLASS CAT WHICH...The first lot we tested on our glass cat, whic...0.0000000.0000000.0000001.000000False
..............................
8493.560NoneI HAD A NOTION IT WAS YOU MATE AS SAVED ME FRO...I'll have a note.0.9285710.8666670.9428570.057143False
74116.360NoneOF WHAT MISSUS NEVERBEND HAD GONE THROUGH IN P...Of what Mrs. N N N N N N N N N N N N N N N N N...0.9361700.9361700.9920210.007979False
3712.440NoneCONSEIL WAS MY MANSERVANTCasa was my man's servant.1.0000000.6666670.8333330.166667False
5382.215NoneSTEPHANOS DEDALOSStefano Stetelos.1.0000001.0000001.0000000.000000False
5921.805NoneHANS STIRS NOTHon Stur's Night.1.3333331.0000001.0000000.000000False
+ +

1000 rows × 9 columns

+
+
+
+
+
+
test_model("vqmodel-4e-6454-hyptuned-small.en.model", N=1000)
+
+ + +
+
+ +
+ + 100.00% [1000/1000 01:41<00:00] +
+ +
+
+
WER: 7.38%
+WER (w/o hallucinations): 7.38%
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
secsidxgt_texttextwermerwilwiphallucination
08.230NoneAND OFTEN HAS MY MOTHER SAID WHILE ON HER LAP ...And often has my mother said, while on her lap...0.000000.0000000.0000001.000000False
35010.680NoneAT ONE END STOOD A GREAT FIREPLACE IN WHICH A ...At one end stood a great fireplace, in which a...0.000000.0000000.0000001.000000False
3492.130NoneTHE WOMAN SEEMED THOUGHTFULThe woman seemed thoughtful.0.000000.0000000.0000001.000000False
6806.450NoneHE DARTED LIKE AN ARROW THROUGH ALL THE HALLS ...He darted like an arrow through all the halls,...0.000000.0000000.0000001.000000False
3473.665NoneOJO HAD NEVER EATEN SUCH A FINE MEAL IN ALL HI...Ojo had never eaten such a fine meal in all hi...0.000000.0000000.0000001.000000False
..............................
5921.805NoneHANS STIRS NOTHonsters, Nod.1.000001.0000001.0000000.000000False
7921.810NoneVENICEVINUS.1.000001.0000001.0000000.000000False
3712.440NoneCONSEIL WAS MY MANSERVANTCossay was my man's servant.1.000000.6666670.8333330.166667False
5382.215NoneSTEPHANOS DEDALOSStephenos dead loss.1.500001.0000001.0000000.000000False
44015.770NoneELEVEN O'CLOCK HAD STRUCK IT WAS A FINE CLEAR ...At the time of the day, the morning of the day...4.127660.9603960.9932590.006741False
+ +

1000 rows × 9 columns

+
+
+
+
+
+
test_model("vqmodel-4e-hyptuned-16gpu.model", N=1000)
+
+ + +
+
+ +
+ + 100.00% [1000/1000 01:32<00:00] +
+ +
+
+
WER: 6.01%
+WER (w/o hallucinations): 6.01%
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
secsidxgt_texttextwermerwilwiphallucination
08.230NoneAND OFTEN HAS MY MOTHER SAID WHILE ON HER LAP ...And often has my mother said, while on her lap...0.0000000.0000000.0000001.000000False
3901.975NoneWE DON'T KNOW WHERE IT WILL TAKE USWe don't know where it will take us.0.0000000.0000000.0000001.000000False
70813.020NoneTHE PAIN PRODUCED BY AN ACT OF HASTY AND ANGRY...The pain produced by an act of hasty and angry...0.0000000.0000000.0000001.000000False
3882.355NoneWE'RE LEAVING ON THE ABRAHAM LINCOLNWe're leaving on the Abraham Lincoln.0.0000000.0000000.0000001.000000False
3872.735NoneA ROUTE SLIGHTLY LESS DIRECT THAT'S ALLA route slightly less direct, that's all.0.0000000.0000000.0000001.000000False
..............................
9183.000NoneTHAT IS TRUE BADAUDERIEThat is true, bad old gree.0.7500000.5000000.6250000.375000False
8098.875NoneWHEN THE BLUESKINS SAW GHIP GHISIZZLE THEY RAI...Thanks for watching!0.9615380.9615380.9871790.012821False
64312.020NoneALICE TOOK UP THE FAN AND GLOVES AND AS THE HA...Thank you.1.0000001.0000001.0000000.000000False
3712.440NoneCONSEIL WAS MY MANSERVANTCosse was my man's servant.1.0000000.6666670.8333330.166667False
5382.215NoneSTEPHANOS DEDALOSStefanos de los1.5000001.0000001.0000000.000000False
+ +

1000 rows × 9 columns

+
+
+
+
+
+
test_model("vqmodel-4e-hyptuned-32gpu.model", N=1000)
+
+ + +
+
+ +
+ + 100.00% [1000/1000 01:32<00:00] +
+ +
+
+
WER: 5.94%
+WER (w/o hallucinations): 5.94%
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
secsidxgt_texttextwermerwilwiphallucination
75710.030NoneTHEREFORE I FEEL MYSELF QUITE ABLE AS PRESIDEN...Therefore, I feel myself quite able, as Presid...0.000.0000000.0000001.000000False
6282.550NoneDURING HIS WATCH I SLEPTDuring his watch, I slept.0.000.0000000.0000001.000000False
7564.735NoneYOU HAVE COME TO US THREATENING US WITH ABSOLU...You have come to us threatening us with absolu...0.000.0000000.0000001.000000False
3773.910NoneHE WENT HERE THERE AND EVERYWHERE IN PERFECT C...He went here, there, and everywhere in perfect...0.000.0000000.0000001.000000False
3768.340NoneNEVER DID HE OBJECT TO BUCKLING UP HIS SUITCAS...Never did he object to buckling up his suitcas...0.000.0000000.0000001.000000False
..............................
9183.000NoneTHAT IS TRUE BADAUDERIEThat is true bad-delt gree.0.750.5000000.6250000.375000False
3712.440NoneCONSEIL WAS MY MANSERVANTCossay was my man's servant.1.000.6666670.8333330.166667False
5921.805NoneHANS STIRS NOTHonsters Nied.1.001.0000001.0000000.000000False
8195.775NoneSCUSE ME SAID TROT I NEGLECTED TO TELL YOU THA...Thanks for watching.1.001.0000001.0000000.000000False
5382.215NoneSTEPHANOS DEDALOSStefano Staedt-Loss1.501.0000001.0000000.000000False
+ +

1000 rows × 9 columns

+
+
+
+
+
+
test_model("vqmodel-512c-4e-hyptuned-32gpu.model", N=1000)
+
+ + +
+
+ +
+ + 100.00% [1000/1000 08:52<00:00] +
+ +
+
+
WER: 7.37%
+WER (w/o hallucinations): 7.37%
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
secsidxgt_texttextwermerwilwiphallucination
71511.340NoneTHE MOTHER IN MANAGING THE CASE IN THIS WAY RE...The mother, in managing the case in this way, ...0.00.0000000.0000001.000000False
3473.665NoneOJO HAD NEVER EATEN SUCH A FINE MEAL IN ALL HI...Ojo had never eaten such a fine meal in all hi...0.00.0000000.0000001.000000False
86010.555NoneIT IS NOTABLE THAT THE INDIAN TRIBES HAVE GENE...It is notable that the Indian tribes have gene...0.00.0000000.0000001.000000False
6083.070NoneTHE HORIZON SEEMS EXTREMELY DISTANTThe horizon seems extremely distant.0.00.0000000.0000001.000000False
3444.275NoneI AM MY DEAR AND ALL STRANGERS ARE WELCOME TO ...I am, my dear, and all strangers are welcome t...0.00.0000000.0000001.000000False
..............................
3712.440NoneCONSEIL WAS MY MANSERVANTCosay was my man's servant.1.00.6666670.8333330.166667False
2603.155NoneWHO TAUGHT YOU TO SCRUB A FLOOR I SHOULD LIKE ....1.01.0000001.0000000.000000False
5921.805NoneHANS STIRS NOTHonster's Night.1.01.0000001.0000000.000000False
7921.810NoneVENICEVenus.1.01.0000001.0000000.000000False
5382.215NoneSTEPHANOS DEDALOSStefanos de los.1.51.0000001.0000000.000000False
+ +

1000 rows × 9 columns

+
+
+
+
+
+
test_model("vqmodel-512c-dim64-4e-hyptuned-32gpu.model", N=1000)
+
+ + +
+
+ +
+ + 100.00% [1000/1000 01:29<00:00] +
+ +
+
+
WER: 7.13%
+WER (w/o hallucinations): 7.13%
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
secsidxgt_texttextwermerwilwiphallucination
08.230NoneAND OFTEN HAS MY MOTHER SAID WHILE ON HER LAP ...And often has my mother said, while on her lap...0.0000000.0000000.0000001.000000False
6806.450NoneHE DARTED LIKE AN ARROW THROUGH ALL THE HALLS ...He darted like an arrow through all the halls,...0.0000000.0000000.0000001.000000False
6825.145NoneAND ALL HIS BROTHERS AND SISTERS STOOD ROUND A...and all his brothers and sisters stood round a...0.0000000.0000000.0000001.000000False
6842.165NoneANDERS FACE GREW REDAnders face grew red.0.0000000.0000000.0000001.000000False
6852.775NoneBUT HIS MOTHER HUGGED HIM CLOSEBut his mother hugged him close.0.0000000.0000000.0000001.000000False
..............................
1062.020NoneSQUEAK SQUEAKSpeak, speak.1.0000001.0000001.0000000.000000False
3712.440NoneCONSEIL WAS MY MANSERVANTCossay was my man's servant.1.0000000.6666670.8333330.166667False
5921.805NoneHANS STIRS NOTHonsters, Nied.1.0000001.0000001.0000000.000000False
3364.835NoneFOR A LONG TIME HE HAD WISHED TO EXPLORE THE B...For a long time, you can see that the video is...1.3333330.8000000.9333330.066667False
5382.215NoneSTEPHANOS DEDALOSStefano Staedt-Loss1.5000001.0000001.0000000.000000False
+ +

1000 rows × 9 columns

+
+
+
+
+
+
test_model("vqmodel-base-en+pl-512c-dim64.model", N=1000)
+
+ + +
+
+ +
+ + 100.00% [1000/1000 01:34<00:00] +
+ +
+
+
WER: 8.45%
+WER (w/o hallucinations): 8.45%
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
secsidxgt_texttextwermerwilwiphallucination
08.230NoneAND OFTEN HAS MY MOTHER SAID WHILE ON HER LAP ...And often has my mother said, while on her lap...0.00.00.01.0False
7402.715NoneBUT I MEAN TO HAVE MY INNINGS BEFORE LONGBut I mean to have my innings before long.0.00.00.01.0False
3872.735NoneA ROUTE SLIGHTLY LESS DIRECT THAT'S ALLA route slightly less direct, that's all.0.00.00.01.0False
3865.915NoneYES WE ARE CERTAINLY I REPLIED EVASIVELY BUT A...Yes, we are, certainly, I replied evasively, b...0.00.00.01.0False
7443.630NoneWHAT COULD I DO NOW BUT JUST LAY MYSELF DOWN A...What could I do now, but just lay myself down ...0.00.00.01.0False
..............................
1146.560NoneYES IN REALITY THOSE WERE HAPPY TIMESThank you.1.01.01.00.0False
1915.580NoneWHY IT'S IN MISSOURI SOMEWHERE ON THE FRONTIER...Thank you.1.01.01.00.0False
5382.215NoneSTEPHANOS DEDALOSStefano Stedilos1.01.01.00.0False
161.695NoneFAREWELL MADAMFair Well, Madame.1.51.01.00.0False
1062.020NoneSQUEAK SQUEAKS'quik, s'quik !2.01.01.00.0False
+ +

1000 rows × 9 columns

+
+
+
+
+
+
test_model("vqmodel-medium-en+pl-512c-dim64.model", N=1000)
+
+ + +
+
+ +
+ + 100.00% [1000/1000 06:09<00:00] +
+ +
+
+
WER: 7.34%
+WER (w/o hallucinations): 6.62%
+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
secsidxgt_texttextwermerwilwiphallucination
08.230NoneAND OFTEN HAS MY MOTHER SAID WHILE ON HER LAP ...And often has my mother said, While on her lap...0.0000000.0000000.0000001.000000False
5716.615NoneSTEPHEN'S HEART BEGAN SLOWLY TO FOLD AND FADE ...Stephen's heart began slowly to fold and fade ...0.0000000.0000000.0000001.000000False
5724.090NoneHE IS CALLED AS YOU KNOW THE APOSTLE OF THE IN...He is called, as you know, the Apostle of the ...0.0000000.0000000.0000001.000000False
5733.330NoneA GREAT SAINT SAINT FRANCIS XAVIERA great saint, St. Francis Xavier.0.0000000.0000000.0000001.000000False
5753.445NoneHE HAD THE FAITH IN HIM THAT MOVES MOUNTAINSHe had the faith in him that moves mountains.0.0000000.0000000.0000001.000000False
..............................
764.110NoneREJOICE IN THY OWN FRESH YOUTHRead more at www.BritishMedia.com1.0000001.0000001.0000000.000000False
6313.950NoneTO GROW AND GROW TO GET OLDER AND BE TALL THOU.... . . . . . . . . . . . . . . . . . . . . . . ...1.0000001.0000001.0000000.000000False
8195.775NoneSCUSE ME SAID TROT I NEGLECTED TO TELL YOU THA...Thanks for watching!1.0000001.0000001.0000000.000000False
958.800NoneTHOUGHT THE FIR TREE AND BELIEVED IT ALL BECAU...thought the fur tree, and believed it all, bec...2.0476190.6825400.6976570.302343False
6546.200NoneI WISH I HADN'T CRIED SO MUCH SAID ALICE AS SH..."'I wish I hadn't cried so much,' said Alice, ...6.9000000.8734180.8734180.126582True
+ +

1000 rows × 9 columns

+
+
+
+
+
+
def show_stat(stats, i):
+    row = stats.loc[i]
+    print('WER: ', row['wer'])
+    print('GT:  ', row['gt_text'])
+    print('GEN: ', row['text'])
+
+
+
show_stat(_18, 654)
+
+
WER:  6.9
+GT:   I WISH I HADN'T CRIED SO MUCH SAID ALICE AS SHE SWAM ABOUT TRYING TO FIND HER WAY OUT
+GEN:  "'I wish I hadn't cried so much,' said Alice, as she swam about, trying to find her way out. "'I wish I hadn't cried so much,' said Alice, as she swam about, trying to find her way out. "'I wish I hadn't cried so much,' said Alice, as she swam about, trying to find her way out. "'I wish I hadn't cried so much,' said Alice, as she swam about, trying to find her way out. "'I wish I hadn't cried so much,' said Alice, as she swam about, trying to find her way out. "'I wish I hadn't cried so much,' said Alice, as she swam about, trying to find her way out. "'I wish I hadn't cried so much,' said Alice, as she swam about, trying to find her way out. "'I wish I hadn't cried so much,' said Alice, as she swam about, trying to find her
+
+
+
+
show_stat(_18, 819)
+
+
WER:  1.0
+GT:   SCUSE ME SAID TROT I NEGLECTED TO TELL YOU THAT YOU'RE NOT THE BOOLOOROO ANY MORE
+GEN:  Thanks for watching!
+
+
+
+
show_stat(_13, 114)
+
+
WER:  1.0
+GT:   YES IN REALITY THOSE WERE HAPPY TIMES
+GEN:  Thank you.
+
+
+ + +
+
+
+ +
+ +
+ + + + + \ No newline at end of file diff --git a/3D. Split out validation.html b/3D. Split out validation.html new file mode 100644 index 0000000..052e29f --- /dev/null +++ b/3D. Split out validation.html @@ -0,0 +1,1058 @@ + + + + + + + + + +WhisperSpeech + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + +
+ +
+ + +
+ + + +
+ + + + + +
+
ds = wds.WebDataset(utils.shard_glob('../wolnelektury-wds2/wolnelektury-eqvad-000000.tar.gz'))
+
+
+
for s in ds: break
+s.keys()
+
+
dict_keys(['__key__', '__url__', 'spk_emb.npy', 'vad.npy'])
+
+
+
+
split_dataset('../wolnelektury-wds2/wolnelektury-eqvad-stoks-*.tar.gz', '../wolnelektury-wds2/validation-eqvad')
+
+
['../wolnelektury-wds2/wolnelektury-eqvad-stoks-000014.tar.gz', '../wolnelektury-wds2/wolnelektury-eqvad-stoks-000008.tar.gz', '../wolnelektury-wds2/wolnelektury-eqvad-stoks-000010.tar.gz', '../wolnelektury-wds2/wolnelektury-eqvad-stoks-000004.tar.gz', '../wolnelektury-wds2/wolnelektury-eqvad-stoks-000011.tar.gz', '../wolnelektury-wds2/wolnelektury-eqvad-stoks-000003.tar.gz', '../wolnelektury-wds2/wolnelektury-eqvad-stoks-000002.tar.gz', '../wolnelektury-wds2/wolnelektury-eqvad-stoks-000007.tar.gz', '../wolnelektury-wds2/wolnelektury-eqvad-stoks-000013.tar.gz', '../wolnelektury-wds2/wolnelektury-eqvad-stoks-000005.tar.gz', '../wolnelektury-wds2/wolnelektury-eqvad-stoks-000009.tar.gz', '../wolnelektury-wds2/wolnelektury-eqvad-stoks-000000.tar.gz', '../wolnelektury-wds2/wolnelektury-eqvad-stoks-000006.tar.gz', '../wolnelektury-wds2/wolnelektury-eqvad-stoks-000012.tar.gz', '../wolnelektury-wds2/wolnelektury-eqvad-stoks-000001.tar.gz']
+{'../wolnelektury-wds2/validation-eqvad': <webdataset.writer.TarWriter object>}
+Looking for 520 samples...
+
+
+ + +
+
+ +
+ + 100.00% [520/520 00:01<00:00] +
+ +
+
+
+
split_dataset('whisperspeech-s2a-512c-tts-r/*.tar.gz', 's2a-dim64-ttsr-valfix')
+
+
# writing s2a-dim64-ttsr-valfix/train-000000.tar.gz 0 0.0 GB 00350 00:13<00:00]
+# writing s2a-dim64-ttsr-valfix/train-000001.tar.gz 400 0.0 GB 4000<04:44]]
+# writing s2a-dim64-ttsr-valfix/train-000002.tar.gz 400 0.0 GB 8000<02:47]
+# writing s2a-dim64-ttsr-valfix/train-000003.tar.gz 400 0.0 GB 12000<02:12]
+# writing s2a-dim64-ttsr-valfix/train-000004.tar.gz 400 0.0 GB 16001<02:01]
+# writing s2a-dim64-ttsr-valfix/train-000005.tar.gz 400 0.0 GB 20001<01:45]
+# writing s2a-dim64-ttsr-valfix/train-000006.tar.gz 400 0.0 GB 24001<01:39]
+# writing s2a-dim64-ttsr-valfix/train-000007.tar.gz 400 0.0 GB 28001<01:35]
+# writing s2a-dim64-ttsr-valfix/train-000008.tar.gz 400 0.0 GB 32001<01:32]
+# writing s2a-dim64-ttsr-valfix/train-000009.tar.gz 400 0.0 GB 36001<01:26]
+# writing s2a-dim64-ttsr-valfix/train-000010.tar.gz 400 0.0 GB 40002<01:24]
+# writing s2a-dim64-ttsr-valfix/train-000011.tar.gz 400 0.0 GB 44002<01:22]
+# writing s2a-dim64-ttsr-valfix/train-000012.tar.gz 400 0.0 GB 48002<01:21]
+# writing s2a-dim64-ttsr-valfix/train-000013.tar.gz 400 0.0 GB 52002<01:19]
+# writing s2a-dim64-ttsr-valfix/train-000014.tar.gz 400 0.0 GB 56002<01:18]
+# writing s2a-dim64-ttsr-valfix/train-000015.tar.gz 400 0.0 GB 60002<01:17]
+# writing s2a-dim64-ttsr-valfix/train-000016.tar.gz 400 0.0 GB 64003<01:16]
+# writing s2a-dim64-ttsr-valfix/train-000017.tar.gz 400 0.0 GB 68003<01:15]
+# writing s2a-dim64-ttsr-valfix/train-000018.tar.gz 400 0.0 GB 72003<01:15]
+# writing s2a-dim64-ttsr-valfix/train-000019.tar.gz 400 0.0 GB 76003<01:14]
+# writing s2a-dim64-ttsr-valfix/train-000020.tar.gz 400 0.0 GB 80003<01:13]
+# writing s2a-dim64-ttsr-valfix/train-000021.tar.gz 400 0.0 GB 84003<01:13]
+# writing s2a-dim64-ttsr-valfix/train-000022.tar.gz 400 0.0 GB 88004<01:12]
+# writing s2a-dim64-ttsr-valfix/train-000023.tar.gz 400 0.0 GB 92004<01:11]
+# writing s2a-dim64-ttsr-valfix/train-000024.tar.gz 400 0.0 GB 96004<01:11]
+# writing s2a-dim64-ttsr-valfix/train-000025.tar.gz 400 0.0 GB 10000<01:10]
+# writing s2a-dim64-ttsr-valfix/train-000026.tar.gz 400 0.0 GB 104004<01:10]
+# writing s2a-dim64-ttsr-valfix/train-000027.tar.gz 400 0.0 GB 108005<01:09]
+# writing s2a-dim64-ttsr-valfix/train-000028.tar.gz 400 0.0 GB 112005<01:09]
+# writing s2a-dim64-ttsr-valfix/train-000029.tar.gz 400 0.0 GB 116005<01:09]
+# writing s2a-dim64-ttsr-valfix/train-000030.tar.gz 400 0.0 GB 12000
+# writing s2a-dim64-ttsr-valfix/train-000031.tar.gz 400 0.0 GB 124005<01:08]
+# writing s2a-dim64-ttsr-valfix/train-000032.tar.gz 400 0.0 GB 128005<01:08]
+# writing s2a-dim64-ttsr-valfix/train-000033.tar.gz 400 0.0 GB 132005<01:07]
+# writing s2a-dim64-ttsr-valfix/train-000034.tar.gz 400 0.0 GB 136006<01:07]
+# writing s2a-dim64-ttsr-valfix/train-000035.tar.gz 400 0.0 GB 140006<01:07]
+# writing s2a-dim64-ttsr-valfix/train-000036.tar.gz 400 0.0 GB 144006<01:07]
+# writing s2a-dim64-ttsr-valfix/train-000037.tar.gz 400 0.0 GB 148006<01:06]
+# writing s2a-dim64-ttsr-valfix/train-000038.tar.gz 400 0.0 GB 152006<01:06]
+# writing s2a-dim64-ttsr-valfix/train-000039.tar.gz 400 0.0 GB 156007<01:06]
+# writing s2a-dim64-ttsr-valfix/train-000040.tar.gz 400 0.0 GB 160007<01:05]
+# writing s2a-dim64-ttsr-valfix/train-000041.tar.gz 400 0.0 GB 1640007<01:05]
+# writing s2a-dim64-ttsr-valfix/train-000042.tar.gz 400 0.0 GB 1680007<01:05]
+# writing s2a-dim64-ttsr-valfix/train-000043.tar.gz 400 0.0 GB 1720007<01:04]
+# writing s2a-dim64-ttsr-valfix/train-000044.tar.gz 400 0.0 GB 17600
+# writing s2a-dim64-ttsr-valfix/train-000045.tar.gz 400 0.0 GB 1800008<01:04]
+# writing s2a-dim64-ttsr-valfix/train-000046.tar.gz 400 0.0 GB 1840008<01:04]
+# writing s2a-dim64-ttsr-valfix/train-000047.tar.gz 400 0.0 GB 1880008<01:03]
+# writing s2a-dim64-ttsr-valfix/train-000048.tar.gz 400 0.0 GB 1920008<01:03]
+# writing s2a-dim64-ttsr-valfix/train-000049.tar.gz 400 0.0 GB 1960008<01:03]
+# writing s2a-dim64-ttsr-valfix/train-000050.tar.gz 400 0.0 GB 2000009<01:03]
+# writing s2a-dim64-ttsr-valfix/train-000051.tar.gz 400 0.0 GB 2040009<01:02]
+# writing s2a-dim64-ttsr-valfix/train-000052.tar.gz 400 0.0 GB 2080009<01:02]
+# writing s2a-dim64-ttsr-valfix/train-000053.tar.gz 400 0.0 GB 2120009<01:02]
+# writing s2a-dim64-ttsr-valfix/train-000054.tar.gz 400 0.0 GB 21600
+# writing s2a-dim64-ttsr-valfix/train-000055.tar.gz 400 0.0 GB 2200009<01:02]
+# writing s2a-dim64-ttsr-valfix/train-000056.tar.gz 400 0.0 GB 2240009<01:01]
+# writing s2a-dim64-ttsr-valfix/train-000057.tar.gz 400 0.0 GB 2280010<01:01]
+# writing s2a-dim64-ttsr-valfix/train-000058.tar.gz 400 0.0 GB 2320010<01:01]
+# writing s2a-dim64-ttsr-valfix/train-000059.tar.gz 400 0.0 GB 2360010<01:01]
+# writing s2a-dim64-ttsr-valfix/train-000060.tar.gz 400 0.0 GB 2400010<01:00]
+# writing s2a-dim64-ttsr-valfix/train-000061.tar.gz 400 0.0 GB 2440010<01:00]
+# writing s2a-dim64-ttsr-valfix/train-000062.tar.gz 400 0.0 GB 2480011<01:00]
+# writing s2a-dim64-ttsr-valfix/train-000063.tar.gz 400 0.0 GB 2520011<01:00]
+# writing s2a-dim64-ttsr-valfix/train-000064.tar.gz 400 0.0 GB 25600
+# writing s2a-dim64-ttsr-valfix/train-000065.tar.gz 400 0.0 GB 2600011<00:59]
+# writing s2a-dim64-ttsr-valfix/train-000066.tar.gz 400 0.0 GB 2640011<00:59]
+# writing s2a-dim64-ttsr-valfix/train-000067.tar.gz 400 0.0 GB 2680011<00:59]
+# writing s2a-dim64-ttsr-valfix/train-000068.tar.gz 400 0.0 GB 2720012<00:59]
+# writing s2a-dim64-ttsr-valfix/train-000069.tar.gz 400 0.0 GB 2760012<00:59]
+# writing s2a-dim64-ttsr-valfix/train-000070.tar.gz 400 0.0 GB 2800012<00:58]
+# writing s2a-dim64-ttsr-valfix/train-000071.tar.gz 400 0.0 GB 2840012<00:58]
+# writing s2a-dim64-ttsr-valfix/train-000072.tar.gz 400 0.0 GB 2880012<00:58]
+# writing s2a-dim64-ttsr-valfix/train-000073.tar.gz 400 0.0 GB 2920013<00:58]
+# writing s2a-dim64-ttsr-valfix/train-000074.tar.gz 400 0.0 GB 29600
+# writing s2a-dim64-ttsr-valfix/train-000075.tar.gz 400 0.0 GB 3000013<00:57]
+# writing s2a-dim64-ttsr-valfix/train-000076.tar.gz 400 0.0 GB 3040013<00:57]
+# writing s2a-dim64-ttsr-valfix/train-000077.tar.gz 400 0.0 GB 3080013<00:57]
+# writing s2a-dim64-ttsr-valfix/train-000078.tar.gz 400 0.0 GB 3120013<00:57]
+# writing s2a-dim64-ttsr-valfix/train-000079.tar.gz 400 0.0 GB 3160014<00:56]
+# writing s2a-dim64-ttsr-valfix/train-000080.tar.gz 400 0.0 GB 3200014<00:56]
+# writing s2a-dim64-ttsr-valfix/train-000081.tar.gz 400 0.0 GB 3240014<00:56]
+# writing s2a-dim64-ttsr-valfix/train-000082.tar.gz 400 0.0 GB 3280014<00:56]
+# writing s2a-dim64-ttsr-valfix/train-000083.tar.gz 400 0.0 GB 33200
+# writing s2a-dim64-ttsr-valfix/train-000084.tar.gz 400 0.0 GB 3360014<00:56]
+# writing s2a-dim64-ttsr-valfix/train-000085.tar.gz 400 0.0 GB 3400014<00:55]
+# writing s2a-dim64-ttsr-valfix/train-000086.tar.gz 400 0.0 GB 3440015<00:55]
+# writing s2a-dim64-ttsr-valfix/train-000087.tar.gz 400 0.0 GB 3480015<00:55]
+# writing s2a-dim64-ttsr-valfix/train-000088.tar.gz 400 0.0 GB 3520015<00:55]
+# writing s2a-dim64-ttsr-valfix/train-000089.tar.gz 400 0.0 GB 3560015<00:54]
+# writing s2a-dim64-ttsr-valfix/train-000090.tar.gz 400 0.0 GB 3600015<00:54]
+# writing s2a-dim64-ttsr-valfix/train-000091.tar.gz 400 0.0 GB 36400
+# writing s2a-dim64-ttsr-valfix/train-000092.tar.gz 400 0.0 GB 3680016<00:54]
+# writing s2a-dim64-ttsr-valfix/train-000093.tar.gz 400 0.0 GB 3720016<00:54]
+# writing s2a-dim64-ttsr-valfix/train-000094.tar.gz 400 0.0 GB 3760016<00:53]
+# writing s2a-dim64-ttsr-valfix/train-000095.tar.gz 400 0.0 GB 3800016<00:53]
+# writing s2a-dim64-ttsr-valfix/train-000096.tar.gz 400 0.0 GB 3840016<00:53]
+# writing s2a-dim64-ttsr-valfix/train-000097.tar.gz 400 0.0 GB 3880017<00:53]
+# writing s2a-dim64-ttsr-valfix/train-000098.tar.gz 400 0.0 GB 3920017<00:52]
+# writing s2a-dim64-ttsr-valfix/train-000099.tar.gz 400 0.0 GB 3960017<00:52]
+# writing s2a-dim64-ttsr-valfix/train-000100.tar.gz 400 0.0 GB 40000
+# writing s2a-dim64-ttsr-valfix/train-000101.tar.gz 400 0.0 GB 4040017<00:52]
+# writing s2a-dim64-ttsr-valfix/train-000102.tar.gz 400 0.0 GB 4080017<00:52]
+# writing s2a-dim64-ttsr-valfix/train-000103.tar.gz 400 0.0 GB 4120018<00:52]
+# writing s2a-dim64-ttsr-valfix/train-000104.tar.gz 400 0.0 GB 4160018<00:51]
+# writing s2a-dim64-ttsr-valfix/train-000105.tar.gz 400 0.0 GB 4200018<00:51]
+# writing s2a-dim64-ttsr-valfix/train-000106.tar.gz 400 0.0 GB 4240018<00:51]
+# writing s2a-dim64-ttsr-valfix/train-000107.tar.gz 400 0.0 GB 4280018<00:51]
+# writing s2a-dim64-ttsr-valfix/train-000108.tar.gz 400 0.0 GB 43200
+# writing s2a-dim64-ttsr-valfix/train-000109.tar.gz 400 0.0 GB 4360019<00:51]
+# writing s2a-dim64-ttsr-valfix/train-000110.tar.gz 400 0.0 GB 4400019<00:50]
+# writing s2a-dim64-ttsr-valfix/train-000111.tar.gz 400 0.0 GB 4440019<00:50]
+# writing s2a-dim64-ttsr-valfix/train-000112.tar.gz 400 0.0 GB 4480019<00:50]
+# writing s2a-dim64-ttsr-valfix/train-000113.tar.gz 400 0.0 GB 4520019<00:50]
+# writing s2a-dim64-ttsr-valfix/train-000114.tar.gz 400 0.0 GB 4560020<00:50]
+# writing s2a-dim64-ttsr-valfix/train-000115.tar.gz 400 0.0 GB 4600020<00:49]
+# writing s2a-dim64-ttsr-valfix/train-000116.tar.gz 400 0.0 GB 4640020<00:49]
+# writing s2a-dim64-ttsr-valfix/train-000117.tar.gz 400 0.0 GB 46800
+# writing s2a-dim64-ttsr-valfix/train-000118.tar.gz 400 0.0 GB 4720020<00:49]
+# writing s2a-dim64-ttsr-valfix/train-000119.tar.gz 400 0.0 GB 4760020<00:49]
+# writing s2a-dim64-ttsr-valfix/train-000120.tar.gz 400 0.0 GB 4800021<00:49]
+# writing s2a-dim64-ttsr-valfix/train-000121.tar.gz 400 0.0 GB 4840021<00:48]
+# writing s2a-dim64-ttsr-valfix/train-000122.tar.gz 400 0.0 GB 4880021<00:48]
+# writing s2a-dim64-ttsr-valfix/train-000123.tar.gz 400 0.0 GB 4920021<00:48]
+# writing s2a-dim64-ttsr-valfix/train-000124.tar.gz 400 0.0 GB 4960021<00:48]
+# writing s2a-dim64-ttsr-valfix/train-000125.tar.gz 400 0.0 GB 50000
+# writing s2a-dim64-ttsr-valfix/train-000126.tar.gz 400 0.0 GB 5040022<00:48]
+# writing s2a-dim64-ttsr-valfix/train-000127.tar.gz 400 0.0 GB 5080022<00:47]
+# writing s2a-dim64-ttsr-valfix/train-000128.tar.gz 400 0.0 GB 5120022<00:47]
+# writing s2a-dim64-ttsr-valfix/train-000129.tar.gz 400 0.0 GB 5160022<00:47]
+# writing s2a-dim64-ttsr-valfix/train-000130.tar.gz 400 0.0 GB 5200022<00:47]
+# writing s2a-dim64-ttsr-valfix/train-000131.tar.gz 400 0.0 GB 5240022<00:47]
+# writing s2a-dim64-ttsr-valfix/train-000132.tar.gz 400 0.0 GB 5280023<00:46]
+# writing s2a-dim64-ttsr-valfix/train-000133.tar.gz 400 0.0 GB 53200
+# writing s2a-dim64-ttsr-valfix/train-000134.tar.gz 400 0.0 GB 5360023<00:46]
+# writing s2a-dim64-ttsr-valfix/train-000135.tar.gz 400 0.0 GB 5400023<00:46]
+# writing s2a-dim64-ttsr-valfix/train-000136.tar.gz 400 0.0 GB 5440023<00:46]
+# writing s2a-dim64-ttsr-valfix/train-000137.tar.gz 400 0.0 GB 5480023<00:46]
+# writing s2a-dim64-ttsr-valfix/train-000138.tar.gz 400 0.0 GB 5520024<00:45]
+# writing s2a-dim64-ttsr-valfix/train-000139.tar.gz 400 0.0 GB 5560024<00:45]
+# writing s2a-dim64-ttsr-valfix/train-000140.tar.gz 400 0.0 GB 5600024<00:45]
+# writing s2a-dim64-ttsr-valfix/train-000141.tar.gz 400 0.0 GB 5640024<00:45]
+# writing s2a-dim64-ttsr-valfix/train-000142.tar.gz 400 0.0 GB 56800
+# writing s2a-dim64-ttsr-valfix/train-000143.tar.gz 400 0.0 GB 5720024<00:45]
+# writing s2a-dim64-ttsr-valfix/train-000144.tar.gz 400 0.0 GB 5760025<00:44]
+# writing s2a-dim64-ttsr-valfix/train-000145.tar.gz 400 0.0 GB 5800025<00:44]
+# writing s2a-dim64-ttsr-valfix/train-000146.tar.gz 400 0.0 GB 5840025<00:44]
+# writing s2a-dim64-ttsr-valfix/train-000147.tar.gz 400 0.0 GB 5880025<00:44]
+# writing s2a-dim64-ttsr-valfix/train-000148.tar.gz 400 0.0 GB 5920025<00:44]
+# writing s2a-dim64-ttsr-valfix/train-000149.tar.gz 400 0.0 GB 5960026<00:43]
+# writing s2a-dim64-ttsr-valfix/train-000150.tar.gz 400 0.0 GB 6000026<00:43]
+# writing s2a-dim64-ttsr-valfix/train-000151.tar.gz 400 0.0 GB 60400
+# writing s2a-dim64-ttsr-valfix/train-000152.tar.gz 400 0.0 GB 6080026<00:43]
+# writing s2a-dim64-ttsr-valfix/train-000153.tar.gz 400 0.0 GB 6120026<00:43]
+# writing s2a-dim64-ttsr-valfix/train-000154.tar.gz 400 0.0 GB 6160026<00:43]
+# writing s2a-dim64-ttsr-valfix/train-000155.tar.gz 400 0.0 GB 6200027<00:42]
+# writing s2a-dim64-ttsr-valfix/train-000156.tar.gz 400 0.0 GB 6240027<00:42]
+# writing s2a-dim64-ttsr-valfix/train-000157.tar.gz 400 0.0 GB 6280027<00:42]
+# writing s2a-dim64-ttsr-valfix/train-000158.tar.gz 400 0.0 GB 6320027<00:42]
+# writing s2a-dim64-ttsr-valfix/train-000159.tar.gz 400 0.0 GB 63600
+# writing s2a-dim64-ttsr-valfix/train-000160.tar.gz 400 0.0 GB 6400027<00:42]
+# writing s2a-dim64-ttsr-valfix/train-000161.tar.gz 400 0.0 GB 6440028<00:41]
+# writing s2a-dim64-ttsr-valfix/train-000162.tar.gz 400 0.0 GB 6480028<00:41]
+# writing s2a-dim64-ttsr-valfix/train-000163.tar.gz 400 0.0 GB 6520028<00:41]
+# writing s2a-dim64-ttsr-valfix/train-000164.tar.gz 400 0.0 GB 6560028<00:41]
+# writing s2a-dim64-ttsr-valfix/train-000165.tar.gz 400 0.0 GB 6600028<00:40]
+# writing s2a-dim64-ttsr-valfix/train-000166.tar.gz 400 0.0 GB 6640029<00:40]
+# writing s2a-dim64-ttsr-valfix/train-000167.tar.gz 400 0.0 GB 66800
+# writing s2a-dim64-ttsr-valfix/train-000168.tar.gz 400 0.0 GB 6720029<00:40]
+# writing s2a-dim64-ttsr-valfix/train-000169.tar.gz 400 0.0 GB 6760029<00:40]
+# writing s2a-dim64-ttsr-valfix/train-000170.tar.gz 400 0.0 GB 6800029<00:40]
+# writing s2a-dim64-ttsr-valfix/train-000171.tar.gz 400 0.0 GB 6840029<00:39]
+# writing s2a-dim64-ttsr-valfix/train-000172.tar.gz 400 0.0 GB 6880030<00:39]
+# writing s2a-dim64-ttsr-valfix/train-000173.tar.gz 400 0.0 GB 6920030<00:39]
+# writing s2a-dim64-ttsr-valfix/train-000174.tar.gz 400 0.0 GB 6960030<00:39]
+# writing s2a-dim64-ttsr-valfix/train-000175.tar.gz 400 0.0 GB 7000030<00:39]
+# writing s2a-dim64-ttsr-valfix/train-000176.tar.gz 400 0.0 GB 70400
+# writing s2a-dim64-ttsr-valfix/train-000177.tar.gz 400 0.0 GB 7080030<00:38]
+# writing s2a-dim64-ttsr-valfix/train-000178.tar.gz 400 0.0 GB 7120031<00:38]
+# writing s2a-dim64-ttsr-valfix/train-000179.tar.gz 400 0.0 GB 7160031<00:38]
+# writing s2a-dim64-ttsr-valfix/train-000180.tar.gz 400 0.0 GB 7200031<00:38]
+# writing s2a-dim64-ttsr-valfix/train-000181.tar.gz 400 0.0 GB 7240031<00:38]
+# writing s2a-dim64-ttsr-valfix/train-000182.tar.gz 400 0.0 GB 7280031<00:37]
+# writing s2a-dim64-ttsr-valfix/train-000183.tar.gz 400 0.0 GB 7320032<00:37]
+# writing s2a-dim64-ttsr-valfix/train-000184.tar.gz 400 0.0 GB 73600
+# writing s2a-dim64-ttsr-valfix/train-000185.tar.gz 400 0.0 GB 7400032<00:37]
+# writing s2a-dim64-ttsr-valfix/train-000186.tar.gz 400 0.0 GB 7440032<00:37]
+# writing s2a-dim64-ttsr-valfix/train-000187.tar.gz 400 0.0 GB 7480032<00:37]
+# writing s2a-dim64-ttsr-valfix/train-000188.tar.gz 400 0.0 GB 7520032<00:36]
+# writing███████████----------------------| 47.37% [75960/160350 00:33<00:36] s2a-dim64-ttsr-valfix/train-000189.tar.gz 400 0.0 GB 75600
+# writing s2a-dim64-ttsr-valfix/train-000190.tar.gz 400 0.0 GB 7600033<00:36]
+# writing s2a-dim64-ttsr-valfix/train-000191.tar.gz 400 0.0 GB 7640033<00:36]
+# writing s2a-dim64-ttsr-valfix/train-000192.tar.gz 400 0.0 GB 76800
+# writing s2a-dim64-ttsr-valfix/train-000193.tar.gz 400 0.0 GB 7720033<00:36]
+# writing s2a-dim64-ttsr-valfix/train-000194.tar.gz 400 0.0 GB 7760033<00:35]
+# writing s2a-dim64-ttsr-valfix/train-000195.tar.gz 400 0.0 GB 7800034<00:35]
+# writing s2a-dim64-ttsr-valfix/train-000196.tar.gz 400 0.0 GB 7840034<00:35]
+# writing s2a-dim64-ttsr-valfix/train-000197.tar.gz 400 0.0 GB 7880034<00:35]
+# writing s2a-dim64-ttsr-valfix/train-000198.tar.gz 400 0.0 GB 7920034<00:35]
+# writing s2a-dim64-ttsr-valfix/train-000199.tar.gz 400 0.0 GB 7960034<00:34]
+# writing s2a-dim64-ttsr-valfix/train-000200.tar.gz 400 0.0 GB 80000
+# writing s2a-dim64-ttsr-valfix/train-000201.tar.gz 400 0.0 GB 8040035<00:34]
+# writing s2a-dim64-ttsr-valfix/train-000202.tar.gz 400 0.0 GB 8080035<00:34]
+# writing s2a-dim64-ttsr-valfix/train-000203.tar.gz 400 0.0 GB 8120035<00:34]
+# writing s2a-dim64-ttsr-valfix/train-000204.tar.gz 400 0.0 GB 8160035<00:33]
+# writing s2a-dim64-ttsr-valfix/train-000205.tar.gz 400 0.0 GB 8200035<00:33]
+# writing s2a-dim64-ttsr-valfix/train-000206.tar.gz 400 0.0 GB 8240035<00:33]
+# writing s2a-dim64-ttsr-valfix/train-000207.tar.gz 400 0.0 GB 82800
+# writing s2a-dim64-ttsr-valfix/train-000208.tar.gz 400 0.0 GB 8320035<00:33]
+# writing s2a-dim64-ttsr-valfix/train-000209.tar.gz 400 0.0 GB 8360036<00:32]
+# writing s2a-dim64-ttsr-valfix/train-000210.tar.gz 400 0.0 GB 8400036<00:32]
+# writing s2a-dim64-ttsr-valfix/train-000211.tar.gz 400 0.0 GB 8440036<00:32]
+# writing s2a-dim64-ttsr-valfix/train-000212.tar.gz 400 0.0 GB 8480036<00:32]
+# writing s2a-dim64-ttsr-valfix/train-000213.tar.gz 400 0.0 GB 8520036<00:31]
+# writing s2a-dim64-ttsr-valfix/train-000214.tar.gz 400 0.0 GB 8560036<00:31]
+# writing s2a-dim64-ttsr-valfix/train-000215.tar.gz 400 0.0 GB 86000
+# writing s2a-dim64-ttsr-valfix/train-000216.tar.gz 400 0.0 GB 8640036<00:31]
+# writing s2a-dim64-ttsr-valfix/train-000217.tar.gz 400 0.0 GB 8680036<00:31]
+# writing s2a-dim64-ttsr-valfix/train-000218.tar.gz 400 0.0 GB 8720037<00:30]
+# writing s2a-dim64-ttsr-valfix/train-000219.tar.gz 400 0.0 GB 8760037<00:30]
+# writing s2a-dim64-ttsr-valfix/train-000220.tar.gz 400 0.0 GB 8800037<00:30]
+# writing s2a-dim64-ttsr-valfix/train-000221.tar.gz 400 0.0 GB 88400
+# writing s2a-dim64-ttsr-valfix/train-000222.tar.gz 400 0.0 GB 8880037<00:30]
+# writing s2a-dim64-ttsr-valfix/train-000223.tar.gz 400 0.0 GB 8920037<00:29]
+# writing s2a-dim64-ttsr-valfix/train-000224.tar.gz 400 0.0 GB 8960037<00:29]
+# writing s2a-dim64-ttsr-valfix/train-000225.tar.gz 400 0.0 GB 9000037<00:29]
+# writing s2a-dim64-ttsr-valfix/train-000226.tar.gz 400 0.0 GB 9040038<00:29]
+# writing s2a-dim64-ttsr-valfix/train-000227.tar.gz 400 0.0 GB 9080038<00:28]
+# writing s2a-dim64-ttsr-valfix/train-000228.tar.gz 400 0.0 GB 91200
+# writing s2a-dim64-ttsr-valfix/train-000229.tar.gz 400 0.0 GB 9160038<00:28]
+# writing s2a-dim64-ttsr-valfix/train-000230.tar.gz 400 0.0 GB 9200038<00:28]
+# writing s2a-dim64-ttsr-valfix/train-000231.tar.gz 400 0.0 GB 9240038<00:28]
+# writing s2a-dim64-ttsr-valfix/train-000232.tar.gz 400 0.0 GB 9280038<00:27]
+# writing s2a-dim64-ttsr-valfix/train-000233.tar.gz 400 0.0 GB 9320038<00:27]
+# writing s2a-dim64-ttsr-valfix/train-000234.tar.gz 400 0.0 GB 93600
+# writing s2a-dim64-ttsr-valfix/train-000235.tar.gz 400 0.0 GB 9400039<00:27]
+# writing s2a-dim64-ttsr-valfix/train-000236.tar.gz 400 0.0 GB 9440039<00:27]
+# writing s2a-dim64-ttsr-valfix/train-000237.tar.gz 400 0.0 GB 9480039<00:27]
+# writing s2a-dim64-ttsr-valfix/train-000238.tar.gz 400 0.0 GB 9520039<00:26]
+# writing s2a-dim64-ttsr-valfix/train-000239.tar.gz 400 0.0 GB 9560039<00:26]
+# writing s2a-dim64-ttsr-valfix/train-000240.tar.gz 400 0.0 GB 96000
+# writing s2a-dim64-ttsr-valfix/train-000241.tar.gz 400 0.0 GB 9640039<00:26]
+# writing s2a-dim64-ttsr-valfix/train-000242.tar.gz 400 0.0 GB 9680039<00:26]
+# writing s2a-dim64-ttsr-valfix/train-000243.tar.gz 400 0.0 GB 9720040<00:25]
+# writing s2a-dim64-ttsr-valfix/train-000244.tar.gz 400 0.0 GB 9760040<00:25]
+# writing s2a-dim64-ttsr-valfix/train-000245.tar.gz 400 0.0 GB 98000
+# writing s2a-dim64-ttsr-valfix/train-000246.tar.gz 400 0.0 GB 9840040<00:25]
+# writing s2a-dim64-ttsr-valfix/train-000247.tar.gz 400 0.0 GB 9880040<00:25]
+# writing s2a-dim64-ttsr-valfix/train-000248.tar.gz 400 0.0 GB 9920040<00:24]
+# writing s2a-dim64-ttsr-valfix/train-000249.tar.gz 400 0.0 GB 9960040<00:24]
+# writing s2a-dim64-ttsr-valfix/train-000250.tar.gz 400 0.0 GB 10000041<00:24]
+# writing s2a-dim64-ttsr-valfix/train-000251.tar.gz 400 0.0 GB 100400
+# writing s2a-dim64-ttsr-valfix/train-000252.tar.gz 400 0.0 GB 10080041<00:24]
+# writing s2a-dim64-ttsr-valfix/train-000253.tar.gz 400 0.0 GB 10120041<00:23]
+# writing s2a-dim64-ttsr-valfix/train-000254.tar.gz 400 0.0 GB 10160041<00:23]
+# writing s2a-dim64-ttsr-valfix/train-000255.tar.gz 400 0.0 GB 10200041<00:23]
+# writing s2a-dim64-ttsr-valfix/train-000256.tar.gz 400 0.0 GB 102400
+# writing s2a-dim64-ttsr-valfix/train-000257.tar.gz 400 0.0 GB 10280041<00:23]
+# writing s2a-dim64-ttsr-valfix/train-000258.tar.gz 400 0.0 GB 10320041<00:23]
+# writing s2a-dim64-ttsr-valfix/train-000259.tar.gz 400 0.0 GB 10360042<00:22]
+# writing s2a-dim64-ttsr-valfix/train-000260.tar.gz 400 0.0 GB 10400042<00:22]
+# writing s2a-dim64-ttsr-valfix/train-000261.tar.gz 400 0.0 GB 10440042<00:22]
+# writing s2a-dim64-ttsr-valfix/train-000262.tar.gz 400 0.0 GB 104800
+# writing s2a-dim64-ttsr-valfix/train-000263.tar.gz 400 0.0 GB 10520042<00:22]
+# writing s2a-dim64-ttsr-valfix/train-000264.tar.gz 400 0.0 GB 10560042<00:21]
+# writing s2a-dim64-ttsr-valfix/train-000265.tar.gz 400 0.0 GB 10600042<00:21]
+# writing s2a-dim64-ttsr-valfix/train-000266.tar.gz 400 0.0 GB 10640042<00:21]
+# writing s2a-dim64-ttsr-valfix/train-000267.tar.gz 400 0.0 GB 106800
+# writing s2a-dim64-ttsr-valfix/train-000268.tar.gz 400 0.0 GB 10720043<00:21]
+# writing s2a-dim64-ttsr-valfix/train-000269.tar.gz 400 0.0 GB 10760043<00:21]
+# writing s2a-dim64-ttsr-valfix/train-000270.tar.gz 400 0.0 GB 10800043<00:20]
+# writing s2a-dim64-ttsr-valfix/train-000271.tar.gz 400 0.0 GB 10840043<00:20]
+# writing s2a-dim64-ttsr-valfix/train-000272.tar.gz 400 0.0 GB 108800
+# writing s2a-dim64-ttsr-valfix/train-000273.tar.gz 400 0.0 GB 10920043<00:20]
+# writing s2a-dim64-ttsr-valfix/train-000274.tar.gz 400 0.0 GB 10960043<00:20]
+# writing s2a-dim64-ttsr-valfix/train-000275.tar.gz 400 0.0 GB 11000043<00:19]
+# writing s2a-dim64-ttsr-valfix/train-000276.tar.gz 400 0.0 GB 11040044<00:19]
+# writing s2a-dim64-ttsr-valfix/train-000277.tar.gz 400 0.0 GB 110800
+# writing s2a-dim64-ttsr-valfix/train-000278.tar.gz 400 0.0 GB 11120044<00:19]
+# writing s2a-dim64-ttsr-valfix/train-000279.tar.gz 400 0.0 GB 11160044<00:19]
+# writing s2a-dim64-ttsr-valfix/train-000280.tar.gz 400 0.0 GB 11200044<00:19]
+# writing s2a-dim64-ttsr-valfix/train-000281.tar.gz 400 0.0 GB 11240044<00:18]
+# writing s2a-dim64-ttsr-valfix/train-000282.tar.gz 400 0.0 GB 112800
+# writing s2a-dim64-ttsr-valfix/train-000283.tar.gz 400 0.0 GB 11320044<00:18]
+# writing s2a-dim64-ttsr-valfix/train-000284.tar.gz 400 0.0 GB 11360045<00:18]
+# writing s2a-dim64-ttsr-valfix/train-000285.tar.gz 400 0.0 GB 11400045<00:18]
+# writing s2a-dim64-ttsr-valfix/train-000286.tar.gz 400 0.0 GB 114400
+# writing s2a-dim64-ttsr-valfix/train-000287.tar.gz 400 0.0 GB 11480045<00:17]
+# writing s2a-dim64-ttsr-valfix/train-000288.tar.gz 400 0.0 GB 11520045<00:17]
+# writing s2a-dim64-ttsr-valfix/train-000289.tar.gz 400 0.0 GB 11560045<00:17]
+# writing s2a-dim64-ttsr-valfix/train-000290.tar.gz 400 0.0 GB 11600045<00:17]
+# writing s2a-dim64-ttsr-valfix/train-000291.tar.gz 400 0.0 GB 116400
+# writing s2a-dim64-ttsr-valfix/train-000292.tar.gz 400 0.0 GB 11680045<00:17]
+# writing s2a-dim64-ttsr-valfix/train-000293.tar.gz 400 0.0 GB 11720046<00:16]
+# writing s2a-dim64-ttsr-valfix/train-000294.tar.gz 400 0.0 GB 11760046<00:16]
+# writing s2a-dim64-ttsr-valfix/train-000295.tar.gz 400 0.0 GB 11800046<00:16]
+# writing s2a-dim64-ttsr-valfix/train-000296.tar.gz 400 0.0 GB 118400
+# writing s2a-dim64-ttsr-valfix/train-000297.tar.gz 400 0.0 GB 11880046<00:16]
+# writing s2a-dim64-ttsr-valfix/train-000298.tar.gz 400 0.0 GB 11920046<00:15]
+# writing s2a-dim64-ttsr-valfix/train-000299.tar.gz 400 0.0 GB 11960046<00:15]
+# writing s2a-dim64-ttsr-valfix/train-000300.tar.gz 400 0.0 GB 120000
+# writing s2a-dim64-ttsr-valfix/train-000301.tar.gz 400 0.0 GB 12040047<00:15]
+# writing s2a-dim64-ttsr-valfix/train-000302.tar.gz 400 0.0 GB 12080047<00:15]
+# writing s2a-dim64-ttsr-valfix/train-000303.tar.gz 400 0.0 GB 12120047<00:15]
+# writing s2a-dim64-ttsr-valfix/train-000304.tar.gz 400 0.0 GB 12160047<00:14]
+# writing s2a-dim64-ttsr-valfix/train-000305.tar.gz 400 0.0 GB 122000
+# writing s2a-dim64-ttsr-valfix/train-000306.tar.gz 400 0.0 GB 12240047<00:14]
+# writing s2a-dim64-ttsr-valfix/train-000307.tar.gz 400 0.0 GB 12280047<00:14]
+# writing s2a-dim64-ttsr-valfix/train-000308.tar.gz 400 0.0 GB 12320047<00:14]
+# writing s2a-dim64-ttsr-valfix/train-000309.tar.gz 400 0.0 GB 12360048<00:14]
+# writing s2a-dim64-ttsr-valfix/train-000310.tar.gz 400 0.0 GB 124000
+# writing s2a-dim64-ttsr-valfix/train-000311.tar.gz 400 0.0 GB 12440048<00:13]
+# writing s2a-dim64-ttsr-valfix/train-000312.tar.gz 400 0.0 GB 12480048<00:13]
+# writing s2a-dim64-ttsr-valfix/train-000313.tar.gz 400 0.0 GB 12520048<00:13]
+# writing s2a-dim64-ttsr-valfix/train-000314.tar.gz 400 0.0 GB 125600
+# writing s2a-dim64-ttsr-valfix/train-000315.tar.gz 400 0.0 GB 12600048<00:13]
+# writing s2a-dim64-ttsr-valfix/train-000316.tar.gz 400 0.0 GB 12640048<00:13]
+# writing s2a-dim64-ttsr-valfix/train-000317.tar.gz 400 0.0 GB 12680049<00:12]
+# writing s2a-dim64-ttsr-valfix/train-000318.tar.gz 400 0.0 GB 127200
+# writing s2a-dim64-ttsr-valfix/train-000319.tar.gz 400 0.0 GB 12760049<00:12]
+# writing s2a-dim64-ttsr-valfix/train-000320.tar.gz 400 0.0 GB 12800049<00:12]
+# writing s2a-dim64-ttsr-valfix/train-000321.tar.gz 400 0.0 GB 12840049<00:12]
+# writing s2a-dim64-ttsr-valfix/train-000322.tar.gz 400 0.0 GB 12880049<00:11]
+# writing s2a-dim64-ttsr-valfix/train-000323.tar.gz 400 0.0 GB 129200
+# writing s2a-dim64-ttsr-valfix/train-000324.tar.gz 400 0.0 GB 12960049<00:11]
+# writing s2a-dim64-ttsr-valfix/train-000325.tar.gz 400 0.0 GB 13000050<00:11]
+# writing s2a-dim64-ttsr-valfix/train-000326.tar.gz 400 0.0 GB 13040050<00:11]
+# writing s2a-dim64-ttsr-valfix/train-000327.tar.gz 400 0.0 GB 130800
+# writing s2a-dim64-ttsr-valfix/train-000328.tar.gz 400 0.0 GB 13120050<00:11]
+# writing s2a-dim64-ttsr-valfix/train-000329.tar.gz 400 0.0 GB 13160050<00:10]
+# writing s2a-dim64-ttsr-valfix/train-000330.tar.gz 400 0.0 GB 13200050<00:10]
+# writing s2a-dim64-ttsr-valfix/train-000331.tar.gz 400 0.0 GB 132400
+# writing s2a-dim64-ttsr-valfix/train-000332.tar.gz 400 0.0 GB 13280050<00:10]
+# writing s2a-dim64-ttsr-valfix/train-000333.tar.gz 400 0.0 GB 13320050<00:10]
+# writing s2a-dim64-ttsr-valfix/train-000334.tar.gz 400 0.0 GB 13360051<00:10]
+# writing s2a-dim64-ttsr-valfix/train-000335.tar.gz 400 0.0 GB 13400051<00:09]
+# writing s2a-dim64-ttsr-valfix/train-000336.tar.gz 400 0.0 GB 134400
+# writing s2a-dim64-ttsr-valfix/train-000337.tar.gz 400 0.0 GB 13480051<00:09]
+# writing s2a-dim64-ttsr-valfix/train-000338.tar.gz 400 0.0 GB 13520051<00:09]
+# writing s2a-dim64-ttsr-valfix/train-000339.tar.gz 400 0.0 GB 13560051<00:09]
+# writing s2a-dim64-ttsr-valfix/train-000340.tar.gz 400 0.0 GB 136000
+# writing s2a-dim64-ttsr-valfix/train-000341.tar.gz 400 0.0 GB 13640051<00:09]
+# writing s2a-dim64-ttsr-valfix/train-000342.tar.gz 400 0.0 GB 13680052<00:08]
+# writing s2a-dim64-ttsr-valfix/train-000343.tar.gz 400 0.0 GB 13720052<00:08]
+# writing s2a-dim64-ttsr-valfix/train-000344.tar.gz 400 0.0 GB 137600
+# writing s2a-dim64-ttsr-valfix/train-000345.tar.gz 400 0.0 GB 13800052<00:08]
+# writing s2a-dim64-ttsr-valfix/train-000346.tar.gz 400 0.0 GB 13840052<00:08]
+# writing s2a-dim64-ttsr-valfix/train-000347.tar.gz 400 0.0 GB 13880052<00:08]
+# writing s2a-dim64-ttsr-valfix/train-000348.tar.gz 400 0.0 GB 139200
+# writing s2a-dim64-ttsr-valfix/train-000349.tar.gz 400 0.0 GB 13960052<00:07]
+# writing s2a-dim64-ttsr-valfix/train-000350.tar.gz 400 0.0 GB 14000053<00:07]
+# writing s2a-dim64-ttsr-valfix/train-000351.tar.gz 400 0.0 GB 14040053<00:07]
+# writing s2a-dim64-ttsr-valfix/train-000352.tar.gz 400 0.0 GB 140800
+# writing s2a-dim64-ttsr-valfix/train-000353.tar.gz 400 0.0 GB 14120053<00:07]
+# writing s2a-dim64-ttsr-valfix/train-000354.tar.gz 400 0.0 GB 14160053<00:06]
+# writing s2a-dim64-ttsr-valfix/train-000355.tar.gz 400 0.0 GB 14200053<00:06]
+# writing s2a-dim64-ttsr-valfix/train-000356.tar.gz 400 0.0 GB 142400
+# writing s2a-dim64-ttsr-valfix/train-000357.tar.gz 400 0.0 GB 14280053<00:06]
+# writing s2a-dim64-ttsr-valfix/train-000358.tar.gz 400 0.0 GB 14320054<00:06]
+# writing s2a-dim64-ttsr-valfix/train-000359.tar.gz 400 0.0 GB 14360054<00:06]
+# writing s2a-dim64-ttsr-valfix/train-000360.tar.gz 400 0.0 GB 14400054<00:05]
+# writing s2a-dim64-ttsr-valfix/train-000361.tar.gz 400 0.0 GB 144400
+# writing s2a-dim64-ttsr-valfix/train-000362.tar.gz 400 0.0 GB 14480054<00:05]
+# writing s2a-dim64-ttsr-valfix/train-000363.tar.gz 400 0.0 GB 14520054<00:05]
+# writing s2a-dim64-ttsr-valfix/train-000364.tar.gz 400 0.0 GB 14560054<00:05]
+# writing s2a-dim64-ttsr-valfix/train-000365.tar.gz 400 0.0 GB 146000
+# writing s2a-dim64-ttsr-valfix/train-000366.tar.gz 400 0.0 GB 14640055<00:05]
+# writing s2a-dim64-ttsr-valfix/train-000367.tar.gz 400 0.0 GB 14680055<00:04]
+# writing s2a-dim64-ttsr-valfix/train-000368.tar.gz 400 0.0 GB 14720055<00:04]
+# writing s2a-dim64-ttsr-valfix/train-000369.tar.gz 400 0.0 GB 147600
+# writing s2a-dim64-ttsr-valfix/train-000370.tar.gz 400 0.0 GB 14800055<00:04]
+# writing s2a-dim64-ttsr-valfix/train-000371.tar.gz 400 0.0 GB 14840055<00:04]
+# writing s2a-dim64-ttsr-valfix/train-000372.tar.gz 400 0.0 GB 14880055<00:04]
+# writing s2a-dim64-ttsr-valfix/train-000373.tar.gz 400 0.0 GB 149200
+# writing s2a-dim64-ttsr-valfix/train-000374.tar.gz 400 0.0 GB 14960056<00:03]
+# writing s2a-dim64-ttsr-valfix/train-000375.tar.gz 400 0.0 GB 15000056<00:03]
+# writing s2a-dim64-ttsr-valfix/train-000376.tar.gz 400 0.0 GB 15040056<00:03]
+# writing s2a-dim64-ttsr-valfix/train-000377.tar.gz 400 0.0 GB 150800
+# writing s2a-dim64-ttsr-valfix/train-000378.tar.gz 400 0.0 GB 15120056<00:03]
+# writing s2a-dim64-ttsr-valfix/train-000379.tar.gz 400 0.0 GB 15160056<00:03]
+# writing s2a-dim64-ttsr-valfix/train-000380.tar.gz 400 0.0 GB 15200056<00:02]
+# writing s2a-dim64-ttsr-valfix/train-000381.tar.gz 400 0.0 GB 152400
+# writing s2a-dim64-ttsr-valfix/train-000382.tar.gz 400 0.0 GB 15280057<00:02]
+# writing s2a-dim64-ttsr-valfix/train-000383.tar.gz 400 0.0 GB 15320057<00:02]
+# writing s2a-dim64-ttsr-valfix/train-000384.tar.gz 400 0.0 GB 15360057<00:02]
+# writing s2a-dim64-ttsr-valfix/train-000385.tar.gz 400 0.0 GB 154000
+# writing s2a-dim64-ttsr-valfix/train-000386.tar.gz 400 0.0 GB 15440057<00:02]
+# writing s2a-dim64-ttsr-valfix/train-000387.tar.gz 400 0.0 GB 15480057<00:01]
+# writing s2a-dim64-ttsr-valfix/train-000388.tar.gz 400 0.0 GB 155200
+# writing s2a-dim64-ttsr-valfix/train-000389.tar.gz 400 0.0 GB 15560057<00:01]
+# writing s2a-dim64-ttsr-valfix/train-000390.tar.gz 400 0.0 GB 15600058<00:01]
+# writing s2a-dim64-ttsr-valfix/train-000391.tar.gz 400 0.0 GB 15640058<00:01]
+# writing s2a-dim64-ttsr-valfix/train-000392.tar.gz 400 0.0 GB 156800
+# writing s2a-dim64-ttsr-valfix/train-000393.tar.gz 400 0.0 GB 15720058<00:01]
+# writing s2a-dim64-ttsr-valfix/train-000394.tar.gz 400 0.0 GB 15760058<00:00]
+# writing s2a-dim64-ttsr-valfix/train-000395.tar.gz 400 0.0 GB 15800058<00:00]
+# writing s2a-dim64-ttsr-valfix/train-000396.tar.gz 400 0.0 GB 158400
+# writing s2a-dim64-ttsr-valfix/train-000397.tar.gz 400 0.0 GB 15880058<00:00]
+# writing s2a-dim64-ttsr-valfix/train-000398.tar.gz 400 0.0 GB 15920059<00:00]
+# writing s2a-dim64-ttsr-valfix/train-000399.tar.gz 400 0.0 GB 15960059<00:00]
+ |████████████████████████████████████████| 100.00% [160350/160350 00:59<00:00]
+
+
+ + + +
+ +
+ + + + + \ No newline at end of file diff --git a/3a. t2s transcripts preparation.html b/3a. t2s transcripts preparation.html new file mode 100644 index 0000000..9506689 --- /dev/null +++ b/3a. t2s transcripts preparation.html @@ -0,0 +1,769 @@ + + + + + + + + + +WhisperSpeech - T2S dataset preparation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + +
+ +
+ + +
+ + + +
+ +
+
+

T2S dataset preparation

+
+ + + +
+ + + + +
+ + + +
+ + + +
+
prepare_txt('../wolnelektury-wds2/wolnelektury-audio-000000.tar', n_samples=1024, batch_size=16)
+
+
Lightning automatically upgraded your loaded checkpoint from v1.5.4 to v2.1.0. To apply the upgrade to your files permanently, run `python -m pytorch_lightning.utilities.upgrade_checkpoint ../../../.cache/torch/whisperx-vad-segmentation.bin`
+
+
+
Model was trained with pyannote.audio 0.0.1, yours is 2.1.1. Bad things might happen unless you revert pyannote.audio to 0.x.
+Model was trained with torch 1.10.0+cu102, yours is 2.1.0+cu121. Bad things might happen unless you revert torch to 1.x.
+Benchmarking run of 1024 samples (64 batches)
+
+
+ + +
+
+ +
+ + 100.00% [64/64 00:40<00:00] +
+ +
+
+
+
prepare_txt('../wolnelektury-wds2/wolnelektury-audio-000000.tar', n_samples=1024, batch_size=16)
+
+
Benchmarking run of 1024 samples (64 batches)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+    - Avoid using `tokenizers` before the fork if possible
+    - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+
+
+ + +
+
+ +
+ + 100.00% [64/64 01:33<00:00] +
+ +
+
+
+
prepare_txt('../wolnelektury-wds2/wolnelektury-audio-000000.tar', transcription_model='medium', n_samples=1024, batch_size=16)
+
+
Benchmarking run of 1024 samples (64 batches)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+    - Avoid using `tokenizers` before the fork if possible
+    - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+
+
+ + +
+
+ +
+ + 100.00% [64/64 02:06<00:00] +
+ +
+
+
+
prepare_txt('../wolnelektury-wds2/wolnelektury-audio-000000.tar', transcription_model='medium', n_samples=1024, batch_size=1)
+
+
Benchmarking run of 1024 samples (1024 batches)
+huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...
+To disable this warning, you can either:
+    - Avoid using `tokenizers` before the fork if possible
+    - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)
+
+
+ + +
+
+ +
+ + 100.00% [1024/1024 10:01<00:00] +
+ +
+
+ + + +
+ +
+ + + + + \ No newline at end of file diff --git a/3b. semantic token extraction.html b/3b. semantic token extraction.html new file mode 100644 index 0000000..903c398 --- /dev/null +++ b/3b. semantic token extraction.html @@ -0,0 +1,812 @@ + + + + + + + + + +WhisperSpeech - Semantic token extraction + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + +
+ +
+ + +
+ + + +
+ +
+
+

Semantic token extraction

+
+ + + +
+ + + + +
+ + + +
+ + + +
+
vq_model = vq_stoks.RQBottleneckTransformer.load_model("vqmodel-medium-en+pl-512c-dim64.model").cuda()
+
+
+
vq_model.ensure_whisper('cuda')
+
+
+
vq_model.whmodel[0].encoder
+
+
AudioEncoder(
+  (conv1): Conv1d(80, 1024, kernel_size=(3,), stride=(1,), padding=(1,))
+  (conv2): Conv1d(1024, 1024, kernel_size=(3,), stride=(2,), padding=(1,))
+  (blocks): ModuleList(
+    (0-23): 24 x ResidualAttentionBlock(
+      (attn): MultiHeadAttention(
+        (query): Linear(in_features=1024, out_features=1024, bias=True)
+        (key): Linear(in_features=1024, out_features=1024, bias=False)
+        (value): Linear(in_features=1024, out_features=1024, bias=True)
+        (out): Linear(in_features=1024, out_features=1024, bias=True)
+      )
+      (attn_ln): LayerNorm((1024,), eps=1e-05, elementwise_affine=True)
+      (mlp): Sequential(
+        (0): Linear(in_features=1024, out_features=4096, bias=True)
+        (1): GELU(approximate='none')
+        (2): Linear(in_features=4096, out_features=1024, bias=True)
+      )
+      (mlp_ln): LayerNorm((1024,), eps=1e-05, elementwise_affine=True)
+    )
+  )
+  (ln_post): LayerNorm((1024,), eps=1e-05, elementwise_affine=True)
+)
+
+
+
+
+
+
Automatic pdb calling has been turned ON
+
+
+
+
prepare_stoks('../wolnelektury-wds2/wolnelektury-audio-000000.tar', n_samples=1024, batch_size=16)
+
+ + +
+
+ +
+ + 100.00% [64/64 00:23<00:00] +
+ +
+
+
+
prepare_stoks('../wolnelektury-wds2/wolnelektury-audio-000000.tar', n_samples=1024, batch_size=16)
+
+ + +
+
+ +
+ + 100.00% [64/64 00:21<00:00] +
+ +
+
+
+
prepare_stoks('../wolnelektury-wds2/wolnelektury-audio-000000.tar', n_samples=1024, batch_size=32)
+
+ + +
+
+ +
+ + 100.00% [32/32 00:21<00:00] +
+ +
+
+
+
prepare_stoks('../wolnelektury-wds2/wolnelektury-audio-000000.tar', n_samples=1024, batch_size=64)
+
+ + +
+
+ +
+ + 100.00% [16/16 00:20<00:00] +
+ +
+
+
+
prepare_stoks('../wolnelektury-wds2/wolnelektury-audio-000000.tar', n_samples=1024, batch_size=64)
+
+ + +
+
+ +
+ + 100.00% [16/16 00:23<00:00] +
+ +
+
+
+
!ls -lh ../wolnelektury-wds2/wolnelektury-maxvad-stoks-000000.tar
+!tar -tf ../wolnelektury-wds2/wolnelektury-maxvad-stoks-000000.tar
+
+ + + +
+ +
+ + + + + \ No newline at end of file diff --git a/3c. s2a acoustic tokens preparation.html b/3c. s2a acoustic tokens preparation.html new file mode 100644 index 0000000..83bb133 --- /dev/null +++ b/3c. s2a acoustic tokens preparation.html @@ -0,0 +1,759 @@ + + + + + + + + + +WhisperSpeech - S2A dataset preparation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + +
+ +
+ + +
+ + + +
+ +
+
+

S2A dataset preparation

+
+ + + +
+ + + + +
+ + + +
+ + + +
+
+
+
Automatic pdb calling has been turned ON
+
+
+
+
prepare_atoks('../wolnelektury-wds2/wolnelektury-audio-000000.tar', n_samples=1024, batch_size=8)
+
+
/opt/conda/lib/python3.10/site-packages/torch/nn/utils/weight_norm.py:30: UserWarning: torch.nn.utils.weight_norm is deprecated in favor of torch.nn.utils.parametrizations.weight_norm.
+  warnings.warn("torch.nn.utils.weight_norm is deprecated in favor of torch.nn.utils.parametrizations.weight_norm.")
+
+
+
Benchmarking run of 1024 samples (128 batches)
+
+
+ + +
+
+ +
+ + 100.00% [128/128 00:22<00:00] +
+ +
+
+
+
prepare_atoks('../wolnelektury-wds2/wolnelektury-audio-000000.tar', n_samples=1024, batch_size=4)
+
+
Benchmarking run of 1024 samples (256 batches)
+
+
+ + +
+
+ +
+ + 100.00% [256/256 00:23<00:00] +
+ +
+
+
+
prepare_atoks('../wolnelektury-wds2/wolnelektury-audio-000000.tar', batch_size=4)
+
+ + +
+
+ +
+ + 100.00% [2769/2769 04:09<00:00] +
+ +
+
+
+
prepare_atoks('../wolnelektury-wds2/wolnelektury-audio-000000.tar', n_samples=1024, batch_size=2)
+
+
Benchmarking run of 1024 samples (512 batches)
+
+
+ + +
+
+ +
+ + 100.00% [512/512 00:31<00:00] +
+ +
+
+ + + +
+ +
+ + + + + \ No newline at end of file diff --git a/4B. Multi-language semantic to acoustic token modeling_files/figure-html/cell-11-output-6.png b/4B. Multi-language semantic to acoustic token modeling_files/figure-html/cell-11-output-6.png new file mode 100644 index 0000000..3ea3cb0 Binary files /dev/null and b/4B. Multi-language semantic to acoustic token modeling_files/figure-html/cell-11-output-6.png differ diff --git a/4B. Multi-language semantic to acoustic token modeling_files/figure-html/cell-12-output-6.png b/4B. Multi-language semantic to acoustic token modeling_files/figure-html/cell-12-output-6.png new file mode 100644 index 0000000..01b6729 Binary files /dev/null and b/4B. Multi-language semantic to acoustic token modeling_files/figure-html/cell-12-output-6.png differ diff --git a/4B. Multi-language semantic to acoustic token modeling_files/figure-html/cell-13-output-6.png b/4B. Multi-language semantic to acoustic token modeling_files/figure-html/cell-13-output-6.png new file mode 100644 index 0000000..b223f48 Binary files /dev/null and b/4B. Multi-language semantic to acoustic token modeling_files/figure-html/cell-13-output-6.png differ diff --git a/4b. multi-language semantic to acoustic token modeling.html b/4b. multi-language semantic to acoustic token modeling.html new file mode 100644 index 0000000..5463011 --- /dev/null +++ b/4b. multi-language semantic to acoustic token modeling.html @@ -0,0 +1,1238 @@ + + + + + + + + + +WhisperSpeech - Semantic to acoustic token modeling + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + +
+ +
+ + +
+ + + +
+ +
+
+

Semantic to acoustic token modeling

+
+ + + +
+ + + + +
+ + + +
+ + + +
+
from encodec.model import EncodecModel
+import webdataset as wds
+from whisperspeech.train import *
+
+import pylab as plt
+from IPython.display import Audio, HTML, display
+
+
+

Load the dataset

+
+

source

+
+
+

load_dataset

+
+
 load_dataset (atoks_shard_spec:str, stoks_shard_dir:str, samples:int,
+               random_trunc_p:float=0, vq_codes:int=4096,
+               language:str='en', weight:float=1, validation:bool=False,
+               exclude_files:str=None, randomize_speakers:bool=False,
+               cwd:pathlib.Path=None)
+
+ ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
TypeDefaultDetails
atoks_shard_specstrwebdataset folder
stoks_shard_dirstrstoks webdataset base dir
samplesintsamples per epoch
random_trunc_pfloat0probability of truncating the input to less than 30 seconds
vq_codesint4096
languagestren
weightfloat1
validationboolFalse
exclude_filesstrNone
randomize_speakersboolFalse
cwdPathNone
+
+
+

Model

+
+
import pylab as plt
+import fastprogress
+import IPython
+import numpy as np
+
+class CMLMVisual:
+    """Visualize training progress"""
+    def __init__ (self, model, masterbar, total_steps):
+        self.model = model
+        self.masterbar = masterbar
+        self.total_steps = total_steps
+        self.epochs = total_steps // masterbar.main_bar.total
+        
+        gs = plt.GridSpec(3, 1, height_ratios=[2,2,1])
+        graph_fig = plt.figure(figsize=(10,6))
+        self.graph_fig = graph_fig
+        self.loss_p = graph_fig.add_subplot(gs[0])
+        self.acc_p = graph_fig.add_subplot(gs[1], sharex=self.loss_p)
+        self.acc_p.tick_params('x', labelbottom=False)
+        self.lr_p = graph_fig.add_subplot(gs[2], sharex=self.loss_p)
+        self.lr_p.tick_params('x', labelbottom=False)
+        self.graph_out = None
+        
+        self.its = []
+        self.train_losses = []
+        self.val_losses = []
+        self.lr_history = []
+        self.acc = np.nan
+        self.acc_history = []
+        self.pacc_history = []
+            
+    def show(self):
+        self.start_t = time.time()
+        self.masterbar.write(["samples", "train", "val", "time"], table=True)
+        self.graph_out = display(self.graph_fig, display_id=True)
+        self.acc_out = display(IPython.display.HTML(''), display_id=True)
+    
+    def hide(self):
+        if self.graph_out is not None:
+            self.graph_out.update(IPython.display.HTML(''))
+    
+    def plot(self):
+        loss_p, acc_p, lr_p = self.loss_p, self.acc_p, self.lr_p
+        loss_p.clear()
+        loss_p.plot(self.its, self.train_losses)
+        loss_p.plot(self.its, self.val_losses)
+        loss_p.set_xlim(0, self.total_steps)
+        loss_p.set_yscale('log')
+        acc_p.clear()
+        for k in self.acc_history[-1].keys():
+            acc_p.plot(self.its, [x[k] for x in self.acc_history], ':')
+        lr_p.clear()
+        lrs = np.array(self.lr_history)
+        lr_p.plot(self.its, lrs)
+        self.graph_out.update(self.graph_fig)
+    
+    def add_data(self, it, lr, train_loss, val_los):
+        self.its.append(it)
+        self.train_losses.append(train_loss)
+        self.val_losses.append(val_los)
+        self.lr_history.append(lr)
+        metrics = self.model.get_metrics()
+        self.acc_history.append(metrics)
+        html  = "<h5>Accuracies:</h5><table>"
+        html += "<thead>"+(''.join([f"<td>{k}<td>" for k,x in metrics.items()]))+"</thead>"
+        html += "<tr>"+(''.join([f"<td>{x*100:.1f}%<td>" for k,x in metrics.items()]))+"</tr>"
+        html += "</table>"
+        self.acc_out.update(IPython.display.HTML(html))
+        self.plot()
+
+    def add_table_row(self, it, avg_train_loss, val_loss):
+        elapsed_t = time.time() - self.start_t
+        self.masterbar.write([it, f"{avg_train_loss:.5f}", f"{val_loss:.5f}", fastprogress.core.format_time(elapsed_t)], table=True)
+    
+    def on_iter(self, bar, it, avg_train_loss, val_loss):
+        epoch = math.ceil(it / self.total_steps * self.epochs)
+        bar.comment = f"#{epoch}/{self.epochs} loss: {avg_train_loss:.3f} / {val_loss:.3f}"
+
+
+

source

+
+

DelSumEmbedding

+
+
 DelSumEmbedding (n_head=6, head_width=64, atoks_width=None, length=2250,
+                  codes=1024, quantizers=8, pos_embs=None)
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+

Submodules assigned in this way will be registered, and will have their parameters converted too when you call :meth:to, etc.

+

.. note:: As per the example above, an __init__() call to the parent class must be made before assignment on the child.

+

:ivar training: Boolean represents whether this module is in training or evaluation mode. :vartype training: bool

+
+

source

+
+
+

SADelARTransformer

+
+
 SADelARTransformer (depth=3, ctx_n=2250, stoks_len=750, stoks_codes=4097,
+                     stoks_width=None, spk_width=None, atoks_width=None,
+                     n_head=3, head_width=64, ffn_mult=4, quantizers=8,
+                     speaker_map={'1': 0}, tunables=Tunables(init_std=9,
+                     embeddings_std=0.2, embeddings_lr_scale=10,
+                     output_mult=5.6, query_mult=0.3,
+                     encoder_depth_ratio=0.25, linear_heads=False,
+                     rope=True, q0_loss_mult=1, causal_encoder=False,
+                     lr0=0.003, clip_gradient_norm=2, weight_decay=0.001,
+                     warmup_steps=2000, random=False,
+                     random_finetune=False, force_hidden_to_emb=False))
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+

Submodules assigned in this way will be registered, and will have their parameters converted too when you call :meth:to, etc.

+

.. note:: As per the example above, an __init__() call to the parent class must be made before assignment on the child.

+

:ivar training: Boolean represents whether this module is in training or evaluation mode. :vartype training: bool

+
+

source

+
+
+

Tunables

+
+
 Tunables (init_std:float=9, embeddings_std:float=0.2,
+           embeddings_lr_scale:float=10, output_mult:float=5.6,
+           query_mult:float=0.3, encoder_depth_ratio:float=0.25,
+           linear_heads:bool=False, rope:bool=True, q0_loss_mult:float=1,
+           causal_encoder:bool=False, lr0:float=0.003,
+           clip_gradient_norm:float=2, weight_decay:float=0.001,
+           warmup_steps:float=2000, random:bool=False,
+           random_finetune:bool=False, force_hidden_to_emb:bool=False)
+
+
+

source

+
+
+

rand

+
+
 rand (start, end)
+
+
+

source

+
+
+

DelSumHead

+
+
 DelSumHead (quantizers=8, n_head=6, head_width=64)
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+

Submodules assigned in this way will be registered, and will have their parameters converted too when you call :meth:to, etc.

+

.. note:: As per the example above, an __init__() call to the parent class must be made before assignment on the child.

+

:ivar training: Boolean represents whether this module is in training or evaluation mode. :vartype training: bool

+
+
+
+

Training test

+
+
train_ds = load_dataset('../librilight/*atoks*.tar.gz', '../librilight-vq-en+pl/', 100000, vq_codes=513, exclude_files='../librilight/common-speakers-maxvad')
+val_ds = load_dataset('../librilight/common-speakers-maxvad.tar.gz', '../librilight-vq-en+pl/', 512, vq_codes=513, validation=True)
+
+
+
model = make_model('micro', quantizers=4, frozen_embeddings_model='vqmodel-medium-en+pl-512c-dim64.model',
+                   tunables=Tunables()).cuda()
+train(f"s2a-new", model, train_ds, val_ds, half=True, bs=32, lr=model.tunables.lr0, epochs=1, warmup_steps=model.tunables.warmup_steps,
+      table_row_every_iters=25000, run_valid_every_iters=5000, visual_class=CMLMVisual)
+
+ +
+
+
Accuracies:
acc_0acc_1acc_2acc_3
29.6%23.6%21.2%19.2%
+
+
+ + +
+
+
+ +
+ + 100.00% [1/1 09:39<00:00] +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
samplestrainvaltime
250243.958864.1707902:34
500163.719093.8194704:56
750083.538383.6292407:18
1000003.341183.4610009:39
+

+ +

+ + 100.00% [3125/3125 09:39<00:00 #100000/100000 loss: 3.341 / 3.461] +
+ +
+
+
+
/opt/conda/lib/python3.10/site-packages/torch/optim/lr_scheduler.py:149: UserWarning: The epoch parameter in `scheduler.step()` was not necessary and is being deprecated where possible. Please use `scheduler.step()` to step the scheduler. During the deprecation, if epoch is different from None, the closed form is used instead of the new chainable form, where available. Please open an issue if you are unable to replicate your use case: https://github.com/pytorch/pytorch/issues/new/choose.
+  warnings.warn(EPOCH_DEPRECATION_WARNING, UserWarning)
+
+
+
+
+

+
+
+
+
+
+
# encoder loss barely helps, probably because the RoPE cross-attention bias is already helping a lot
+model = make_model('micro', quantizers=4, frozen_embeddings_model='vqmodel-medium-en+pl-512c-dim64.model',
+                   tunables=Tunables(causal_encoder=True)).cuda()
+train(f"s2a-new", model, train_ds, val_ds, half=True, bs=32, lr=model.tunables.lr0, epochs=1, warmup_steps=model.tunables.warmup_steps,
+      table_row_every_iters=25000, run_valid_every_iters=5000, visual_class=CMLMVisual)
+
+ +
+
+
Accuracies:
acc_0acc_1acc_2acc_3
29.6%23.8%21.2%19.2%
+
+
+ + +
+
+
+ +
+ + 100.00% [1/1 09:41<00:00] +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
samplestrainvaltime
250244.163334.1606302:32
500163.984113.7963204:55
750083.752783.6235707:18
1000003.546393.4573409:41
+

+ +

+ + 100.00% [3125/3125 09:41<00:00 #100000/100000 loss: 3.546 / 3.457] +
+ +
+
+
+
/opt/conda/lib/python3.10/site-packages/torch/optim/lr_scheduler.py:149: UserWarning: The epoch parameter in `scheduler.step()` was not necessary and is being deprecated where possible. Please use `scheduler.step()` to step the scheduler. During the deprecation, if epoch is different from None, the closed form is used instead of the new chainable form, where available. Please open an issue if you are unable to replicate your use case: https://github.com/pytorch/pytorch/issues/new/choose.
+  warnings.warn(EPOCH_DEPRECATION_WARNING, UserWarning)
+
+
+
+
+

+
+
+
+
+
+
# we can prioritize the loss for the first quantizer
+# we'd have to compare generations to really know if it helps though
+model = make_model('micro', quantizers=4, frozen_embeddings_model='vqmodel-medium-en+pl-512c-dim64.model',
+                   tunables=Tunables(q0_loss_mult=5)).cuda()
+train(f"s2a-new", model, train_ds, val_ds, half=True, bs=32, lr=model.tunables.lr0, epochs=1, warmup_steps=model.tunables.warmup_steps,
+      table_row_every_iters=25000, run_valid_every_iters=5000, visual_class=CMLMVisual)
+
+ +
+
+
Accuracies:
acc_0acc_1acc_2acc_3
30.5%23.0%19.8%17.7%
+
+
+ + +
+
+
+ +
+ + 100.00% [1/1 09:39<00:00] +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
samplestrainvaltime
250243.599234.2483802:32
500163.417113.8803004:55
750083.193593.7088107:17
1000003.049863.5376209:39
+

+ +

+ + 100.00% [3125/3125 09:39<00:00 #100000/100000 loss: 3.050 / 3.538] +
+ +
+
+
+
/opt/conda/lib/python3.10/site-packages/torch/optim/lr_scheduler.py:149: UserWarning: The epoch parameter in `scheduler.step()` was not necessary and is being deprecated where possible. Please use `scheduler.step()` to step the scheduler. During the deprecation, if epoch is different from None, the closed form is used instead of the new chainable form, where available. Please open an issue if you are unable to replicate your use case: https://github.com/pytorch/pytorch/issues/new/choose.
+  warnings.warn(EPOCH_DEPRECATION_WARNING, UserWarning)
+
+
+
+
+

+
+
+
+
+ + +
+ +
+ +
+ + + + + \ No newline at end of file diff --git a/5B. Multi-lang text to semantic token modeling_files/figure-html/cell-13-output-4.png b/5B. Multi-lang text to semantic token modeling_files/figure-html/cell-13-output-4.png new file mode 100644 index 0000000..51943ae Binary files /dev/null and b/5B. Multi-lang text to semantic token modeling_files/figure-html/cell-13-output-4.png differ diff --git a/5B. Multi-lang text to semantic token modeling_files/figure-html/cell-15-output-5.png b/5B. Multi-lang text to semantic token modeling_files/figure-html/cell-15-output-5.png new file mode 100644 index 0000000..98091d2 Binary files /dev/null and b/5B. Multi-lang text to semantic token modeling_files/figure-html/cell-15-output-5.png differ diff --git a/5b. multi-lang text to semantic token modeling.html b/5b. multi-lang text to semantic token modeling.html new file mode 100644 index 0000000..8e3f64f --- /dev/null +++ b/5b. multi-lang text to semantic token modeling.html @@ -0,0 +1,1198 @@ + + + + + + + + + +WhisperSpeech - Text to semantic tokens model + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + +
+ +
+ + +
+ + + +
+ +
+
+

Text to semantic tokens model

+
+ + + +
+ + + + +
+ + + +
+ + + +
+
from whisperspeech.wer_metrics import *
+from whisperspeech.train import *
+
+from fastprogress import master_bar
+import webdataset as wds
+
+
+

Dataset

+
+

source

+
+

load_dataset

+
+
 load_dataset (txt_shard_spec:str, stoks_shard_dir:str, samples:int,
+               txt_kind:str='small.en-txt', vq_codes:int=4096,
+               language:str='en', weight:float=1, validation:bool=False,
+               exclude_files:str=None, cwd:pathlib.Path=None)
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
TypeDefaultDetails
txt_shard_specstrtranscription webdataset shards
stoks_shard_dirstrstoks webdataset base dir
samplesintsamples per epoch
txt_kindstrsmall.en-txt
vq_codesint4096
languagestren
weightfloat1
validationboolFalse
exclude_filesstrNone
cwdPathNone
+
+
+
+
Automatic pdb calling has been turned ON
+
+
+
+
train_ds = load_dataset('../wolnelektury-wds2/wolnelektury-medium-txt-*.tar.gz', '../wolnelektury-vqv2/', 190000,
+                        txt_kind='medium-txt', vq_codes=513, language='pl',
+                        exclude_files='../wolnelektury-wds2/validation-samples')
+val_ds = load_dataset('../wolnelektury-wds2/validation-eqvad.tar.gz', '../wolnelektury-vqv2/', 520,
+                      txt_kind='medium-txt', vq_codes=513, language='pl', validation=True)
+
+
+
for x in progress_bar(train_ds, total=100): pass
+x
+
+ + +
+
+ +
+ + 100.00% [100/100 00:06<00:00] +
+ +
+
+
[tensor([[  0,  80, 114,  ...,   0,   0,   0],
+         [  0,  74,  97,  ...,   0,   0,   0],
+         [  0,  80, 114,  ...,   0,   0,   0],
+         ...,
+         [  0,  90,  32,  ...,   0,   0,   0],
+         [  0,  78, 105,  ...,   0,   0,   0],
+         [  0,  74,  97,  ...,   0,   0,   0]]),
+ tensor([[ 80, 114, 111,  ...,   0,   0,   0],
+         [ 74,  97,  99,  ...,   0,   0,   0],
+         [ 80, 114, 111,  ...,   0,   0,   0],
+         ...,
+         [ 90,  32, 100,  ...,   0,   0,   0],
+         [ 78, 105, 101,  ...,   0,   0,   0],
+         [ 74,  97,  32,  ...,   0,   0,   0]]),
+ array([10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
+        10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
+        10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
+        10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10]),
+ array([16.87227866, 13.26666667, 10.44474394, 12.30366492, 17.85714286,
+        12.91291291, 17.57731959, 12.59044863, 13.08701657,  9.05923345,
+         6.64893617, 16.04938272, 13.57664234, 16.6958042 , 12.89986092,
+        12.30385164, 13.0044843 , 11.58280922,  6.55940594, 14.94444444,
+        14.01639344, 11.34085213, 14.24632353, 13.95348837, 13.08219178,
+        14.08382066, 17.42424242, 13.91006098, 12.85425101, 14.37296417,
+        13.3640553 , 12.09103841, 12.54098361, 11.59711075, 14.07380608,
+        13.40388007, 14.59537572, 11.70212766, 12.1559633 , 14.36781609,
+        13.86138614, 12.27272727, 14.36915888, 13.57388316, 12.84059946,
+        13.21478382, 11.01123596, 15.40041068, 14.14473684, 10.51401869,
+        11.55172414, 14.90990991, 16.0130719 , 12.80959752, 14.18511066,
+         6.04448743, 11.36      , 15.35087719, 15.41155867, 14.49880668,
+        12.47892074, 12.34375   , 14.04612159, 16.55629139]),
+ tensor([[512, 460,  66,  ..., 512, 512, 512],
+         [512, 336, 452,  ..., 116, 116, 116],
+         [512,  66, 309,  ..., 512, 512, 512],
+         ...,
+         [512, 336, 253,  ..., 512, 512, 512],
+         [512, 336, 141,  ..., 512, 512, 512],
+         [512, 336, 261,  ..., 512, 512, 512]]),
+ tensor([[460,  66, 337,  ..., 512, 512, 512],
+         [336, 452, 417,  ..., 116, 116, 460],
+         [ 66, 309,  58,  ..., 512, 512, 512],
+         ...,
+         [336, 253, 253,  ..., 512, 512, 512],
+         [336, 141, 248,  ..., 512, 512, 512],
+         [336, 261, 197,  ..., 512, 512, 512]])]
+
+
+
+
+
+

Modeling

+
+

source

+
+

Tunables

+
+
 Tunables (init_std:float=1, embeddings_std:float=0.01,
+           embeddings_lr_scale:float=5,
+           embedding_projector_lr_scale:float=2.5, output_mult:float=0.35,
+           query_mult:float=1, encoder_depth_ratio:float=0.25,
+           causal_encoder:bool=True, eot_dropout_p:float=0.5,
+           cps_input:bool=True, cps_bins:int=32, lr0:float=0.0015,
+           clip_gradient_norm:float=0.2, weight_decay:float=0.1,
+           warmup_steps:float=4000, random:bool=False)
+
+
+

source

+
+
+

rand

+
+
 rand (start, end)
+
+
+

source

+
+
+

T2SEmbedding

+
+
 T2SEmbedding (length=1500, codes=1024, width=384, pos_embs=None,
+               stoks_width=384)
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+

Submodules assigned in this way will be registered, and will have their parameters converted too when you call :meth:to, etc.

+

.. note:: As per the example above, an __init__() call to the parent class must be made before assignment on the child.

+

:ivar training: Boolean represents whether this module is in training or evaluation mode. :vartype training: bool

+
+

source

+
+
+

Encoder

+
+
 Encoder (depth=6, width=384, n_head=6, length=1500, codes=1024,
+          emb_width=384, ffn_mult=4, pos_embs=None,
+          tunables=Tunables(init_std=1, embeddings_std=0.01,
+          embeddings_lr_scale=5, embedding_projector_lr_scale=2.5,
+          output_mult=0.35, query_mult=1, encoder_depth_ratio=0.25,
+          causal_encoder=True, eot_dropout_p=0.5, cps_input=True,
+          cps_bins=32, lr0=0.0015, clip_gradient_norm=0.2,
+          weight_decay=0.1, warmup_steps=4000, random=False))
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+

Submodules assigned in this way will be registered, and will have their parameters converted too when you call :meth:to, etc.

+

.. note:: As per the example above, an __init__() call to the parent class must be made before assignment on the child.

+

:ivar training: Boolean represents whether this module is in training or evaluation mode. :vartype training: bool

+
+

source

+
+
+

TSARTransformer

+
+
 TSARTransformer (depth=6, n_head=6, head_width=64, ffn_mult=4,
+                  ttoks_len=200, ttoks_codes=256, ttoks_width=None,
+                  stoks_len=1500, stoks_codes=1024, stoks_width=None,
+                  tunables=Tunables(init_std=1, embeddings_std=0.01,
+                  embeddings_lr_scale=5, embedding_projector_lr_scale=2.5,
+                  output_mult=0.35, query_mult=1,
+                  encoder_depth_ratio=0.25, causal_encoder=True,
+                  eot_dropout_p=0.5, cps_input=True, cps_bins=32,
+                  lr0=0.0015, clip_gradient_norm=0.2, weight_decay=0.1,
+                  warmup_steps=4000, random=False))
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+

Submodules assigned in this way will be registered, and will have their parameters converted too when you call :meth:to, etc.

+

.. note:: As per the example above, an __init__() call to the parent class must be made before assignment on the child.

+

:ivar training: Boolean represents whether this module is in training or evaluation mode. :vartype training: bool

+
+

source

+
+
+

make_model

+
+
 make_model (size:str, frozen_embeddings_model:str=None,
+             tunables:__main__.Tunables=Tunables(init_std=1,
+             embeddings_std=0.01, embeddings_lr_scale=5,
+             embedding_projector_lr_scale=2.5, output_mult=0.35,
+             query_mult=1, encoder_depth_ratio=0.25, causal_encoder=True,
+             eot_dropout_p=0.5, cps_input=True, cps_bins=32, lr0=0.0015,
+             clip_gradient_norm=0.2, weight_decay=0.1, warmup_steps=4000,
+             random=False), dataset:torch.utils.data.dataset.Dataset=None)
+
+
+
# baseline
+model = make_model('micro', dataset=train_ds, frozen_embeddings_model='vqmodel-medium-en+pl-512c-dim64.model',
+                   tunables=Tunables()).cuda()
+train("tsar-wx", model, train_ds, val_ds, half=True, bs=32, lr=model.tunables.lr0, epochs=4,
+      warmup_steps=model.tunables.warmup_steps, weight_decay=model.tunables.weight_decay, clip_gradient_norm=model.tunables.clip_gradient_norm,
+      table_row_every_iters=100000, run_valid_every_iters=10000)
+
+ +
+
+ + +
+
+
+ +
+ + 100.00% [4/4 07:59<00:00] +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
samplestrainvaltime
1000002.620642.4839301:06
2000001.793761.7724802:11
3000001.696661.6620203:11
4000001.737551.6074004:17
5000001.751081.5682705:17
6000001.598731.5339406:23
7000001.502891.4951507:23
7599361.522611.4747308:00
+

+ +

+ + 100.00% [5937/5937 02:00<00:00 #189984/189984 loss: 1.523 / 1.475] +
+ +
+
+
+
+
+

+
+
+
+
+
+
model.save_model('t2s-micro.model')
+
+
+
# no encoder LM loss, trains visibly slower
+model = make_model('micro', dataset=train_ds, frozen_embeddings_model='vqmodel-medium-en+pl-512c-dim64.model',
+                   tunables=Tunables(causal_encoder=False)).cuda()
+train("tsar-wx", model, train_ds, val_ds, half=True, bs=32, lr=model.tunables.lr0, epochs=4,
+      warmup_steps=1500, weight_decay=model.tunables.weight_decay, clip_gradient_norm=model.tunables.clip_gradient_norm,
+      table_row_every_iters=100000, run_valid_every_iters=10000)
+
+ +
+
+ + +
+
+
+ +
+ + 100.00% [4/4 07:57<00:00] +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
samplestrainvaltime
1000002.444522.3818101:04
2000002.332792.1901002:11
3000001.830191.8291803:12
4000001.749881.7307404:16
5000001.586861.6756005:15
6000001.545441.6292206:21
7000001.683791.5951307:21
7599361.619151.5761907:57
+

+ +

+ + 100.00% [5937/5937 01:59<00:00 #189984/189984 loss: 1.619 / 1.576] +
+ +
+
+
+
/opt/conda/lib/python3.10/site-packages/torch/optim/lr_scheduler.py:149: UserWarning: The epoch parameter in `scheduler.step()` was not necessary and is being deprecated where possible. Please use `scheduler.step()` to step the scheduler. During the deprecation, if epoch is different from None, the closed form is used instead of the new chainable form, where available. Please open an issue if you are unable to replicate your use case: https://github.com/pytorch/pytorch/issues/new/choose.
+  warnings.warn(EPOCH_DEPRECATION_WARNING, UserWarning)
+
+
+
+
+

+
+
+
+
+ + +
+
+ +
+ +
+ + + + + \ No newline at end of file diff --git a/6. Quality-boosting vocoder.html b/6. Quality-boosting vocoder.html new file mode 100644 index 0000000..8f75b72 --- /dev/null +++ b/6. Quality-boosting vocoder.html @@ -0,0 +1,588 @@ + + + + + + + + + +WhisperSpeech + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + +
+ +
+ + +
+ + + +
+ + + + + +
+

source

+
+

Vocoder

+
+
 Vocoder (repo_id='charactr/vocos-encodec-24khz', device=None)
+
+

Initialize self. See help(type(self)) for accurate signature.

+ + +
+ +
+ +
+ + + + + \ No newline at end of file diff --git a/7. Pipeline.html b/7. Pipeline.html new file mode 100644 index 0000000..058f2a7 --- /dev/null +++ b/7. Pipeline.html @@ -0,0 +1,589 @@ + + + + + + + + + +WhisperSpeech + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + +
+ +
+ + +
+ + + +
+ + + + + +
+

source

+
+

Pipeline

+
+
 Pipeline (t2s_ref=None, s2a_ref=None, optimize=True, torch_compile=False,
+           device=None)
+
+

Initialize self. See help(type(self)) for accurate signature.

+ + +
+ +
+ +
+ + + + + \ No newline at end of file diff --git a/A. Neural modules.html b/A. Neural modules.html new file mode 100644 index 0000000..5103fc6 --- /dev/null +++ b/A. Neural modules.html @@ -0,0 +1,895 @@ + + + + + + + + + +WhisperSpeech + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + +
+ +
+ + +
+ + + +
+ + + + + +
+

source

+
+

init_transformer

+
+
 init_transformer (m)
+
+
+

source

+
+
+

QueryHead

+
+
 QueryHead (in_features:int, out_features:int, bias:bool=True,
+            device=None, dtype=None)
+
+

Applies a linear transformation to the incoming data: :math:y = xA^T + b.

+

This module supports :ref:TensorFloat32<tf32_on_ampere>.

+

On certain ROCm devices, when using float16 inputs this module will use :ref:different precision<fp16_on_mi200> for backward.

+

Args: in_features: size of each input sample out_features: size of each output sample bias: If set to False, the layer will not learn an additive bias. Default: True

+

Shape: - Input: :math:(*, H_{in}) where :math:* means any number of dimensions including none and :math:H_{in} = \text{in\_features}. - Output: :math:(*, H_{out}) where all but the last dimension are the same shape as the input and :math:H_{out} = \text{out\_features}.

+

Attributes: weight: the learnable weights of the module of shape :math:(\text{out\_features}, \text{in\_features}). The values are initialized from :math:\mathcal{U}(-\sqrt{k}, \sqrt{k}), where :math:k = \frac{1}{\text{in\_features}} bias: the learnable bias of the module of shape :math:(\text{out\_features}). If :attr:bias is True, the values are initialized from :math:\mathcal{U}(-\sqrt{k}, \sqrt{k}) where :math:k = \frac{1}{\text{in\_features}}

+

Examples::

+
>>> m = nn.Linear(20, 30)
+>>> input = torch.randn(128, 20)
+>>> output = m(input)
+>>> print(output.size())
+torch.Size([128, 30])
+
+

source

+
+
+

LinearHead

+
+
 LinearHead (in_features:int, out_features:int, bias:bool=True,
+             device=None, dtype=None)
+
+

Applies a linear transformation to the incoming data: :math:y = xA^T + b.

+

This module supports :ref:TensorFloat32<tf32_on_ampere>.

+

On certain ROCm devices, when using float16 inputs this module will use :ref:different precision<fp16_on_mi200> for backward.

+

Args: in_features: size of each input sample out_features: size of each output sample bias: If set to False, the layer will not learn an additive bias. Default: True

+

Shape: - Input: :math:(*, H_{in}) where :math:* means any number of dimensions including none and :math:H_{in} = \text{in\_features}. - Output: :math:(*, H_{out}) where all but the last dimension are the same shape as the input and :math:H_{out} = \text{out\_features}.

+

Attributes: weight: the learnable weights of the module of shape :math:(\text{out\_features}, \text{in\_features}). The values are initialized from :math:\mathcal{U}(-\sqrt{k}, \sqrt{k}), where :math:k = \frac{1}{\text{in\_features}} bias: the learnable bias of the module of shape :math:(\text{out\_features}). If :attr:bias is True, the values are initialized from :math:\mathcal{U}(-\sqrt{k}, \sqrt{k}) where :math:k = \frac{1}{\text{in\_features}}

+

Examples::

+
>>> m = nn.Linear(20, 30)
+>>> input = torch.randn(128, 20)
+>>> output = m(input)
+>>> print(output.size())
+torch.Size([128, 30])
+
+

source

+
+
+

LayerNorm

+
+
 LayerNorm (normalized_shape:Union[int,List[int],torch.Size],
+            eps:float=1e-05, elementwise_affine:bool=True, bias:bool=True,
+            device=None, dtype=None)
+
+

Applies Layer Normalization over a mini-batch of inputs.

+

This layer implements the operation as described in the paper Layer Normalization <https://arxiv.org/abs/1607.06450>__

+

.. math:: y = * +

+

The mean and standard-deviation are calculated over the last D dimensions, where D is the dimension of :attr:normalized_shape. For example, if :attr:normalized_shape is (3, 5) (a 2-dimensional shape), the mean and standard-deviation are computed over the last 2 dimensions of the input (i.e. input.mean((-2, -1))). :math:\gamma and :math:\beta are learnable affine transform parameters of :attr:normalized_shape if :attr:elementwise_affine is True. The standard-deviation is calculated via the biased estimator, equivalent to torch.var(input, unbiased=False).

+

.. note:: Unlike Batch Normalization and Instance Normalization, which applies scalar scale and bias for each entire channel/plane with the :attr:affine option, Layer Normalization applies per-element scale and bias with :attr:elementwise_affine.

+

This layer uses statistics computed from input data in both training and evaluation modes.

+

Args: normalized_shape (int or list or torch.Size): input shape from an expected input of size

+
    .. math::
+        [* \times \text{normalized\_shape}[0] \times \text{normalized\_shape}[1]
+            \times \ldots \times \text{normalized\_shape}[-1]]
+
+    If a single integer is used, it is treated as a singleton list, and this module will
+    normalize over the last dimension which is expected to be of that specific size.
+eps: a value added to the denominator for numerical stability. Default: 1e-5
+elementwise_affine: a boolean value that when set to ``True``, this module
+    has learnable per-element affine parameters initialized to ones (for weights)
+    and zeros (for biases). Default: ``True``.
+bias: If set to ``False``, the layer will not learn an additive bias (only relevant if
+    :attr:`elementwise_affine` is ``True``). Default: ``True``.
+

Attributes: weight: the learnable weights of the module of shape :math:\text{normalized\_shape} when :attr:elementwise_affine is set to True. The values are initialized to 1. bias: the learnable bias of the module of shape :math:\text{normalized\_shape} when :attr:elementwise_affine is set to True. The values are initialized to 0.

+

Shape: - Input: :math:(N, *) - Output: :math:(N, *) (same shape as input)

+

Examples::

+
>>> # NLP Example
+>>> batch, sentence_length, embedding_dim = 20, 5, 10
+>>> embedding = torch.randn(batch, sentence_length, embedding_dim)
+>>> layer_norm = nn.LayerNorm(embedding_dim)
+>>> # Activate module
+>>> layer_norm(embedding)
+>>>
+>>> # Image Example
+>>> N, C, H, W = 20, 5, 10, 10
+>>> input = torch.randn(N, C, H, W)
+>>> # Normalize over the last three dimensions (i.e. the channel and spatial dimensions)
+>>> # as shown in the image below
+>>> layer_norm = nn.LayerNorm([C, H, W])
+>>> output = layer_norm(input)
+

.. image:: ../_static/img/nn/layer_norm.jpg :scale: 50 %

+
+

source

+
+
+

sinusoids

+
+
 sinusoids (length, channels, max_timescale=10000)
+
+

Returns sinusoids for positional embedding

+
+

source

+
+
+

MultiHeadAttention

+
+
 MultiHeadAttention (n_state:int, n_head:int, qk_scale:float=1,
+                     rope:bool=False, cross=False)
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+

Submodules assigned in this way will be registered, and will have their parameters converted too when you call :meth:to, etc.

+

.. note:: As per the example above, an __init__() call to the parent class must be made before assignment on the child.

+

:ivar training: Boolean represents whether this module is in training or evaluation mode. :vartype training: bool

+
+

source

+
+
+

ResidualAttentionBlock

+
+
 ResidualAttentionBlock (n_state:int, n_head:int,
+                         cross_attention:bool=False, rope:bool=False,
+                         qk_scale:float=1, ffn_mult:int=4)
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+

Submodules assigned in this way will be registered, and will have their parameters converted too when you call :meth:to, etc.

+

.. note:: As per the example above, an __init__() call to the parent class must be made before assignment on the child.

+

:ivar training: Boolean represents whether this module is in training or evaluation mode. :vartype training: bool

+
+

source

+
+
+

BaseDecoder

+
+
 BaseDecoder (depth=6, n_head=6, width=384, qk_scale=1, ffn_mult=4,
+              length=2250, rope=False)
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+

Submodules assigned in this way will be registered, and will have their parameters converted too when you call :meth:to, etc.

+

.. note:: As per the example above, an __init__() call to the parent class must be made before assignment on the child.

+

:ivar training: Boolean represents whether this module is in training or evaluation mode. :vartype training: bool

+
+

source

+
+
+

FlexEmbeddings

+
+
 FlexEmbeddings (codes, width, special_codes=None, frozen_width=None,
+                 special_embedding=None, unembed=True)
+
+

Base class for all neural network modules.

+

Your models should also subclass this class.

+

Modules can also contain other Modules, allowing to nest them in a tree structure. You can assign the submodules as regular attributes::

+
import torch.nn as nn
+import torch.nn.functional as F
+
+class Model(nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.conv1 = nn.Conv2d(1, 20, 5)
+        self.conv2 = nn.Conv2d(20, 20, 5)
+
+    def forward(self, x):
+        x = F.relu(self.conv1(x))
+        return F.relu(self.conv2(x))
+

Submodules assigned in this way will be registered, and will have their parameters converted too when you call :meth:to, etc.

+

.. note:: As per the example above, an __init__() call to the parent class must be made before assignment on the child.

+

:ivar training: Boolean represents whether this module is in training or evaluation mode. :vartype training: bool

+
+

source

+
+
+

EmbeddingProjector

+
+
 EmbeddingProjector (in_features:int, out_features:int, bias:bool=True,
+                     device=None, dtype=None)
+
+

Applies a linear transformation to the incoming data: :math:y = xA^T + b.

+

This module supports :ref:TensorFloat32<tf32_on_ampere>.

+

On certain ROCm devices, when using float16 inputs this module will use :ref:different precision<fp16_on_mi200> for backward.

+

Args: in_features: size of each input sample out_features: size of each output sample bias: If set to False, the layer will not learn an additive bias. Default: True

+

Shape: - Input: :math:(*, H_{in}) where :math:* means any number of dimensions including none and :math:H_{in} = \text{in\_features}. - Output: :math:(*, H_{out}) where all but the last dimension are the same shape as the input and :math:H_{out} = \text{out\_features}.

+

Attributes: weight: the learnable weights of the module of shape :math:(\text{out\_features}, \text{in\_features}). The values are initialized from :math:\mathcal{U}(-\sqrt{k}, \sqrt{k}), where :math:k = \frac{1}{\text{in\_features}} bias: the learnable bias of the module of shape :math:(\text{out\_features}). If :attr:bias is True, the values are initialized from :math:\mathcal{U}(-\sqrt{k}, \sqrt{k}) where :math:k = \frac{1}{\text{in\_features}}

+

Examples::

+
>>> m = nn.Linear(20, 30)
+>>> input = torch.randn(128, 20)
+>>> output = m(input)
+>>> print(output.size())
+torch.Size([128, 30])
+
+
femb = FlexEmbeddings(2, 3, 1).half()
+with torch.no_grad():
+    femb.main.weight[:] = 0
+    femb.main.weight[:,:2] = torch.eye(2)
+    femb.special.weight[:] = torch.tensor([0,0,1])
+femb.main.weight, femb.special.weight
+
+
(Parameter containing:
+ tensor([[1., 0., 0.],
+         [0., 1., 0.]], dtype=torch.float16, requires_grad=True),
+ Parameter containing:
+ tensor([[0., 0., 1.]], dtype=torch.float16, requires_grad=True))
+
+
+
+
embs = femb(torch.tensor([[0,2,1,0]]))
+embs
+
+
tensor([[[1., 0., 0.],
+         [0., 0., 1.],
+         [0., 1., 0.],
+         [1., 0., 0.]]], dtype=torch.float16, grad_fn=<IndexPutBackward0>)
+
+
+
+
embs += femb(torch.tensor([[0]]))
+
+
+
femb.unembed(embs.float())
+
+
tensor([[[2., 0., 0.],
+         [1., 0., 1.],
+         [1., 1., 0.],
+         [2., 0., 0.]]], grad_fn=<CatBackward0>)
+
+
+ + +
+ +
+ +
+ + + + + \ No newline at end of file diff --git a/B1. Training.html b/B1. Training.html new file mode 100644 index 0000000..0d53d5d --- /dev/null +++ b/B1. Training.html @@ -0,0 +1,613 @@ + + + + + + + + + +WhisperSpeech + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + +
+ +
+ + +
+ + + +
+ + + + + +
+

source

+
+

SimpleVisual

+
+
 SimpleVisual (model, masterbar, total_steps)
+
+

Initialize self. See help(type(self)) for accurate signature.

+
+

source

+
+
+

validate

+
+
 validate (model, val, half=True, bs=16, drop_last=False, dl_workers=8,
+           device='cuda')
+
+
+

source

+
+
+

train

+
+
 train (checkpoint_path, model, train, val, half=True, bs=16, lr=0.0001,
+        drop_last=False, weight_decay=0.1, warmup_steps=10000, epochs=10,
+        clip_gradient_norm=None, dl_workers=8, visual_class=<class
+        '__main__.SimpleVisual'>, profiler=None,
+        run_valid_every_iters=8000, table_row_every_iters=80000,
+        chkpt_every_iters=None, device='cuda', trainable_params=None,
+        callback=None, lr_schedule='wsd')
+
+ + +
+ +
+ +
+ + + + + \ No newline at end of file diff --git a/B2. Training (Lightning).html b/B2. Training (Lightning).html new file mode 100644 index 0000000..291a39b --- /dev/null +++ b/B2. Training (Lightning).html @@ -0,0 +1,638 @@ + + + + + + + + + +WhisperSpeech + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + +
+ +
+ + +
+ + + +
+ + + + + +
+
def test_fun(a:str=None, to:int = 2, toggle:bool=True):
+    assert(a is not None)
+    print(a, to, toggle)
+parse_and_call("test", test_fun, ["--to", "4"], dict(a=[]), log_to_wandb=False)
+
+
[] 4 False
+
+
+
+
from fastcore.script import anno_parser
+def test_fun(a:str=None, to:int = 2, toggle:bool=True):
+    assert(a is not None)
+    print(a, to, toggle)
+test_fun("a")
+anno_parser(test_fun).parse_args([])
+
+
a 2 True
+
+
+
Namespace(a=None, to=2, toggle=False, pdb=False, xtra=None)
+
+
+
+
def test_fun2(a:str, to:int = 2):
+    assert(a is not None)
+    print(a, to)
+
+parse_and_call("test", test_fun2, ["qwe"], log_to_wandb=False)
+
+
qwe 2
+
+
+ + + +
+ +
+ + + + + \ No newline at end of file diff --git a/C. Word error rate metrics.html b/C. Word error rate metrics.html new file mode 100644 index 0000000..dbcd4fe --- /dev/null +++ b/C. Word error rate metrics.html @@ -0,0 +1,655 @@ + + + + + + + + + +WhisperSpeech + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + +
+ +
+ + +
+ + + +
+ + + + + +
+
+
+
The autoreload extension is already loaded. To reload it, use:
+  %reload_ext autoreload
+
+
+
+
default_transform(["Footnote, Somber Tashan, May 12, 1856", "FOOTNOTE SUMNER TO SHANNON MAY TWELFTH EIGHTEEN FIFTY SIX"])
+
+
[['footnote', 'somber', 'tashan', 'may', '12', '1856'],
+ ['footnote', 'sumner', 'to', 'shannon', 'may', '12th', '1856']]
+
+
+
+

source

+
+

librispeech_data

+
+
 librispeech_data (datadir, sample_rate=16000)
+
+
+

source

+
+
+

DfBuilder

+
+
 DfBuilder ()
+
+

Initialize self. See help(type(self)) for accurate signature.

+
+

source

+
+
+

WERStats

+
+
 WERStats (transform=<jiwer.transforms.Compose object at 0x7f9cae35aa90>)
+
+

Initialize self. See help(type(self)) for accurate signature.

+ + +
+ +
+ +
+ + + + + \ No newline at end of file diff --git a/C2. Testing.html b/C2. Testing.html new file mode 100644 index 0000000..eb98f0a --- /dev/null +++ b/C2. Testing.html @@ -0,0 +1,587 @@ + + + + + + + + + +WhisperSpeech + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + +
+ +
+ + +
+ + + +
+ + + + + +
+

source

+
+

test_model

+
+
 test_model (model, ds, bs=1)
+
+ + +
+ +
+ +
+ + + + + \ No newline at end of file diff --git a/D. Common dataset utilities.html b/D. Common dataset utilities.html new file mode 100644 index 0000000..0e989ef --- /dev/null +++ b/D. Common dataset utilities.html @@ -0,0 +1,866 @@ + + + + + + + + + +WhisperSpeech + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + +
+ +
+ + +
+ + + +
+ + + + + +
+

source

+
+

shard_glob

+
+
 shard_glob (input)
+
+
+
shard_glob('../librilight/librilight-atoks-txts/librilight-small-atoks-3kbps-*.tar.gz')
+
+
['../librilight/librilight-atoks-txts/librilight-small-atoks-3kbps-000000.tar.gz',
+ '../librilight/librilight-atoks-txts/librilight-small-atoks-3kbps-000006.tar.gz',
+ '../librilight/librilight-atoks-txts/librilight-small-atoks-3kbps-000004.tar.gz',
+ '../librilight/librilight-atoks-txts/librilight-small-atoks-3kbps-000001.tar.gz',
+ '../librilight/librilight-atoks-txts/librilight-small-atoks-3kbps-000003.tar.gz',
+ '../librilight/librilight-atoks-txts/librilight-small-atoks-3kbps-000002.tar.gz',
+ '../librilight/librilight-atoks-txts/librilight-small-atoks-3kbps-000005.tar.gz',
+ '../librilight/librilight-atoks-txts/librilight-small-atoks-3kbps-000007.tar.gz']
+
+
+
+
# 
+shard_glob(Path('../librilight/librilight-atoks-txts/librilight-small-atoks-3kbps-*.tar.gz'))
+
+
['../librilight/librilight-atoks-txts/librilight-small-atoks-3kbps-000000.tar.gz',
+ '../librilight/librilight-atoks-txts/librilight-small-atoks-3kbps-000006.tar.gz',
+ '../librilight/librilight-atoks-txts/librilight-small-atoks-3kbps-000004.tar.gz',
+ '../librilight/librilight-atoks-txts/librilight-small-atoks-3kbps-000001.tar.gz',
+ '../librilight/librilight-atoks-txts/librilight-small-atoks-3kbps-000003.tar.gz',
+ '../librilight/librilight-atoks-txts/librilight-small-atoks-3kbps-000002.tar.gz',
+ '../librilight/librilight-atoks-txts/librilight-small-atoks-3kbps-000005.tar.gz',
+ '../librilight/librilight-atoks-txts/librilight-small-atoks-3kbps-000007.tar.gz']
+
+
+
+
# we can also specify the range and generate shard URLs
+shard_glob(Path('https://huggingface.co/datasets/collabora/librilight-processed-webdataset/resolve/main/librilight-small-atoks-3kbps-{000000..000007}.tar.gz'))
+
+
['https:/huggingface.co/datasets/collabora/librilight-processed-webdataset/resolve/main/librilight-small-atoks-3kbps-000000.tar.gz',
+ 'https:/huggingface.co/datasets/collabora/librilight-processed-webdataset/resolve/main/librilight-small-atoks-3kbps-000001.tar.gz',
+ 'https:/huggingface.co/datasets/collabora/librilight-processed-webdataset/resolve/main/librilight-small-atoks-3kbps-000002.tar.gz',
+ 'https:/huggingface.co/datasets/collabora/librilight-processed-webdataset/resolve/main/librilight-small-atoks-3kbps-000003.tar.gz',
+ 'https:/huggingface.co/datasets/collabora/librilight-processed-webdataset/resolve/main/librilight-small-atoks-3kbps-000004.tar.gz',
+ 'https:/huggingface.co/datasets/collabora/librilight-processed-webdataset/resolve/main/librilight-small-atoks-3kbps-000005.tar.gz',
+ 'https:/huggingface.co/datasets/collabora/librilight-processed-webdataset/resolve/main/librilight-small-atoks-3kbps-000006.tar.gz',
+ 'https:/huggingface.co/datasets/collabora/librilight-processed-webdataset/resolve/main/librilight-small-atoks-3kbps-000007.tar.gz']
+
+
+
+

source

+
+
+

join_datasets

+
+
 join_datasets (datasets)
+
+

An iterable Dataset.

+

All datasets that represent an iterable of data samples should subclass it. Such form of datasets is particularly useful when data come from a stream.

+

All subclasses should overwrite :meth:__iter__, which would return an iterator of samples in this dataset.

+

When a subclass is used with :class:~torch.utils.data.DataLoader, each item in the dataset will be yielded from the :class:~torch.utils.data.DataLoader iterator. When :attr:num_workers > 0, each worker process will have a different copy of the dataset object, so it is often desired to configure each copy independently to avoid having duplicate data returned from the workers. :func:~torch.utils.data.get_worker_info, when called in a worker process, returns information about the worker. It can be used in either the dataset’s :meth:__iter__ method or the :class:~torch.utils.data.DataLoader ’s :attr:worker_init_fn option to modify each copy’s behavior.

+

Example 1: splitting workload across all workers in :meth:__iter__::

+
>>> # xdoctest: +REQUIRES(env:TORCH_DOCTEST_DATALOADER)
+>>> # xdoctest: +SKIP("Fails on MacOS12")
+>>> class MyIterableDataset(torch.utils.data.IterableDataset):
+...     def __init__(self, start, end):
+...         super(MyIterableDataset).__init__()
+...         assert end > start, "this example code only works with end >= start"
+...         self.start = start
+...         self.end = end
+...
+...     def __iter__(self):
+...         worker_info = torch.utils.data.get_worker_info()
+...         if worker_info is None:  # single-process data loading, return the full iterator
+...             iter_start = self.start
+...             iter_end = self.end
+...         else:  # in a worker process
+...             # split workload
+...             per_worker = int(math.ceil((self.end - self.start) / float(worker_info.num_workers)))
+...             worker_id = worker_info.id
+...             iter_start = self.start + worker_id * per_worker
+...             iter_end = min(iter_start + per_worker, self.end)
+...         return iter(range(iter_start, iter_end))
+...
+>>> # should give same set of data as range(3, 7), i.e., [3, 4, 5, 6].
+>>> ds = MyIterableDataset(start=3, end=7)
+
+>>> # Single-process loading
+>>> print(list(torch.utils.data.DataLoader(ds, num_workers=0)))
+[tensor([3]), tensor([4]), tensor([5]), tensor([6])]
+
+>>> # xdoctest: +REQUIRES(POSIX)
+>>> # Mult-process loading with two worker processes
+>>> # Worker 0 fetched [3, 4].  Worker 1 fetched [5, 6].
+>>> # xdoctest: +IGNORE_WANT("non deterministic")
+>>> print(list(torch.utils.data.DataLoader(ds, num_workers=2)))
+[tensor([3]), tensor([5]), tensor([4]), tensor([6])]
+
+>>> # With even more workers
+>>> # xdoctest: +IGNORE_WANT("non deterministic")
+>>> print(list(torch.utils.data.DataLoader(ds, num_workers=12)))
+[tensor([3]), tensor([5]), tensor([4]), tensor([6])]
+

Example 2: splitting workload across all workers using :attr:worker_init_fn::

+
>>> # xdoctest: +REQUIRES(env:TORCH_DOCTEST_DATALOADER)
+>>> class MyIterableDataset(torch.utils.data.IterableDataset):
+...     def __init__(self, start, end):
+...         super(MyIterableDataset).__init__()
+...         assert end > start, "this example code only works with end >= start"
+...         self.start = start
+...         self.end = end
+...
+...     def __iter__(self):
+...         return iter(range(self.start, self.end))
+...
+>>> # should give same set of data as range(3, 7), i.e., [3, 4, 5, 6].
+>>> ds = MyIterableDataset(start=3, end=7)
+
+>>> # Single-process loading
+>>> print(list(torch.utils.data.DataLoader(ds, num_workers=0)))
+[3, 4, 5, 6]
+>>>
+>>> # Directly doing multi-process loading yields duplicate data
+>>> print(list(torch.utils.data.DataLoader(ds, num_workers=2)))
+[3, 3, 4, 4, 5, 5, 6, 6]
+
+>>> # Define a `worker_init_fn` that configures each dataset copy differently
+>>> def worker_init_fn(worker_id):
+...     worker_info = torch.utils.data.get_worker_info()
+...     dataset = worker_info.dataset  # the dataset copy in this worker process
+...     overall_start = dataset.start
+...     overall_end = dataset.end
+...     # configure the dataset to only process the split workload
+...     per_worker = int(math.ceil((overall_end - overall_start) / float(worker_info.num_workers)))
+...     worker_id = worker_info.id
+...     dataset.start = overall_start + worker_id * per_worker
+...     dataset.end = min(dataset.start + per_worker, overall_end)
+...
+
+>>> # Mult-process loading with the custom `worker_init_fn`
+>>> # Worker 0 fetched [3, 4].  Worker 1 fetched [5, 6].
+>>> print(list(torch.utils.data.DataLoader(ds, num_workers=2, worker_init_fn=worker_init_fn)))
+[3, 5, 4, 6]
+
+>>> # With even more workers
+>>> print(list(torch.utils.data.DataLoader(ds, num_workers=12, worker_init_fn=worker_init_fn)))
+[3, 4, 5, 6]
+
+
# validate that we don't reset the datasets on each `iter`
+# this is important with webdatasets since sample shuffling is very bad initially, unless num_workers << num_shards
+from itertools import islice
+ds = join_datasets(["ABCDEFG"])
+for x in islice(ds, 3):
+    print(x)
+for x in islice(ds, 5):
+    print(x)
+
+
A
+B
+C
+D
+E
+F
+G
+
+
+
+
# will stop as soon as it exhausts one iterator
+for x in join_datasets(['ABCDEFG', 'abcdefg', range(20)]):
+    print(x)
+
+
0
+a
+1
+2
+3
+A
+4
+5
+b
+B
+c
+C
+D
+E
+6
+d
+e
+7
+F
+f
+g
+8
+G
+9
+
+
+
+

source

+
+
+

resampler

+
+
 resampler (newsr=24000, key='samples_24k')
+
+
+

source

+
+
+

derived_name

+
+
 derived_name (input, kind, base='audio', suffix='.gz', dir=None)
+
+
+

source

+
+
+

derived_dataset

+
+
 derived_dataset (kind, base='audio', suffix='.gz', decoders=[], dir=None)
+
+
+

source

+
+
+

merge_in

+
+
 merge_in (dataset_fun)
+
+

Merge a dataset into the current one returning samples with the union of keys. Pass in a function that takes a URL of a sample and returns a dataset for it (called everytime the URL changes).

+

It requires (and validates) that both datasets have the same ordering of keys so you have to use it before any sample shuffling. Shard shuffling is ok.

+
+

source

+
+
+

AtomicTarWriter

+
+
 AtomicTarWriter (name, throwaway=False)
+
+
+

source

+
+
+

readlines

+
+
 readlines (fname)
+
+ + +
+ +
+ +
+ + + + + \ No newline at end of file diff --git a/D. Common inference utilities.html b/D. Common inference utilities.html new file mode 100644 index 0000000..aecb0f1 --- /dev/null +++ b/D. Common inference utilities.html @@ -0,0 +1,587 @@ + + + + + + + + + +WhisperSpeech + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + +
+ +
+ + +
+ + + +
+ + + + + +
+

source

+
+

get_compute_device

+
+
 get_compute_device ()
+
+ + +
+ +
+ +
+ + + + + \ No newline at end of file diff --git a/b. languages.html b/b. languages.html new file mode 100644 index 0000000..b430e25 --- /dev/null +++ b/b. languages.html @@ -0,0 +1,604 @@ + + + + + + + + + +WhisperSpeech - Language codes + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + +
+ +
+ + +
+ + + +
+ +
+
+

Language codes

+
+ + + +
+ + + + +
+ + + +
+ + + +

This language list is comming straigh from openai-whisper. The upstream file is here: https://github.com/openai/whisper/blob/main/whisper/tokenizer.py but we are freezing this to the openai-whisper==20230918 version right now.

+
+

source

+
+

to_id

+
+
 to_id (lang)
+
+ + +
+ +
+ +
+ + + + + \ No newline at end of file diff --git a/c. benchmark.html b/c. benchmark.html new file mode 100644 index 0000000..a4696b2 --- /dev/null +++ b/c. benchmark.html @@ -0,0 +1,587 @@ + + + + + + + + + +WhisperSpeech - Benchmarking + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + +
+ +
+ + +
+ + + +
+ +
+
+

Benchmarking

+
+ + + +
+ + + + +
+ + + +
+ + + + + + +
+ +
+ + + + + \ No newline at end of file diff --git a/dataset preparation.html b/dataset preparation.html new file mode 100644 index 0000000..402deb9 --- /dev/null +++ b/dataset preparation.html @@ -0,0 +1,884 @@ + + + + + + + + + +WhisperSpeech - I can has speech? What data WhisperSpeech needs? + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + +
+ +
+ + +
+ + + +
+ +
+
+

I can has speech? What data WhisperSpeech needs?

+
+ + + +
+ + + + +
+ + + +
+ + + +

WhisperSpeech is trained on heavily preprocessed speech data generated from several models:

+ +
+
+

+
WhisperSpeech TTS overview diagram
+
+
+
+

Who is who? A high-level overview

+

To get these 3 data representations we have to run the audio data through several models. The first two steps are always the same, the rest depend on the model we want to run.

+
    +
  1. We start by downloading the speech audio files into a sharded webdataset (e.g. A3. Download Project Guttenberg audiobooks).
    +We released webdatasetified versions of two important public domain speech datasets – LibriLight and Project Gutenberg Audiobooks.

  2. +
  3. All subsequent steps rely on voice activity detection (VAD) and diarization so we always generate segment lists and extract speaker embeddings for all audio files (see 1B. Voice activity detection and 2A. Speaker Embeddings for source code).
    +The results of this step were also released on Hugging Face – LibriLight and Project Gutenberg Audiobooks.

  4. +
+

The next steps depend on which model we want to train or fine-tune.

+
    +
  1. To re-train the quantized Whisper model we need to transcribe the audio with base.en (2A. Whisper quantization dataset preparation). A model pretrained on 60k hours of LibriLight is available from Hugging Face whisper-vq-stoks-v2.model.
  2. +
  3. To train the text to semantic token model we need to transcribe the audio with Whisper small.en and extract the semantic tokens (5A. T2S dataset preparation).
  4. +
  5. To train the semantic to acoustic model we need to extract the semantic tokens and compress the audio with Encodec for the semantic to acoustic model (4A. S2A dataset preparation).
  6. +
+

These three steps are all independent since they require different chunking of speech data. For quantizing Whisper and S2A training we greedily merge the VAD segments from the same speaker into (at most) 30 second chunks to improve training performance (more uniform chunks mean less computation time is spent on padding). For T2S we randomly truncate when merging the VAD segments so the model also learns how to work with shorter texts. The code to perform this is in 1C. VAD merging.

+
+
+

TL;DR example – give me the codes!

+

In this example we will convert a single split from the Multilingual Libri Speech dataset.

+
+

Prepare the webdataset shards

+

The first, most time-consuming, step is to convert the data from it’s original form into the webdataset format. If you want to skip this section and still follow along, the results can be downloaded from Hugging Face at datasets/collabora/multilingual-librispeech-webdataset.

+

First we need tarp which is a tool that helps create and manipulate the webdataset tar files more effectively. You can check out more about it in the official tarp README

+
go install -v github.com/collabora/tarp/tarp@latest
+

Afterwards, we download and unpack the original dataset files:

+
aria2c -x10 https://dl.fbaipublicfiles.com/mls/mls_french_opus.tar.gz
+tar -xf mls_french_opus.tar.gz
+

Next, we’ll need to convert each line in the transcripts.txt file:

+
10065_10039_000000      ses vêtements devinrent tout brillants de lumière et blancs comme la neige en sorte qu'il n'y a point de foulon sur la terre qui puisse en faire d'aussi blancs
+

into a tarp script:

+
train/10065_10039_000000.opus file:mls_french_opus/train/audio/10065/10039/10065_10039_000000.opus
+train/10065_10039_000000.txt text:ses vêtements devinrent tout brillants de lumière et blancs comme la neige en sorte qu'il n'y a point de foulon sur la terre qui puisse en faire d'aussi blancs
+

We can achieve this using a short Python script (saved as make-script.py):

+
import sys
+
+fname = sys.argv[1]
+dir, split, _ = fname.rsplit("/", 2)
+
+for ln in open(fname):
+    id, txt = ln.split("\t")
+    a,b,c = id.split("_")
+    txt = txt.replace("\n", "")
+    print(f"""{split}/{id}.opus file:{dir}/{split}/audio/{a}/{b}/{id}.opus
+{split}/{id}.txt text:{txt}""")
+

Once we have this, we can run the conversion process. The python script outputs data sample descriptions which are fed to tarp create that archives them into a tar stream (a bit similar to tar -T -). The tarp split will then cut the incoming stream into 2GB shards and save them to separate files, making sure to split on sample boundaries.

+

The 2GB size was chosen as a good compromise between the shard count and shard transcription time for mp3/opus files with mutlilingual speech. For LibriLight (English compressed with FLAC) the magic number was 5GB because we FLAC compresses less and we can also use a smaller model for transcribing English speech.

+
python3 make-script.py  mls_french_opus/train/transcripts.txt \
+  | /root/go/bin/tarp create -o - - \
+  | /root/go/bin/tarp split -s 2e9 -o 'mls_french_train-audio-%06d.tar' -
+

We’ll have to repeat the same command two times replacing train with test and dev and afterwards we can upload everything to Hugging Face:

+
huggingface-cli login
+huggingface-cli upload --repo-type dataset collabora/multilingual-librispeech-webdataset .
+
+
+

Process the shards on a single GPU machine

+

We do the sharding mainly to be able to effectively process data on many GPUs but for the sake of simplicity we will use a single GPU here. The process stays the same, but different tools would be used to schedule the jobs. For reference, below the commands, we have specified their approximate runtimes on a RTX 4090 for the French subset of MLS.

+

Perform voice activity detection:

+
parallel --eta -j3 python -m whisperspeech.vad {} ::: ./*.tar
+# 50min
+

Extract speaker embeddings for each fragment:

+
parallel --eta -j2 python -m whisperspeech.extract_spk_emb --batch_size 16 {} ::: ./*.tar
+# 1h 10min
+

We perform VAD segment merging (we do it as a separate step here to remove all randomness and get reproducibility for later steps):

+
parallel --eta -j16 python -m whisperspeech.vad_merge --eqvad {} ::: *.tar
+parallel --eta -j16 python -m whisperspeech.vad_merge {} ::: *.tar
+

With that covered we can start the heavy lifting with the transcripts:

+
parallel --eta -j1 python -m whisperspeech.prepare_t2s_txts --transcription_model medium --language fr --batch_size 32 {} ::: *.tar
+# 6h 48min
+

Afterwards comes Encodec compression:

+
parallel --eta -j2 python -m whisperspeech.prepare_s2a_atoks --batch_size 4 {} ::: *.tar
+# 2h
+

Now we can extract the semantic tokens for both the T2S (eqvad) and S2A (maxvad) training:

+
parallel --eta -j1 python -m whisperspeech.extract_stoks --batch_size 16 --vq_model ../nbs/vqmodel-medium-en+pl-512c-dim64.model {} ::: *.tar
+parallel --eta -j1 python -m whisperspeech.extract_stoks --kind eqvad --batch_size 16 --vq_model ../nbs/vqmodel-medium-en+pl-512c-dim64.model {} ::: *.tar
+# 3h 45min
+
+
+

Splitting out the validation set(s)

+

After we have all the samples we may want to extract some validation sets. There are many ways to do it but here we’ll manually choose some speakers we’ll later skip completely during training.

+

We start by dumping all the sample ids:

+
parallel tar tf {} ::: stoks/*-atoks-3kbps-*.tar.gz | sed -e 's/\.atoks\.npy//' > all-samples-maxvad
+parallel tar tf {} ::: stoks/*-small.en-txt-*.tar.gz | sed -e 's/\.txt//' > all-samples-eqvad
+wc -l all-samples-maxvad
+

Because the sample ids (which are the original file paths) have speaker ids in them we can make a quick histogram:

+
< all-samples-maxvad awk -F_ '{ print $1; }'|sort|uniq -c|sort -n|less
+

From the result we can copy and paste 10 speaker ids of around 50 samples each to get 512 validation samples. We’ll exclude them from the training set because we want to validate on unseen speakers. We have to repeat this process for both splits (maxvad and eqvad since they have’ll different sample counts and ids):

+
< all-samples-maxvad grep 'train/1579\|train/2033\|train/3182\|train/12981\|train/2284\|train/2297\|train/6348\|train/7200\|train/7679\|train/1989' >
+unseen-speakers-maxvad
+< all-samples-eq grep 'train/1579\|train/2033\|train/3182\|train/12981\|train/2284\|train/2297\|train/6348\|train/7200\|train/7679\|train/1989' > unseen-speakers-eqvad
+

Once we have all the ids we can rescan the whole dataset once and split out the validation samples to separate webdataset shards to make validation fast:

+
python -m whisperspeech.split_out_val_datasets *-atoks-* unseen-speakers-maxvad
+python -m whisperspeech.split_out_val_datasets '*-txt-*' unseen-speakers-eqvad
+cd stoks && python -m whisperspeech.split_out_val_datasets '*-maxvad-stoks-*' ../unseen-speakers-maxvad
+cd stoks && python -m whisperspeech.split_out_val_datasets '*-eqvad-stoks-*' ../unseen-speakers-eqvad
+

We can use wc -l all-samples-maxvad to find out how many samples we have.

+
+
+

Creating the dataset configuration files for training

+

Finally we create the configuration files for the training script:

+
cat > mls-fr-t2s-train.dataset <<EOF
+multilingual-librispeech-webdataset/*-medium-txt-*.tar.gz multilingual-librispeech-webdataset/vq-en+pl/ 390203 --txt_kind='medium-txt' --language=fr --exclude_files multilingual-librispeech-webdataset/unseen-speakers-eqvad
+EOF
+cat > mls-fr-s2a-train.dataset <<EOF
+multilingual-librispeech-webdataset/*-atoks-*.tar.gz multilingual-librispeech-webdataset/vq-en+pl/ 338362  --language=fr --exclude_files multilingual-librispeech-webdataset/unseen-speakers-maxvad
+EOF
+cat > mls-fr-s2a-val-unseen-speakers.dataset <<EOF
+multilingual-librispeech-webdataset/unseen-speakers-maxvad.tar.gz multilingual-librispeech-webdataset/vq-en+pl/ 512 --language fr
+EOF
+cat > mls-fr-t2s-val-unseen-speakers.dataset <<EOF
+multilingual-librispeech-webdataset/unseen-speakers-eqvad.tar.gz multilingual-librispeech-webdataset/vq-en+pl/ 512 --txt_kind 'medium-txt' --language fr
+EOF
+
+
+
+

Why WebDataset?

+

All WhisperSpeech training and preproc code got reorganized around webdatasets. Webdatasets are just simple tar files that store all our data samples (files) but they are great for working with very large datasets. Inside these tar files we can store multiple files per sample in any format we want (e.g. the speech mp3/flac/wav files, the text transcripts, tokens in numpy arrays). For example from the data used to train the S2A model we have:

+
$ tar tf whisperspeech-s2a-512c-dim64/librilight-small-000.tar.gz |head -6
+small/1874/shortlifelincoln_0809_librivox_64kb_mp3/shortlifeoflincoln_10_nicolay_64kb_021.atoks.npy
+small/1874/shortlifelincoln_0809_librivox_64kb_mp3/shortlifeoflincoln_10_nicolay_64kb_021.stoks.npy
+small/28/amateur_cracksman_librivox_64kb_mp3/amateur_cracksman_04_hornung_64kb_004.atoks.npy
+small/28/amateur_cracksman_librivox_64kb_mp3/amateur_cracksman_04_hornung_64kb_004.stoks.npy
+small/1874/shortlifelincoln_0809_librivox_64kb_mp3/shortlifeoflincoln_10_nicolay_64kb_052.atoks.npy
+small/1874/shortlifelincoln_0809_librivox_64kb_mp3/shortlifeoflincoln_10_nicolay_64kb_052.stoks.npy
+

The name of the file is the same as the file name of the original dataset sample and the extensions tell us what kind of value they hold and in which format.

+

Furthermore we can split the whole dataset into fixed-size tar files called shards and load them on demand without unpacking. It turns out that this is exactly what we need for both AI training and data preprocessing:

+
    +
  • for training we start a multiple CPU workers in parallel, open different shards in each, stream the data sequentially from disk (fast), decode it independently and them shuffle the samples we receive from each worker to create varied training batches
  • +
  • for preprocessing we independently send each shard to a worker and save all the results in a new webdataset shard
  • +
+

Reading samples sequentialy allows us to simply compress the whole file with gzip and offers best performance even on spinning or network disks.

+
+
+
+ +
+
+Note +
+
+
+

For the Juwels cluster there is another crucial benefit. There is a pretty low limit on the total number of files on network disks (inodes to be precise) so there is a strong preference to keep data in a few large files. The network file system performance is also better if we don’t have to open too many files.

+
+
+

Keeping each shard around 5GB seems to work great (the processed shards will likely be a lot smaller but it’s a lot easier to keep a 1-to-1 shard mapping). For the almost 4TB LibriLight dataset this translates to 625 files.

+

We found it quite useful to also keep all the data in some splits. This is data dependent but for LibriLight we followed the original split (small, medium, large) but also extracted the 6454 speaker from the large split because it is was the largest single speaker dataset and it allowed us to use it during development without downloading the full 4TB.

+
+
+
+ +
+
+Caution +
+
+
+

The sample file names should not have dots in them, otherwise the WebDataset code gets confused which files go together into one sample. This can be worked around later but it’s easiest if we just do .replace('.', '_') when storing the initial raw dataset.

+
+
+
+
+

Joins on WebDatasets

+

One novel functionality we developed for this project is the capability to join multiple preprocessed webdatasets. This mechanism relies on keeping a constant ordering of samples in a shard and ensuring 1-to-1 correspondence between the input and output shards during preprocessing.

+

Example usage:

+
ds = wds.WebDataset([str(x) for x in Path('librilight/').glob('*.tar')]).compose( # load all audio shards
+    wds.decode(wds.torch_audio), # decode the audio data
+    vq_stoks.merge_in( # merge another WebDataset
+        # for each audio (`raw`) shard, find the path and name of a corresponding `vad` shard
+        vq_stoks.derived_dataset('librilight-processed/', 'vad')
+    ),
+)
+

derived_dataset creates for us a helper function that returns an opened derived dataset given the original shard file name:

+
def derived_dataset(path, kind):
+    def deriver(url):
+        url = str(Path(path)/(Path(url).name.replace("raw", kind) + ".gz"))
+        return wds.WebDataset(wds.SimpleShardList([url])).decode()
+    return deriver
+

This feature is experimental and the API may change as we develop more experience with this merging style.

+
+
+

Examples of preprocessing runs

+

An example of running a preprocessing step locally on a single file:

+
mkdir -p guttenberg-preproc && cd guttenberg-preproc
+python -m whisperspeech.vad ../guttenberg-audiobooks/guttenberg-audiobooks-raw-000010.tar
+

This will generate a file named guttenberg-audiobooks-vad-000000.tar.gz in the guttenberg-preproc directory.

+

On the cluster we can run multiple jobs in parallel (24 in this case), each processing one input shard. Since each job is pretty short (around 30 minutes) it’s easier for the scheduler to squeeze these between longer and higher-priority jobs.

+
mkdir -p whisperspeech-s2a-512c-dim64 && cd whisperspeech-s2a-512c-dim64
+find ../librilight/ -name 'librilight-small-*.tar'| ~/clapa1/run-batch 24 \
+    'python -m whisperspeech.prepare_s2a_dataset $FILE ../librilight-preproc
+            --vq_model ~/clapa1/scratch/vqmodel-512c-dim64-4e-hyptuned-32gpu.model
+            --batch_size 8'
+

The prepare_s2a_dataset script is taking raw audio data from the input file, automatically finding corresponding shards with VAD results in ../librilight-preproc and writing the results to the whisperspeech-s2a-512c-dim64 directory.

+
+
+

Voice activity detection

+

Code: 1B. Voice activity detection

+

Right now we are using the VAD model from WhisperX that is enough to avoid cutting audio in the middle of a word which would hurt automated transcriptions quite a lot. For more fancy datasets with multiple speakers we could use pyannote for it’s detection of multiple people speaking at once and diarization capability.

+

We later merge the VAD segments into longer chunks for more efficient training (less padding == higher efficiency). The code and histogram plots can be found in 2A. Whisper quantization dataset preparation

+
+
+

Transcription

+

Code: 5A. T2S dataset preparation

+

For training the TTS model (T2S) we are using running batches of chunked speech segments though FasterWhisper. We use the small.en model since there seems to be little benefit from using the larger models on English speech. For multilingual TTS we would probably want to switch to large-v2.

+
+
+
+ +
+
+Note +
+
+
+

Right now we extract both semantic tokens and transcriptions in one go. Doing the transcriptions is very time consuming are the result is unlikely to change. OTOH we may want to regenerate the semantic tokens if we train different quantized Whisper models. Because of that we may want to split this into two separate steps and only merge the results just before we generate the training dataset.

+
+
+
+
+

Acoustic token extraction

+

Code: 4A. S2A dataset preparation

+

This is basically the same as T2S above but with Encodec instead of Whisper.

+
+
+

Train/validation split

+

We create validation splits differently for each dataset. For example for LibriLight we use the speaker labels to create a common and unseen speakers splits. Once we have a list of samples we want to use we extract them from the full dataset into a new shard while keeping a list of IDs to skip during training. This way we avoid copying the training samples.

+

This has the downside of delaying all shuffling until training. This is especially problematic for smaller datasets with not enough shards since multiple workers may read the same shard and initially (before the shuffling buffer is filled) deliver the same samples multiple times. This causes overfitting. This is not a problem early in training (the model is too random to overfit) and we make sure we don’t reset the dataloaders between epochs but it is causing issues when resuming training from a checkpoint. The workaround is to preload the shuffling bufferwith a lot of samples (.shuffle(initial=20000)). Unfortunately it has the downside of putting a lot of load on the filesystem and adding a significant delay before training can start.

+ + +
+ +
+ +
+ + + + + \ No newline at end of file diff --git a/index.html b/index.html new file mode 100644 index 0000000..5194952 --- /dev/null +++ b/index.html @@ -0,0 +1,793 @@ + + + + + + + + + +WhisperSpeech + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + +
+ +
+ + +
+ + + +
+ +
+
+

WhisperSpeech

+
+ + + +
+ + + + +
+ + + +
+ + + +

Test it out yourself in Colab
+If you have questions or you want to help you can find us in the #audio-generation channel on the LAION Discord server.

+

An Open Source text-to-speech system built by inverting Whisper. Previously known as spear-tts-pytorch.

+

We want this model to be like Stable Diffusion but for speech – both powerful and easily customizable.

+

We are working only with properly licensed speech recordings and all the code is Open Source so the model will be always safe to use for commercial applications.

+

Currently the models are trained on the English LibreLight dataset. In the next release we want to target multiple languages (Whisper and EnCodec are both multilanguage).

+

Sample of the synthesized voice:

+

https://github.com/collabora/WhisperSpeech/assets/107984/aa5a1e7e-dc94-481f-8863-b022c7fd7434

+
+

Progress update [2024-01-29]

+

We successfully trained a tiny S2A model on an en+pl+fr dataset and it can do voice cloning in French:

+

https://github.com/collabora/WhisperSpeech/assets/107984/267f2602-7eec-4646-a43b-059ff91b574e

+

https://github.com/collabora/WhisperSpeech/assets/107984/fbf08e8e-0f9a-4b0d-ab5e-747ffba2ccb9

+

We were able to do this with frozen semantic tokens that were only trained on English and Polish. This supports the idea that we will be able to train a single semantic token model to support all the languages in the world. Quite likely even ones that are not currently well supported by the Whisper model. Stay tuned for more updates on this front. :)

+
+
+

Progress update [2024-01-18]

+

We spend the last week optimizing inference performance. We integrated torch.compile, added kv-caching and tuned some of the layers – we are now working over 12x faster than real-time on a consumer 4090!

+

We can mix languages in a single sentence (here the highlighted English project names are seamlessly mixed into Polish speech):

+
+

To jest pierwszy test wielojęzycznego Whisper Speech modelu zamieniającego tekst na mowę, który Collabora i Laion nauczyli na superkomputerze Jewels.

+
+

https://github.com/collabora/WhisperSpeech/assets/107984/d7092ef1-9df7-40e3-a07e-fdc7a090ae9e

+

We also added an easy way to test voice-cloning. Here is a sample voice cloned from a famous speech by Winston Churchill (the radio static is a feature, not a bug ;) – it is part of the reference recording):

+

https://github.com/collabora/WhisperSpeech/assets/107984/bd28110b-31fb-4d61-83f6-c997f560bc26

+

You can test all of these on Colab (we optimized the dependencies so now it takes less than 30 seconds to install). A Huggingface Space is coming soon.

+
+
+

Progress update [2024-01-10]

+

We’ve pushed a new SD S2A model that is a lot faster while still generating high-quality speech. We’ve also added an example of voice cloning based on a reference audio file.

+

As always, you can check out our Colab to try it yourself!

+
+
+

Progress update [2023-12-10]

+

Another trio of models, this time they support multiple languages (English and Polish). Here are two new samples for a sneak peek. You can check out our Colab to try it yourself!

+

English speech, female voice (transferred from a Polish language dataset):

+

https://github.com/collabora/WhisperSpeech/assets/107984/aa5a1e7e-dc94-481f-8863-b022c7fd7434

+

A Polish sample, male voice:

+

https://github.com/collabora/WhisperSpeech/assets/107984/4da14b03-33f9-4e2d-be42-f0fcf1d4a6ec

+

Older progress updates are archived here

+
+
+

Downloads

+

We encourage you to start with the Google Colab link above or run the provided notebook locally. If you want to download manually or train the models from scratch then both the WhisperSpeech pre-trained models as well as the converted datasets are available on HuggingFace.

+
+
+

Roadmap

+ +
+
+

Architecture

+

The general architecture is similar to AudioLM, SPEAR TTS from Google and MusicGen from Meta. We avoided the NIH syndrome and built it on top of powerful Open Source models: Whisper from OpenAI to generate semantic tokens and perform transcription, EnCodec from Meta for acoustic modeling and Vocos from Charactr Inc as the high-quality vocoder.

+

We gave two presentation diving deeper into WhisperSpeech. The first one talks about the challenges of large scale training:

+
+
+

Tricks Learned from Scaling WhisperSpeech Models to 80k+ Hours of Speech - video recording by Jakub Cłapa, Collabora

+
Tricks Learned from Scaling WhisperSpeech Models to 80k+ Hours of Speech - video recording by Jakub Cłapa, Collabora
+
+
+

The other one goes a bit more into the architectural choices we made:

+
+
+

Open Source Text-To-Speech Projects: WhisperSpeech - In Depth Discussion

+
Open Source Text-To-Speech Projects: WhisperSpeech - In Depth Discussion
+
+
+
+

Whisper for modeling semantic tokens

+

We utilize the OpenAI Whisper encoder block to generate embeddings which we then quantize to get semantic tokens.

+

If the language is already supported by Whisper then this process requires only audio files (without ground truth transcriptions).

+
+
+

+
Using Whisper for semantic token extraction diagram
+
+
+
+
+
+

EnCodec for modeling acoustic tokens

+

We use EnCodec to model the audio waveform. Out of the box it delivers reasonable quality at 1.5kbps and we can bring this to high-quality by using Vocos – a vocoder pretrained on EnCodec tokens.

+
+
+

+
EnCodec block diagram
+
+
+
+
+

Appreciation

+

Collabora logo      LAION logo

+

This work would not be possible without the generous sponsorships from:

+ +

We gratefully acknowledge the Gauss Centre for Supercomputing e.V. (www.gauss-centre.eu) for funding part of this work by providing computing time through the John von Neumann Institute for Computing (NIC) on the GCS Supercomputer JUWELS Booster at Jülich Supercomputing Centre (JSC), with access to compute provided via LAION cooperation on foundation models research.

+

We’d like to also thank individual contributors for their great help in building this model:

+ +
+
+

Consulting

+

We are available to help you with both Open Source and proprietary AI projects. You can reach us via the Collabora website or on Discord ( and )

+
+
+

Citations

+

We rely on many amazing Open Source projects and research papers:

+
@article{SpearTTS,
+  title = {Speak, Read and Prompt: High-Fidelity Text-to-Speech with Minimal Supervision},
+  url = {https://arxiv.org/abs/2302.03540},
+  author = {Kharitonov, Eugene and Vincent, Damien and Borsos, Zalán and Marinier, Raphaël and Girgin, Sertan and Pietquin, Olivier and Sharifi, Matt and Tagliasacchi, Marco and Zeghidour, Neil},
+  publisher = {arXiv},
+  year = {2023},
+}
+
@article{MusicGen,
+  title={Simple and Controllable Music Generation}, 
+  url = {https://arxiv.org/abs/2306.05284},
+  author={Jade Copet and Felix Kreuk and Itai Gat and Tal Remez and David Kant and Gabriel Synnaeve and Yossi Adi and Alexandre Défossez},
+  publisher={arXiv},
+  year={2023},
+}
+
@article{Whisper
+  title = {Robust Speech Recognition via Large-Scale Weak Supervision},
+  url = {https://arxiv.org/abs/2212.04356},
+  author = {Radford, Alec and Kim, Jong Wook and Xu, Tao and Brockman, Greg and McLeavey, Christine and Sutskever, Ilya},
+  publisher = {arXiv},
+  year = {2022},
+}
+
@article{EnCodec
+  title = {High Fidelity Neural Audio Compression},
+  url = {https://arxiv.org/abs/2210.13438},
+  author = {Défossez, Alexandre and Copet, Jade and Synnaeve, Gabriel and Adi, Yossi},
+  publisher = {arXiv},
+  year = {2022},
+}
+
@article{Vocos
+  title={Vocos: Closing the gap between time-domain and Fourier-based neural vocoders for high-quality audio synthesis}, 
+  url = {https://arxiv.org/abs/2306.00814},
+  author={Hubert Siuzdak},
+  publisher={arXiv},
+  year={2023},
+}
+ + +
+ +
+ +
+ + + + + \ No newline at end of file diff --git a/logo.svg b/logo.svg new file mode 100644 index 0000000..5cc90f6 --- /dev/null +++ b/logo.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/robots.txt b/robots.txt new file mode 100644 index 0000000..7495975 --- /dev/null +++ b/robots.txt @@ -0,0 +1 @@ +Sitemap: https://collabora.github.io/WhisperSpeech/sitemap.xml diff --git a/search.json b/search.json new file mode 100644 index 0000000..58715b8 --- /dev/null +++ b/search.json @@ -0,0 +1,544 @@ +[ + { + "objectID": "6. Quality-boosting vocoder.html", + "href": "6. Quality-boosting vocoder.html", + "title": "WhisperSpeech", + "section": "", + "text": "source\n\nVocoder\n\n Vocoder (repo_id='charactr/vocos-encodec-24khz', device=None)\n\nInitialize self. See help(type(self)) for accurate signature." + }, + { + "objectID": "1c. vad merging.html", + "href": "1c. vad merging.html", + "title": "VAD merging", + "section": "", + "text": "source\n\nderived_name\n\n derived_name (input, kind, base='audio')\n\n\nds = wds.WebDataset(['../wolnelektury-wds2/wolnelektury-audio-000000.tar']).compose(\n wds.decode(wds.torch_audio),\n utils.merge_in(utils.derived_dataset('vad')),\n utils.find_audio,\n utils.split_to_chunks,\n utils.merge_in(utils.derived_dataset('spk_emb')),\n)\n\n\nimport IPython\nimport time\n\n\nprev = None\nfor s in progress_bar(ds, total=20):\n sim = F.cosine_similarity(torch.tensor(s['spk_emb.npy']), torch.tensor((prev if prev is not None else s)['spk_emb.npy']), dim=0)\n secs = s['tend'] - s['tstart']\n same = sim > 0.6 if secs > 2 else sim > 0.1\n if not same: print(\"new\")\n print(s['__key__'], sim, secs)\n display(IPython.display.Audio(s['samples'], rate=s['sample_rate']))\n if secs > 2:\n prev = s\n time.sleep(.5)\ns\n\n\nds = wds.WebDataset([utils.derived_name('../wolnelektury-wds2/wolnelektury-audio-000000.tar', 'vad')]).compose(\n wds.decode(),\n split,\n utils.merge_in(utils.derived_dataset('spk_emb', base='vad', suffix='')),\n merge_by_src_key,\n)\n\n\nfor s in ds: break\ns\n\n\nds = wds.WebDataset([utils.derived_name('../wolnelektury-wds2/wolnelektury-audio-000000.tar', 'vad')]).compose(\n wds.decode(),\n split,\n utils.merge_in(utils.derived_dataset('spk_emb', base='vad', suffix='')),\n merge_by_src_key,\n chunk_merger,\n)\n\n\nfor s in ds: break\ns\n\n\nds = wds.WebDataset(['../wolnelektury-wds2/wolnelektury-audio-000000.tar']).compose(\n wds.decode(wds.torch_audio),\n utils.merge_in(utils.derived_dataset('vad')),\n utils.find_audio,\n utils.split_to_chunks,\n utils.merge_in(utils.derived_dataset('spk_emb')),\n merge_by_src_key,\n chunk_merger,\n utils.merge_in(utils.derived_dataset('audio', suffix='', decoders=[wds.torch_audio])),\n utils.find_audio,\n lambda x: utils.split_to_chunks(x, metakeys=['spk_emb.npy']),\n)\n\n\nfor s in ds: break\ns\n\n\nprev = None\nfor s in progress_bar(ds, total=20):\n sim = F.cosine_similarity(torch.tensor(s['spk_emb.npy']), torch.tensor((prev if prev is not None else s)['spk_emb.npy']), dim=0)\n secs = s['tend'] - s['tstart']\n same = sim > 0.6 if secs > 2 else sim > 0.1\n if not same: print(\"new\")\n print(s['__key__'], sim, secs, sum([e-s for s,e in s['orig_s']['subvads.pyd'][s['i']]]))\n display(IPython.display.Audio(s['samples'], rate=s['sample_rate']))\n if secs > 2:\n prev = s\n time.sleep(.5)\n\n\nprepare_mvad('../wolnelektury-wds2/wolnelektury-audio-000000.tar')\n\n\n\n\n\n\n \n \n 100.00% [235/235 00:04<00:00]\n \n \n\n\n\n!tar tf ../wolnelektury-wds2/wolnelektury-maxvad-000000.tar.gz\n\n./kornhauser-wiatr/kornhauser-wiatr_001.spk_emb.npy\n./kornhauser-wiatr/kornhauser-wiatr_001.subvads.pyd\n./kornhauser-wiatr/kornhauser-wiatr_001.vad.npy\n./fraszki-ksiegi-pierwsze-epitafium-wysockiemu/jan-kochanowski-fraszki-ksiegi-pierwsze-epitafium-wysockiemu.spk_emb.npy\n./fraszki-ksiegi-pierwsze-epitafium-wysockiemu/jan-kochanowski-fraszki-ksiegi-pierwsze-epitafium-wysockiemu.subvads.pyd\n./fraszki-ksiegi-pierwsze-epitafium-wysockiemu/jan-kochanowski-fraszki-ksiegi-pierwsze-epitafium-wysockiemu.vad.npy\n./kucharczyk-jak-modlitwa-ochrania-przed-zlodziejami/jak-modlitwa-ochrania-przed-zlodziejami.spk_emb.npy\n./kucharczyk-jak-modlitwa-ochrania-przed-zlodziejami/jak-modlitwa-ochrania-przed-zlodziejami.subvads.pyd\n./kucharczyk-jak-modlitwa-ochrania-przed-zlodziejami/jak-modlitwa-ochrania-przed-zlodziejami.vad.npy\n./nowakowska-niska-rozdzielczosc-proba-wody/proba-wody.spk_emb.npy\n./nowakowska-niska-rozdzielczosc-proba-wody/proba-wody.subvads.pyd\n./nowakowska-niska-rozdzielczosc-proba-wody/proba-wody.vad.npy\n./slowka-zbior-dziwna-przygoda-rodziny-polanieckich/tadeusz-boy-zelenski-slowka-zbior-dziwna-przygoda-rodziny-polanieckich.spk_emb.npy\n./slowka-zbior-dziwna-przygoda-rodziny-polanieckich/tadeusz-boy-zelenski-slowka-zbior-dziwna-przygoda-rodziny-polanieckich.subvads.pyd\n./slowka-zbior-dziwna-przygoda-rodziny-polanieckich/tadeusz-boy-zelenski-slowka-zbior-dziwna-przygoda-rodziny-polanieckich.vad.npy\n./piesni-ksiegi-wtore-piesn-xii/jan-kochanowski-piesni-ksiegi-wtore-piesn-xii-nie-masz-i-po-drugi-raz-nie-masz-watp.spk_emb.npy\n./piesni-ksiegi-wtore-piesn-xii/jan-kochanowski-piesni-ksiegi-wtore-piesn-xii-nie-masz-i-po-drugi-raz-nie-masz-watp.subvads.pyd\n./piesni-ksiegi-wtore-piesn-xii/jan-kochanowski-piesni-ksiegi-wtore-piesn-xii-nie-masz-i-po-drugi-raz-nie-masz-watp.vad.npy\n./sonety-krymskie-stepy-akermanskie/adam-mickiewicz-sonety-krymskie-stepy-akermanskie.spk_emb.npy\n./sonety-krymskie-stepy-akermanskie/adam-mickiewicz-sonety-krymskie-stepy-akermanskie.subvads.pyd\n./sonety-krymskie-stepy-akermanskie/adam-mickiewicz-sonety-krymskie-stepy-akermanskie.vad.npy\n./napoj-cienisty-balwan-ze-sniegu/boleslaw-lesmian-napoj-cienisty-postacie-cykl-balwan-ze-sniegu.spk_emb.npy\n./napoj-cienisty-balwan-ze-sniegu/boleslaw-lesmian-napoj-cienisty-postacie-cykl-balwan-ze-sniegu.subvads.pyd\n./napoj-cienisty-balwan-ze-sniegu/boleslaw-lesmian-napoj-cienisty-postacie-cykl-balwan-ze-sniegu.vad.npy\n./fraczek-zolw-wiercipieta-prosiaczek/fraczek-zolw-wiercipieta-prosiaczek_001.spk_emb.npy\n./fraczek-zolw-wiercipieta-prosiaczek/fraczek-zolw-wiercipieta-prosiaczek_001.subvads.pyd\n./fraczek-zolw-wiercipieta-prosiaczek/fraczek-zolw-wiercipieta-prosiaczek_001.vad.npy\n./grabinski-nietykalny/grabinski-nietykalny.spk_emb.npy\n./grabinski-nietykalny/grabinski-nietykalny.subvads.pyd\n./grabinski-nietykalny/grabinski-nietykalny.vad.npy\n./wol-i-mrowki/ignacy-krasicki-bajki-i-przypowiesci-wol-i-mrowki.spk_emb.npy\n./wol-i-mrowki/ignacy-krasicki-bajki-i-przypowiesci-wol-i-mrowki.subvads.pyd\n./wol-i-mrowki/ignacy-krasicki-bajki-i-przypowiesci-wol-i-mrowki.vad.npy\n./pszczola-w-bursztynie/jan-andrzej-morsztyn-pszczola-w-bursztynie.spk_emb.npy\n./pszczola-w-bursztynie/jan-andrzej-morsztyn-pszczola-w-bursztynie.subvads.pyd\n./pszczola-w-bursztynie/jan-andrzej-morsztyn-pszczola-w-bursztynie.vad.npy\n./jastrzab-i-sokol/ignacy-krasicki-bajki-i-przypowiesci-jastrzab-i-sokol.spk_emb.npy\n./jastrzab-i-sokol/ignacy-krasicki-bajki-i-przypowiesci-jastrzab-i-sokol.subvads.pyd\n./jastrzab-i-sokol/ignacy-krasicki-bajki-i-przypowiesci-jastrzab-i-sokol.vad.npy\n./fraszki-ksiegi-pierwsze-o-doktorze-hiszpanie/jan-kochanowski-fraszki-ksiegi-pierwsze-o-doktorze-hiszpanie.spk_emb.npy\n./fraszki-ksiegi-pierwsze-o-doktorze-hiszpanie/jan-kochanowski-fraszki-ksiegi-pierwsze-o-doktorze-hiszpanie.subvads.pyd\n./fraszki-ksiegi-pierwsze-o-doktorze-hiszpanie/jan-kochanowski-fraszki-ksiegi-pierwsze-o-doktorze-hiszpanie.vad.npy\n./perrault-kopciuszek/perrault-kopciuszek.spk_emb.npy\n./perrault-kopciuszek/perrault-kopciuszek.subvads.pyd\n./perrault-kopciuszek/perrault-kopciuszek.vad.npy\n./napoj-cienisty-wieczor/boleslaw-lesmian-napoj-cienisty-w-chmur-odbiciu-cykl-wieczor.spk_emb.npy\n./napoj-cienisty-wieczor/boleslaw-lesmian-napoj-cienisty-w-chmur-odbiciu-cykl-wieczor.subvads.pyd\n./napoj-cienisty-wieczor/boleslaw-lesmian-napoj-cienisty-w-chmur-odbiciu-cykl-wieczor.vad.npy\n./satyry-czesc-druga-malzenstwo/satyry-czesc-druga-malzenstwo.spk_emb.npy\n./satyry-czesc-druga-malzenstwo/satyry-czesc-druga-malzenstwo.subvads.pyd\n./satyry-czesc-druga-malzenstwo/satyry-czesc-druga-malzenstwo.vad.npy\n./slowka-zbior-spleen/tadeusz-boy-zelenski-slowka-zbior-spleen.spk_emb.npy\n./slowka-zbior-spleen/tadeusz-boy-zelenski-slowka-zbior-spleen.subvads.pyd\n./slowka-zbior-spleen/tadeusz-boy-zelenski-slowka-zbior-spleen.vad.npy\n./fraczek-zolw-wiercipieta-strus/fraczek-zolw-wiercipieta-strus_001.spk_emb.npy\n./fraczek-zolw-wiercipieta-strus/fraczek-zolw-wiercipieta-strus_001.subvads.pyd\n./fraczek-zolw-wiercipieta-strus/fraczek-zolw-wiercipieta-strus_001.vad.npy\n./janko-muzykant/janko-muzykant.spk_emb.npy\n./janko-muzykant/janko-muzykant.subvads.pyd\n./janko-muzykant/janko-muzykant.vad.npy\n./slowka-zbior-piosenki-zb-dobra-mama/tadeusz-boy-zelenski-slowka-zbior-piosenki-zielonego-balonika-dobra-mama.spk_emb.npy\n./slowka-zbior-piosenki-zb-dobra-mama/tadeusz-boy-zelenski-slowka-zbior-piosenki-zielonego-balonika-dobra-mama.subvads.pyd\n./slowka-zbior-piosenki-zb-dobra-mama/tadeusz-boy-zelenski-slowka-zbior-piosenki-zielonego-balonika-dobra-mama.vad.npy\n./lange-nowy-tarzan/antoni-lange-nowy-tarzan.spk_emb.npy\n./lange-nowy-tarzan/antoni-lange-nowy-tarzan.subvads.pyd\n./lange-nowy-tarzan/antoni-lange-nowy-tarzan.vad.npy\n./w-pamietniku-zofii-bobrowny/w-pamietniku-zofii-bobrowny.spk_emb.npy\n./w-pamietniku-zofii-bobrowny/w-pamietniku-zofii-bobrowny.subvads.pyd\n./w-pamietniku-zofii-bobrowny/w-pamietniku-zofii-bobrowny.vad.npy\n./fraczek-zolw-wiercipieta-zebra/fraczek-zolw-wiercipieta-zebra_001.spk_emb.npy\n./fraczek-zolw-wiercipieta-zebra/fraczek-zolw-wiercipieta-zebra_001.subvads.pyd\n./fraczek-zolw-wiercipieta-zebra/fraczek-zolw-wiercipieta-zebra_001.vad.npy\n./but-w-butonierce-milosc-na-aucie/bruno-jasienski-but-w-butonierce-tomik-milosc-na-aucie.spk_emb.npy\n./but-w-butonierce-milosc-na-aucie/bruno-jasienski-but-w-butonierce-tomik-milosc-na-aucie.subvads.pyd\n./but-w-butonierce-milosc-na-aucie/bruno-jasienski-but-w-butonierce-tomik-milosc-na-aucie.vad.npy\n./sonety-krymskie-grob-potockiej/adam-mickiewicz-sonety-krymskie-grob-potockiej.spk_emb.npy\n./sonety-krymskie-grob-potockiej/adam-mickiewicz-sonety-krymskie-grob-potockiej.subvads.pyd\n./sonety-krymskie-grob-potockiej/adam-mickiewicz-sonety-krymskie-grob-potockiej.vad.npy\n./do-matki/juliusz-slowacki-do-matki-zadrzy-ci-nieraz-serce-mila-matko-moja.spk_emb.npy\n./do-matki/juliusz-slowacki-do-matki-zadrzy-ci-nieraz-serce-mila-matko-moja.subvads.pyd\n./do-matki/juliusz-slowacki-do-matki-zadrzy-ci-nieraz-serce-mila-matko-moja.vad.npy\n./slowka-zbior-piosenki-zb-z-niewydanej-szopki-krakowskiej-na-rok-1908/tadeusz-boy-zelenski-slowka-zbior-piosenki-zielonego-balonika-z-niewydanej-szopki-krakowskiej-na-rok-1908.spk_emb.npy\n./slowka-zbior-piosenki-zb-z-niewydanej-szopki-krakowskiej-na-rok-1908/tadeusz-boy-zelenski-slowka-zbior-piosenki-zielonego-balonika-z-niewydanej-szopki-krakowskiej-na-rok-1908.subvads.pyd\n./slowka-zbior-piosenki-zb-z-niewydanej-szopki-krakowskiej-na-rok-1908/tadeusz-boy-zelenski-slowka-zbior-piosenki-zielonego-balonika-z-niewydanej-szopki-krakowskiej-na-rok-1908.vad.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_030_tom-ii-rozdzial-lx.spk_emb.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_030_tom-ii-rozdzial-lx.subvads.pyd\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_030_tom-ii-rozdzial-lx.vad.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_006_tom-ii-rozdzial-xxxv.spk_emb.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_006_tom-ii-rozdzial-xxxv.subvads.pyd\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_006_tom-ii-rozdzial-xxxv.vad.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_017_tom-ii-rozdzial-xlvi.spk_emb.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_017_tom-ii-rozdzial-xlvi.subvads.pyd\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_017_tom-ii-rozdzial-xlvi.vad.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_022_tom-ii-rozdzial-li.spk_emb.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_022_tom-ii-rozdzial-li.subvads.pyd\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_022_tom-ii-rozdzial-li.vad.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_023_tom-ii-rozdzial-liii.spk_emb.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_023_tom-ii-rozdzial-liii.subvads.pyd\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_023_tom-ii-rozdzial-liii.vad.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_012_tom-ii-rozdzial-xli.spk_emb.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_012_tom-ii-rozdzial-xli.subvads.pyd\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_012_tom-ii-rozdzial-xli.vad.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_019_tom-ii-rozdzial-xlviii.spk_emb.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_019_tom-ii-rozdzial-xlviii.subvads.pyd\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_019_tom-ii-rozdzial-xlviii.vad.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_010_tom-ii-rozdzial-xxxix.spk_emb.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_010_tom-ii-rozdzial-xxxix.subvads.pyd\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_010_tom-ii-rozdzial-xxxix.vad.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_035_tom-ii-rozdzial-lii.spk_emb.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_035_tom-ii-rozdzial-lii.subvads.pyd\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_035_tom-ii-rozdzial-lii.vad.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_002_tom-ii-rozdzial-xxxi.spk_emb.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_002_tom-ii-rozdzial-xxxi.subvads.pyd\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_002_tom-ii-rozdzial-xxxi.vad.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_025_tom-ii-rozdzial-lv.spk_emb.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_025_tom-ii-rozdzial-lv.subvads.pyd\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_025_tom-ii-rozdzial-lv.vad.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_003_tom-ii-rozdzial-xxxii.spk_emb.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_003_tom-ii-rozdzial-xxxii.subvads.pyd\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_003_tom-ii-rozdzial-xxxii.vad.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_009_tom-ii-rozdzial-xxxviii.spk_emb.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_009_tom-ii-rozdzial-xxxviii.subvads.pyd\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_009_tom-ii-rozdzial-xxxviii.vad.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_008_tom-ii-rozdzial-xxxvii.spk_emb.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_008_tom-ii-rozdzial-xxxvii.subvads.pyd\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_008_tom-ii-rozdzial-xxxvii.vad.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_016_tom-ii-rozdzial-xlv.spk_emb.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_016_tom-ii-rozdzial-xlv.subvads.pyd\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_016_tom-ii-rozdzial-xlv.vad.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_004_tom-ii-rozdzial-xxxiii.spk_emb.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_004_tom-ii-rozdzial-xxxiii.subvads.pyd\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_004_tom-ii-rozdzial-xxxiii.vad.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_032_tom-ii-rozdzial-lxii.spk_emb.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_032_tom-ii-rozdzial-lxii.subvads.pyd\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_032_tom-ii-rozdzial-lxii.vad.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_021_tom-ii-rozdzial-l.spk_emb.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_021_tom-ii-rozdzial-l.subvads.pyd\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_021_tom-ii-rozdzial-l.vad.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_018_tom-ii-rozdzial-xlvii.spk_emb.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_018_tom-ii-rozdzial-xlvii.subvads.pyd\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_018_tom-ii-rozdzial-xlvii.vad.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_033_tom-ii-rozdzial-lxiii.spk_emb.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_033_tom-ii-rozdzial-lxiii.subvads.pyd\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_033_tom-ii-rozdzial-lxiii.vad.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_014_tom-ii-rozdzial-xliii.spk_emb.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_014_tom-ii-rozdzial-xliii.subvads.pyd\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_014_tom-ii-rozdzial-xliii.vad.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_005_tom-ii-rozdzial-xxxiv.spk_emb.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_005_tom-ii-rozdzial-xxxiv.subvads.pyd\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_005_tom-ii-rozdzial-xxxiv.vad.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_034_tom-ii-rozdzial-lxiv.spk_emb.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_034_tom-ii-rozdzial-lxiv.subvads.pyd\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_034_tom-ii-rozdzial-lxiv.vad.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_001_tom-ii-rozdzial-xxx.spk_emb.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_001_tom-ii-rozdzial-xxx.subvads.pyd\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_001_tom-ii-rozdzial-xxx.vad.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_029_tom-ii-rozdzial-lix.spk_emb.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_029_tom-ii-rozdzial-lix.subvads.pyd\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_029_tom-ii-rozdzial-lix.vad.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_007_tom-ii-rozdzial-xxxvi.spk_emb.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_007_tom-ii-rozdzial-xxxvi.subvads.pyd\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_007_tom-ii-rozdzial-xxxvi.vad.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_024_tom-ii-rozdzial-liv.spk_emb.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_024_tom-ii-rozdzial-liv.subvads.pyd\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_024_tom-ii-rozdzial-liv.vad.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_028_tom-ii-rozdzial-lviii.spk_emb.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_028_tom-ii-rozdzial-lviii.subvads.pyd\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_028_tom-ii-rozdzial-lviii.vad.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_026_tom-ii-rozdzial-lvi.spk_emb.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_026_tom-ii-rozdzial-lvi.subvads.pyd\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_026_tom-ii-rozdzial-lvi.vad.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_011_tom-ii-rozdzial-xl.spk_emb.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_011_tom-ii-rozdzial-xl.subvads.pyd\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_011_tom-ii-rozdzial-xl.vad.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_031_tom-ii-rozdzial-lxi.spk_emb.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_031_tom-ii-rozdzial-lxi.subvads.pyd\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_031_tom-ii-rozdzial-lxi.vad.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_015_tom-ii-rozdzial-xliv.spk_emb.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_015_tom-ii-rozdzial-xliv.subvads.pyd\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_015_tom-ii-rozdzial-xliv.vad.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_013_tom-ii-rozdzial-xlii.spk_emb.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_013_tom-ii-rozdzial-xlii.subvads.pyd\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_013_tom-ii-rozdzial-xlii.vad.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_027_tom-ii-rozdzial-lvii.spk_emb.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_027_tom-ii-rozdzial-lvii.subvads.pyd\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_027_tom-ii-rozdzial-lvii.vad.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_020_tom-ii-rozdzial-xlix.spk_emb.npy\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_020_tom-ii-rozdzial-xlix.subvads.pyd\n./dickens-dawid-copperfield-t2/dickens-dawid-copperfield-t2_020_tom-ii-rozdzial-xlix.vad.npy\n./historia-zoltej-cizemki/09-antonina-domanska-historia-zoltej-cizemki-tajemnica-dworku-pod-cmentarzem.spk_emb.npy\n./historia-zoltej-cizemki/09-antonina-domanska-historia-zoltej-cizemki-tajemnica-dworku-pod-cmentarzem.subvads.pyd\n./historia-zoltej-cizemki/09-antonina-domanska-historia-zoltej-cizemki-tajemnica-dworku-pod-cmentarzem.vad.npy\n./historia-zoltej-cizemki/11-antonina-domanska-historia-zoltej-cizemki-zakonczenie.spk_emb.npy\n./historia-zoltej-cizemki/11-antonina-domanska-historia-zoltej-cizemki-zakonczenie.subvads.pyd\n./historia-zoltej-cizemki/11-antonina-domanska-historia-zoltej-cizemki-zakonczenie.vad.npy\n./historia-zoltej-cizemki/08-antonina-domanska-historia-zoltej-cizemki-swiety-kazimierz.spk_emb.npy\n./historia-zoltej-cizemki/08-antonina-domanska-historia-zoltej-cizemki-swiety-kazimierz.subvads.pyd\n./historia-zoltej-cizemki/08-antonina-domanska-historia-zoltej-cizemki-swiety-kazimierz.vad.npy\n./historia-zoltej-cizemki/07-antonina-domanska-historia-zoltej-cizemki-jasiek.spk_emb.npy\n./historia-zoltej-cizemki/07-antonina-domanska-historia-zoltej-cizemki-jasiek.subvads.pyd\n./historia-zoltej-cizemki/07-antonina-domanska-historia-zoltej-cizemki-jasiek.vad.npy\n./historia-zoltej-cizemki/05-antonina-domanska-historia-zoltej-cizemki-u-jana-dlugosza.spk_emb.npy\n./historia-zoltej-cizemki/05-antonina-domanska-historia-zoltej-cizemki-u-jana-dlugosza.subvads.pyd\n./historia-zoltej-cizemki/05-antonina-domanska-historia-zoltej-cizemki-u-jana-dlugosza.vad.npy\n./historia-zoltej-cizemki/03-antonina-domanska-historia-zoltej-cizemki-u-wilow.spk_emb.npy\n./historia-zoltej-cizemki/03-antonina-domanska-historia-zoltej-cizemki-u-wilow.subvads.pyd\n./historia-zoltej-cizemki/03-antonina-domanska-historia-zoltej-cizemki-u-wilow.vad.npy\n./historia-zoltej-cizemki/10-antonina-domanska-historia-zoltej-cizemki-poreba.spk_emb.npy\n./historia-zoltej-cizemki/10-antonina-domanska-historia-zoltej-cizemki-poreba.subvads.pyd\n./historia-zoltej-cizemki/10-antonina-domanska-historia-zoltej-cizemki-poreba.vad.npy\n./historia-zoltej-cizemki/04-antonina-domanska-historia-zoltej-cizemki-konik-zwierzyniecki.spk_emb.npy\n./historia-zoltej-cizemki/04-antonina-domanska-historia-zoltej-cizemki-konik-zwierzyniecki.subvads.pyd\n./historia-zoltej-cizemki/04-antonina-domanska-historia-zoltej-cizemki-konik-zwierzyniecki.vad.npy\n./historia-zoltej-cizemki/02-antonina-domanska-historia-zoltej-cizemki-dziwny-pielgrzym.spk_emb.npy\n./historia-zoltej-cizemki/02-antonina-domanska-historia-zoltej-cizemki-dziwny-pielgrzym.subvads.pyd\n./historia-zoltej-cizemki/02-antonina-domanska-historia-zoltej-cizemki-dziwny-pielgrzym.vad.npy\n./historia-zoltej-cizemki/01-antonina-domanska-historia-zoltej-cizemki-w-domu-i-w-puszczy.spk_emb.npy\n./historia-zoltej-cizemki/01-antonina-domanska-historia-zoltej-cizemki-w-domu-i-w-puszczy.subvads.pyd\n./historia-zoltej-cizemki/01-antonina-domanska-historia-zoltej-cizemki-w-domu-i-w-puszczy.vad.npy\n./historia-zoltej-cizemki/06-antonina-domanska-historia-zoltej-cizemki-uczen-mistrza-wita.spk_emb.npy\n./historia-zoltej-cizemki/06-antonina-domanska-historia-zoltej-cizemki-uczen-mistrza-wita.subvads.pyd\n./historia-zoltej-cizemki/06-antonina-domanska-historia-zoltej-cizemki-uczen-mistrza-wita.vad.npy\n./slowka-zbior-piosenki-zb-piosenka-wzruszajaca/tadeusz-boy-zelenski-slowka-zbior-piosenki-zielonego-balonika-piosenka-wzruszajaca.spk_emb.npy\n./slowka-zbior-piosenki-zb-piosenka-wzruszajaca/tadeusz-boy-zelenski-slowka-zbior-piosenki-zielonego-balonika-piosenka-wzruszajaca.subvads.pyd\n./slowka-zbior-piosenki-zb-piosenka-wzruszajaca/tadeusz-boy-zelenski-slowka-zbior-piosenki-zielonego-balonika-piosenka-wzruszajaca.vad.npy\n./do-justyny/franciszek-karpinski-do-justyny.spk_emb.npy\n./do-justyny/franciszek-karpinski-do-justyny.subvads.pyd\n./do-justyny/franciszek-karpinski-do-justyny.vad.npy\n./pan-i-pies/ignacy-krasicki-bajki-i-przypowiesci-pan-i-pies.spk_emb.npy\n./pan-i-pies/ignacy-krasicki-bajki-i-przypowiesci-pan-i-pies.subvads.pyd\n./pan-i-pies/ignacy-krasicki-bajki-i-przypowiesci-pan-i-pies.vad.npy\n./pasterz-i-owce-bajki-nowe/pasterz-i-owce-bajki-nowe.spk_emb.npy\n./pasterz-i-owce-bajki-nowe/pasterz-i-owce-bajki-nowe.subvads.pyd\n./pasterz-i-owce-bajki-nowe/pasterz-i-owce-bajki-nowe.vad.npy\n./prawdziwy-opis-wypadku-z-p-waldemarem/prawdziwy-opis-wypadku-z-p-waldemarem.spk_emb.npy\n./prawdziwy-opis-wypadku-z-p-waldemarem/prawdziwy-opis-wypadku-z-p-waldemarem.subvads.pyd\n./prawdziwy-opis-wypadku-z-p-waldemarem/prawdziwy-opis-wypadku-z-p-waldemarem.vad.npy\n./grabinski-przypadek/grabinski-przypadek.spk_emb.npy\n./grabinski-przypadek/grabinski-przypadek.subvads.pyd\n./grabinski-przypadek/grabinski-przypadek.vad.npy\n./rozmowa-mistrza-polikarpa-ze-smiercia/rozmowa-mistrza-polikarpa-ze-smiercia.spk_emb.npy\n./rozmowa-mistrza-polikarpa-ze-smiercia/rozmowa-mistrza-polikarpa-ze-smiercia.subvads.pyd\n./rozmowa-mistrza-polikarpa-ze-smiercia/rozmowa-mistrza-polikarpa-ze-smiercia.vad.npy\n./hej-w-dzien-narodzenia/autor-nieznany-hej-w-dzien-narodzenia.spk_emb.npy\n./hej-w-dzien-narodzenia/autor-nieznany-hej-w-dzien-narodzenia.subvads.pyd\n./hej-w-dzien-narodzenia/autor-nieznany-hej-w-dzien-narodzenia.vad.npy\n./napoj-cienisty-dziewczyna/boleslaw-lesmian-napoj-cienisty-powiesc-o-rozumnej-dziewczynie-cykl-dziewczyna.spk_emb.npy\n./napoj-cienisty-dziewczyna/boleslaw-lesmian-napoj-cienisty-powiesc-o-rozumnej-dziewczynie-cykl-dziewczyna.subvads.pyd\n./napoj-cienisty-dziewczyna/boleslaw-lesmian-napoj-cienisty-powiesc-o-rozumnej-dziewczynie-cykl-dziewczyna.vad.npy\n./grabinski-niesamowita-opowiesc-na-tropie/grabinski-niesamowita-opowiesc-na-tropie.spk_emb.npy\n./grabinski-niesamowita-opowiesc-na-tropie/grabinski-niesamowita-opowiesc-na-tropie.subvads.pyd\n./grabinski-niesamowita-opowiesc-na-tropie/grabinski-niesamowita-opowiesc-na-tropie.vad.npy\n./sroczynska-lasowiackie-serce/lasowiackie-serce.spk_emb.npy\n./sroczynska-lasowiackie-serce/lasowiackie-serce.subvads.pyd\n./sroczynska-lasowiackie-serce/lasowiackie-serce.vad.npy\n./slowka-zbior-piosenki-zb-wiersz-inauguracyjny/tadeusz-boy-zelenski-slowka-zbior-piosenki-zielonego-balonika-wiersz-inauguracyjny-na-otwarcie-piatego-sezonu-zielonego-balonika.spk_emb.npy\n./slowka-zbior-piosenki-zb-wiersz-inauguracyjny/tadeusz-boy-zelenski-slowka-zbior-piosenki-zielonego-balonika-wiersz-inauguracyjny-na-otwarcie-piatego-sezonu-zielonego-balonika.subvads.pyd\n./slowka-zbior-piosenki-zb-wiersz-inauguracyjny/tadeusz-boy-zelenski-slowka-zbior-piosenki-zielonego-balonika-wiersz-inauguracyjny-na-otwarcie-piatego-sezonu-zielonego-balonika.vad.npy\n./nasza-czarna-jaskoleczka/nasza-czarna-jaskoleczka.spk_emb.npy\n./nasza-czarna-jaskoleczka/nasza-czarna-jaskoleczka.subvads.pyd\n./nasza-czarna-jaskoleczka/nasza-czarna-jaskoleczka.vad.npy\n./puszkin-bajka-o-rybaku-i-rybce/puszkin-bajka-o-rybaku-i-rybce.spk_emb.npy\n./puszkin-bajka-o-rybaku-i-rybce/puszkin-bajka-o-rybaku-i-rybce.subvads.pyd\n./puszkin-bajka-o-rybaku-i-rybce/puszkin-bajka-o-rybaku-i-rybce.vad.npy\n./nowakowska-niska-rozdzielczosc-daj-mi/daj-mi.spk_emb.npy\n./nowakowska-niska-rozdzielczosc-daj-mi/daj-mi.subvads.pyd\n./nowakowska-niska-rozdzielczosc-daj-mi/daj-mi.vad.npy\n./slowka-zbior-replika-kobiety-polskiej/tadeusz-boy-zelenski-slowka-zbior-replika-kobiety-polskiej.spk_emb.npy\n./slowka-zbior-replika-kobiety-polskiej/tadeusz-boy-zelenski-slowka-zbior-replika-kobiety-polskiej.subvads.pyd\n./slowka-zbior-replika-kobiety-polskiej/tadeusz-boy-zelenski-slowka-zbior-replika-kobiety-polskiej.vad.npy\n./but-w-butonierce-ipecacuana/bruno-jasienski-but-w-butonierce-tomik-ipecacuana.spk_emb.npy\n./but-w-butonierce-ipecacuana/bruno-jasienski-but-w-butonierce-tomik-ipecacuana.subvads.pyd\n./but-w-butonierce-ipecacuana/bruno-jasienski-but-w-butonierce-tomik-ipecacuana.vad.npy\n./madry-i-glupi/ignacy-krasicki-bajki-i-przypowiesci-madry-i-glupi.spk_emb.npy\n./madry-i-glupi/ignacy-krasicki-bajki-i-przypowiesci-madry-i-glupi.subvads.pyd\n./madry-i-glupi/ignacy-krasicki-bajki-i-przypowiesci-madry-i-glupi.vad.npy\n./sonety-krymskie-bakczysaraj-w-nocy/adam-mickiewicz-sonety-krymskie-bakczysaraj-w-nocy.spk_emb.npy\n./sonety-krymskie-bakczysaraj-w-nocy/adam-mickiewicz-sonety-krymskie-bakczysaraj-w-nocy.subvads.pyd\n./sonety-krymskie-bakczysaraj-w-nocy/adam-mickiewicz-sonety-krymskie-bakczysaraj-w-nocy.vad.npy\n./sklepy-cynamonowe-traktat-o-manekinach-dokonczenie/sklepy-cynamonowe-traktat-o-manekinach-dokonczenie.spk_emb.npy\n./sklepy-cynamonowe-traktat-o-manekinach-dokonczenie/sklepy-cynamonowe-traktat-o-manekinach-dokonczenie.subvads.pyd\n./sklepy-cynamonowe-traktat-o-manekinach-dokonczenie/sklepy-cynamonowe-traktat-o-manekinach-dokonczenie.vad.npy\n./schulz-sanatorium-pod-klepsydra-druga-jesien/schulz-sanatorium-pod-klepsydra-druga-jesien_001_druga-jesien.spk_emb.npy\n./schulz-sanatorium-pod-klepsydra-druga-jesien/schulz-sanatorium-pod-klepsydra-druga-jesien_001_druga-jesien.subvads.pyd\n./schulz-sanatorium-pod-klepsydra-druga-jesien/schulz-sanatorium-pod-klepsydra-druga-jesien_001_druga-jesien.vad.npy\n./medrcy-swiata/autor-nieznany-medrcy-swiata.spk_emb.npy\n./medrcy-swiata/autor-nieznany-medrcy-swiata.subvads.pyd\n./medrcy-swiata/autor-nieznany-medrcy-swiata.vad.npy\n./aniol/aniol.spk_emb.npy\n./aniol/aniol.subvads.pyd\n./aniol/aniol.vad.npy\n./do-motyla/jan-andrzej-morsztyn-do-motyla.spk_emb.npy\n./do-motyla/jan-andrzej-morsztyn-do-motyla.subvads.pyd\n./do-motyla/jan-andrzej-morsztyn-do-motyla.vad.npy\n./napoj-cienisty-dokola-klombu/boleslaw-lesmian-napoj-cienisty-postacie-cykl-dokola-klombu.spk_emb.npy\n./napoj-cienisty-dokola-klombu/boleslaw-lesmian-napoj-cienisty-postacie-cykl-dokola-klombu.subvads.pyd\n./napoj-cienisty-dokola-klombu/boleslaw-lesmian-napoj-cienisty-postacie-cykl-dokola-klombu.vad.npy\n./dabrowska-boze-narodzenie/maria-dabrowska-boze-narodzenie.spk_emb.npy\n./dabrowska-boze-narodzenie/maria-dabrowska-boze-narodzenie.subvads.pyd\n./dabrowska-boze-narodzenie/maria-dabrowska-boze-narodzenie.vad.npy\n./spiewak-spod-strzechy/spiewak-spod-strzechy.spk_emb.npy\n./spiewak-spod-strzechy/spiewak-spod-strzechy.subvads.pyd\n./spiewak-spod-strzechy/spiewak-spod-strzechy.vad.npy\n./ziemia-i-potok-bajki-nowe/ziemia-i-potok-bajki-nowe.spk_emb.npy\n./ziemia-i-potok-bajki-nowe/ziemia-i-potok-bajki-nowe.subvads.pyd\n./ziemia-i-potok-bajki-nowe/ziemia-i-potok-bajki-nowe.vad.npy\n./sonety-krymskie-aluszta-w-dzien/adam-mickiewicz-sonety-krymskie-aluszta-w-dzien.spk_emb.npy\n./sonety-krymskie-aluszta-w-dzien/adam-mickiewicz-sonety-krymskie-aluszta-w-dzien.subvads.pyd\n./sonety-krymskie-aluszta-w-dzien/adam-mickiewicz-sonety-krymskie-aluszta-w-dzien.vad.npy\n./napoj-cienisty-wiosna/boleslaw-lesmian-napoj-cienisty-postacie-cykl-wiosna.spk_emb.npy\n./napoj-cienisty-wiosna/boleslaw-lesmian-napoj-cienisty-postacie-cykl-wiosna.subvads.pyd\n./napoj-cienisty-wiosna/boleslaw-lesmian-napoj-cienisty-postacie-cykl-wiosna.vad.npy\n./berenice/edgar-allan-poe-berenice.spk_emb.npy\n./berenice/edgar-allan-poe-berenice.subvads.pyd\n./berenice/edgar-allan-poe-berenice.vad.npy\n./chlop-i-jowisz-bajki-nowe/chlop-i-jowisz-bajki-nowe.spk_emb.npy\n./chlop-i-jowisz-bajki-nowe/chlop-i-jowisz-bajki-nowe.subvads.pyd\n./chlop-i-jowisz-bajki-nowe/chlop-i-jowisz-bajki-nowe.vad.npy\n./podrozny-i-kaleka/ignacy-krasicki-bajki-i-przypowiesci-podrozny-i-kaleka.spk_emb.npy\n./podrozny-i-kaleka/ignacy-krasicki-bajki-i-przypowiesci-podrozny-i-kaleka.subvads.pyd\n./podrozny-i-kaleka/ignacy-krasicki-bajki-i-przypowiesci-podrozny-i-kaleka.vad.npy\n./grabinski-ksiega-ognia-czerwona-magda/grabinski-ksiega-ognia-czerwona-magda.spk_emb.npy\n./grabinski-ksiega-ognia-czerwona-magda/grabinski-ksiega-ognia-czerwona-magda.subvads.pyd\n./grabinski-ksiega-ognia-czerwona-magda/grabinski-ksiega-ognia-czerwona-magda.vad.npy\n./wino-i-woda/ignacy-krasicki-bajki-i-przypowiesci-wino-i-woda.spk_emb.npy\n./wino-i-woda/ignacy-krasicki-bajki-i-przypowiesci-wino-i-woda.subvads.pyd\n./wino-i-woda/ignacy-krasicki-bajki-i-przypowiesci-wino-i-woda.vad.npy\n./grabinski-z-wyjatkow-w-pomrokach-wiary-klatwa/grabinski-z-wyjatkow-w-pomrokach-wiary-klatwa.spk_emb.npy\n./grabinski-z-wyjatkow-w-pomrokach-wiary-klatwa/grabinski-z-wyjatkow-w-pomrokach-wiary-klatwa.subvads.pyd\n./grabinski-z-wyjatkow-w-pomrokach-wiary-klatwa/grabinski-z-wyjatkow-w-pomrokach-wiary-klatwa.vad.npy\n./blumengraber-do-profesorow/blumengraber-do-profesorow.spk_emb.npy\n./blumengraber-do-profesorow/blumengraber-do-profesorow.subvads.pyd\n./blumengraber-do-profesorow/blumengraber-do-profesorow.vad.npy\n./wyzel-i-brytan-ii-bajki-nowe/wyzel-i-brytan-ii-bajki-nowe.spk_emb.npy\n./wyzel-i-brytan-ii-bajki-nowe/wyzel-i-brytan-ii-bajki-nowe.subvads.pyd\n./wyzel-i-brytan-ii-bajki-nowe/wyzel-i-brytan-ii-bajki-nowe.vad.npy\n./mala-ksiezniczka/mala-ksiezniczka_017_17-oto-jest-to-dziecko.spk_emb.npy\n./mala-ksiezniczka/mala-ksiezniczka_017_17-oto-jest-to-dziecko.subvads.pyd\n./mala-ksiezniczka/mala-ksiezniczka_017_17-oto-jest-to-dziecko.vad.npy\n./mala-ksiezniczka/mala-ksiezniczka_006_6-kopalnia-diamentow.spk_emb.npy\n./mala-ksiezniczka/mala-ksiezniczka_006_6-kopalnia-diamentow.subvads.pyd\n./mala-ksiezniczka/mala-ksiezniczka_006_6-kopalnia-diamentow.vad.npy\n./mala-ksiezniczka/mala-ksiezniczka_011_11-ram-dass.spk_emb.npy\n./mala-ksiezniczka/mala-ksiezniczka_011_11-ram-dass.subvads.pyd\n./mala-ksiezniczka/mala-ksiezniczka_011_11-ram-dass.vad.npy\n./mala-ksiezniczka/mala-ksiezniczka_012_12-po-drugiej-stronie-sciany.spk_emb.npy\n./mala-ksiezniczka/mala-ksiezniczka_012_12-po-drugiej-stronie-sciany.subvads.pyd\n./mala-ksiezniczka/mala-ksiezniczka_012_12-po-drugiej-stronie-sciany.vad.npy\n./mala-ksiezniczka/mala-ksiezniczka_004_4-lottie.spk_emb.npy\n./mala-ksiezniczka/mala-ksiezniczka_004_4-lottie.subvads.pyd\n./mala-ksiezniczka/mala-ksiezniczka_004_4-lottie.vad.npy\n./mala-ksiezniczka/mala-ksiezniczka_008_8-na-poddaszu.spk_emb.npy\n./mala-ksiezniczka/mala-ksiezniczka_008_8-na-poddaszu.subvads.pyd\n./mala-ksiezniczka/mala-ksiezniczka_008_8-na-poddaszu.vad.npy\n./mala-ksiezniczka/mala-ksiezniczka_015_15-czarnoksieznik.spk_emb.npy\n./mala-ksiezniczka/mala-ksiezniczka_015_15-czarnoksieznik.subvads.pyd\n./mala-ksiezniczka/mala-ksiezniczka_015_15-czarnoksieznik.vad.npy\n./mala-ksiezniczka/mala-ksiezniczka_010_10-przybysz-z-indii.spk_emb.npy\n./mala-ksiezniczka/mala-ksiezniczka_010_10-przybysz-z-indii.subvads.pyd\n./mala-ksiezniczka/mala-ksiezniczka_010_10-przybysz-z-indii.vad.npy\n./mala-ksiezniczka/mala-ksiezniczka_018_18-staralam-sie-byc-nia-zawsze.spk_emb.npy\n./mala-ksiezniczka/mala-ksiezniczka_018_18-staralam-sie-byc-nia-zawsze.subvads.pyd\n./mala-ksiezniczka/mala-ksiezniczka_018_18-staralam-sie-byc-nia-zawsze.vad.npy\n./mala-ksiezniczka/mala-ksiezniczka_019_19-anna.spk_emb.npy\n./mala-ksiezniczka/mala-ksiezniczka_019_19-anna.subvads.pyd\n./mala-ksiezniczka/mala-ksiezniczka_019_19-anna.vad.npy\n./mala-ksiezniczka/mala-ksiezniczka_003_3-ermenegarda.spk_emb.npy\n./mala-ksiezniczka/mala-ksiezniczka_003_3-ermenegarda.subvads.pyd\n./mala-ksiezniczka/mala-ksiezniczka_003_3-ermenegarda.vad.npy\n./mala-ksiezniczka/mala-ksiezniczka_013_13-jedna-z-szarego-tlumu.spk_emb.npy\n./mala-ksiezniczka/mala-ksiezniczka_013_13-jedna-z-szarego-tlumu.subvads.pyd\n./mala-ksiezniczka/mala-ksiezniczka_013_13-jedna-z-szarego-tlumu.vad.npy\n./mala-ksiezniczka/mala-ksiezniczka_014_14-co-slyszal-i-widzial-melchizedech.spk_emb.npy\n./mala-ksiezniczka/mala-ksiezniczka_014_14-co-slyszal-i-widzial-melchizedech.subvads.pyd\n./mala-ksiezniczka/mala-ksiezniczka_014_14-co-slyszal-i-widzial-melchizedech.vad.npy\n./mala-ksiezniczka/mala-ksiezniczka_005_5-becky.spk_emb.npy\n./mala-ksiezniczka/mala-ksiezniczka_005_5-becky.subvads.pyd\n./mala-ksiezniczka/mala-ksiezniczka_005_5-becky.vad.npy\n./mala-ksiezniczka/mala-ksiezniczka_009_9-melchizedech.spk_emb.npy\n./mala-ksiezniczka/mala-ksiezniczka_009_9-melchizedech.subvads.pyd\n./mala-ksiezniczka/mala-ksiezniczka_009_9-melchizedech.vad.npy\n./mala-ksiezniczka/mala-ksiezniczka_016_16-odwiedziny.spk_emb.npy\n./mala-ksiezniczka/mala-ksiezniczka_016_16-odwiedziny.subvads.pyd\n./mala-ksiezniczka/mala-ksiezniczka_016_16-odwiedziny.vad.npy\n./mala-ksiezniczka/mala-ksiezniczka_001_1-sara.spk_emb.npy\n./mala-ksiezniczka/mala-ksiezniczka_001_1-sara.subvads.pyd\n./mala-ksiezniczka/mala-ksiezniczka_001_1-sara.vad.npy\n./mala-ksiezniczka/mala-ksiezniczka_002_2-lekcja-francuskiego.spk_emb.npy\n./mala-ksiezniczka/mala-ksiezniczka_002_2-lekcja-francuskiego.subvads.pyd\n./mala-ksiezniczka/mala-ksiezniczka_002_2-lekcja-francuskiego.vad.npy\n./mala-ksiezniczka/mala-ksiezniczka_007_7-jeszcze-o-kopalniach-diamentow.spk_emb.npy\n./mala-ksiezniczka/mala-ksiezniczka_007_7-jeszcze-o-kopalniach-diamentow.subvads.pyd\n./mala-ksiezniczka/mala-ksiezniczka_007_7-jeszcze-o-kopalniach-diamentow.vad.npy\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_014_czesc-14.spk_emb.npy\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_014_czesc-14.subvads.pyd\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_014_czesc-14.vad.npy\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_006_czesc-6.spk_emb.npy\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_006_czesc-6.subvads.pyd\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_006_czesc-6.vad.npy\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_002_czesc-2.spk_emb.npy\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_002_czesc-2.subvads.pyd\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_002_czesc-2.vad.npy\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_013_czesc-13.spk_emb.npy\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_013_czesc-13.subvads.pyd\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_013_czesc-13.vad.npy\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_011_czesc-11.spk_emb.npy\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_011_czesc-11.subvads.pyd\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_011_czesc-11.vad.npy\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_001_czesc-1.spk_emb.npy\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_001_czesc-1.subvads.pyd\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_001_czesc-1.vad.npy\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_005_czesc-5.spk_emb.npy\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_005_czesc-5.subvads.pyd\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_005_czesc-5.vad.npy\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_003_czesc-3.spk_emb.npy\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_003_czesc-3.subvads.pyd\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_003_czesc-3.vad.npy\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_009_czesc-9.spk_emb.npy\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_009_czesc-9.subvads.pyd\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_009_czesc-9.vad.npy\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_012_czesc-12.spk_emb.npy\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_012_czesc-12.subvads.pyd\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_012_czesc-12.vad.npy\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_016_czesc-16.spk_emb.npy\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_016_czesc-16.subvads.pyd\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_016_czesc-16.vad.npy\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_008_czesc-8.spk_emb.npy\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_008_czesc-8.subvads.pyd\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_008_czesc-8.vad.npy\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_015_czesc-15.spk_emb.npy\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_015_czesc-15.subvads.pyd\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_015_czesc-15.vad.npy\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_010_czesc-10.spk_emb.npy\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_010_czesc-10.subvads.pyd\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_010_czesc-10.vad.npy\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_007_czesc-7.spk_emb.npy\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_007_czesc-7.subvads.pyd\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_007_czesc-7.vad.npy\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_004_czesc-4.spk_emb.npy\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_004_czesc-4.subvads.pyd\n./balzac-komedia-ludzka-eugenia-grandet/balzac-komedia-ludzka-eugenia-grandet_004_czesc-4.vad.npy\n./kornhauser-tyle-rzeczy-niezwyklych-tyle-rzeczy-niezwyklych/kornhauser-tyle-rzeczy-niezwyklych-tyle-rzeczy-niezwyklych_001.spk_emb.npy\n./kornhauser-tyle-rzeczy-niezwyklych-tyle-rzeczy-niezwyklych/kornhauser-tyle-rzeczy-niezwyklych-tyle-rzeczy-niezwyklych_001.subvads.pyd\n./kornhauser-tyle-rzeczy-niezwyklych-tyle-rzeczy-niezwyklych/kornhauser-tyle-rzeczy-niezwyklych-tyle-rzeczy-niezwyklych_001.vad.npy\n./slowka-zbior-dziadzio/tadeusz-boy-zelenski-slowka-zbior-dziadzio.spk_emb.npy\n./slowka-zbior-dziadzio/tadeusz-boy-zelenski-slowka-zbior-dziadzio.subvads.pyd\n./slowka-zbior-dziadzio/tadeusz-boy-zelenski-slowka-zbior-dziadzio.vad.npy\n./osiel-i-wol/ignacy-krasicki-bajki-i-przypowiesci-osiel-i-wol.spk_emb.npy\n./osiel-i-wol/ignacy-krasicki-bajki-i-przypowiesci-osiel-i-wol.subvads.pyd\n./osiel-i-wol/ignacy-krasicki-bajki-i-przypowiesci-osiel-i-wol.vad.npy\n./hop-frog/edgar-allan-poe-hop-frog.spk_emb.npy\n./hop-frog/edgar-allan-poe-hop-frog.subvads.pyd\n./hop-frog/edgar-allan-poe-hop-frog.vad.npy\n./slowka-zbior-ernestynka/tadeusz-boy-zelenski-slowka-zbior-ernestynka.spk_emb.npy\n./slowka-zbior-ernestynka/tadeusz-boy-zelenski-slowka-zbior-ernestynka.subvads.pyd\n./slowka-zbior-ernestynka/tadeusz-boy-zelenski-slowka-zbior-ernestynka.vad.npy\n./maska-smierci-szkarlatnej/edgar-allan-poe-maska-smierci-szkarlatnej.spk_emb.npy\n./maska-smierci-szkarlatnej/edgar-allan-poe-maska-smierci-szkarlatnej.subvads.pyd\n./maska-smierci-szkarlatnej/edgar-allan-poe-maska-smierci-szkarlatnej.vad.npy\n./tulli-sny-i-kamienie/sny-i-kamienie.spk_emb.npy\n./tulli-sny-i-kamienie/sny-i-kamienie.subvads.pyd\n./tulli-sny-i-kamienie/sny-i-kamienie.vad.npy\n./beresewicz-czy-pisarzom-czy-lubil-pan-chodzic-do-szkoly/beresewicz-czy-pisarzom-czy-lubil-pan-chodzic-do-szkoly_001.spk_emb.npy\n./beresewicz-czy-pisarzom-czy-lubil-pan-chodzic-do-szkoly/beresewicz-czy-pisarzom-czy-lubil-pan-chodzic-do-szkoly_001.subvads.pyd\n./beresewicz-czy-pisarzom-czy-lubil-pan-chodzic-do-szkoly/beresewicz-czy-pisarzom-czy-lubil-pan-chodzic-do-szkoly_001.vad.npy\n./skrucha-jozi/skrucha-jozi.spk_emb.npy\n./skrucha-jozi/skrucha-jozi.subvads.pyd\n./skrucha-jozi/skrucha-jozi.vad.npy\n./piesni-ksiegi-pierwsze-piesn-xx/jan-kochanowski-piesni-ksiegi-pierwsze-piesn-xx-milo-szalec-kiedy-czas-po-temu.spk_emb.npy\n./piesni-ksiegi-pierwsze-piesn-xx/jan-kochanowski-piesni-ksiegi-pierwsze-piesn-xx-milo-szalec-kiedy-czas-po-temu.subvads.pyd\n./piesni-ksiegi-pierwsze-piesn-xx/jan-kochanowski-piesni-ksiegi-pierwsze-piesn-xx-milo-szalec-kiedy-czas-po-temu.vad.npy\n./proby-ksiega-trzecia/proby-ksiega-trzecia_005_rozdzial-v-o-wierszach-wergilego.spk_emb.npy\n./proby-ksiega-trzecia/proby-ksiega-trzecia_005_rozdzial-v-o-wierszach-wergilego.subvads.pyd\n./proby-ksiega-trzecia/proby-ksiega-trzecia_005_rozdzial-v-o-wierszach-wergilego.vad.npy\n./proby-ksiega-trzecia/proby-ksiega-trzecia_012_rozdzial-xii-o-fizjonomii.spk_emb.npy\n./proby-ksiega-trzecia/proby-ksiega-trzecia_012_rozdzial-xii-o-fizjonomii.subvads.pyd\n./proby-ksiega-trzecia/proby-ksiega-trzecia_012_rozdzial-xii-o-fizjonomii.vad.npy\n./proby-ksiega-trzecia/proby-ksiega-trzecia_007_rozdzial-vii-o-ciezarach-wielkosci.spk_emb.npy\n./proby-ksiega-trzecia/proby-ksiega-trzecia_007_rozdzial-vii-o-ciezarach-wielkosci.subvads.pyd\n./proby-ksiega-trzecia/proby-ksiega-trzecia_007_rozdzial-vii-o-ciezarach-wielkosci.vad.npy\n./proby-ksiega-trzecia/proby-ksiega-trzecia_001_rozdzial-i-o-pozytecznym-i-poczciwym.spk_emb.npy\n./proby-ksiega-trzecia/proby-ksiega-trzecia_001_rozdzial-i-o-pozytecznym-i-poczciwym.subvads.pyd\n./proby-ksiega-trzecia/proby-ksiega-trzecia_001_rozdzial-i-o-pozytecznym-i-poczciwym.vad.npy\n./proby-ksiega-trzecia/proby-ksiega-trzecia_013_rozdzial-xiii-o-doswiadczeniu.spk_emb.npy\n./proby-ksiega-trzecia/proby-ksiega-trzecia_013_rozdzial-xiii-o-doswiadczeniu.subvads.pyd\n./proby-ksiega-trzecia/proby-ksiega-trzecia_013_rozdzial-xiii-o-doswiadczeniu.vad.npy\n./proby-ksiega-trzecia/proby-ksiega-trzecia_006_rozdzial-vi-o-pojazdach.spk_emb.npy\n./proby-ksiega-trzecia/proby-ksiega-trzecia_006_rozdzial-vi-o-pojazdach.subvads.pyd\n./proby-ksiega-trzecia/proby-ksiega-trzecia_006_rozdzial-vi-o-pojazdach.vad.npy\n./proby-ksiega-trzecia/proby-ksiega-trzecia_010_rozdzial-x-o-oszczedzaniu-woli.spk_emb.npy\n./proby-ksiega-trzecia/proby-ksiega-trzecia_010_rozdzial-x-o-oszczedzaniu-woli.subvads.pyd\n./proby-ksiega-trzecia/proby-ksiega-trzecia_010_rozdzial-x-o-oszczedzaniu-woli.vad.npy\n./proby-ksiega-trzecia/proby-ksiega-trzecia_003_rozdzial-iii-o-trzech-rodzajach-obcowania.spk_emb.npy\n./proby-ksiega-trzecia/proby-ksiega-trzecia_003_rozdzial-iii-o-trzech-rodzajach-obcowania.subvads.pyd\n./proby-ksiega-trzecia/proby-ksiega-trzecia_003_rozdzial-iii-o-trzech-rodzajach-obcowania.vad.npy\n./proby-ksiega-trzecia/proby-ksiega-trzecia_004_rozdzial-iv-o-dywersji.spk_emb.npy\n./proby-ksiega-trzecia/proby-ksiega-trzecia_004_rozdzial-iv-o-dywersji.subvads.pyd\n./proby-ksiega-trzecia/proby-ksiega-trzecia_004_rozdzial-iv-o-dywersji.vad.npy\n./proby-ksiega-trzecia/proby-ksiega-trzecia_011_rozdzial-xi-o-kulawych.spk_emb.npy\n./proby-ksiega-trzecia/proby-ksiega-trzecia_011_rozdzial-xi-o-kulawych.subvads.pyd\n./proby-ksiega-trzecia/proby-ksiega-trzecia_011_rozdzial-xi-o-kulawych.vad.npy\n./proby-ksiega-trzecia/proby-ksiega-trzecia_008_rozdzial-viii-o-sztuce-rozmawiania.spk_emb.npy\n./proby-ksiega-trzecia/proby-ksiega-trzecia_008_rozdzial-viii-o-sztuce-rozmawiania.subvads.pyd\n./proby-ksiega-trzecia/proby-ksiega-trzecia_008_rozdzial-viii-o-sztuce-rozmawiania.vad.npy\n./proby-ksiega-trzecia/proby-ksiega-trzecia_009_rozdzial-ix-o-proznosci.spk_emb.npy\n./proby-ksiega-trzecia/proby-ksiega-trzecia_009_rozdzial-ix-o-proznosci.subvads.pyd\n./proby-ksiega-trzecia/proby-ksiega-trzecia_009_rozdzial-ix-o-proznosci.vad.npy\n./proby-ksiega-trzecia/proby-ksiega-trzecia_002_rozdzial-ii-o-zalu.spk_emb.npy\n./proby-ksiega-trzecia/proby-ksiega-trzecia_002_rozdzial-ii-o-zalu.subvads.pyd\n./proby-ksiega-trzecia/proby-ksiega-trzecia_002_rozdzial-ii-o-zalu.vad.npy\n./slonce-i-zaby-bajki-nowe/slonce-i-zaby-bajki-nowe.spk_emb.npy\n./slonce-i-zaby-bajki-nowe/slonce-i-zaby-bajki-nowe.subvads.pyd\n./slonce-i-zaby-bajki-nowe/slonce-i-zaby-bajki-nowe.vad.npy\n./slowka-zbior-nowa-wiara/tadeusz-boy-zelenski-slowka-zbior-nowa-wiara.spk_emb.npy\n./slowka-zbior-nowa-wiara/tadeusz-boy-zelenski-slowka-zbior-nowa-wiara.subvads.pyd\n./slowka-zbior-nowa-wiara/tadeusz-boy-zelenski-slowka-zbior-nowa-wiara.vad.npy\n./sonety-krymskie-bakczysaraj/adam-mickiewicz-sonety-krymskie-bakczysaraj.spk_emb.npy\n./sonety-krymskie-bakczysaraj/adam-mickiewicz-sonety-krymskie-bakczysaraj.subvads.pyd\n./sonety-krymskie-bakczysaraj/adam-mickiewicz-sonety-krymskie-bakczysaraj.vad.npy\n./wolny-hamkalo-nikt-nic/nikt-nic.spk_emb.npy\n./wolny-hamkalo-nikt-nic/nikt-nic.subvads.pyd\n./wolny-hamkalo-nikt-nic/nikt-nic.vad.npy\n./wabik-bajki-nowe/wabik-bajki-nowe.spk_emb.npy\n./wabik-bajki-nowe/wabik-bajki-nowe.subvads.pyd\n./wabik-bajki-nowe/wabik-bajki-nowe.vad.npy\n./wilk-i-baran-bajki-nowe/wilk-i-baran-bajki-nowe.spk_emb.npy\n./wilk-i-baran-bajki-nowe/wilk-i-baran-bajki-nowe.subvads.pyd\n./wilk-i-baran-bajki-nowe/wilk-i-baran-bajki-nowe.vad.npy\n./grabinski-ksiega-ognia-bialy-wyrak/grabinski-ksiega-ognia-bialy-wyrak.spk_emb.npy\n./grabinski-ksiega-ognia-bialy-wyrak/grabinski-ksiega-ognia-bialy-wyrak.subvads.pyd\n./grabinski-ksiega-ognia-bialy-wyrak/grabinski-ksiega-ognia-bialy-wyrak.vad.npy\n./konopnicka-w-polu/w-polu-pojdziemy-w-pole-w-ranny-czas.spk_emb.npy\n./konopnicka-w-polu/w-polu-pojdziemy-w-pole-w-ranny-czas.subvads.pyd\n./konopnicka-w-polu/w-polu-pojdziemy-w-pole-w-ranny-czas.vad.npy\n./lis-i-osiel/ignacy-krasicki-bajki-i-przypowiesci-lis-i-osiel.spk_emb.npy\n./lis-i-osiel/ignacy-krasicki-bajki-i-przypowiesci-lis-i-osiel.subvads.pyd\n./lis-i-osiel/ignacy-krasicki-bajki-i-przypowiesci-lis-i-osiel.vad.npy\n./do-delljusa/do-delljusa.spk_emb.npy\n./do-delljusa/do-delljusa.subvads.pyd\n./do-delljusa/do-delljusa.vad.npy\n./satyry-czesc-pierwsza-zona-modna/satyry-czesc-pierwsza-zona-modna.spk_emb.npy\n./satyry-czesc-pierwsza-zona-modna/satyry-czesc-pierwsza-zona-modna.subvads.pyd\n./satyry-czesc-pierwsza-zona-modna/satyry-czesc-pierwsza-zona-modna.vad.npy\n./janicki-i-nas-wybawi/i-nas-wybawi.spk_emb.npy\n./janicki-i-nas-wybawi/i-nas-wybawi.subvads.pyd\n./janicki-i-nas-wybawi/i-nas-wybawi.vad.npy\n./napoj-cienisty-cmentarz/boleslaw-lesmian-napoj-cienisty-w-nicosc-sniaca-sie-droga-cykl-cmentarz.spk_emb.npy\n./napoj-cienisty-cmentarz/boleslaw-lesmian-napoj-cienisty-w-nicosc-sniaca-sie-droga-cykl-cmentarz.subvads.pyd\n./napoj-cienisty-cmentarz/boleslaw-lesmian-napoj-cienisty-w-nicosc-sniaca-sie-droga-cykl-cmentarz.vad.npy\n./przyjaciel/ignacy-krasicki-bajki-i-przypowiesci-przyjaciel.spk_emb.npy\n./przyjaciel/ignacy-krasicki-bajki-i-przypowiesci-przyjaciel.subvads.pyd\n./przyjaciel/ignacy-krasicki-bajki-i-przypowiesci-przyjaciel.vad.npy\n./but-w-butonierce-jak-introdukcja/bruno-jasienski-but-w-butonierce-tomik-jak-introdukcja.spk_emb.npy\n./but-w-butonierce-jak-introdukcja/bruno-jasienski-but-w-butonierce-tomik-jak-introdukcja.subvads.pyd\n./but-w-butonierce-jak-introdukcja/bruno-jasienski-but-w-butonierce-tomik-jak-introdukcja.vad.npy\n./czlowiek-i-zwierciadla/ignacy-krasicki-bajki-i-przypowiesci-czlowiek-i-zwierciadla.spk_emb.npy\n./czlowiek-i-zwierciadla/ignacy-krasicki-bajki-i-przypowiesci-czlowiek-i-zwierciadla.subvads.pyd\n./czlowiek-i-zwierciadla/ignacy-krasicki-bajki-i-przypowiesci-czlowiek-i-zwierciadla.vad.npy\n./dobroczynnosc/ignacy-krasicki-bajki-i-przypowiesci-dobroczynnosc.spk_emb.npy\n./dobroczynnosc/ignacy-krasicki-bajki-i-przypowiesci-dobroczynnosc.subvads.pyd\n./dobroczynnosc/ignacy-krasicki-bajki-i-przypowiesci-dobroczynnosc.vad.npy\n./krol-i-pisarze/ignacy-krasicki-bajki-i-przypowiesci-krol-i-pisarze.spk_emb.npy\n./krol-i-pisarze/ignacy-krasicki-bajki-i-przypowiesci-krol-i-pisarze.subvads.pyd\n./krol-i-pisarze/ignacy-krasicki-bajki-i-przypowiesci-krol-i-pisarze.vad.npy\n./grzegorzewska-wszystkie-wieczory-swiata/wszystkie-wieczory-swiata.spk_emb.npy\n./grzegorzewska-wszystkie-wieczory-swiata/wszystkie-wieczory-swiata.subvads.pyd\n./grzegorzewska-wszystkie-wieczory-swiata/wszystkie-wieczory-swiata.vad.npy\n./do-leukonoe/do-leukonoe.spk_emb.npy\n./do-leukonoe/do-leukonoe.subvads.pyd\n./do-leukonoe/do-leukonoe.vad.npy\n./slonecznik-i-fialek-bajki-nowe/slonecznik-i-fialek-bajki-nowe.spk_emb.npy\n./slonecznik-i-fialek-bajki-nowe/slonecznik-i-fialek-bajki-nowe.subvads.pyd\n./slonecznik-i-fialek-bajki-nowe/slonecznik-i-fialek-bajki-nowe.vad.npy\n./slowka-zbior-odsiecz-wiednia/tadeusz-boy-zelenski-slowka-zbior-odsiecz-wiednia.spk_emb.npy\n./slowka-zbior-odsiecz-wiednia/tadeusz-boy-zelenski-slowka-zbior-odsiecz-wiednia.subvads.pyd\n./slowka-zbior-odsiecz-wiednia/tadeusz-boy-zelenski-slowka-zbior-odsiecz-wiednia.vad.npy\n./arystoteles-poetyka/arystoteles-poetyka_022_rozdzial-22.spk_emb.npy\n./arystoteles-poetyka/arystoteles-poetyka_022_rozdzial-22.subvads.pyd\n./arystoteles-poetyka/arystoteles-poetyka_022_rozdzial-22.vad.npy\n./arystoteles-poetyka/arystoteles-poetyka_010_rozdzial-10.spk_emb.npy\n./arystoteles-poetyka/arystoteles-poetyka_010_rozdzial-10.subvads.pyd\n./arystoteles-poetyka/arystoteles-poetyka_010_rozdzial-10.vad.npy\n./arystoteles-poetyka/arystoteles-poetyka_027_rozdzial-27.spk_emb.npy\n./arystoteles-poetyka/arystoteles-poetyka_027_rozdzial-27.subvads.pyd\n./arystoteles-poetyka/arystoteles-poetyka_027_rozdzial-27.vad.npy\n./arystoteles-poetyka/arystoteles-poetyka_001_rozdzial-1.spk_emb.npy\n./arystoteles-poetyka/arystoteles-poetyka_001_rozdzial-1.subvads.pyd\n./arystoteles-poetyka/arystoteles-poetyka_001_rozdzial-1.vad.npy\n./arystoteles-poetyka/arystoteles-poetyka_025_rozdzial-25.spk_emb.npy\n./arystoteles-poetyka/arystoteles-poetyka_025_rozdzial-25.subvads.pyd\n./arystoteles-poetyka/arystoteles-poetyka_025_rozdzial-25.vad.npy\n./arystoteles-poetyka/arystoteles-poetyka_020_rozdzial-20.spk_emb.npy\n./arystoteles-poetyka/arystoteles-poetyka_020_rozdzial-20.subvads.pyd\n./arystoteles-poetyka/arystoteles-poetyka_020_rozdzial-20.vad.npy\n./arystoteles-poetyka/arystoteles-poetyka_003_rozdzial-3.spk_emb.npy\n./arystoteles-poetyka/arystoteles-poetyka_003_rozdzial-3.subvads.pyd\n./arystoteles-poetyka/arystoteles-poetyka_003_rozdzial-3.vad.npy\n./arystoteles-poetyka/arystoteles-poetyka_011_rozdzial-11.spk_emb.npy\n./arystoteles-poetyka/arystoteles-poetyka_011_rozdzial-11.subvads.pyd\n./arystoteles-poetyka/arystoteles-poetyka_011_rozdzial-11.vad.npy\n./arystoteles-poetyka/arystoteles-poetyka_007_rozdzial-7.spk_emb.npy\n./arystoteles-poetyka/arystoteles-poetyka_007_rozdzial-7.subvads.pyd\n./arystoteles-poetyka/arystoteles-poetyka_007_rozdzial-7.vad.npy\n./arystoteles-poetyka/arystoteles-poetyka_019_rozdzial-19.spk_emb.npy\n./arystoteles-poetyka/arystoteles-poetyka_019_rozdzial-19.subvads.pyd\n./arystoteles-poetyka/arystoteles-poetyka_019_rozdzial-19.vad.npy\n./arystoteles-poetyka/arystoteles-poetyka_018_rozdzial-18.spk_emb.npy\n./arystoteles-poetyka/arystoteles-poetyka_018_rozdzial-18.subvads.pyd\n./arystoteles-poetyka/arystoteles-poetyka_018_rozdzial-18.vad.npy\n./arystoteles-poetyka/arystoteles-poetyka_013_rozdzial-13.spk_emb.npy\n./arystoteles-poetyka/arystoteles-poetyka_013_rozdzial-13.subvads.pyd\n./arystoteles-poetyka/arystoteles-poetyka_013_rozdzial-13.vad.npy\n./arystoteles-poetyka/arystoteles-poetyka_016_rozdzial-16.spk_emb.npy\n./arystoteles-poetyka/arystoteles-poetyka_016_rozdzial-16.subvads.pyd\n./arystoteles-poetyka/arystoteles-poetyka_016_rozdzial-16.vad.npy\n./arystoteles-poetyka/arystoteles-poetyka_008_rozdzial-8.spk_emb.npy\n./arystoteles-poetyka/arystoteles-poetyka_008_rozdzial-8.subvads.pyd\n./arystoteles-poetyka/arystoteles-poetyka_008_rozdzial-8.vad.npy\n./arystoteles-poetyka/arystoteles-poetyka_023_rozdzial-23.spk_emb.npy\n./arystoteles-poetyka/arystoteles-poetyka_023_rozdzial-23.subvads.pyd\n./arystoteles-poetyka/arystoteles-poetyka_023_rozdzial-23.vad.npy\n./arystoteles-poetyka/arystoteles-poetyka_021_rozdzial-21.spk_emb.npy\n./arystoteles-poetyka/arystoteles-poetyka_021_rozdzial-21.subvads.pyd\n./arystoteles-poetyka/arystoteles-poetyka_021_rozdzial-21.vad.npy\n./arystoteles-poetyka/arystoteles-poetyka_002_rozdzial-2.spk_emb.npy\n./arystoteles-poetyka/arystoteles-poetyka_002_rozdzial-2.subvads.pyd\n./arystoteles-poetyka/arystoteles-poetyka_002_rozdzial-2.vad.npy\n./arystoteles-poetyka/arystoteles-poetyka_014_rozdzial-14.spk_emb.npy\n./arystoteles-poetyka/arystoteles-poetyka_014_rozdzial-14.subvads.pyd\n./arystoteles-poetyka/arystoteles-poetyka_014_rozdzial-14.vad.npy\n./arystoteles-poetyka/arystoteles-poetyka_024_rozdzial-24.spk_emb.npy\n./arystoteles-poetyka/arystoteles-poetyka_024_rozdzial-24.subvads.pyd\n./arystoteles-poetyka/arystoteles-poetyka_024_rozdzial-24.vad.npy\n./arystoteles-poetyka/arystoteles-poetyka_009_rozdzial-9.spk_emb.npy\n./arystoteles-poetyka/arystoteles-poetyka_009_rozdzial-9.subvads.pyd\n./arystoteles-poetyka/arystoteles-poetyka_009_rozdzial-9.vad.npy\n./arystoteles-poetyka/arystoteles-poetyka_017_rozdzial-17.spk_emb.npy\n./arystoteles-poetyka/arystoteles-poetyka_017_rozdzial-17.subvads.pyd\n./arystoteles-poetyka/arystoteles-poetyka_017_rozdzial-17.vad.npy\n./arystoteles-poetyka/arystoteles-poetyka_005_rozdzial-5.spk_emb.npy\n./arystoteles-poetyka/arystoteles-poetyka_005_rozdzial-5.subvads.pyd\n./arystoteles-poetyka/arystoteles-poetyka_005_rozdzial-5.vad.npy\n./arystoteles-poetyka/arystoteles-poetyka_012_rozdzial-12.spk_emb.npy\n./arystoteles-poetyka/arystoteles-poetyka_012_rozdzial-12.subvads.pyd\n./arystoteles-poetyka/arystoteles-poetyka_012_rozdzial-12.vad.npy\n./arystoteles-poetyka/arystoteles-poetyka_028_rozdzial-28.spk_emb.npy\n./arystoteles-poetyka/arystoteles-poetyka_028_rozdzial-28.subvads.pyd\n./arystoteles-poetyka/arystoteles-poetyka_028_rozdzial-28.vad.npy\n./arystoteles-poetyka/arystoteles-poetyka_006_rozdzial-6.spk_emb.npy\n./arystoteles-poetyka/arystoteles-poetyka_006_rozdzial-6.subvads.pyd\n./arystoteles-poetyka/arystoteles-poetyka_006_rozdzial-6.vad.npy\n./arystoteles-poetyka/arystoteles-poetyka_026_rozdzial-26.spk_emb.npy\n./arystoteles-poetyka/arystoteles-poetyka_026_rozdzial-26.subvads.pyd\n./arystoteles-poetyka/arystoteles-poetyka_026_rozdzial-26.vad.npy\n./arystoteles-poetyka/arystoteles-poetyka_015_rozdzial-15.spk_emb.npy\n./arystoteles-poetyka/arystoteles-poetyka_015_rozdzial-15.subvads.pyd\n./arystoteles-poetyka/arystoteles-poetyka_015_rozdzial-15.vad.npy\n./arystoteles-poetyka/arystoteles-poetyka_004_rozdzial-4.spk_emb.npy\n./arystoteles-poetyka/arystoteles-poetyka_004_rozdzial-4.subvads.pyd\n./arystoteles-poetyka/arystoteles-poetyka_004_rozdzial-4.vad.npy\n./slowka-zbior-piosenki-zb-piosenka-w-stylu-klasycznym/tadeusz-boy-zelenski-slowka-zbior-piosenki-zielonego-balonika-piosenka-w-stylu-klasycznym.spk_emb.npy\n./slowka-zbior-piosenki-zb-piosenka-w-stylu-klasycznym/tadeusz-boy-zelenski-slowka-zbior-piosenki-zielonego-balonika-piosenka-w-stylu-klasycznym.subvads.pyd\n./slowka-zbior-piosenki-zb-piosenka-w-stylu-klasycznym/tadeusz-boy-zelenski-slowka-zbior-piosenki-zielonego-balonika-piosenka-w-stylu-klasycznym.vad.npy\n./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_010_w-noska.spk_emb.npy\n./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_010_w-noska.subvads.pyd\n./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_010_w-noska.vad.npy\n./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_007_bijatyka.spk_emb.npy\n./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_007_bijatyka.subvads.pyd\n./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_007_bijatyka.vad.npy\n./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_004_rozstanie.spk_emb.npy\n./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_004_rozstanie.subvads.pyd\n./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_004_rozstanie.vad.npy\n./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_009_list.spk_emb.npy\n./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_009_list.subvads.pyd\n./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_009_list.vad.npy\n./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_011_pimpus-smialek.spk_emb.npy\n./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_011_pimpus-smialek.subvads.pyd\n./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_011_pimpus-smialek.vad.npy\n./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_012_postanowienie.spk_emb.npy\n./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_012_postanowienie.subvads.pyd\n./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_012_postanowienie.vad.npy\n./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_005_marsz-z-kuchni.spk_emb.npy\n./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_005_marsz-z-kuchni.subvads.pyd\n./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_005_marsz-z-kuchni.vad.npy\n./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_003_lekcja-tanca.spk_emb.npy\n./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_003_lekcja-tanca.subvads.pyd\n./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_003_lekcja-tanca.vad.npy\n./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_001_szkola.spk_emb.npy\n./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_001_szkola.subvads.pyd\n./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_001_szkola.vad.npy\n./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_008_pimpus-buty-czysci.spk_emb.npy\n./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_008_pimpus-buty-czysci.subvads.pyd\n./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_008_pimpus-buty-czysci.vad.npy\n./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_002_szczescie-rodzinne.spk_emb.npy\n./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_002_szczescie-rodzinne.subvads.pyd\n./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_002_szczescie-rodzinne.vad.npy\n./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_006_katastrofa.spk_emb.npy\n./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_006_katastrofa.subvads.pyd\n./konopnicka-szkolne-przygody-pimpusia-sadelko/konopnicka-szkolne-przygody-pimpusia-sadelko_006_katastrofa.vad.npy\n./but-w-butonierce-przejechali/bruno-jasienski-but-w-butonierce-tomik-przejechali.spk_emb.npy\n./but-w-butonierce-przejechali/bruno-jasienski-but-w-butonierce-tomik-przejechali.subvads.pyd\n./but-w-butonierce-przejechali/bruno-jasienski-but-w-butonierce-tomik-przejechali.vad.npy\n./fiedorczuk-kazdy-snil-swoj-sen/fiedorczuk-kazdy-snil-swoj-sen.spk_emb.npy\n./fiedorczuk-kazdy-snil-swoj-sen/fiedorczuk-kazdy-snil-swoj-sen.subvads.pyd\n./fiedorczuk-kazdy-snil-swoj-sen/fiedorczuk-kazdy-snil-swoj-sen.vad.npy\n\n\n\nds = chunked_audio_dataset(['../wolnelektury-wds2/wolnelektury-audio-000000.tar'])\nprev = None\nfor s in progress_bar(ds, total=6):\n sim = F.cosine_similarity(torch.tensor(s['spk_emb.npy']), torch.tensor((prev if prev is not None else s)['spk_emb.npy']), dim=0)\n if sim < 0.5: print(\"new\")\n print(s['__key__'], sim, s['tend'] - s['tstart'], sum([e-s for s,e in s['orig_s']['subvads.pyd'][s['i']]]))\n display(IPython.display.Audio(s['samples'], rate=s['sample_rate']))\n time.sleep(.5)\n prev = s" + }, + { + "objectID": "2A. Whisper quantization dataset preparation.html", + "href": "2A. Whisper quantization dataset preparation.html", + "title": "WhisperSpeech", + "section": "", + "text": "Doing transcription means sampling from the Whisper auto-regresive decoder. This is too slow to do for each training batch. Fortunately the trainscriptions are small text snippets so we can precompute them once for the whole dataset.\nWe use segments from Voice Activity Detection to reduce any boundary issues, the we use webdataset to yields multiple chunks from a FLAC file we only load once. The VAD segments are merged into longer chunks to make Whisper processing more efficent (it always processes 30s at a time)\nUsage:\npython -m whisperspeech.wh_transcribe librilight-large-wo6454-flac-000002.tar\nYou can pass in either a URL or a local file name. Either way it will expect a vad file in the local directory. The result will go into a file in the current directory named after the source file but replacing flac with txt.\n\n\n\nThe autoreload extension is already loaded. To reload it, use:\n %reload_ext autoreload\n\n\n\nimport pylab as plt\nimport IPython\n\n\nflac_url = 'https://huggingface.co/datasets/collabora/librilight-webdataset/resolve/main/librilight-small-flac-000000.tar'\n\n\nflac_url = './librilight-small-flac-000000.tar'" + }, + { + "objectID": "2A. Whisper quantization dataset preparation.html#precompute-whisper-transcriptions-for-vq-bottleneck-distilation", + "href": "2A. Whisper quantization dataset preparation.html#precompute-whisper-transcriptions-for-vq-bottleneck-distilation", + "title": "WhisperSpeech", + "section": "", + "text": "Doing transcription means sampling from the Whisper auto-regresive decoder. This is too slow to do for each training batch. Fortunately the trainscriptions are small text snippets so we can precompute them once for the whole dataset.\nWe use segments from Voice Activity Detection to reduce any boundary issues, the we use webdataset to yields multiple chunks from a FLAC file we only load once. The VAD segments are merged into longer chunks to make Whisper processing more efficent (it always processes 30s at a time)\nUsage:\npython -m whisperspeech.wh_transcribe librilight-large-wo6454-flac-000002.tar\nYou can pass in either a URL or a local file name. Either way it will expect a vad file in the local directory. The result will go into a file in the current directory named after the source file but replacing flac with txt.\n\n\n\nThe autoreload extension is already loaded. To reload it, use:\n %reload_ext autoreload\n\n\n\nimport pylab as plt\nimport IPython\n\n\nflac_url = 'https://huggingface.co/datasets/collabora/librilight-webdataset/resolve/main/librilight-small-flac-000000.tar'\n\n\nflac_url = './librilight-small-flac-000000.tar'" + }, + { + "objectID": "2A. Whisper quantization dataset preparation.html#merge-vad-segments-into-longer-chunks", + "href": "2A. Whisper quantization dataset preparation.html#merge-vad-segments-into-longer-chunks", + "title": "WhisperSpeech", + "section": "Merge VAD segments into longer chunks", + "text": "Merge VAD segments into longer chunks\n\n# load some VAD ouputs\nds = wds.WebDataset(\n vad.flac_to_vad_name(flac_url)\n).decode().to_tuple('vad.npy')\nchunks = [x[0] for x in progress_bar(ds, total='noinfer')]\n\n\n\n\n\n\n \n \n 100.00% [335/335 00:00<00:00]\n \n \n\n\n\n# quick test\nlen(chunks[0]), len(chunk_merger(chunks[0]))\n\n(46, 28)\n\n\n\nplt.hist([te-ts for x in chunks for ts,te in x])\nplt.title('Segment length distribution straight out of the VAD algorithm');\n\n\n\n\n\n\n\n\n\nplt.hist([te-ts for x in chunks for ts,te in chunk_merger(x)]);\nplt.title('Chunk length distribution after greedy merging');\n\n\n\n\n\n\n\n\n\n(np.array([te-ts for x in chunks for ts,te in chunk_merger(x)]) < 10).mean()\n\n0.03671825647504738\n\n\nIn the above distribution only 3,7% of the samples have < 10 seconds. We noticed that this limits the ability of the T2S model to generate short sequences reliably.\nIt does not seem to matter for quantizing Whisper so we can keep this distribution (it uses less compute for training).\nFor T2S we can add some more shorter chunks at random:\n\nplt.hist([te-ts for x in chunks for ts,te in chunk_merger(x, random_cutter)])\nplt.title('Chunk length distribution after randomized merging');" + }, + { + "objectID": "2A. Whisper quantization dataset preparation.html#merge-the-flac-and-vad-datasets", + "href": "2A. Whisper quantization dataset preparation.html#merge-the-flac-and-vad-datasets", + "title": "WhisperSpeech", + "section": "Merge the FLAC and VAD datasets", + "text": "Merge the FLAC and VAD datasets\nFirst we want to merge the VAD dataset with the FLAC audio data.\n\nds = wds_compose(vad.load_dataset(flac_url),\n merge_in(wds.WebDataset(vad.flac_to_vad_name(flac_url)).decode())\n)\n\n\nfor s in ds: break\ns # notice the 'vad.npy' values that was missing from the FLAC dataset\n\n{'__key__': 'small/100/sea_fairies_0812_librivox_64kb_mp3/01_baum_sea_fairies_64kb',\n '__url__': 'librilight-small-vad-000000.tar.gz',\n 'flac': (tensor([[0., 0., 0., ..., 0., 0., 0.]]), 16000),\n 'json': {'speaker': '100',\n 'book_meta': {'id': '2315',\n 'title': 'Sea Fairies',\n 'description': \"<p>In 1910, Baum hoped to end the Oz series and follow with a new series about a little girl named Trot and her sailor companion, Cap'n Bill. The Sea Fairies (1911) was the first book in the projected series and took Trot and Cap'n Bill under the sea where they had adventures with mermaids and other fantastic creatures. It was followed by Sky Island (1912) and then Baum returned to the Oz titles. He brought Trot and Cap'n Bill to Oz in the Scarecrow of Oz (1915). (Summary by Judy Bieber)</p>\",\n 'url_text_source': 'http://www.gutenberg.org/etext/4358',\n 'language': 'English',\n 'copyright_year': '1911',\n 'num_sections': '22',\n 'url_rss': 'https://librivox.org/rss/2315',\n 'url_zip_file': 'http://www.archive.org/download/sea_fairies_0812_librivox/sea_fairies_0812_librivox_64kb_mp3.zip',\n 'url_project': 'http://en.wikipedia.org/wiki/The_Sea_Fairies',\n 'url_librivox': 'https://librivox.org/the-sea-fairies-by-l-frank-baum/',\n 'url_other': None,\n 'totaltimesecs': 15311,\n 'authors': [{'id': '406',\n 'first_name': 'L. Frank',\n 'last_name': 'Baum',\n 'dob': '1856',\n 'dod': '1919'}],\n 'genre': ['Action & Adventure'],\n 'Dramatic Readings': False,\n 'meta_genre': 'Literature'},\n 'snr': 11.4471,\n 'voice_activity': [[1.52, 11.2],\n [11.84, 14.08],\n [15.12, 35.76],\n [36.32, 55.6],\n [56.24, 70.48],\n [71.28, 79.52],\n [80.08, 89.76],\n [90.24, 97.52],\n [98.0, 101.28],\n [102.8, 124.88],\n [125.36, 133.12],\n [133.68, 154.16],\n [154.64, 177.2],\n [178.0, 196.96],\n [197.68, 211.44],\n [212.32, 216.32],\n [216.96, 243.52],\n [244.0, 250.72],\n [251.52, 268.32],\n [268.96, 308.56],\n [309.04, 315.28],\n [316.0, 317.36],\n [317.92, 325.44],\n [326.24, 343.6],\n [344.08, 350.32],\n [350.88, 356.64],\n [357.2, 363.2],\n [363.76, 365.2],\n [365.2, 373.2],\n [373.84, 392.0],\n [392.56, 401.04],\n [401.6, 456.96],\n [457.68, 501.92],\n [502.4, 531.04],\n [531.6, 554.48],\n [554.96, 568.32],\n [568.96, 585.84],\n [587.04, 588.48],\n [597.12, 597.92]]},\n 'vad.npy': array([[ 1.764, 6.49 ],\n [ 6.773, 11.18 ],\n [ 11.98 , 14.03 ],\n [ 15.31 , 36.3 ],\n [ 36.3 , 56.06 ],\n [ 56.4 , 70.6 ],\n [ 71.4 , 101.2 ],\n [102.75 , 103.56 ],\n [103.7 , 121.75 ],\n [122.06 , 125. ],\n [125.44 , 133.4 ],\n [133.8 , 154.6 ],\n [154.6 , 177.6 ],\n [178.1 , 197.2 ],\n [197.9 , 212.1 ],\n [212.5 , 222.5 ],\n [222.8 , 243.6 ],\n [244.2 , 246.5 ],\n [246.8 , 251.1 ],\n [251.5 , 256.2 ],\n [256.5 , 257.8 ],\n [258.2 , 259.8 ],\n [259.8 , 268.5 ],\n [269.2 , 289.8 ],\n [289.8 , 315.8 ],\n [316. , 317.2 ],\n [318. , 319. ],\n [319.8 , 344. ],\n [344.2 , 350.2 ],\n [351. , 352.5 ],\n [353. , 356.8 ],\n [357.5 , 373.5 ],\n [374. , 388. ],\n [388.2 , 397.2 ],\n [397.5 , 401.5 ],\n [401.8 , 423.5 ],\n [423.5 , 448. ],\n [448. , 457.2 ],\n [457.8 , 460.8 ],\n [461. , 477.8 ],\n [478.5 , 502.2 ],\n [502.2 , 527.5 ],\n [527.5 , 550.5 ],\n [550.5 , 576.5 ],\n [577. , 586. ],\n [587.5 , 588.5 ]], dtype=float16)}" + }, + { + "objectID": "2A. Whisper quantization dataset preparation.html#split-the-audio-into-chunks", + "href": "2A. Whisper quantization dataset preparation.html#split-the-audio-into-chunks", + "title": "WhisperSpeech", + "section": "Split the audio into chunks", + "text": "Split the audio into chunks\nAfter we merge the datasets and chunk the segments we can split each audio file into individual samples and pad them to 30s.\n\nsplit_ds = wds_compose(ds,\n wds.map_dict(**{\"vad.npy\":chunk_merger}),\n split_to_chunks,\n utils.resampler(16000, 'samples_16k')\n)\n\n\nfor s in split_ds: break\ns\n\n{'__key__': './dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_000',\n '__url__': '../wolnelektury-preproc-wds/wolnelektury-vad-000001.tar.gz',\n 'i': 0,\n 'imax': 115,\n 'tstart': 0.00844,\n 'tend': 10.06,\n 'total_seconds': 2776.057029478458,\n 'lpad': 0,\n 'rpad': 879616,\n 'lpad_s': 0.0,\n 'rpad_s': 19.9459410430839,\n 'samples': tensor([ 1.8147e-05, -4.9754e-06, -1.3190e-05, ..., 0.0000e+00,\n 0.0000e+00, 0.0000e+00]),\n 'sample_rate': 44100,\n 'samples_16k': tensor([ 4.3992e-06, 9.4182e-07, -1.3307e-06, ..., 0.0000e+00,\n 0.0000e+00, 0.0000e+00])}\n\n\n\nIPython.display.display(IPython.display.Audio(s['samples_16k'], rate=16000))\n\n\n \n \n Your browser does not support the audio element." + }, + { + "objectID": "2A. Whisper quantization dataset preparation.html#transcribe", + "href": "2A. Whisper quantization dataset preparation.html#transcribe", + "title": "WhisperSpeech", + "section": "Transcribe", + "text": "Transcribe\n\nwhmodel = whisper.load_model('base.en')\ndecoding_options = whisper.DecodingOptions(language='en')\n\n\noutput = flac_url.rsplit(\"/\", 1)[1].replace('flac', 'txt') + \".gz\"\nwith wds.TarWriter(output) as sink:\n for s in progress_bar(split_ds, total=256):\n mel = whisper.log_mel_spectrogram(s['samples'].unsqueeze(0).cuda())\n embs = whmodel.encoder(mel)\n decs = whmodel.decode(embs, decoding_options)\n\n sink.write({\n \"__key__\": s['__key__'],\n \"txt\": decs[0].text,\n })\n\n\n\n\n\n\n \n \n 100.00% [256/256 00:59<00:00]" + }, + { + "objectID": "2A. Whisper quantization dataset preparation.html#transcribe-in-batches", + "href": "2A. Whisper quantization dataset preparation.html#transcribe-in-batches", + "title": "WhisperSpeech", + "section": "Transcribe in batches", + "text": "Transcribe in batches\nWe have one more thing to add – batch processing makes the transcription quite a bit faster (bs=16 brings a 4.5x speedup).\n\nbatched_ds = wds_compose(split_ds,\n wds.to_tuple('__key__', 'samples'),\n wds.batched(16),\n)" + }, + { + "objectID": "2A. Whisper quantization dataset preparation.html#verify-the-transcripts-and-the-chunks-work-together", + "href": "2A. Whisper quantization dataset preparation.html#verify-the-transcripts-and-the-chunks-work-together", + "title": "WhisperSpeech", + "section": "Verify the transcripts and the chunks work together", + "text": "Verify the transcripts and the chunks work together\n\ntxt_ds = wds_compose(split_ds,\n merge_in(wds.WebDataset('../wolnelektury-preproc-wds/'+flac_url.rsplit(\"/\", 1)[1].replace('flac', 'txt') + \".gz\").decode())\n)\n\n\nfor x in txt_ds: break\nx\n\n{'__key__': './dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_000',\n '__url__': '../wolnelektury-preproc-wds/wolnelektury-raw-000001.tar.gz',\n 'i': 0,\n 'imax': 115,\n 'tstart': 0.00844,\n 'tend': 10.06,\n 'total_seconds': 2776.057029478458,\n 'lpad': 0,\n 'rpad': 879616,\n 'lpad_s': 0.0,\n 'rpad_s': 19.9459410430839,\n 'samples': tensor([ 1.8147e-05, -4.9754e-06, -1.3190e-05, ..., 0.0000e+00,\n 0.0000e+00, 0.0000e+00]),\n 'sample_rate': 44100,\n 'samples_16k': tensor([ 4.3992e-06, 9.4182e-07, -1.3307e-06, ..., 0.0000e+00,\n 0.0000e+00, 0.0000e+00]),\n 'txt': 'Rozdział 22. Stare mięśca, nowi ludzie. Stierfort i ja zabawiliśmy dwa tygodnie w tamtej okolicy.'}\n\n\n\nfor x in progress_bar(txt_ds, total=10):\n IPython.display.display(IPython.display.Markdown(f\"#### {x['__key__']} chunk {x['i']} of {x['imax']}\"))\n fname = f\"test-{x['i']}.ogg\"\n torchaudio.save(fname, x['samples'][None,:int((x['tend']-x['tstart'])*s['sample_rate'])], s['sample_rate'])\n IPython.display.display(IPython.display.Audio(url=fname, rate=x['sample_rate']))\n IPython.display.display(IPython.display.Markdown(x['txt']))\n\n\n\n\n\n\n \n \n 100.00% [10/10 00:02<00:00]\n \n \n\n\n./dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_000 chunk 0 of 115\n\n\n\n \n \n Your browser does not support the audio element.\n \n \n\n\nRozdział 22. Stare mięśca, nowi ludzie. Stierfort i ja zabawiliśmy dwa tygodnie w tamtej okolicy.\n\n\n./dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_001 chunk 1 of 115\n\n\n\n \n \n Your browser does not support the audio element.\n \n \n\n\nByliśmy prawie ciągle razem. Czasem tylko rozstawaliśmy się na kilka godzin. Styrford, bowiem, był zawołanym żeglarzem. Ja zaś nie smakowałem zbytnia o w tego rodzaju rozrywkach. To też gdy przyjaciel mój puszczał się w towarzystwie pana PegoTi na morze, pozostawałem zwykle na lądzie. Korzystanie z pokoiku u PegoTi krępowało mnie nieco.\n\n\n./dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_002 chunk 2 of 115\n\n\n\n \n \n Your browser does not support the audio element.\n \n \n\n\nWiedząc, jak dalece przez dzień cały jest zajęta do oglądaniem chorego męża, wracałem wcześniej bieczorem, gdy Steelfort, będąc panem swego czasu, niczym się nie krempował. To wiedziałem się też, że gdy już spał w najlepsze, on podejmował rybaków w ulubionej winiarni pana PegoT pod dobrą chęcią, lub wyrybaczkim odzieniu spędzał całe księżycowe nocy na morzu.\n\n\n./dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_003 chunk 3 of 115\n\n\n\n \n \n Your browser does not support the audio element.\n \n \n\n\nWiedziałem, że jego żywa gorąca natura potrzebuje rozrywek i niebezpieczeństw i postępowanie jego wcale mnie nie dziwiło. Rostawaliśmy się i z tego jeszcze powodu, że Stirforta nie mogły pociągać tak jak mnie wycieczki do Blanderstone. To też czasem, że gnaliśmy się po wczesnym śniadaniu, a schodzi i dopiero późno na obiad. Nie miałem pojęcia co robił, czym się wówczas zajmował.\n\n\n./dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_004 chunk 4 of 115\n\n\n\n \n \n Your browser does not support the audio element.\n \n \n\n\nWiedziałem tylko, że znany był i lubiany przez wszystkich, a posiadał darż, szczególny wynajdwanie rozrywek i zajęcia nawet tam, gdzie inny na jego miejscu nic będzie nie znalazł. Co do mnie przebiegając drogę do Blanderstom, przeżywałem w pamięci każdy dzień z przeszłości i to mi wypełniało myśl i serce. Przypominało mi się każde niegdyś tu przeżyte wrażenie.\n\n\n./dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_005 chunk 5 of 115\n\n\n\n \n \n Your browser does not support the audio element.\n \n \n\n\nnadmogiło pod drzewami, gdzie spoczywali moi rodzice, zamienioną przez pegoty w kwietnik, na którą gdybyła tylko jeszcze miejscem wiecznego z poczynku megajca, spoglądałem z takim żalem, i którą widział otwartą na przyjęcie z włog, mojej pięknej, kochanej matki jej dzieciątka, długie, spędzałem godziny. Lężała ona na uboczu, wrogu cmentarze.\n\n\n./dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_006 chunk 6 of 115\n\n\n\n \n \n Your browser does not support the audio element.\n \n \n\n\nPrzechodząc z drogą, czytać mogłem wypisane na kamieniu nazwiska, a dzwonkościelny zdawał się być głosem pożegnania. W godzinach tych, myśląc o nich.\n\n\n./dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_007 chunk 7 of 115\n\n\n\n \n \n Your browser does not support the audio element.\n \n \n\n\nMyślałem zarazem o tym zawsze, jakie miejsce zajmę w życiu, jakich wielkich lub dobrych dokonam czynów. Ech okroków moich nie odbijało nud innych, te tylko, jak gdybym krocząc, ubokużyjącej jeszcze matki, nad powietrzne budował zamki. Stary dom nasz zmienił się niedopoznania.\n\n\n./dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_008 chunk 8 of 115\n\n\n\n \n \n Your browser does not support the audio element.\n \n \n\n\nZnikły bez śladu, powiżone przez wrony dawno opuszczone gniazda, adrzewa, strzyżone i ścinane utraciły, dawny kształt, ogród zdziczał, połowa okien była zabita. Dom zamieszkany został przez jakiegoś chorego umysłowo-gentelmena ich, tych, co go doglądali.\n\n\n./dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_009 chunk 9 of 115\n\n\n\n \n \n Your browser does not support the audio element.\n \n \n\n\nChory przesiadawał zwykle w okniem, i niech ktoś pokójku i spoglądał na cmentarz. Ciekawy byłem, czy też jego myśli, kreślą te same obrazy, co moje, gdy w czeznym rankiem, w nocnej koszulce wyglądałem tym okienkiem, witając pasące się o wschodzie słońca trzody. Dawni nasi sąsiedzi, Państwu Grraper,\n\n\n\nfor x in progress_bar(txt_ds, total=10):\n IPython.display.display(IPython.display.Markdown(f\"#### {x['__key__']} chunk {x['i']} of {x['imax']}\"))\n fname = f\"test-{x['i']}.ogg\"\n torchaudio.save(fname, x['samples'][None,:int((x['tend']-x['tstart'])*s['sample_rate'])], s['sample_rate'])\n IPython.display.display(IPython.display.Audio(url=fname, rate=x['sample_rate']))\n IPython.display.display(IPython.display.Markdown(x['txt']))\n\n\n\n\n\n\n \n \n 100.00% [10/10 00:02<00:00]\n \n \n\n\n./dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_000 chunk 0 of 115\n\n\n\n \n \n Your browser does not support the audio element.\n \n \n\n\nRozdział dwudziesty drugi Stare miejsca, nowi ludzie Styrford i ja zabawiliśmy dwa tygodnie w tamtej okolicy.\n\n\n./dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_001 chunk 1 of 115\n\n\n\n \n \n Your browser does not support the audio element.\n \n \n\n\nByliśmy prawie ciągle razem. Czasem tylko rozstawaliśmy się na kilka godzin. Styrford bowiem był zawołanym żeglarzem, ja zaś nie smakowałem zbytnio w tego rodzaju rozrywkach. Toteż gdy przyjaciel mój puszczał się w towarzystwie pana Pegoty na morze, pozostawałem zwykle na lądzie. Korzystanie z pokoiku u Pegoty krępowało mnie nieco.\n\n\n./dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_002 chunk 2 of 115\n\n\n\n \n \n Your browser does not support the audio element.\n \n \n\n\nWiedząc, jak dalece przez dzień cały jest zajęta doglądaniem chorego męża, wracałem wcześniej wieczorem, gdy Stilford, będąc panem swego czasu, niczym się nie krępował. Dowiedziałem się też, że gdybym już spał w najlepsze, on podejmował rybaków w ulubionej winiarni pana Pegoty pod dobrą chęcią lub w rybackim odzieniu spędzał całe księżycowe noce na morzu.\n\n\n./dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_003 chunk 3 of 115\n\n\n\n \n \n Your browser does not support the audio element.\n \n \n\n\nWiedziałem, że jego żywa, gorąca natura potrzebuje rozrywek i niebezpieczeństw i postępowanie jego wcale mnie nie dziwiło. Rozstawaliśmy się i z tego jeszcze powodu, że z Tyrforda nie mogły pociągać, tak jak mnie, wycieczki do Blunderstown. To też czasem żegnaliśmy się po wczesnym śniadaniu, a schodzili dopiero późno na obiad. Nie miałem pojęcia, co robił, czym się wówczas zajmował.\n\n\n./dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_004 chunk 4 of 115\n\n\n\n \n \n Your browser does not support the audio element.\n \n \n\n\nWiedziałem tylko, że znany był i lubiany przez wszystkich, a posiadał dar szczególny wynajdywanie rozrywek i zajęcia nawet tam, gdzie inny na jego miejscu nic by nie znalazł. Co do mnie, przebiegając drogę do Blunderstone, przeżywałem w pamięci każdy dzień z przeszłości i to mi wypełniało myśl i serce. Przypominało mi się każde niegdyś tu przeżyte wrażenie.\n\n\n./dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_005 chunk 5 of 115\n\n\n\n \n \n Your browser does not support the audio element.\n \n \n\n\nNad mogiłą pod drzewami, gdzie spoczywali moi rodzice, zamienioną przez pegot i w kwietnik, na którą, gdy była tylko jeszcze miejscem wiecznego spoczynku mego ojca, spoglądałem z takim żalem i którą widział otwartą na przyjęcie zwłok mojej pięknej, kochanej matki i jej dzieciątka. Długie spędzałem godziny. Leżała ona na uboczu, w rogu cmentarza.\n\n\n./dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_006 chunk 6 of 115\n\n\n\n \n \n Your browser does not support the audio element.\n \n \n\n\nPrzechodząc drogą, czytać mogłem wypisane na kamieniu nazwiska, a dzwoń kościelny zdawał się być głosem pożegnania. W godzinach tych, myśląc o nich…\n\n\n./dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_007 chunk 7 of 115\n\n\n\n \n \n Your browser does not support the audio element.\n \n \n\n\nMyślałem zarazem o tym zawsze, jakie miejsce zajmę w życiu, jakich wielkich lub dobrych dokonam czynów. Echo kroków moich nie odbijało nut innych, te tylko, jak gdybym krocząc u boku żyjącej jeszcze matki nadpowietrzne budował zamki. Stary dom nasz zmienił się nie do poznania.\n\n\n./dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_008 chunk 8 of 115\n\n\n\n \n \n Your browser does not support the audio element.\n \n \n\n\nZnikły bez śladu powichrzone przez wrony dawno opuszczone gniazda, a drzewa strzyżone i ścinane utraciły dawny kształt. Ogród zdziczał. Połowa okien była zabita. Dom zamieszkany został przez jakiegoś chorego umysłowo dżentelmena i tych, co go doglądali.\n\n\n./dickens-dawid-copperfield-t1/dickens-dawid-copperfield-t1_022_tom-i-rozdzial-xxii_009 chunk 9 of 115\n\n\n\n \n \n Your browser does not support the audio element.\n \n \n\n\nChory przesiadywał zwykle w oknie mego niegdyś pokoiku i spoglądał na cmentarz. Ciekawy byłem, czy też jego myśli kreślą te same obrazy co moje, gdy wczesnym rankiem w nocnej koszulce wyglądałem tym okienkiem, witając pasące się o wschodzie słońca trzody. Dawni nasi sąsiedzi, państwo Graper." + }, + { + "objectID": "2A. Whisper quantization dataset preparation.html#batch-processing", + "href": "2A. Whisper quantization dataset preparation.html#batch-processing", + "title": "WhisperSpeech", + "section": "Batch processing", + "text": "Batch processing\nLet’s put everything above together." + }, + { + "objectID": "3c. s2a acoustic tokens preparation.html", + "href": "3c. s2a acoustic tokens preparation.html", + "title": "S2A dataset preparation", + "section": "", + "text": "Automatic pdb calling has been turned ON\n\n\n\nprepare_atoks('../wolnelektury-wds2/wolnelektury-audio-000000.tar', n_samples=1024, batch_size=8)\n\n/opt/conda/lib/python3.10/site-packages/torch/nn/utils/weight_norm.py:30: UserWarning: torch.nn.utils.weight_norm is deprecated in favor of torch.nn.utils.parametrizations.weight_norm.\n warnings.warn(\"torch.nn.utils.weight_norm is deprecated in favor of torch.nn.utils.parametrizations.weight_norm.\")\n\n\nBenchmarking run of 1024 samples (128 batches)\n\n\n\n\n\n\n\n \n \n 100.00% [128/128 00:22<00:00]\n \n \n\n\n\nprepare_atoks('../wolnelektury-wds2/wolnelektury-audio-000000.tar', n_samples=1024, batch_size=4)\n\nBenchmarking run of 1024 samples (256 batches)\n\n\n\n\n\n\n\n \n \n 100.00% [256/256 00:23<00:00]\n \n \n\n\n\nprepare_atoks('../wolnelektury-wds2/wolnelektury-audio-000000.tar', batch_size=4)\n\n\n\n\n\n\n \n \n 100.00% [2769/2769 04:09<00:00]\n \n \n\n\n\nprepare_atoks('../wolnelektury-wds2/wolnelektury-audio-000000.tar', n_samples=1024, batch_size=2)\n\nBenchmarking run of 1024 samples (512 batches)\n\n\n\n\n\n\n\n \n \n 100.00% [512/512 00:31<00:00]" + }, + { + "objectID": "C2. Testing.html", + "href": "C2. Testing.html", + "title": "WhisperSpeech", + "section": "", + "text": "source\n\ntest_model\n\n test_model (model, ds, bs=1)" + }, + { + "objectID": "index.html", + "href": "index.html", + "title": "WhisperSpeech", + "section": "", + "text": "If you have questions or you want to help you can find us in the #audio-generation channel on the LAION Discord server.\nAn Open Source text-to-speech system built by inverting Whisper. Previously known as spear-tts-pytorch.\nWe want this model to be like Stable Diffusion but for speech – both powerful and easily customizable.\nWe are working only with properly licensed speech recordings and all the code is Open Source so the model will be always safe to use for commercial applications.\nCurrently the models are trained on the English LibreLight dataset. In the next release we want to target multiple languages (Whisper and EnCodec are both multilanguage).\nSample of the synthesized voice:\nhttps://github.com/collabora/WhisperSpeech/assets/107984/aa5a1e7e-dc94-481f-8863-b022c7fd7434", + "crumbs": [ + "WhisperSpeech" + ] + }, + { + "objectID": "index.html#progress-update-2024-01-29", + "href": "index.html#progress-update-2024-01-29", + "title": "WhisperSpeech", + "section": "Progress update [2024-01-29]", + "text": "Progress update [2024-01-29]\nWe successfully trained a tiny S2A model on an en+pl+fr dataset and it can do voice cloning in French:\nhttps://github.com/collabora/WhisperSpeech/assets/107984/267f2602-7eec-4646-a43b-059ff91b574e\nhttps://github.com/collabora/WhisperSpeech/assets/107984/fbf08e8e-0f9a-4b0d-ab5e-747ffba2ccb9\nWe were able to do this with frozen semantic tokens that were only trained on English and Polish. This supports the idea that we will be able to train a single semantic token model to support all the languages in the world. Quite likely even ones that are not currently well supported by the Whisper model. Stay tuned for more updates on this front. :)", + "crumbs": [ + "WhisperSpeech" + ] + }, + { + "objectID": "index.html#progress-update-2024-01-18", + "href": "index.html#progress-update-2024-01-18", + "title": "WhisperSpeech", + "section": "Progress update [2024-01-18]", + "text": "Progress update [2024-01-18]\nWe spend the last week optimizing inference performance. We integrated torch.compile, added kv-caching and tuned some of the layers – we are now working over 12x faster than real-time on a consumer 4090!\nWe can mix languages in a single sentence (here the highlighted English project names are seamlessly mixed into Polish speech):\n\nTo jest pierwszy test wielojęzycznego Whisper Speech modelu zamieniającego tekst na mowę, który Collabora i Laion nauczyli na superkomputerze Jewels.\n\nhttps://github.com/collabora/WhisperSpeech/assets/107984/d7092ef1-9df7-40e3-a07e-fdc7a090ae9e\nWe also added an easy way to test voice-cloning. Here is a sample voice cloned from a famous speech by Winston Churchill (the radio static is a feature, not a bug ;) – it is part of the reference recording):\nhttps://github.com/collabora/WhisperSpeech/assets/107984/bd28110b-31fb-4d61-83f6-c997f560bc26\nYou can test all of these on Colab (we optimized the dependencies so now it takes less than 30 seconds to install). A Huggingface Space is coming soon.", + "crumbs": [ + "WhisperSpeech" + ] + }, + { + "objectID": "index.html#progress-update-2024-01-10", + "href": "index.html#progress-update-2024-01-10", + "title": "WhisperSpeech", + "section": "Progress update [2024-01-10]", + "text": "Progress update [2024-01-10]\nWe’ve pushed a new SD S2A model that is a lot faster while still generating high-quality speech. We’ve also added an example of voice cloning based on a reference audio file.\nAs always, you can check out our Colab to try it yourself!", + "crumbs": [ + "WhisperSpeech" + ] + }, + { + "objectID": "index.html#progress-update-2023-12-10", + "href": "index.html#progress-update-2023-12-10", + "title": "WhisperSpeech", + "section": "Progress update [2023-12-10]", + "text": "Progress update [2023-12-10]\nAnother trio of models, this time they support multiple languages (English and Polish). Here are two new samples for a sneak peek. You can check out our Colab to try it yourself!\nEnglish speech, female voice (transferred from a Polish language dataset):\nhttps://github.com/collabora/WhisperSpeech/assets/107984/aa5a1e7e-dc94-481f-8863-b022c7fd7434\nA Polish sample, male voice:\nhttps://github.com/collabora/WhisperSpeech/assets/107984/4da14b03-33f9-4e2d-be42-f0fcf1d4a6ec\nOlder progress updates are archived here", + "crumbs": [ + "WhisperSpeech" + ] + }, + { + "objectID": "index.html#downloads", + "href": "index.html#downloads", + "title": "WhisperSpeech", + "section": "Downloads", + "text": "Downloads\nWe encourage you to start with the Google Colab link above or run the provided notebook locally. If you want to download manually or train the models from scratch then both the WhisperSpeech pre-trained models as well as the converted datasets are available on HuggingFace.", + "crumbs": [ + "WhisperSpeech" + ] + }, + { + "objectID": "index.html#roadmap", + "href": "index.html#roadmap", + "title": "WhisperSpeech", + "section": "Roadmap", + "text": "Roadmap\n\nGather a bigger emotive speech dataset\nFigure out a way to condition the generation on emotions and prosody\nCreate a community effort to gather freely licensed speech in multiple languages\nTrain final multi-language models", + "crumbs": [ + "WhisperSpeech" + ] + }, + { + "objectID": "index.html#architecture", + "href": "index.html#architecture", + "title": "WhisperSpeech", + "section": "Architecture", + "text": "Architecture\nThe general architecture is similar to AudioLM, SPEAR TTS from Google and MusicGen from Meta. We avoided the NIH syndrome and built it on top of powerful Open Source models: Whisper from OpenAI to generate semantic tokens and perform transcription, EnCodec from Meta for acoustic modeling and Vocos from Charactr Inc as the high-quality vocoder.\nWe gave two presentation diving deeper into WhisperSpeech. The first one talks about the challenges of large scale training:\n\n\n\nTricks Learned from Scaling WhisperSpeech Models to 80k+ Hours of Speech - video recording by Jakub Cłapa, Collabora\n\n\nThe other one goes a bit more into the architectural choices we made:\n\n\n\nOpen Source Text-To-Speech Projects: WhisperSpeech - In Depth Discussion\n\n\n\nWhisper for modeling semantic tokens\nWe utilize the OpenAI Whisper encoder block to generate embeddings which we then quantize to get semantic tokens.\nIf the language is already supported by Whisper then this process requires only audio files (without ground truth transcriptions).\n\n\n\nUsing Whisper for semantic token extraction diagram", + "crumbs": [ + "WhisperSpeech" + ] + }, + { + "objectID": "index.html#encodec-for-modeling-acoustic-tokens", + "href": "index.html#encodec-for-modeling-acoustic-tokens", + "title": "WhisperSpeech", + "section": "EnCodec for modeling acoustic tokens", + "text": "EnCodec for modeling acoustic tokens\nWe use EnCodec to model the audio waveform. Out of the box it delivers reasonable quality at 1.5kbps and we can bring this to high-quality by using Vocos – a vocoder pretrained on EnCodec tokens.\n\n\n\nEnCodec block diagram", + "crumbs": [ + "WhisperSpeech" + ] + }, + { + "objectID": "index.html#appreciation", + "href": "index.html#appreciation", + "title": "WhisperSpeech", + "section": "Appreciation", + "text": "Appreciation\n      \nThis work would not be possible without the generous sponsorships from:\n\nCollabora – code development and model training\nLAION – community building and datasets (special thanks to\nJülich Supercomputing Centre - JUWELS Booster supercomputer\n\nWe gratefully acknowledge the Gauss Centre for Supercomputing e.V. (www.gauss-centre.eu) for funding part of this work by providing computing time through the John von Neumann Institute for Computing (NIC) on the GCS Supercomputer JUWELS Booster at Jülich Supercomputing Centre (JSC), with access to compute provided via LAION cooperation on foundation models research.\nWe’d like to also thank individual contributors for their great help in building this model:\n\ninevitable-2031 (qwerty_qwer on Discord) for dataset curation", + "crumbs": [ + "WhisperSpeech" + ] + }, + { + "objectID": "index.html#consulting", + "href": "index.html#consulting", + "title": "WhisperSpeech", + "section": "Consulting", + "text": "Consulting\nWe are available to help you with both Open Source and proprietary AI projects. You can reach us via the Collabora website or on Discord ( and )", + "crumbs": [ + "WhisperSpeech" + ] + }, + { + "objectID": "index.html#citations", + "href": "index.html#citations", + "title": "WhisperSpeech", + "section": "Citations", + "text": "Citations\nWe rely on many amazing Open Source projects and research papers:\n@article{SpearTTS,\n title = {Speak, Read and Prompt: High-Fidelity Text-to-Speech with Minimal Supervision},\n url = {https://arxiv.org/abs/2302.03540},\n author = {Kharitonov, Eugene and Vincent, Damien and Borsos, Zalán and Marinier, Raphaël and Girgin, Sertan and Pietquin, Olivier and Sharifi, Matt and Tagliasacchi, Marco and Zeghidour, Neil},\n publisher = {arXiv},\n year = {2023},\n}\n@article{MusicGen,\n title={Simple and Controllable Music Generation}, \n url = {https://arxiv.org/abs/2306.05284},\n author={Jade Copet and Felix Kreuk and Itai Gat and Tal Remez and David Kant and Gabriel Synnaeve and Yossi Adi and Alexandre Défossez},\n publisher={arXiv},\n year={2023},\n}\n@article{Whisper\n title = {Robust Speech Recognition via Large-Scale Weak Supervision},\n url = {https://arxiv.org/abs/2212.04356},\n author = {Radford, Alec and Kim, Jong Wook and Xu, Tao and Brockman, Greg and McLeavey, Christine and Sutskever, Ilya},\n publisher = {arXiv},\n year = {2022},\n}\n@article{EnCodec\n title = {High Fidelity Neural Audio Compression},\n url = {https://arxiv.org/abs/2210.13438},\n author = {Défossez, Alexandre and Copet, Jade and Synnaeve, Gabriel and Adi, Yossi},\n publisher = {arXiv},\n year = {2022},\n}\n@article{Vocos\n title={Vocos: Closing the gap between time-domain and Fourier-based neural vocoders for high-quality audio synthesis}, \n url = {https://arxiv.org/abs/2306.00814},\n author={Hubert Siuzdak},\n publisher={arXiv},\n year={2023},\n}", + "crumbs": [ + "WhisperSpeech" + ] + }, + { + "objectID": "B1. Training.html", + "href": "B1. Training.html", + "title": "WhisperSpeech", + "section": "", + "text": "source\n\nSimpleVisual\n\n SimpleVisual (model, masterbar, total_steps)\n\nInitialize self. See help(type(self)) for accurate signature.\n\nsource\n\n\nvalidate\n\n validate (model, val, half=True, bs=16, drop_last=False, dl_workers=8,\n device='cuda')\n\n\nsource\n\n\ntrain\n\n train (checkpoint_path, model, train, val, half=True, bs=16, lr=0.0001,\n drop_last=False, weight_decay=0.1, warmup_steps=10000, epochs=10,\n clip_gradient_norm=None, dl_workers=8, visual_class=<class\n '__main__.SimpleVisual'>, profiler=None,\n run_valid_every_iters=8000, table_row_every_iters=80000,\n chkpt_every_iters=None, device='cuda', trainable_params=None,\n callback=None, lr_schedule='wsd')" + }, + { + "objectID": "3a. t2s transcripts preparation.html", + "href": "3a. t2s transcripts preparation.html", + "title": "T2S dataset preparation", + "section": "", + "text": "prepare_txt('../wolnelektury-wds2/wolnelektury-audio-000000.tar', n_samples=1024, batch_size=16)\n\nLightning automatically upgraded your loaded checkpoint from v1.5.4 to v2.1.0. To apply the upgrade to your files permanently, run `python -m pytorch_lightning.utilities.upgrade_checkpoint ../../../.cache/torch/whisperx-vad-segmentation.bin`\n\n\nModel was trained with pyannote.audio 0.0.1, yours is 2.1.1. Bad things might happen unless you revert pyannote.audio to 0.x.\nModel was trained with torch 1.10.0+cu102, yours is 2.1.0+cu121. Bad things might happen unless you revert torch to 1.x.\nBenchmarking run of 1024 samples (64 batches)\n\n\n\n\n\n\n\n \n \n 100.00% [64/64 00:40<00:00]\n \n \n\n\n\nprepare_txt('../wolnelektury-wds2/wolnelektury-audio-000000.tar', n_samples=1024, batch_size=16)\n\nBenchmarking run of 1024 samples (64 batches)\nhuggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...\nTo disable this warning, you can either:\n - Avoid using `tokenizers` before the fork if possible\n - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)\n\n\n\n\n\n\n\n \n \n 100.00% [64/64 01:33<00:00]\n \n \n\n\n\nprepare_txt('../wolnelektury-wds2/wolnelektury-audio-000000.tar', transcription_model='medium', n_samples=1024, batch_size=16)\n\nBenchmarking run of 1024 samples (64 batches)\nhuggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...\nTo disable this warning, you can either:\n - Avoid using `tokenizers` before the fork if possible\n - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)\n\n\n\n\n\n\n\n \n \n 100.00% [64/64 02:06<00:00]\n \n \n\n\n\nprepare_txt('../wolnelektury-wds2/wolnelektury-audio-000000.tar', transcription_model='medium', n_samples=1024, batch_size=1)\n\nBenchmarking run of 1024 samples (1024 batches)\nhuggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...\nTo disable this warning, you can either:\n - Avoid using `tokenizers` before the fork if possible\n - Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)\n\n\n\n\n\n\n\n \n \n 100.00% [1024/1024 10:01<00:00]" + }, + { + "objectID": "D. Common inference utilities.html", + "href": "D. Common inference utilities.html", + "title": "WhisperSpeech", + "section": "", + "text": "source\n\nget_compute_device\n\n get_compute_device ()" + }, + { + "objectID": "b. languages.html", + "href": "b. languages.html", + "title": "Language codes", + "section": "", + "text": "This language list is comming straigh from openai-whisper. The upstream file is here: https://github.com/openai/whisper/blob/main/whisper/tokenizer.py but we are freezing this to the openai-whisper==20230918 version right now.\n\nsource\n\nto_id\n\n to_id (lang)" + }, + { + "objectID": "1b. voice activity detection.html", + "href": "1b. voice activity detection.html", + "title": "Perform Voice Activity Detection (VAD)", + "section": "", + "text": "from IPython.display import HTML\nimport pylab as plt\nWe use the voice activity detection model from WhisperX (but we don’t use their merging algorithm):\nTest just a few files:\nds = wds.WebDataset('/data2/libritts-r-raw-000000.tar').compose(wds.decode(wds.torch_audio))\nfor x in ds: break\nx\n\n{'__key__': './dev-clean/1272/128104/1272_128104_000001_000000',\n '__url__': '/data2/libritts-r-raw-000000.tar',\n 'normalized.txt': \"A 'JOLLY' ART CRITIC\",\n 'original.txt': \"A 'JOLLY' ART CRITIC\",\n 'wav': (tensor([[ 0.0000, 0.0000, 0.0000, ..., -0.0036, -0.0038, -0.0050]]),\n 24000)}\n# test it locally\ninput:str = 'https://huggingface.co/datasets/collabora/librilight-webdataset/resolve/main/librilight-large-wo6454-flac-000002.tar'\noutput:str = input.rsplit(\"/\", 1)[1].replace('flac', 'vad') + \".gz\"\n\nds = load_dataset(input)\nvad_model = whisperx.vad.load_vad_model('cuda')\n\nwith wds.TarWriter(output) as sink:\n for s in progress_bar(ds, total=10):\n audio, sr = s['audio']\n assert(sr == 16000)\n sink.write({\n \"__key__\": s['__key__'],\n \"vad.npy\": np.array(segment_audio(vad_model, audio), dtype=np.float32)\n })\n \n!ls -lh {output}\n!tar tf {output}\n\nLightning automatically upgraded your loaded checkpoint from v1.5.4 to v2.0.2. To apply the upgrade to your files permanently, run `python -m pytorch_lightning.utilities.upgrade_checkpoint --file ../../../.cache/torch/whisperx-vad-segmentation.bin`\n\n\nModel was trained with pyannote.audio 0.0.1, yours is 2.1.1. Bad things might happen unless you revert pyannote.audio to 0.x.\nModel was trained with torch 1.10.0+cu102, yours is 2.0.1+cu118. Bad things might happen unless you revert torch to 1.x.\n-rw-r--r-- 1 root root 7.5K Sep 21 08:51 librilight-large-wo6454-vad-000002.tar.gz\nlarge/10089/five_minutes_stories_1508_librivox_64kb_mp3/5minutesstories_03_molesworth_64kb.vad.npy\nlarge/10089/five_minutes_stories_1508_librivox_64kb_mp3/5minutesstories_04_molesworth_64kb.vad.npy\nlarge/10089/five_minutes_stories_1508_librivox_64kb_mp3/5minutesstories_08_molesworth_64kb.vad.npy\nlarge/10089/five_minutes_stories_1508_librivox_64kb_mp3/5minutesstories_09_molesworth_64kb.vad.npy\nlarge/10089/five_minutes_stories_1508_librivox_64kb_mp3/5minutesstories_10_molesworth_64kb.vad.npy\nlarge/10089/five_minutes_stories_1508_librivox_64kb_mp3/5minutesstories_11_molesworth_64kb.vad.npy\nlarge/10089/goodcheerstories_1511_librivox_64kb_mp3/goodcheerstories_13_dickinson_64kb.vad.npy\nlarge/10089/goodcheerstories_1511_librivox_64kb_mp3/goodcheerstories_30_dickinson_64kb.vad.npy\nlarge/10089/mothers_nursery_tales_1512_librivox_64kb_mp3/mothers_nursery_tales_16_pyle_64kb.vad.npy\nlarge/10089/mothers_nursery_tales_1512_librivox_64kb_mp3/mothers_nursery_tales_25_pyle_64kb.vad.npy\n\n\n\n\n\n\n\n \n \n 100.00% [10/10 00:10<00:00]" + }, + { + "objectID": "1b. voice activity detection.html#batch-processing", + "href": "1b. voice activity detection.html#batch-processing", + "title": "Perform Voice Activity Detection (VAD)", + "section": "Batch processing", + "text": "Batch processing\nLet’s put everything above together.\n\n# for reference, this was the performance on a single 4090:\nprocess_shard('https://huggingface.co/datasets/collabora/librilight-webdataset/resolve/main/librilight-small-flac-000000.tar')\n\nLightning automatically upgraded your loaded checkpoint from v1.5.4 to v2.0.2. To apply the upgrade to your files permanently, run `python -m pytorch_lightning.utilities.upgrade_checkpoint --file ../../../.cache/torch/whisperx-vad-segmentation.bin`\n\n\nModel was trained with pyannote.audio 0.0.1, yours is 2.1.1. Bad things might happen unless you revert pyannote.audio to 0.x.\nModel was trained with torch 1.10.0+cu102, yours is 2.0.1+cu118. Bad things might happen unless you revert torch to 1.x.\n\n\n\n\n\n\n\n \n \n 100.00% [335/335 03:30<00:00]\n \n \n\n\n\nfor x in wds.WebDataset('/data2/libritts-r-vad-000000.tar').decode(): break\nx['__key__'].split('/')\n\n['.', 'dev-clean', '1272', '128104', '1272_128104_000001_000000']\n\n\n\nplt.hist([x['vad.npy'].shape[0] for x in wds.WebDataset('/data2/libritts-r-vad-000000.tar').decode()])\n\n(array([1.6967e+04, 0.0000e+00, 6.4500e+02, 0.0000e+00, 0.0000e+00,\n 1.0800e+02, 0.0000e+00, 2.5000e+01, 0.0000e+00, 7.0000e+00]),\n array([1. , 1.4, 1.8, 2.2, 2.6, 3. , 3.4, 3.8, 4.2, 4.6, 5. ]),\n <BarContainer object of 10 artists>)" + }, + { + "objectID": "5b. multi-lang text to semantic token modeling.html", + "href": "5b. multi-lang text to semantic token modeling.html", + "title": "Text to semantic tokens model", + "section": "", + "text": "from whisperspeech.wer_metrics import *\nfrom whisperspeech.train import *\n\nfrom fastprogress import master_bar\nimport webdataset as wds\n\n\nDataset\n\nsource\n\nload_dataset\n\n load_dataset (txt_shard_spec:str, stoks_shard_dir:str, samples:int,\n txt_kind:str='small.en-txt', vq_codes:int=4096,\n language:str='en', weight:float=1, validation:bool=False,\n exclude_files:str=None, cwd:pathlib.Path=None)\n\n\n\n\n\nType\nDefault\nDetails\n\n\n\n\ntxt_shard_spec\nstr\n\ntranscription webdataset shards\n\n\nstoks_shard_dir\nstr\n\nstoks webdataset base dir\n\n\nsamples\nint\n\nsamples per epoch\n\n\ntxt_kind\nstr\nsmall.en-txt\n\n\n\nvq_codes\nint\n4096\n\n\n\nlanguage\nstr\nen\n\n\n\nweight\nfloat\n1\n\n\n\nvalidation\nbool\nFalse\n\n\n\nexclude_files\nstr\nNone\n\n\n\ncwd\nPath\nNone\n\n\n\n\n\n\n\nAutomatic pdb calling has been turned ON\n\n\n\ntrain_ds = load_dataset('../wolnelektury-wds2/wolnelektury-medium-txt-*.tar.gz', '../wolnelektury-vqv2/', 190000,\n txt_kind='medium-txt', vq_codes=513, language='pl',\n exclude_files='../wolnelektury-wds2/validation-samples')\nval_ds = load_dataset('../wolnelektury-wds2/validation-eqvad.tar.gz', '../wolnelektury-vqv2/', 520,\n txt_kind='medium-txt', vq_codes=513, language='pl', validation=True)\n\n\nfor x in progress_bar(train_ds, total=100): pass\nx\n\n\n\n\n\n\n \n \n 100.00% [100/100 00:06<00:00]\n \n \n\n\n[tensor([[ 0, 80, 114, ..., 0, 0, 0],\n [ 0, 74, 97, ..., 0, 0, 0],\n [ 0, 80, 114, ..., 0, 0, 0],\n ...,\n [ 0, 90, 32, ..., 0, 0, 0],\n [ 0, 78, 105, ..., 0, 0, 0],\n [ 0, 74, 97, ..., 0, 0, 0]]),\n tensor([[ 80, 114, 111, ..., 0, 0, 0],\n [ 74, 97, 99, ..., 0, 0, 0],\n [ 80, 114, 111, ..., 0, 0, 0],\n ...,\n [ 90, 32, 100, ..., 0, 0, 0],\n [ 78, 105, 101, ..., 0, 0, 0],\n [ 74, 97, 32, ..., 0, 0, 0]]),\n array([10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,\n 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,\n 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,\n 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10]),\n array([16.87227866, 13.26666667, 10.44474394, 12.30366492, 17.85714286,\n 12.91291291, 17.57731959, 12.59044863, 13.08701657, 9.05923345,\n 6.64893617, 16.04938272, 13.57664234, 16.6958042 , 12.89986092,\n 12.30385164, 13.0044843 , 11.58280922, 6.55940594, 14.94444444,\n 14.01639344, 11.34085213, 14.24632353, 13.95348837, 13.08219178,\n 14.08382066, 17.42424242, 13.91006098, 12.85425101, 14.37296417,\n 13.3640553 , 12.09103841, 12.54098361, 11.59711075, 14.07380608,\n 13.40388007, 14.59537572, 11.70212766, 12.1559633 , 14.36781609,\n 13.86138614, 12.27272727, 14.36915888, 13.57388316, 12.84059946,\n 13.21478382, 11.01123596, 15.40041068, 14.14473684, 10.51401869,\n 11.55172414, 14.90990991, 16.0130719 , 12.80959752, 14.18511066,\n 6.04448743, 11.36 , 15.35087719, 15.41155867, 14.49880668,\n 12.47892074, 12.34375 , 14.04612159, 16.55629139]),\n tensor([[512, 460, 66, ..., 512, 512, 512],\n [512, 336, 452, ..., 116, 116, 116],\n [512, 66, 309, ..., 512, 512, 512],\n ...,\n [512, 336, 253, ..., 512, 512, 512],\n [512, 336, 141, ..., 512, 512, 512],\n [512, 336, 261, ..., 512, 512, 512]]),\n tensor([[460, 66, 337, ..., 512, 512, 512],\n [336, 452, 417, ..., 116, 116, 460],\n [ 66, 309, 58, ..., 512, 512, 512],\n ...,\n [336, 253, 253, ..., 512, 512, 512],\n [336, 141, 248, ..., 512, 512, 512],\n [336, 261, 197, ..., 512, 512, 512]])]\n\n\n\n\n\nModeling\n\nsource\n\nTunables\n\n Tunables (init_std:float=1, embeddings_std:float=0.01,\n embeddings_lr_scale:float=5,\n embedding_projector_lr_scale:float=2.5, output_mult:float=0.35,\n query_mult:float=1, encoder_depth_ratio:float=0.25,\n causal_encoder:bool=True, eot_dropout_p:float=0.5,\n cps_input:bool=True, cps_bins:int=32, lr0:float=0.0015,\n clip_gradient_norm:float=0.2, weight_decay:float=0.1,\n warmup_steps:float=4000, random:bool=False)\n\n\nsource\n\n\nrand\n\n rand (start, end)\n\n\nsource\n\n\nT2SEmbedding\n\n T2SEmbedding (length=1500, codes=1024, width=384, pos_embs=None,\n stoks_width=384)\n\nBase class for all neural network modules.\nYour models should also subclass this class.\nModules can also contain other Modules, allowing to nest them in a tree structure. You can assign the submodules as regular attributes::\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nclass Model(nn.Module):\n def __init__(self):\n super().__init__()\n self.conv1 = nn.Conv2d(1, 20, 5)\n self.conv2 = nn.Conv2d(20, 20, 5)\n\n def forward(self, x):\n x = F.relu(self.conv1(x))\n return F.relu(self.conv2(x))\nSubmodules assigned in this way will be registered, and will have their parameters converted too when you call :meth:to, etc.\n.. note:: As per the example above, an __init__() call to the parent class must be made before assignment on the child.\n:ivar training: Boolean represents whether this module is in training or evaluation mode. :vartype training: bool\n\nsource\n\n\nEncoder\n\n Encoder (depth=6, width=384, n_head=6, length=1500, codes=1024,\n emb_width=384, ffn_mult=4, pos_embs=None,\n tunables=Tunables(init_std=1, embeddings_std=0.01,\n embeddings_lr_scale=5, embedding_projector_lr_scale=2.5,\n output_mult=0.35, query_mult=1, encoder_depth_ratio=0.25,\n causal_encoder=True, eot_dropout_p=0.5, cps_input=True,\n cps_bins=32, lr0=0.0015, clip_gradient_norm=0.2,\n weight_decay=0.1, warmup_steps=4000, random=False))\n\nBase class for all neural network modules.\nYour models should also subclass this class.\nModules can also contain other Modules, allowing to nest them in a tree structure. You can assign the submodules as regular attributes::\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nclass Model(nn.Module):\n def __init__(self):\n super().__init__()\n self.conv1 = nn.Conv2d(1, 20, 5)\n self.conv2 = nn.Conv2d(20, 20, 5)\n\n def forward(self, x):\n x = F.relu(self.conv1(x))\n return F.relu(self.conv2(x))\nSubmodules assigned in this way will be registered, and will have their parameters converted too when you call :meth:to, etc.\n.. note:: As per the example above, an __init__() call to the parent class must be made before assignment on the child.\n:ivar training: Boolean represents whether this module is in training or evaluation mode. :vartype training: bool\n\nsource\n\n\nTSARTransformer\n\n TSARTransformer (depth=6, n_head=6, head_width=64, ffn_mult=4,\n ttoks_len=200, ttoks_codes=256, ttoks_width=None,\n stoks_len=1500, stoks_codes=1024, stoks_width=None,\n tunables=Tunables(init_std=1, embeddings_std=0.01,\n embeddings_lr_scale=5, embedding_projector_lr_scale=2.5,\n output_mult=0.35, query_mult=1,\n encoder_depth_ratio=0.25, causal_encoder=True,\n eot_dropout_p=0.5, cps_input=True, cps_bins=32,\n lr0=0.0015, clip_gradient_norm=0.2, weight_decay=0.1,\n warmup_steps=4000, random=False))\n\nBase class for all neural network modules.\nYour models should also subclass this class.\nModules can also contain other Modules, allowing to nest them in a tree structure. You can assign the submodules as regular attributes::\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nclass Model(nn.Module):\n def __init__(self):\n super().__init__()\n self.conv1 = nn.Conv2d(1, 20, 5)\n self.conv2 = nn.Conv2d(20, 20, 5)\n\n def forward(self, x):\n x = F.relu(self.conv1(x))\n return F.relu(self.conv2(x))\nSubmodules assigned in this way will be registered, and will have their parameters converted too when you call :meth:to, etc.\n.. note:: As per the example above, an __init__() call to the parent class must be made before assignment on the child.\n:ivar training: Boolean represents whether this module is in training or evaluation mode. :vartype training: bool\n\nsource\n\n\nmake_model\n\n make_model (size:str, frozen_embeddings_model:str=None,\n tunables:__main__.Tunables=Tunables(init_std=1,\n embeddings_std=0.01, embeddings_lr_scale=5,\n embedding_projector_lr_scale=2.5, output_mult=0.35,\n query_mult=1, encoder_depth_ratio=0.25, causal_encoder=True,\n eot_dropout_p=0.5, cps_input=True, cps_bins=32, lr0=0.0015,\n clip_gradient_norm=0.2, weight_decay=0.1, warmup_steps=4000,\n random=False), dataset:torch.utils.data.dataset.Dataset=None)\n\n\n# baseline\nmodel = make_model('micro', dataset=train_ds, frozen_embeddings_model='vqmodel-medium-en+pl-512c-dim64.model',\n tunables=Tunables()).cuda()\ntrain(\"tsar-wx\", model, train_ds, val_ds, half=True, bs=32, lr=model.tunables.lr0, epochs=4,\n warmup_steps=model.tunables.warmup_steps, weight_decay=model.tunables.weight_decay, clip_gradient_norm=model.tunables.clip_gradient_norm,\n table_row_every_iters=100000, run_valid_every_iters=10000)\n\n\n\n\n\n\n\n\n\n\n \n \n 100.00% [4/4 07:59<00:00]\n \n \n\n\n\n\nsamples\ntrain\nval\ntime\n\n\n\n\n100000\n2.62064\n2.48393\n01:06\n\n\n200000\n1.79376\n1.77248\n02:11\n\n\n300000\n1.69666\n1.66202\n03:11\n\n\n400000\n1.73755\n1.60740\n04:17\n\n\n500000\n1.75108\n1.56827\n05:17\n\n\n600000\n1.59873\n1.53394\n06:23\n\n\n700000\n1.50289\n1.49515\n07:23\n\n\n759936\n1.52261\n1.47473\n08:00\n\n\n\n\n\n \n \n 100.00% [5937/5937 02:00<00:00 #189984/189984 loss: 1.523 / 1.475]\n \n \n\n\n\n\n\n\n\n\n\n\n\nmodel.save_model('t2s-micro.model')\n\n\n# no encoder LM loss, trains visibly slower\nmodel = make_model('micro', dataset=train_ds, frozen_embeddings_model='vqmodel-medium-en+pl-512c-dim64.model',\n tunables=Tunables(causal_encoder=False)).cuda()\ntrain(\"tsar-wx\", model, train_ds, val_ds, half=True, bs=32, lr=model.tunables.lr0, epochs=4,\n warmup_steps=1500, weight_decay=model.tunables.weight_decay, clip_gradient_norm=model.tunables.clip_gradient_norm,\n table_row_every_iters=100000, run_valid_every_iters=10000)\n\n\n\n\n\n\n\n\n\n\n \n \n 100.00% [4/4 07:57<00:00]\n \n \n\n\n\n\nsamples\ntrain\nval\ntime\n\n\n\n\n100000\n2.44452\n2.38181\n01:04\n\n\n200000\n2.33279\n2.19010\n02:11\n\n\n300000\n1.83019\n1.82918\n03:12\n\n\n400000\n1.74988\n1.73074\n04:16\n\n\n500000\n1.58686\n1.67560\n05:15\n\n\n600000\n1.54544\n1.62922\n06:21\n\n\n700000\n1.68379\n1.59513\n07:21\n\n\n759936\n1.61915\n1.57619\n07:57\n\n\n\n\n\n \n \n 100.00% [5937/5937 01:59<00:00 #189984/189984 loss: 1.619 / 1.576]\n \n \n\n\n\n/opt/conda/lib/python3.10/site-packages/torch/optim/lr_scheduler.py:149: UserWarning: The epoch parameter in `scheduler.step()` was not necessary and is being deprecated where possible. Please use `scheduler.step()` to step the scheduler. During the deprecation, if epoch is different from None, the closed form is used instead of the new chainable form, where available. Please open an issue if you are unable to replicate your use case: https://github.com/pytorch/pytorch/issues/new/choose.\n warnings.warn(EPOCH_DEPRECATION_WARNING, UserWarning)" + }, + { + "objectID": "dataset preparation.html", + "href": "dataset preparation.html", + "title": "I can has speech? What data WhisperSpeech needs?", + "section": "", + "text": "WhisperSpeech is trained on heavily preprocessed speech data generated from several models:", + "crumbs": [ + "I can has speech? What data WhisperSpeech needs?" + ] + }, + { + "objectID": "dataset preparation.html#who-is-who-a-high-level-overview", + "href": "dataset preparation.html#who-is-who-a-high-level-overview", + "title": "I can has speech? What data WhisperSpeech needs?", + "section": "Who is who? A high-level overview", + "text": "Who is who? A high-level overview\nTo get these 3 data representations we have to run the audio data through several models. The first two steps are always the same, the rest depend on the model we want to run.\n\nWe start by downloading the speech audio files into a sharded webdataset (e.g. A3. Download Project Guttenberg audiobooks).\nWe released webdatasetified versions of two important public domain speech datasets – LibriLight and Project Gutenberg Audiobooks.\nAll subsequent steps rely on voice activity detection (VAD) and diarization so we always generate segment lists and extract speaker embeddings for all audio files (see 1B. Voice activity detection and 2A. Speaker Embeddings for source code).\nThe results of this step were also released on Hugging Face – LibriLight and Project Gutenberg Audiobooks.\n\nThe next steps depend on which model we want to train or fine-tune.\n\nTo re-train the quantized Whisper model we need to transcribe the audio with base.en (2A. Whisper quantization dataset preparation). A model pretrained on 60k hours of LibriLight is available from Hugging Face whisper-vq-stoks-v2.model.\nTo train the text to semantic token model we need to transcribe the audio with Whisper small.en and extract the semantic tokens (5A. T2S dataset preparation).\nTo train the semantic to acoustic model we need to extract the semantic tokens and compress the audio with Encodec for the semantic to acoustic model (4A. S2A dataset preparation).\n\nThese three steps are all independent since they require different chunking of speech data. For quantizing Whisper and S2A training we greedily merge the VAD segments from the same speaker into (at most) 30 second chunks to improve training performance (more uniform chunks mean less computation time is spent on padding). For T2S we randomly truncate when merging the VAD segments so the model also learns how to work with shorter texts. The code to perform this is in 1C. VAD merging.", + "crumbs": [ + "I can has speech? What data WhisperSpeech needs?" + ] + }, + { + "objectID": "dataset preparation.html#tldr-example-give-me-the-codes", + "href": "dataset preparation.html#tldr-example-give-me-the-codes", + "title": "I can has speech? What data WhisperSpeech needs?", + "section": "TL;DR example – give me the codes!", + "text": "TL;DR example – give me the codes!\nIn this example we will convert a single split from the Multilingual Libri Speech dataset.\n\nPrepare the webdataset shards\nThe first, most time-consuming, step is to convert the data from it’s original form into the webdataset format. If you want to skip this section and still follow along, the results can be downloaded from Hugging Face at datasets/collabora/multilingual-librispeech-webdataset.\nFirst we need tarp which is a tool that helps create and manipulate the webdataset tar files more effectively. You can check out more about it in the official tarp README\ngo install -v github.com/collabora/tarp/tarp@latest\nAfterwards, we download and unpack the original dataset files:\naria2c -x10 https://dl.fbaipublicfiles.com/mls/mls_french_opus.tar.gz\ntar -xf mls_french_opus.tar.gz\nNext, we’ll need to convert each line in the transcripts.txt file:\n10065_10039_000000 ses vêtements devinrent tout brillants de lumière et blancs comme la neige en sorte qu'il n'y a point de foulon sur la terre qui puisse en faire d'aussi blancs\ninto a tarp script:\ntrain/10065_10039_000000.opus file:mls_french_opus/train/audio/10065/10039/10065_10039_000000.opus\ntrain/10065_10039_000000.txt text:ses vêtements devinrent tout brillants de lumière et blancs comme la neige en sorte qu'il n'y a point de foulon sur la terre qui puisse en faire d'aussi blancs\nWe can achieve this using a short Python script (saved as make-script.py):\nimport sys\n\nfname = sys.argv[1]\ndir, split, _ = fname.rsplit(\"/\", 2)\n\nfor ln in open(fname):\n id, txt = ln.split(\"\\t\")\n a,b,c = id.split(\"_\")\n txt = txt.replace(\"\\n\", \"\")\n print(f\"\"\"{split}/{id}.opus file:{dir}/{split}/audio/{a}/{b}/{id}.opus\n{split}/{id}.txt text:{txt}\"\"\")\nOnce we have this, we can run the conversion process. The python script outputs data sample descriptions which are fed to tarp create that archives them into a tar stream (a bit similar to tar -T -). The tarp split will then cut the incoming stream into 2GB shards and save them to separate files, making sure to split on sample boundaries.\nThe 2GB size was chosen as a good compromise between the shard count and shard transcription time for mp3/opus files with mutlilingual speech. For LibriLight (English compressed with FLAC) the magic number was 5GB because we FLAC compresses less and we can also use a smaller model for transcribing English speech.\npython3 make-script.py mls_french_opus/train/transcripts.txt \\\n | /root/go/bin/tarp create -o - - \\\n | /root/go/bin/tarp split -s 2e9 -o 'mls_french_train-audio-%06d.tar' -\nWe’ll have to repeat the same command two times replacing train with test and dev and afterwards we can upload everything to Hugging Face:\nhuggingface-cli login\nhuggingface-cli upload --repo-type dataset collabora/multilingual-librispeech-webdataset .\n\n\nProcess the shards on a single GPU machine\nWe do the sharding mainly to be able to effectively process data on many GPUs but for the sake of simplicity we will use a single GPU here. The process stays the same, but different tools would be used to schedule the jobs. For reference, below the commands, we have specified their approximate runtimes on a RTX 4090 for the French subset of MLS.\nPerform voice activity detection:\nparallel --eta -j3 python -m whisperspeech.vad {} ::: ./*.tar\n# 50min\nExtract speaker embeddings for each fragment:\nparallel --eta -j2 python -m whisperspeech.extract_spk_emb --batch_size 16 {} ::: ./*.tar\n# 1h 10min\nWe perform VAD segment merging (we do it as a separate step here to remove all randomness and get reproducibility for later steps):\nparallel --eta -j16 python -m whisperspeech.vad_merge --eqvad {} ::: *.tar\nparallel --eta -j16 python -m whisperspeech.vad_merge {} ::: *.tar\nWith that covered we can start the heavy lifting with the transcripts:\nparallel --eta -j1 python -m whisperspeech.prepare_t2s_txts --transcription_model medium --language fr --batch_size 32 {} ::: *.tar\n# 6h 48min\nAfterwards comes Encodec compression:\nparallel --eta -j2 python -m whisperspeech.prepare_s2a_atoks --batch_size 4 {} ::: *.tar\n# 2h\nNow we can extract the semantic tokens for both the T2S (eqvad) and S2A (maxvad) training:\nparallel --eta -j1 python -m whisperspeech.extract_stoks --batch_size 16 --vq_model ../nbs/vqmodel-medium-en+pl-512c-dim64.model {} ::: *.tar\nparallel --eta -j1 python -m whisperspeech.extract_stoks --kind eqvad --batch_size 16 --vq_model ../nbs/vqmodel-medium-en+pl-512c-dim64.model {} ::: *.tar\n# 3h 45min\n\n\nSplitting out the validation set(s)\nAfter we have all the samples we may want to extract some validation sets. There are many ways to do it but here we’ll manually choose some speakers we’ll later skip completely during training.\nWe start by dumping all the sample ids:\nparallel tar tf {} ::: stoks/*-atoks-3kbps-*.tar.gz | sed -e 's/\\.atoks\\.npy//' > all-samples-maxvad\nparallel tar tf {} ::: stoks/*-small.en-txt-*.tar.gz | sed -e 's/\\.txt//' > all-samples-eqvad\nwc -l all-samples-maxvad\nBecause the sample ids (which are the original file paths) have speaker ids in them we can make a quick histogram:\n< all-samples-maxvad awk -F_ '{ print $1; }'|sort|uniq -c|sort -n|less\nFrom the result we can copy and paste 10 speaker ids of around 50 samples each to get 512 validation samples. We’ll exclude them from the training set because we want to validate on unseen speakers. We have to repeat this process for both splits (maxvad and eqvad since they have’ll different sample counts and ids):\n< all-samples-maxvad grep 'train/1579\\|train/2033\\|train/3182\\|train/12981\\|train/2284\\|train/2297\\|train/6348\\|train/7200\\|train/7679\\|train/1989' >\nunseen-speakers-maxvad\n< all-samples-eq grep 'train/1579\\|train/2033\\|train/3182\\|train/12981\\|train/2284\\|train/2297\\|train/6348\\|train/7200\\|train/7679\\|train/1989' > unseen-speakers-eqvad\nOnce we have all the ids we can rescan the whole dataset once and split out the validation samples to separate webdataset shards to make validation fast:\npython -m whisperspeech.split_out_val_datasets *-atoks-* unseen-speakers-maxvad\npython -m whisperspeech.split_out_val_datasets '*-txt-*' unseen-speakers-eqvad\ncd stoks && python -m whisperspeech.split_out_val_datasets '*-maxvad-stoks-*' ../unseen-speakers-maxvad\ncd stoks && python -m whisperspeech.split_out_val_datasets '*-eqvad-stoks-*' ../unseen-speakers-eqvad\nWe can use wc -l all-samples-maxvad to find out how many samples we have.\n\n\nCreating the dataset configuration files for training\nFinally we create the configuration files for the training script:\ncat > mls-fr-t2s-train.dataset <<EOF\nmultilingual-librispeech-webdataset/*-medium-txt-*.tar.gz multilingual-librispeech-webdataset/vq-en+pl/ 390203 --txt_kind='medium-txt' --language=fr --exclude_files multilingual-librispeech-webdataset/unseen-speakers-eqvad\nEOF\ncat > mls-fr-s2a-train.dataset <<EOF\nmultilingual-librispeech-webdataset/*-atoks-*.tar.gz multilingual-librispeech-webdataset/vq-en+pl/ 338362 --language=fr --exclude_files multilingual-librispeech-webdataset/unseen-speakers-maxvad\nEOF\ncat > mls-fr-s2a-val-unseen-speakers.dataset <<EOF\nmultilingual-librispeech-webdataset/unseen-speakers-maxvad.tar.gz multilingual-librispeech-webdataset/vq-en+pl/ 512 --language fr\nEOF\ncat > mls-fr-t2s-val-unseen-speakers.dataset <<EOF\nmultilingual-librispeech-webdataset/unseen-speakers-eqvad.tar.gz multilingual-librispeech-webdataset/vq-en+pl/ 512 --txt_kind 'medium-txt' --language fr\nEOF", + "crumbs": [ + "I can has speech? What data WhisperSpeech needs?" + ] + }, + { + "objectID": "dataset preparation.html#why-webdataset", + "href": "dataset preparation.html#why-webdataset", + "title": "I can has speech? What data WhisperSpeech needs?", + "section": "Why WebDataset?", + "text": "Why WebDataset?\nAll WhisperSpeech training and preproc code got reorganized around webdatasets. Webdatasets are just simple tar files that store all our data samples (files) but they are great for working with very large datasets. Inside these tar files we can store multiple files per sample in any format we want (e.g. the speech mp3/flac/wav files, the text transcripts, tokens in numpy arrays). For example from the data used to train the S2A model we have:\n$ tar tf whisperspeech-s2a-512c-dim64/librilight-small-000.tar.gz |head -6\nsmall/1874/shortlifelincoln_0809_librivox_64kb_mp3/shortlifeoflincoln_10_nicolay_64kb_021.atoks.npy\nsmall/1874/shortlifelincoln_0809_librivox_64kb_mp3/shortlifeoflincoln_10_nicolay_64kb_021.stoks.npy\nsmall/28/amateur_cracksman_librivox_64kb_mp3/amateur_cracksman_04_hornung_64kb_004.atoks.npy\nsmall/28/amateur_cracksman_librivox_64kb_mp3/amateur_cracksman_04_hornung_64kb_004.stoks.npy\nsmall/1874/shortlifelincoln_0809_librivox_64kb_mp3/shortlifeoflincoln_10_nicolay_64kb_052.atoks.npy\nsmall/1874/shortlifelincoln_0809_librivox_64kb_mp3/shortlifeoflincoln_10_nicolay_64kb_052.stoks.npy\nThe name of the file is the same as the file name of the original dataset sample and the extensions tell us what kind of value they hold and in which format.\nFurthermore we can split the whole dataset into fixed-size tar files called shards and load them on demand without unpacking. It turns out that this is exactly what we need for both AI training and data preprocessing:\n\nfor training we start a multiple CPU workers in parallel, open different shards in each, stream the data sequentially from disk (fast), decode it independently and them shuffle the samples we receive from each worker to create varied training batches\nfor preprocessing we independently send each shard to a worker and save all the results in a new webdataset shard\n\nReading samples sequentialy allows us to simply compress the whole file with gzip and offers best performance even on spinning or network disks.\n\n\n\n\n\n\nNote\n\n\n\nFor the Juwels cluster there is another crucial benefit. There is a pretty low limit on the total number of files on network disks (inodes to be precise) so there is a strong preference to keep data in a few large files. The network file system performance is also better if we don’t have to open too many files.\n\n\nKeeping each shard around 5GB seems to work great (the processed shards will likely be a lot smaller but it’s a lot easier to keep a 1-to-1 shard mapping). For the almost 4TB LibriLight dataset this translates to 625 files.\nWe found it quite useful to also keep all the data in some splits. This is data dependent but for LibriLight we followed the original split (small, medium, large) but also extracted the 6454 speaker from the large split because it is was the largest single speaker dataset and it allowed us to use it during development without downloading the full 4TB.\n\n\n\n\n\n\nCaution\n\n\n\nThe sample file names should not have dots in them, otherwise the WebDataset code gets confused which files go together into one sample. This can be worked around later but it’s easiest if we just do .replace('.', '_') when storing the initial raw dataset.", + "crumbs": [ + "I can has speech? What data WhisperSpeech needs?" + ] + }, + { + "objectID": "dataset preparation.html#joins-on-webdatasets", + "href": "dataset preparation.html#joins-on-webdatasets", + "title": "I can has speech? What data WhisperSpeech needs?", + "section": "Joins on WebDatasets", + "text": "Joins on WebDatasets\nOne novel functionality we developed for this project is the capability to join multiple preprocessed webdatasets. This mechanism relies on keeping a constant ordering of samples in a shard and ensuring 1-to-1 correspondence between the input and output shards during preprocessing.\nExample usage:\nds = wds.WebDataset([str(x) for x in Path('librilight/').glob('*.tar')]).compose( # load all audio shards\n wds.decode(wds.torch_audio), # decode the audio data\n vq_stoks.merge_in( # merge another WebDataset\n # for each audio (`raw`) shard, find the path and name of a corresponding `vad` shard\n vq_stoks.derived_dataset('librilight-processed/', 'vad')\n ),\n)\nderived_dataset creates for us a helper function that returns an opened derived dataset given the original shard file name:\ndef derived_dataset(path, kind):\n def deriver(url):\n url = str(Path(path)/(Path(url).name.replace(\"raw\", kind) + \".gz\"))\n return wds.WebDataset(wds.SimpleShardList([url])).decode()\n return deriver\nThis feature is experimental and the API may change as we develop more experience with this merging style.", + "crumbs": [ + "I can has speech? What data WhisperSpeech needs?" + ] + }, + { + "objectID": "dataset preparation.html#examples-of-preprocessing-runs", + "href": "dataset preparation.html#examples-of-preprocessing-runs", + "title": "I can has speech? What data WhisperSpeech needs?", + "section": "Examples of preprocessing runs", + "text": "Examples of preprocessing runs\nAn example of running a preprocessing step locally on a single file:\nmkdir -p guttenberg-preproc && cd guttenberg-preproc\npython -m whisperspeech.vad ../guttenberg-audiobooks/guttenberg-audiobooks-raw-000010.tar\nThis will generate a file named guttenberg-audiobooks-vad-000000.tar.gz in the guttenberg-preproc directory.\nOn the cluster we can run multiple jobs in parallel (24 in this case), each processing one input shard. Since each job is pretty short (around 30 minutes) it’s easier for the scheduler to squeeze these between longer and higher-priority jobs.\nmkdir -p whisperspeech-s2a-512c-dim64 && cd whisperspeech-s2a-512c-dim64\nfind ../librilight/ -name 'librilight-small-*.tar'| ~/clapa1/run-batch 24 \\\n 'python -m whisperspeech.prepare_s2a_dataset $FILE ../librilight-preproc\n --vq_model ~/clapa1/scratch/vqmodel-512c-dim64-4e-hyptuned-32gpu.model\n --batch_size 8'\nThe prepare_s2a_dataset script is taking raw audio data from the input file, automatically finding corresponding shards with VAD results in ../librilight-preproc and writing the results to the whisperspeech-s2a-512c-dim64 directory.", + "crumbs": [ + "I can has speech? What data WhisperSpeech needs?" + ] + }, + { + "objectID": "dataset preparation.html#voice-activity-detection", + "href": "dataset preparation.html#voice-activity-detection", + "title": "I can has speech? What data WhisperSpeech needs?", + "section": "Voice activity detection", + "text": "Voice activity detection\nCode: 1B. Voice activity detection\nRight now we are using the VAD model from WhisperX that is enough to avoid cutting audio in the middle of a word which would hurt automated transcriptions quite a lot. For more fancy datasets with multiple speakers we could use pyannote for it’s detection of multiple people speaking at once and diarization capability.\nWe later merge the VAD segments into longer chunks for more efficient training (less padding == higher efficiency). The code and histogram plots can be found in 2A. Whisper quantization dataset preparation", + "crumbs": [ + "I can has speech? What data WhisperSpeech needs?" + ] + }, + { + "objectID": "dataset preparation.html#transcription", + "href": "dataset preparation.html#transcription", + "title": "I can has speech? What data WhisperSpeech needs?", + "section": "Transcription", + "text": "Transcription\nCode: 5A. T2S dataset preparation\nFor training the TTS model (T2S) we are using running batches of chunked speech segments though FasterWhisper. We use the small.en model since there seems to be little benefit from using the larger models on English speech. For multilingual TTS we would probably want to switch to large-v2.\n\n\n\n\n\n\nNote\n\n\n\nRight now we extract both semantic tokens and transcriptions in one go. Doing the transcriptions is very time consuming are the result is unlikely to change. OTOH we may want to regenerate the semantic tokens if we train different quantized Whisper models. Because of that we may want to split this into two separate steps and only merge the results just before we generate the training dataset.", + "crumbs": [ + "I can has speech? What data WhisperSpeech needs?" + ] + }, + { + "objectID": "dataset preparation.html#acoustic-token-extraction", + "href": "dataset preparation.html#acoustic-token-extraction", + "title": "I can has speech? What data WhisperSpeech needs?", + "section": "Acoustic token extraction", + "text": "Acoustic token extraction\nCode: 4A. S2A dataset preparation\nThis is basically the same as T2S above but with Encodec instead of Whisper.", + "crumbs": [ + "I can has speech? What data WhisperSpeech needs?" + ] + }, + { + "objectID": "dataset preparation.html#trainvalidation-split", + "href": "dataset preparation.html#trainvalidation-split", + "title": "I can has speech? What data WhisperSpeech needs?", + "section": "Train/validation split", + "text": "Train/validation split\nWe create validation splits differently for each dataset. For example for LibriLight we use the speaker labels to create a common and unseen speakers splits. Once we have a list of samples we want to use we extract them from the full dataset into a new shard while keeping a list of IDs to skip during training. This way we avoid copying the training samples.\nThis has the downside of delaying all shuffling until training. This is especially problematic for smaller datasets with not enough shards since multiple workers may read the same shard and initially (before the shuffling buffer is filled) deliver the same samples multiple times. This causes overfitting. This is not a problem early in training (the model is too random to overfit) and we make sure we don’t reset the dataloaders between epochs but it is causing issues when resuming training from a checkpoint. The workaround is to preload the shuffling bufferwith a lot of samples (.shuffle(initial=20000)). Unfortunately it has the downside of putting a lot of load on the filesystem and adding a significant delay before training can start.", + "crumbs": [ + "I can has speech? What data WhisperSpeech needs?" + ] + }, + { + "objectID": "1. acoustic token extraction.html", + "href": "1. acoustic token extraction.html", + "title": "Acoustic token extraction", + "section": "", + "text": "# unpacked small.tar should go here:\ndatadir = Path('/mnt/')\n# you can download it downloaded from\n# https://github.com/facebookresearch/libri-light/blob/main/data_preparation/README.md\n\n\nsource\n\nload\n\n load (fname, newsr=24000)\n\nLoad an audio file to the GPU and resample to newsr.\n\nsource\n\n\nload_model\n\n load_model ()\n\nLoad the pretrained EnCodec model\n\nsource\n\n\nextract_Atoks\n\n extract_Atoks (model, audio)\n\nExtract EnCodec tokens for the given audio tensor (or file path) using the given model (see load_model).\n\nsource\n\n\nextract_acoustic\n\n extract_acoustic (srcdir:pathlib.Path, outdir:pathlib.Path)\n\nConvert audio files to .encodec files with tensors of tokens\n\n\n\n\nType\nDetails\n\n\n\n\nsrcdir\nPath\nsource dir, should contain *.flac files\n\n\noutdir\nPath\noutput dir, will get the *.encodec files\n\n\n\n\n# process all files for speaker 1401\nmodel = load_model()\nextract_acoustic(model, datadir/'small/1401', datadir/'acoustic-1401')\n\n\n\n\n\n\n \n \n 100.00% [131/131 05:38<00:00]\n \n \n\n\n\n!du -hs {datadir}/acoustic-1401/\n\n78M /mnt/acoustic-1401/" + }, + { + "objectID": "3D. Split out validation.html", + "href": "3D. Split out validation.html", + "title": "WhisperSpeech", + "section": "", + "text": "ds = wds.WebDataset(utils.shard_glob('../wolnelektury-wds2/wolnelektury-eqvad-000000.tar.gz'))\n\n\nfor s in ds: break\ns.keys()\n\ndict_keys(['__key__', '__url__', 'spk_emb.npy', 'vad.npy'])\n\n\n\nsplit_dataset('../wolnelektury-wds2/wolnelektury-eqvad-stoks-*.tar.gz', '../wolnelektury-wds2/validation-eqvad')\n\n['../wolnelektury-wds2/wolnelektury-eqvad-stoks-000014.tar.gz', '../wolnelektury-wds2/wolnelektury-eqvad-stoks-000008.tar.gz', '../wolnelektury-wds2/wolnelektury-eqvad-stoks-000010.tar.gz', '../wolnelektury-wds2/wolnelektury-eqvad-stoks-000004.tar.gz', '../wolnelektury-wds2/wolnelektury-eqvad-stoks-000011.tar.gz', '../wolnelektury-wds2/wolnelektury-eqvad-stoks-000003.tar.gz', '../wolnelektury-wds2/wolnelektury-eqvad-stoks-000002.tar.gz', '../wolnelektury-wds2/wolnelektury-eqvad-stoks-000007.tar.gz', '../wolnelektury-wds2/wolnelektury-eqvad-stoks-000013.tar.gz', '../wolnelektury-wds2/wolnelektury-eqvad-stoks-000005.tar.gz', '../wolnelektury-wds2/wolnelektury-eqvad-stoks-000009.tar.gz', '../wolnelektury-wds2/wolnelektury-eqvad-stoks-000000.tar.gz', '../wolnelektury-wds2/wolnelektury-eqvad-stoks-000006.tar.gz', '../wolnelektury-wds2/wolnelektury-eqvad-stoks-000012.tar.gz', '../wolnelektury-wds2/wolnelektury-eqvad-stoks-000001.tar.gz']\n{'../wolnelektury-wds2/validation-eqvad': <webdataset.writer.TarWriter object>}\nLooking for 520 samples...\n\n\n\n\n\n\n\n \n \n 100.00% [520/520 00:01<00:00]\n \n \n\n\n\nsplit_dataset('whisperspeech-s2a-512c-tts-r/*.tar.gz', 's2a-dim64-ttsr-valfix')\n\n# writing s2a-dim64-ttsr-valfix/train-000000.tar.gz 0 0.0 GB 00350 00:13<00:00]\n# writing s2a-dim64-ttsr-valfix/train-000001.tar.gz 400 0.0 GB 4000<04:44]]\n# writing s2a-dim64-ttsr-valfix/train-000002.tar.gz 400 0.0 GB 8000<02:47]\n# writing s2a-dim64-ttsr-valfix/train-000003.tar.gz 400 0.0 GB 12000<02:12]\n# writing s2a-dim64-ttsr-valfix/train-000004.tar.gz 400 0.0 GB 16001<02:01]\n# writing s2a-dim64-ttsr-valfix/train-000005.tar.gz 400 0.0 GB 20001<01:45]\n# writing s2a-dim64-ttsr-valfix/train-000006.tar.gz 400 0.0 GB 24001<01:39]\n# writing s2a-dim64-ttsr-valfix/train-000007.tar.gz 400 0.0 GB 28001<01:35]\n# writing s2a-dim64-ttsr-valfix/train-000008.tar.gz 400 0.0 GB 32001<01:32]\n# writing s2a-dim64-ttsr-valfix/train-000009.tar.gz 400 0.0 GB 36001<01:26]\n# writing s2a-dim64-ttsr-valfix/train-000010.tar.gz 400 0.0 GB 40002<01:24]\n# writing s2a-dim64-ttsr-valfix/train-000011.tar.gz 400 0.0 GB 44002<01:22]\n# writing s2a-dim64-ttsr-valfix/train-000012.tar.gz 400 0.0 GB 48002<01:21]\n# writing s2a-dim64-ttsr-valfix/train-000013.tar.gz 400 0.0 GB 52002<01:19]\n# writing s2a-dim64-ttsr-valfix/train-000014.tar.gz 400 0.0 GB 56002<01:18]\n# writing s2a-dim64-ttsr-valfix/train-000015.tar.gz 400 0.0 GB 60002<01:17]\n# writing s2a-dim64-ttsr-valfix/train-000016.tar.gz 400 0.0 GB 64003<01:16]\n# writing s2a-dim64-ttsr-valfix/train-000017.tar.gz 400 0.0 GB 68003<01:15]\n# writing s2a-dim64-ttsr-valfix/train-000018.tar.gz 400 0.0 GB 72003<01:15]\n# writing s2a-dim64-ttsr-valfix/train-000019.tar.gz 400 0.0 GB 76003<01:14]\n# writing s2a-dim64-ttsr-valfix/train-000020.tar.gz 400 0.0 GB 80003<01:13]\n# writing s2a-dim64-ttsr-valfix/train-000021.tar.gz 400 0.0 GB 84003<01:13]\n# writing s2a-dim64-ttsr-valfix/train-000022.tar.gz 400 0.0 GB 88004<01:12]\n# writing s2a-dim64-ttsr-valfix/train-000023.tar.gz 400 0.0 GB 92004<01:11]\n# writing s2a-dim64-ttsr-valfix/train-000024.tar.gz 400 0.0 GB 96004<01:11]\n# writing s2a-dim64-ttsr-valfix/train-000025.tar.gz 400 0.0 GB 10000<01:10]\n# writing s2a-dim64-ttsr-valfix/train-000026.tar.gz 400 0.0 GB 104004<01:10]\n# writing s2a-dim64-ttsr-valfix/train-000027.tar.gz 400 0.0 GB 108005<01:09]\n# writing s2a-dim64-ttsr-valfix/train-000028.tar.gz 400 0.0 GB 112005<01:09]\n# writing s2a-dim64-ttsr-valfix/train-000029.tar.gz 400 0.0 GB 116005<01:09]\n# writing s2a-dim64-ttsr-valfix/train-000030.tar.gz 400 0.0 GB 12000\n# writing s2a-dim64-ttsr-valfix/train-000031.tar.gz 400 0.0 GB 124005<01:08]\n# writing s2a-dim64-ttsr-valfix/train-000032.tar.gz 400 0.0 GB 128005<01:08]\n# writing s2a-dim64-ttsr-valfix/train-000033.tar.gz 400 0.0 GB 132005<01:07]\n# writing s2a-dim64-ttsr-valfix/train-000034.tar.gz 400 0.0 GB 136006<01:07]\n# writing s2a-dim64-ttsr-valfix/train-000035.tar.gz 400 0.0 GB 140006<01:07]\n# writing s2a-dim64-ttsr-valfix/train-000036.tar.gz 400 0.0 GB 144006<01:07]\n# writing s2a-dim64-ttsr-valfix/train-000037.tar.gz 400 0.0 GB 148006<01:06]\n# writing s2a-dim64-ttsr-valfix/train-000038.tar.gz 400 0.0 GB 152006<01:06]\n# writing s2a-dim64-ttsr-valfix/train-000039.tar.gz 400 0.0 GB 156007<01:06]\n# writing s2a-dim64-ttsr-valfix/train-000040.tar.gz 400 0.0 GB 160007<01:05]\n# writing s2a-dim64-ttsr-valfix/train-000041.tar.gz 400 0.0 GB 1640007<01:05]\n# writing s2a-dim64-ttsr-valfix/train-000042.tar.gz 400 0.0 GB 1680007<01:05]\n# writing s2a-dim64-ttsr-valfix/train-000043.tar.gz 400 0.0 GB 1720007<01:04]\n# writing s2a-dim64-ttsr-valfix/train-000044.tar.gz 400 0.0 GB 17600\n# writing s2a-dim64-ttsr-valfix/train-000045.tar.gz 400 0.0 GB 1800008<01:04]\n# writing s2a-dim64-ttsr-valfix/train-000046.tar.gz 400 0.0 GB 1840008<01:04]\n# writing s2a-dim64-ttsr-valfix/train-000047.tar.gz 400 0.0 GB 1880008<01:03]\n# writing s2a-dim64-ttsr-valfix/train-000048.tar.gz 400 0.0 GB 1920008<01:03]\n# writing s2a-dim64-ttsr-valfix/train-000049.tar.gz 400 0.0 GB 1960008<01:03]\n# writing s2a-dim64-ttsr-valfix/train-000050.tar.gz 400 0.0 GB 2000009<01:03]\n# writing s2a-dim64-ttsr-valfix/train-000051.tar.gz 400 0.0 GB 2040009<01:02]\n# writing s2a-dim64-ttsr-valfix/train-000052.tar.gz 400 0.0 GB 2080009<01:02]\n# writing s2a-dim64-ttsr-valfix/train-000053.tar.gz 400 0.0 GB 2120009<01:02]\n# writing s2a-dim64-ttsr-valfix/train-000054.tar.gz 400 0.0 GB 21600\n# writing s2a-dim64-ttsr-valfix/train-000055.tar.gz 400 0.0 GB 2200009<01:02]\n# writing s2a-dim64-ttsr-valfix/train-000056.tar.gz 400 0.0 GB 2240009<01:01]\n# writing s2a-dim64-ttsr-valfix/train-000057.tar.gz 400 0.0 GB 2280010<01:01]\n# writing s2a-dim64-ttsr-valfix/train-000058.tar.gz 400 0.0 GB 2320010<01:01]\n# writing s2a-dim64-ttsr-valfix/train-000059.tar.gz 400 0.0 GB 2360010<01:01]\n# writing s2a-dim64-ttsr-valfix/train-000060.tar.gz 400 0.0 GB 2400010<01:00]\n# writing s2a-dim64-ttsr-valfix/train-000061.tar.gz 400 0.0 GB 2440010<01:00]\n# writing s2a-dim64-ttsr-valfix/train-000062.tar.gz 400 0.0 GB 2480011<01:00]\n# writing s2a-dim64-ttsr-valfix/train-000063.tar.gz 400 0.0 GB 2520011<01:00]\n# writing s2a-dim64-ttsr-valfix/train-000064.tar.gz 400 0.0 GB 25600\n# writing s2a-dim64-ttsr-valfix/train-000065.tar.gz 400 0.0 GB 2600011<00:59]\n# writing s2a-dim64-ttsr-valfix/train-000066.tar.gz 400 0.0 GB 2640011<00:59]\n# writing s2a-dim64-ttsr-valfix/train-000067.tar.gz 400 0.0 GB 2680011<00:59]\n# writing s2a-dim64-ttsr-valfix/train-000068.tar.gz 400 0.0 GB 2720012<00:59]\n# writing s2a-dim64-ttsr-valfix/train-000069.tar.gz 400 0.0 GB 2760012<00:59]\n# writing s2a-dim64-ttsr-valfix/train-000070.tar.gz 400 0.0 GB 2800012<00:58]\n# writing s2a-dim64-ttsr-valfix/train-000071.tar.gz 400 0.0 GB 2840012<00:58]\n# writing s2a-dim64-ttsr-valfix/train-000072.tar.gz 400 0.0 GB 2880012<00:58]\n# writing s2a-dim64-ttsr-valfix/train-000073.tar.gz 400 0.0 GB 2920013<00:58]\n# writing s2a-dim64-ttsr-valfix/train-000074.tar.gz 400 0.0 GB 29600\n# writing s2a-dim64-ttsr-valfix/train-000075.tar.gz 400 0.0 GB 3000013<00:57]\n# writing s2a-dim64-ttsr-valfix/train-000076.tar.gz 400 0.0 GB 3040013<00:57]\n# writing s2a-dim64-ttsr-valfix/train-000077.tar.gz 400 0.0 GB 3080013<00:57]\n# writing s2a-dim64-ttsr-valfix/train-000078.tar.gz 400 0.0 GB 3120013<00:57]\n# writing s2a-dim64-ttsr-valfix/train-000079.tar.gz 400 0.0 GB 3160014<00:56]\n# writing s2a-dim64-ttsr-valfix/train-000080.tar.gz 400 0.0 GB 3200014<00:56]\n# writing s2a-dim64-ttsr-valfix/train-000081.tar.gz 400 0.0 GB 3240014<00:56]\n# writing s2a-dim64-ttsr-valfix/train-000082.tar.gz 400 0.0 GB 3280014<00:56]\n# writing s2a-dim64-ttsr-valfix/train-000083.tar.gz 400 0.0 GB 33200\n# writing s2a-dim64-ttsr-valfix/train-000084.tar.gz 400 0.0 GB 3360014<00:56]\n# writing s2a-dim64-ttsr-valfix/train-000085.tar.gz 400 0.0 GB 3400014<00:55]\n# writing s2a-dim64-ttsr-valfix/train-000086.tar.gz 400 0.0 GB 3440015<00:55]\n# writing s2a-dim64-ttsr-valfix/train-000087.tar.gz 400 0.0 GB 3480015<00:55]\n# writing s2a-dim64-ttsr-valfix/train-000088.tar.gz 400 0.0 GB 3520015<00:55]\n# writing s2a-dim64-ttsr-valfix/train-000089.tar.gz 400 0.0 GB 3560015<00:54]\n# writing s2a-dim64-ttsr-valfix/train-000090.tar.gz 400 0.0 GB 3600015<00:54]\n# writing s2a-dim64-ttsr-valfix/train-000091.tar.gz 400 0.0 GB 36400\n# writing s2a-dim64-ttsr-valfix/train-000092.tar.gz 400 0.0 GB 3680016<00:54]\n# writing s2a-dim64-ttsr-valfix/train-000093.tar.gz 400 0.0 GB 3720016<00:54]\n# writing s2a-dim64-ttsr-valfix/train-000094.tar.gz 400 0.0 GB 3760016<00:53]\n# writing s2a-dim64-ttsr-valfix/train-000095.tar.gz 400 0.0 GB 3800016<00:53]\n# writing s2a-dim64-ttsr-valfix/train-000096.tar.gz 400 0.0 GB 3840016<00:53]\n# writing s2a-dim64-ttsr-valfix/train-000097.tar.gz 400 0.0 GB 3880017<00:53]\n# writing s2a-dim64-ttsr-valfix/train-000098.tar.gz 400 0.0 GB 3920017<00:52]\n# writing s2a-dim64-ttsr-valfix/train-000099.tar.gz 400 0.0 GB 3960017<00:52]\n# writing s2a-dim64-ttsr-valfix/train-000100.tar.gz 400 0.0 GB 40000\n# writing s2a-dim64-ttsr-valfix/train-000101.tar.gz 400 0.0 GB 4040017<00:52]\n# writing s2a-dim64-ttsr-valfix/train-000102.tar.gz 400 0.0 GB 4080017<00:52]\n# writing s2a-dim64-ttsr-valfix/train-000103.tar.gz 400 0.0 GB 4120018<00:52]\n# writing s2a-dim64-ttsr-valfix/train-000104.tar.gz 400 0.0 GB 4160018<00:51]\n# writing s2a-dim64-ttsr-valfix/train-000105.tar.gz 400 0.0 GB 4200018<00:51]\n# writing s2a-dim64-ttsr-valfix/train-000106.tar.gz 400 0.0 GB 4240018<00:51]\n# writing s2a-dim64-ttsr-valfix/train-000107.tar.gz 400 0.0 GB 4280018<00:51]\n# writing s2a-dim64-ttsr-valfix/train-000108.tar.gz 400 0.0 GB 43200\n# writing s2a-dim64-ttsr-valfix/train-000109.tar.gz 400 0.0 GB 4360019<00:51]\n# writing s2a-dim64-ttsr-valfix/train-000110.tar.gz 400 0.0 GB 4400019<00:50]\n# writing s2a-dim64-ttsr-valfix/train-000111.tar.gz 400 0.0 GB 4440019<00:50]\n# writing s2a-dim64-ttsr-valfix/train-000112.tar.gz 400 0.0 GB 4480019<00:50]\n# writing s2a-dim64-ttsr-valfix/train-000113.tar.gz 400 0.0 GB 4520019<00:50]\n# writing s2a-dim64-ttsr-valfix/train-000114.tar.gz 400 0.0 GB 4560020<00:50]\n# writing s2a-dim64-ttsr-valfix/train-000115.tar.gz 400 0.0 GB 4600020<00:49]\n# writing s2a-dim64-ttsr-valfix/train-000116.tar.gz 400 0.0 GB 4640020<00:49]\n# writing s2a-dim64-ttsr-valfix/train-000117.tar.gz 400 0.0 GB 46800\n# writing s2a-dim64-ttsr-valfix/train-000118.tar.gz 400 0.0 GB 4720020<00:49]\n# writing s2a-dim64-ttsr-valfix/train-000119.tar.gz 400 0.0 GB 4760020<00:49]\n# writing s2a-dim64-ttsr-valfix/train-000120.tar.gz 400 0.0 GB 4800021<00:49]\n# writing s2a-dim64-ttsr-valfix/train-000121.tar.gz 400 0.0 GB 4840021<00:48]\n# writing s2a-dim64-ttsr-valfix/train-000122.tar.gz 400 0.0 GB 4880021<00:48]\n# writing s2a-dim64-ttsr-valfix/train-000123.tar.gz 400 0.0 GB 4920021<00:48]\n# writing s2a-dim64-ttsr-valfix/train-000124.tar.gz 400 0.0 GB 4960021<00:48]\n# writing s2a-dim64-ttsr-valfix/train-000125.tar.gz 400 0.0 GB 50000\n# writing s2a-dim64-ttsr-valfix/train-000126.tar.gz 400 0.0 GB 5040022<00:48]\n# writing s2a-dim64-ttsr-valfix/train-000127.tar.gz 400 0.0 GB 5080022<00:47]\n# writing s2a-dim64-ttsr-valfix/train-000128.tar.gz 400 0.0 GB 5120022<00:47]\n# writing s2a-dim64-ttsr-valfix/train-000129.tar.gz 400 0.0 GB 5160022<00:47]\n# writing s2a-dim64-ttsr-valfix/train-000130.tar.gz 400 0.0 GB 5200022<00:47]\n# writing s2a-dim64-ttsr-valfix/train-000131.tar.gz 400 0.0 GB 5240022<00:47]\n# writing s2a-dim64-ttsr-valfix/train-000132.tar.gz 400 0.0 GB 5280023<00:46]\n# writing s2a-dim64-ttsr-valfix/train-000133.tar.gz 400 0.0 GB 53200\n# writing s2a-dim64-ttsr-valfix/train-000134.tar.gz 400 0.0 GB 5360023<00:46]\n# writing s2a-dim64-ttsr-valfix/train-000135.tar.gz 400 0.0 GB 5400023<00:46]\n# writing s2a-dim64-ttsr-valfix/train-000136.tar.gz 400 0.0 GB 5440023<00:46]\n# writing s2a-dim64-ttsr-valfix/train-000137.tar.gz 400 0.0 GB 5480023<00:46]\n# writing s2a-dim64-ttsr-valfix/train-000138.tar.gz 400 0.0 GB 5520024<00:45]\n# writing s2a-dim64-ttsr-valfix/train-000139.tar.gz 400 0.0 GB 5560024<00:45]\n# writing s2a-dim64-ttsr-valfix/train-000140.tar.gz 400 0.0 GB 5600024<00:45]\n# writing s2a-dim64-ttsr-valfix/train-000141.tar.gz 400 0.0 GB 5640024<00:45]\n# writing s2a-dim64-ttsr-valfix/train-000142.tar.gz 400 0.0 GB 56800\n# writing s2a-dim64-ttsr-valfix/train-000143.tar.gz 400 0.0 GB 5720024<00:45]\n# writing s2a-dim64-ttsr-valfix/train-000144.tar.gz 400 0.0 GB 5760025<00:44]\n# writing s2a-dim64-ttsr-valfix/train-000145.tar.gz 400 0.0 GB 5800025<00:44]\n# writing s2a-dim64-ttsr-valfix/train-000146.tar.gz 400 0.0 GB 5840025<00:44]\n# writing s2a-dim64-ttsr-valfix/train-000147.tar.gz 400 0.0 GB 5880025<00:44]\n# writing s2a-dim64-ttsr-valfix/train-000148.tar.gz 400 0.0 GB 5920025<00:44]\n# writing s2a-dim64-ttsr-valfix/train-000149.tar.gz 400 0.0 GB 5960026<00:43]\n# writing s2a-dim64-ttsr-valfix/train-000150.tar.gz 400 0.0 GB 6000026<00:43]\n# writing s2a-dim64-ttsr-valfix/train-000151.tar.gz 400 0.0 GB 60400\n# writing s2a-dim64-ttsr-valfix/train-000152.tar.gz 400 0.0 GB 6080026<00:43]\n# writing s2a-dim64-ttsr-valfix/train-000153.tar.gz 400 0.0 GB 6120026<00:43]\n# writing s2a-dim64-ttsr-valfix/train-000154.tar.gz 400 0.0 GB 6160026<00:43]\n# writing s2a-dim64-ttsr-valfix/train-000155.tar.gz 400 0.0 GB 6200027<00:42]\n# writing s2a-dim64-ttsr-valfix/train-000156.tar.gz 400 0.0 GB 6240027<00:42]\n# writing s2a-dim64-ttsr-valfix/train-000157.tar.gz 400 0.0 GB 6280027<00:42]\n# writing s2a-dim64-ttsr-valfix/train-000158.tar.gz 400 0.0 GB 6320027<00:42]\n# writing s2a-dim64-ttsr-valfix/train-000159.tar.gz 400 0.0 GB 63600\n# writing s2a-dim64-ttsr-valfix/train-000160.tar.gz 400 0.0 GB 6400027<00:42]\n# writing s2a-dim64-ttsr-valfix/train-000161.tar.gz 400 0.0 GB 6440028<00:41]\n# writing s2a-dim64-ttsr-valfix/train-000162.tar.gz 400 0.0 GB 6480028<00:41]\n# writing s2a-dim64-ttsr-valfix/train-000163.tar.gz 400 0.0 GB 6520028<00:41]\n# writing s2a-dim64-ttsr-valfix/train-000164.tar.gz 400 0.0 GB 6560028<00:41]\n# writing s2a-dim64-ttsr-valfix/train-000165.tar.gz 400 0.0 GB 6600028<00:40]\n# writing s2a-dim64-ttsr-valfix/train-000166.tar.gz 400 0.0 GB 6640029<00:40]\n# writing s2a-dim64-ttsr-valfix/train-000167.tar.gz 400 0.0 GB 66800\n# writing s2a-dim64-ttsr-valfix/train-000168.tar.gz 400 0.0 GB 6720029<00:40]\n# writing s2a-dim64-ttsr-valfix/train-000169.tar.gz 400 0.0 GB 6760029<00:40]\n# writing s2a-dim64-ttsr-valfix/train-000170.tar.gz 400 0.0 GB 6800029<00:40]\n# writing s2a-dim64-ttsr-valfix/train-000171.tar.gz 400 0.0 GB 6840029<00:39]\n# writing s2a-dim64-ttsr-valfix/train-000172.tar.gz 400 0.0 GB 6880030<00:39]\n# writing s2a-dim64-ttsr-valfix/train-000173.tar.gz 400 0.0 GB 6920030<00:39]\n# writing s2a-dim64-ttsr-valfix/train-000174.tar.gz 400 0.0 GB 6960030<00:39]\n# writing s2a-dim64-ttsr-valfix/train-000175.tar.gz 400 0.0 GB 7000030<00:39]\n# writing s2a-dim64-ttsr-valfix/train-000176.tar.gz 400 0.0 GB 70400\n# writing s2a-dim64-ttsr-valfix/train-000177.tar.gz 400 0.0 GB 7080030<00:38]\n# writing s2a-dim64-ttsr-valfix/train-000178.tar.gz 400 0.0 GB 7120031<00:38]\n# writing s2a-dim64-ttsr-valfix/train-000179.tar.gz 400 0.0 GB 7160031<00:38]\n# writing s2a-dim64-ttsr-valfix/train-000180.tar.gz 400 0.0 GB 7200031<00:38]\n# writing s2a-dim64-ttsr-valfix/train-000181.tar.gz 400 0.0 GB 7240031<00:38]\n# writing s2a-dim64-ttsr-valfix/train-000182.tar.gz 400 0.0 GB 7280031<00:37]\n# writing s2a-dim64-ttsr-valfix/train-000183.tar.gz 400 0.0 GB 7320032<00:37]\n# writing s2a-dim64-ttsr-valfix/train-000184.tar.gz 400 0.0 GB 73600\n# writing s2a-dim64-ttsr-valfix/train-000185.tar.gz 400 0.0 GB 7400032<00:37]\n# writing s2a-dim64-ttsr-valfix/train-000186.tar.gz 400 0.0 GB 7440032<00:37]\n# writing s2a-dim64-ttsr-valfix/train-000187.tar.gz 400 0.0 GB 7480032<00:37]\n# writing s2a-dim64-ttsr-valfix/train-000188.tar.gz 400 0.0 GB 7520032<00:36]\n# writing███████████----------------------| 47.37% [75960/160350 00:33<00:36] s2a-dim64-ttsr-valfix/train-000189.tar.gz 400 0.0 GB 75600\n# writing s2a-dim64-ttsr-valfix/train-000190.tar.gz 400 0.0 GB 7600033<00:36]\n# writing s2a-dim64-ttsr-valfix/train-000191.tar.gz 400 0.0 GB 7640033<00:36]\n# writing s2a-dim64-ttsr-valfix/train-000192.tar.gz 400 0.0 GB 76800\n# writing s2a-dim64-ttsr-valfix/train-000193.tar.gz 400 0.0 GB 7720033<00:36]\n# writing s2a-dim64-ttsr-valfix/train-000194.tar.gz 400 0.0 GB 7760033<00:35]\n# writing s2a-dim64-ttsr-valfix/train-000195.tar.gz 400 0.0 GB 7800034<00:35]\n# writing s2a-dim64-ttsr-valfix/train-000196.tar.gz 400 0.0 GB 7840034<00:35]\n# writing s2a-dim64-ttsr-valfix/train-000197.tar.gz 400 0.0 GB 7880034<00:35]\n# writing s2a-dim64-ttsr-valfix/train-000198.tar.gz 400 0.0 GB 7920034<00:35]\n# writing s2a-dim64-ttsr-valfix/train-000199.tar.gz 400 0.0 GB 7960034<00:34]\n# writing s2a-dim64-ttsr-valfix/train-000200.tar.gz 400 0.0 GB 80000\n# writing s2a-dim64-ttsr-valfix/train-000201.tar.gz 400 0.0 GB 8040035<00:34]\n# writing s2a-dim64-ttsr-valfix/train-000202.tar.gz 400 0.0 GB 8080035<00:34]\n# writing s2a-dim64-ttsr-valfix/train-000203.tar.gz 400 0.0 GB 8120035<00:34]\n# writing s2a-dim64-ttsr-valfix/train-000204.tar.gz 400 0.0 GB 8160035<00:33]\n# writing s2a-dim64-ttsr-valfix/train-000205.tar.gz 400 0.0 GB 8200035<00:33]\n# writing s2a-dim64-ttsr-valfix/train-000206.tar.gz 400 0.0 GB 8240035<00:33]\n# writing s2a-dim64-ttsr-valfix/train-000207.tar.gz 400 0.0 GB 82800\n# writing s2a-dim64-ttsr-valfix/train-000208.tar.gz 400 0.0 GB 8320035<00:33]\n# writing s2a-dim64-ttsr-valfix/train-000209.tar.gz 400 0.0 GB 8360036<00:32]\n# writing s2a-dim64-ttsr-valfix/train-000210.tar.gz 400 0.0 GB 8400036<00:32]\n# writing s2a-dim64-ttsr-valfix/train-000211.tar.gz 400 0.0 GB 8440036<00:32]\n# writing s2a-dim64-ttsr-valfix/train-000212.tar.gz 400 0.0 GB 8480036<00:32]\n# writing s2a-dim64-ttsr-valfix/train-000213.tar.gz 400 0.0 GB 8520036<00:31]\n# writing s2a-dim64-ttsr-valfix/train-000214.tar.gz 400 0.0 GB 8560036<00:31]\n# writing s2a-dim64-ttsr-valfix/train-000215.tar.gz 400 0.0 GB 86000\n# writing s2a-dim64-ttsr-valfix/train-000216.tar.gz 400 0.0 GB 8640036<00:31]\n# writing s2a-dim64-ttsr-valfix/train-000217.tar.gz 400 0.0 GB 8680036<00:31]\n# writing s2a-dim64-ttsr-valfix/train-000218.tar.gz 400 0.0 GB 8720037<00:30]\n# writing s2a-dim64-ttsr-valfix/train-000219.tar.gz 400 0.0 GB 8760037<00:30]\n# writing s2a-dim64-ttsr-valfix/train-000220.tar.gz 400 0.0 GB 8800037<00:30]\n# writing s2a-dim64-ttsr-valfix/train-000221.tar.gz 400 0.0 GB 88400\n# writing s2a-dim64-ttsr-valfix/train-000222.tar.gz 400 0.0 GB 8880037<00:30]\n# writing s2a-dim64-ttsr-valfix/train-000223.tar.gz 400 0.0 GB 8920037<00:29]\n# writing s2a-dim64-ttsr-valfix/train-000224.tar.gz 400 0.0 GB 8960037<00:29]\n# writing s2a-dim64-ttsr-valfix/train-000225.tar.gz 400 0.0 GB 9000037<00:29]\n# writing s2a-dim64-ttsr-valfix/train-000226.tar.gz 400 0.0 GB 9040038<00:29]\n# writing s2a-dim64-ttsr-valfix/train-000227.tar.gz 400 0.0 GB 9080038<00:28]\n# writing s2a-dim64-ttsr-valfix/train-000228.tar.gz 400 0.0 GB 91200\n# writing s2a-dim64-ttsr-valfix/train-000229.tar.gz 400 0.0 GB 9160038<00:28]\n# writing s2a-dim64-ttsr-valfix/train-000230.tar.gz 400 0.0 GB 9200038<00:28]\n# writing s2a-dim64-ttsr-valfix/train-000231.tar.gz 400 0.0 GB 9240038<00:28]\n# writing s2a-dim64-ttsr-valfix/train-000232.tar.gz 400 0.0 GB 9280038<00:27]\n# writing s2a-dim64-ttsr-valfix/train-000233.tar.gz 400 0.0 GB 9320038<00:27]\n# writing s2a-dim64-ttsr-valfix/train-000234.tar.gz 400 0.0 GB 93600\n# writing s2a-dim64-ttsr-valfix/train-000235.tar.gz 400 0.0 GB 9400039<00:27]\n# writing s2a-dim64-ttsr-valfix/train-000236.tar.gz 400 0.0 GB 9440039<00:27]\n# writing s2a-dim64-ttsr-valfix/train-000237.tar.gz 400 0.0 GB 9480039<00:27]\n# writing s2a-dim64-ttsr-valfix/train-000238.tar.gz 400 0.0 GB 9520039<00:26]\n# writing s2a-dim64-ttsr-valfix/train-000239.tar.gz 400 0.0 GB 9560039<00:26]\n# writing s2a-dim64-ttsr-valfix/train-000240.tar.gz 400 0.0 GB 96000\n# writing s2a-dim64-ttsr-valfix/train-000241.tar.gz 400 0.0 GB 9640039<00:26]\n# writing s2a-dim64-ttsr-valfix/train-000242.tar.gz 400 0.0 GB 9680039<00:26]\n# writing s2a-dim64-ttsr-valfix/train-000243.tar.gz 400 0.0 GB 9720040<00:25]\n# writing s2a-dim64-ttsr-valfix/train-000244.tar.gz 400 0.0 GB 9760040<00:25]\n# writing s2a-dim64-ttsr-valfix/train-000245.tar.gz 400 0.0 GB 98000\n# writing s2a-dim64-ttsr-valfix/train-000246.tar.gz 400 0.0 GB 9840040<00:25]\n# writing s2a-dim64-ttsr-valfix/train-000247.tar.gz 400 0.0 GB 9880040<00:25]\n# writing s2a-dim64-ttsr-valfix/train-000248.tar.gz 400 0.0 GB 9920040<00:24]\n# writing s2a-dim64-ttsr-valfix/train-000249.tar.gz 400 0.0 GB 9960040<00:24]\n# writing s2a-dim64-ttsr-valfix/train-000250.tar.gz 400 0.0 GB 10000041<00:24]\n# writing s2a-dim64-ttsr-valfix/train-000251.tar.gz 400 0.0 GB 100400\n# writing s2a-dim64-ttsr-valfix/train-000252.tar.gz 400 0.0 GB 10080041<00:24]\n# writing s2a-dim64-ttsr-valfix/train-000253.tar.gz 400 0.0 GB 10120041<00:23]\n# writing s2a-dim64-ttsr-valfix/train-000254.tar.gz 400 0.0 GB 10160041<00:23]\n# writing s2a-dim64-ttsr-valfix/train-000255.tar.gz 400 0.0 GB 10200041<00:23]\n# writing s2a-dim64-ttsr-valfix/train-000256.tar.gz 400 0.0 GB 102400\n# writing s2a-dim64-ttsr-valfix/train-000257.tar.gz 400 0.0 GB 10280041<00:23]\n# writing s2a-dim64-ttsr-valfix/train-000258.tar.gz 400 0.0 GB 10320041<00:23]\n# writing s2a-dim64-ttsr-valfix/train-000259.tar.gz 400 0.0 GB 10360042<00:22]\n# writing s2a-dim64-ttsr-valfix/train-000260.tar.gz 400 0.0 GB 10400042<00:22]\n# writing s2a-dim64-ttsr-valfix/train-000261.tar.gz 400 0.0 GB 10440042<00:22]\n# writing s2a-dim64-ttsr-valfix/train-000262.tar.gz 400 0.0 GB 104800\n# writing s2a-dim64-ttsr-valfix/train-000263.tar.gz 400 0.0 GB 10520042<00:22]\n# writing s2a-dim64-ttsr-valfix/train-000264.tar.gz 400 0.0 GB 10560042<00:21]\n# writing s2a-dim64-ttsr-valfix/train-000265.tar.gz 400 0.0 GB 10600042<00:21]\n# writing s2a-dim64-ttsr-valfix/train-000266.tar.gz 400 0.0 GB 10640042<00:21]\n# writing s2a-dim64-ttsr-valfix/train-000267.tar.gz 400 0.0 GB 106800\n# writing s2a-dim64-ttsr-valfix/train-000268.tar.gz 400 0.0 GB 10720043<00:21]\n# writing s2a-dim64-ttsr-valfix/train-000269.tar.gz 400 0.0 GB 10760043<00:21]\n# writing s2a-dim64-ttsr-valfix/train-000270.tar.gz 400 0.0 GB 10800043<00:20]\n# writing s2a-dim64-ttsr-valfix/train-000271.tar.gz 400 0.0 GB 10840043<00:20]\n# writing s2a-dim64-ttsr-valfix/train-000272.tar.gz 400 0.0 GB 108800\n# writing s2a-dim64-ttsr-valfix/train-000273.tar.gz 400 0.0 GB 10920043<00:20]\n# writing s2a-dim64-ttsr-valfix/train-000274.tar.gz 400 0.0 GB 10960043<00:20]\n# writing s2a-dim64-ttsr-valfix/train-000275.tar.gz 400 0.0 GB 11000043<00:19]\n# writing s2a-dim64-ttsr-valfix/train-000276.tar.gz 400 0.0 GB 11040044<00:19]\n# writing s2a-dim64-ttsr-valfix/train-000277.tar.gz 400 0.0 GB 110800\n# writing s2a-dim64-ttsr-valfix/train-000278.tar.gz 400 0.0 GB 11120044<00:19]\n# writing s2a-dim64-ttsr-valfix/train-000279.tar.gz 400 0.0 GB 11160044<00:19]\n# writing s2a-dim64-ttsr-valfix/train-000280.tar.gz 400 0.0 GB 11200044<00:19]\n# writing s2a-dim64-ttsr-valfix/train-000281.tar.gz 400 0.0 GB 11240044<00:18]\n# writing s2a-dim64-ttsr-valfix/train-000282.tar.gz 400 0.0 GB 112800\n# writing s2a-dim64-ttsr-valfix/train-000283.tar.gz 400 0.0 GB 11320044<00:18]\n# writing s2a-dim64-ttsr-valfix/train-000284.tar.gz 400 0.0 GB 11360045<00:18]\n# writing s2a-dim64-ttsr-valfix/train-000285.tar.gz 400 0.0 GB 11400045<00:18]\n# writing s2a-dim64-ttsr-valfix/train-000286.tar.gz 400 0.0 GB 114400\n# writing s2a-dim64-ttsr-valfix/train-000287.tar.gz 400 0.0 GB 11480045<00:17]\n# writing s2a-dim64-ttsr-valfix/train-000288.tar.gz 400 0.0 GB 11520045<00:17]\n# writing s2a-dim64-ttsr-valfix/train-000289.tar.gz 400 0.0 GB 11560045<00:17]\n# writing s2a-dim64-ttsr-valfix/train-000290.tar.gz 400 0.0 GB 11600045<00:17]\n# writing s2a-dim64-ttsr-valfix/train-000291.tar.gz 400 0.0 GB 116400\n# writing s2a-dim64-ttsr-valfix/train-000292.tar.gz 400 0.0 GB 11680045<00:17]\n# writing s2a-dim64-ttsr-valfix/train-000293.tar.gz 400 0.0 GB 11720046<00:16]\n# writing s2a-dim64-ttsr-valfix/train-000294.tar.gz 400 0.0 GB 11760046<00:16]\n# writing s2a-dim64-ttsr-valfix/train-000295.tar.gz 400 0.0 GB 11800046<00:16]\n# writing s2a-dim64-ttsr-valfix/train-000296.tar.gz 400 0.0 GB 118400\n# writing s2a-dim64-ttsr-valfix/train-000297.tar.gz 400 0.0 GB 11880046<00:16]\n# writing s2a-dim64-ttsr-valfix/train-000298.tar.gz 400 0.0 GB 11920046<00:15]\n# writing s2a-dim64-ttsr-valfix/train-000299.tar.gz 400 0.0 GB 11960046<00:15]\n# writing s2a-dim64-ttsr-valfix/train-000300.tar.gz 400 0.0 GB 120000\n# writing s2a-dim64-ttsr-valfix/train-000301.tar.gz 400 0.0 GB 12040047<00:15]\n# writing s2a-dim64-ttsr-valfix/train-000302.tar.gz 400 0.0 GB 12080047<00:15]\n# writing s2a-dim64-ttsr-valfix/train-000303.tar.gz 400 0.0 GB 12120047<00:15]\n# writing s2a-dim64-ttsr-valfix/train-000304.tar.gz 400 0.0 GB 12160047<00:14]\n# writing s2a-dim64-ttsr-valfix/train-000305.tar.gz 400 0.0 GB 122000\n# writing s2a-dim64-ttsr-valfix/train-000306.tar.gz 400 0.0 GB 12240047<00:14]\n# writing s2a-dim64-ttsr-valfix/train-000307.tar.gz 400 0.0 GB 12280047<00:14]\n# writing s2a-dim64-ttsr-valfix/train-000308.tar.gz 400 0.0 GB 12320047<00:14]\n# writing s2a-dim64-ttsr-valfix/train-000309.tar.gz 400 0.0 GB 12360048<00:14]\n# writing s2a-dim64-ttsr-valfix/train-000310.tar.gz 400 0.0 GB 124000\n# writing s2a-dim64-ttsr-valfix/train-000311.tar.gz 400 0.0 GB 12440048<00:13]\n# writing s2a-dim64-ttsr-valfix/train-000312.tar.gz 400 0.0 GB 12480048<00:13]\n# writing s2a-dim64-ttsr-valfix/train-000313.tar.gz 400 0.0 GB 12520048<00:13]\n# writing s2a-dim64-ttsr-valfix/train-000314.tar.gz 400 0.0 GB 125600\n# writing s2a-dim64-ttsr-valfix/train-000315.tar.gz 400 0.0 GB 12600048<00:13]\n# writing s2a-dim64-ttsr-valfix/train-000316.tar.gz 400 0.0 GB 12640048<00:13]\n# writing s2a-dim64-ttsr-valfix/train-000317.tar.gz 400 0.0 GB 12680049<00:12]\n# writing s2a-dim64-ttsr-valfix/train-000318.tar.gz 400 0.0 GB 127200\n# writing s2a-dim64-ttsr-valfix/train-000319.tar.gz 400 0.0 GB 12760049<00:12]\n# writing s2a-dim64-ttsr-valfix/train-000320.tar.gz 400 0.0 GB 12800049<00:12]\n# writing s2a-dim64-ttsr-valfix/train-000321.tar.gz 400 0.0 GB 12840049<00:12]\n# writing s2a-dim64-ttsr-valfix/train-000322.tar.gz 400 0.0 GB 12880049<00:11]\n# writing s2a-dim64-ttsr-valfix/train-000323.tar.gz 400 0.0 GB 129200\n# writing s2a-dim64-ttsr-valfix/train-000324.tar.gz 400 0.0 GB 12960049<00:11]\n# writing s2a-dim64-ttsr-valfix/train-000325.tar.gz 400 0.0 GB 13000050<00:11]\n# writing s2a-dim64-ttsr-valfix/train-000326.tar.gz 400 0.0 GB 13040050<00:11]\n# writing s2a-dim64-ttsr-valfix/train-000327.tar.gz 400 0.0 GB 130800\n# writing s2a-dim64-ttsr-valfix/train-000328.tar.gz 400 0.0 GB 13120050<00:11]\n# writing s2a-dim64-ttsr-valfix/train-000329.tar.gz 400 0.0 GB 13160050<00:10]\n# writing s2a-dim64-ttsr-valfix/train-000330.tar.gz 400 0.0 GB 13200050<00:10]\n# writing s2a-dim64-ttsr-valfix/train-000331.tar.gz 400 0.0 GB 132400\n# writing s2a-dim64-ttsr-valfix/train-000332.tar.gz 400 0.0 GB 13280050<00:10]\n# writing s2a-dim64-ttsr-valfix/train-000333.tar.gz 400 0.0 GB 13320050<00:10]\n# writing s2a-dim64-ttsr-valfix/train-000334.tar.gz 400 0.0 GB 13360051<00:10]\n# writing s2a-dim64-ttsr-valfix/train-000335.tar.gz 400 0.0 GB 13400051<00:09]\n# writing s2a-dim64-ttsr-valfix/train-000336.tar.gz 400 0.0 GB 134400\n# writing s2a-dim64-ttsr-valfix/train-000337.tar.gz 400 0.0 GB 13480051<00:09]\n# writing s2a-dim64-ttsr-valfix/train-000338.tar.gz 400 0.0 GB 13520051<00:09]\n# writing s2a-dim64-ttsr-valfix/train-000339.tar.gz 400 0.0 GB 13560051<00:09]\n# writing s2a-dim64-ttsr-valfix/train-000340.tar.gz 400 0.0 GB 136000\n# writing s2a-dim64-ttsr-valfix/train-000341.tar.gz 400 0.0 GB 13640051<00:09]\n# writing s2a-dim64-ttsr-valfix/train-000342.tar.gz 400 0.0 GB 13680052<00:08]\n# writing s2a-dim64-ttsr-valfix/train-000343.tar.gz 400 0.0 GB 13720052<00:08]\n# writing s2a-dim64-ttsr-valfix/train-000344.tar.gz 400 0.0 GB 137600\n# writing s2a-dim64-ttsr-valfix/train-000345.tar.gz 400 0.0 GB 13800052<00:08]\n# writing s2a-dim64-ttsr-valfix/train-000346.tar.gz 400 0.0 GB 13840052<00:08]\n# writing s2a-dim64-ttsr-valfix/train-000347.tar.gz 400 0.0 GB 13880052<00:08]\n# writing s2a-dim64-ttsr-valfix/train-000348.tar.gz 400 0.0 GB 139200\n# writing s2a-dim64-ttsr-valfix/train-000349.tar.gz 400 0.0 GB 13960052<00:07]\n# writing s2a-dim64-ttsr-valfix/train-000350.tar.gz 400 0.0 GB 14000053<00:07]\n# writing s2a-dim64-ttsr-valfix/train-000351.tar.gz 400 0.0 GB 14040053<00:07]\n# writing s2a-dim64-ttsr-valfix/train-000352.tar.gz 400 0.0 GB 140800\n# writing s2a-dim64-ttsr-valfix/train-000353.tar.gz 400 0.0 GB 14120053<00:07]\n# writing s2a-dim64-ttsr-valfix/train-000354.tar.gz 400 0.0 GB 14160053<00:06]\n# writing s2a-dim64-ttsr-valfix/train-000355.tar.gz 400 0.0 GB 14200053<00:06]\n# writing s2a-dim64-ttsr-valfix/train-000356.tar.gz 400 0.0 GB 142400\n# writing s2a-dim64-ttsr-valfix/train-000357.tar.gz 400 0.0 GB 14280053<00:06]\n# writing s2a-dim64-ttsr-valfix/train-000358.tar.gz 400 0.0 GB 14320054<00:06]\n# writing s2a-dim64-ttsr-valfix/train-000359.tar.gz 400 0.0 GB 14360054<00:06]\n# writing s2a-dim64-ttsr-valfix/train-000360.tar.gz 400 0.0 GB 14400054<00:05]\n# writing s2a-dim64-ttsr-valfix/train-000361.tar.gz 400 0.0 GB 144400\n# writing s2a-dim64-ttsr-valfix/train-000362.tar.gz 400 0.0 GB 14480054<00:05]\n# writing s2a-dim64-ttsr-valfix/train-000363.tar.gz 400 0.0 GB 14520054<00:05]\n# writing s2a-dim64-ttsr-valfix/train-000364.tar.gz 400 0.0 GB 14560054<00:05]\n# writing s2a-dim64-ttsr-valfix/train-000365.tar.gz 400 0.0 GB 146000\n# writing s2a-dim64-ttsr-valfix/train-000366.tar.gz 400 0.0 GB 14640055<00:05]\n# writing s2a-dim64-ttsr-valfix/train-000367.tar.gz 400 0.0 GB 14680055<00:04]\n# writing s2a-dim64-ttsr-valfix/train-000368.tar.gz 400 0.0 GB 14720055<00:04]\n# writing s2a-dim64-ttsr-valfix/train-000369.tar.gz 400 0.0 GB 147600\n# writing s2a-dim64-ttsr-valfix/train-000370.tar.gz 400 0.0 GB 14800055<00:04]\n# writing s2a-dim64-ttsr-valfix/train-000371.tar.gz 400 0.0 GB 14840055<00:04]\n# writing s2a-dim64-ttsr-valfix/train-000372.tar.gz 400 0.0 GB 14880055<00:04]\n# writing s2a-dim64-ttsr-valfix/train-000373.tar.gz 400 0.0 GB 149200\n# writing s2a-dim64-ttsr-valfix/train-000374.tar.gz 400 0.0 GB 14960056<00:03]\n# writing s2a-dim64-ttsr-valfix/train-000375.tar.gz 400 0.0 GB 15000056<00:03]\n# writing s2a-dim64-ttsr-valfix/train-000376.tar.gz 400 0.0 GB 15040056<00:03]\n# writing s2a-dim64-ttsr-valfix/train-000377.tar.gz 400 0.0 GB 150800\n# writing s2a-dim64-ttsr-valfix/train-000378.tar.gz 400 0.0 GB 15120056<00:03]\n# writing s2a-dim64-ttsr-valfix/train-000379.tar.gz 400 0.0 GB 15160056<00:03]\n# writing s2a-dim64-ttsr-valfix/train-000380.tar.gz 400 0.0 GB 15200056<00:02]\n# writing s2a-dim64-ttsr-valfix/train-000381.tar.gz 400 0.0 GB 152400\n# writing s2a-dim64-ttsr-valfix/train-000382.tar.gz 400 0.0 GB 15280057<00:02]\n# writing s2a-dim64-ttsr-valfix/train-000383.tar.gz 400 0.0 GB 15320057<00:02]\n# writing s2a-dim64-ttsr-valfix/train-000384.tar.gz 400 0.0 GB 15360057<00:02]\n# writing s2a-dim64-ttsr-valfix/train-000385.tar.gz 400 0.0 GB 154000\n# writing s2a-dim64-ttsr-valfix/train-000386.tar.gz 400 0.0 GB 15440057<00:02]\n# writing s2a-dim64-ttsr-valfix/train-000387.tar.gz 400 0.0 GB 15480057<00:01]\n# writing s2a-dim64-ttsr-valfix/train-000388.tar.gz 400 0.0 GB 155200\n# writing s2a-dim64-ttsr-valfix/train-000389.tar.gz 400 0.0 GB 15560057<00:01]\n# writing s2a-dim64-ttsr-valfix/train-000390.tar.gz 400 0.0 GB 15600058<00:01]\n# writing s2a-dim64-ttsr-valfix/train-000391.tar.gz 400 0.0 GB 15640058<00:01]\n# writing s2a-dim64-ttsr-valfix/train-000392.tar.gz 400 0.0 GB 156800\n# writing s2a-dim64-ttsr-valfix/train-000393.tar.gz 400 0.0 GB 15720058<00:01]\n# writing s2a-dim64-ttsr-valfix/train-000394.tar.gz 400 0.0 GB 15760058<00:00]\n# writing s2a-dim64-ttsr-valfix/train-000395.tar.gz 400 0.0 GB 15800058<00:00]\n# writing s2a-dim64-ttsr-valfix/train-000396.tar.gz 400 0.0 GB 158400\n# writing s2a-dim64-ttsr-valfix/train-000397.tar.gz 400 0.0 GB 15880058<00:00]\n# writing s2a-dim64-ttsr-valfix/train-000398.tar.gz 400 0.0 GB 15920059<00:00]\n# writing s2a-dim64-ttsr-valfix/train-000399.tar.gz 400 0.0 GB 15960059<00:00]\n |████████████████████████████████████████| 100.00% [160350/160350 00:59<00:00]" + }, + { + "objectID": "B2. Training (Lightning).html", + "href": "B2. Training (Lightning).html", + "title": "WhisperSpeech", + "section": "", + "text": "def test_fun(a:str=None, to:int = 2, toggle:bool=True):\n assert(a is not None)\n print(a, to, toggle)\nparse_and_call(\"test\", test_fun, [\"--to\", \"4\"], dict(a=[]), log_to_wandb=False)\n\n[] 4 False\n\n\n\nfrom fastcore.script import anno_parser\ndef test_fun(a:str=None, to:int = 2, toggle:bool=True):\n assert(a is not None)\n print(a, to, toggle)\ntest_fun(\"a\")\nanno_parser(test_fun).parse_args([])\n\na 2 True\n\n\nNamespace(a=None, to=2, toggle=False, pdb=False, xtra=None)\n\n\n\ndef test_fun2(a:str, to:int = 2):\n assert(a is not None)\n print(a, to)\n\nparse_and_call(\"test\", test_fun2, [\"qwe\"], log_to_wandb=False)\n\nqwe 2" + }, + { + "objectID": "3b. semantic token extraction.html", + "href": "3b. semantic token extraction.html", + "title": "Semantic token extraction", + "section": "", + "text": "vq_model = vq_stoks.RQBottleneckTransformer.load_model(\"vqmodel-medium-en+pl-512c-dim64.model\").cuda()\n\n\nvq_model.ensure_whisper('cuda')\n\n\nvq_model.whmodel[0].encoder\n\nAudioEncoder(\n (conv1): Conv1d(80, 1024, kernel_size=(3,), stride=(1,), padding=(1,))\n (conv2): Conv1d(1024, 1024, kernel_size=(3,), stride=(2,), padding=(1,))\n (blocks): ModuleList(\n (0-23): 24 x ResidualAttentionBlock(\n (attn): MultiHeadAttention(\n (query): Linear(in_features=1024, out_features=1024, bias=True)\n (key): Linear(in_features=1024, out_features=1024, bias=False)\n (value): Linear(in_features=1024, out_features=1024, bias=True)\n (out): Linear(in_features=1024, out_features=1024, bias=True)\n )\n (attn_ln): LayerNorm((1024,), eps=1e-05, elementwise_affine=True)\n (mlp): Sequential(\n (0): Linear(in_features=1024, out_features=4096, bias=True)\n (1): GELU(approximate='none')\n (2): Linear(in_features=4096, out_features=1024, bias=True)\n )\n (mlp_ln): LayerNorm((1024,), eps=1e-05, elementwise_affine=True)\n )\n )\n (ln_post): LayerNorm((1024,), eps=1e-05, elementwise_affine=True)\n)\n\n\n\n\n\nAutomatic pdb calling has been turned ON\n\n\n\nprepare_stoks('../wolnelektury-wds2/wolnelektury-audio-000000.tar', n_samples=1024, batch_size=16)\n\n\n\n\n\n\n \n \n 100.00% [64/64 00:23<00:00]\n \n \n\n\n\nprepare_stoks('../wolnelektury-wds2/wolnelektury-audio-000000.tar', n_samples=1024, batch_size=16)\n\n\n\n\n\n\n \n \n 100.00% [64/64 00:21<00:00]\n \n \n\n\n\nprepare_stoks('../wolnelektury-wds2/wolnelektury-audio-000000.tar', n_samples=1024, batch_size=32)\n\n\n\n\n\n\n \n \n 100.00% [32/32 00:21<00:00]\n \n \n\n\n\nprepare_stoks('../wolnelektury-wds2/wolnelektury-audio-000000.tar', n_samples=1024, batch_size=64)\n\n\n\n\n\n\n \n \n 100.00% [16/16 00:20<00:00]\n \n \n\n\n\nprepare_stoks('../wolnelektury-wds2/wolnelektury-audio-000000.tar', n_samples=1024, batch_size=64)\n\n\n\n\n\n\n \n \n 100.00% [16/16 00:23<00:00]\n \n \n\n\n\n!ls -lh ../wolnelektury-wds2/wolnelektury-maxvad-stoks-000000.tar\n!tar -tf ../wolnelektury-wds2/wolnelektury-maxvad-stoks-000000.tar" + }, + { + "objectID": "D. Common dataset utilities.html", + "href": "D. Common dataset utilities.html", + "title": "WhisperSpeech", + "section": "", + "text": "source\n\nshard_glob\n\n shard_glob (input)\n\n\nshard_glob('../librilight/librilight-atoks-txts/librilight-small-atoks-3kbps-*.tar.gz')\n\n['../librilight/librilight-atoks-txts/librilight-small-atoks-3kbps-000000.tar.gz',\n '../librilight/librilight-atoks-txts/librilight-small-atoks-3kbps-000006.tar.gz',\n '../librilight/librilight-atoks-txts/librilight-small-atoks-3kbps-000004.tar.gz',\n '../librilight/librilight-atoks-txts/librilight-small-atoks-3kbps-000001.tar.gz',\n '../librilight/librilight-atoks-txts/librilight-small-atoks-3kbps-000003.tar.gz',\n '../librilight/librilight-atoks-txts/librilight-small-atoks-3kbps-000002.tar.gz',\n '../librilight/librilight-atoks-txts/librilight-small-atoks-3kbps-000005.tar.gz',\n '../librilight/librilight-atoks-txts/librilight-small-atoks-3kbps-000007.tar.gz']\n\n\n\n# \nshard_glob(Path('../librilight/librilight-atoks-txts/librilight-small-atoks-3kbps-*.tar.gz'))\n\n['../librilight/librilight-atoks-txts/librilight-small-atoks-3kbps-000000.tar.gz',\n '../librilight/librilight-atoks-txts/librilight-small-atoks-3kbps-000006.tar.gz',\n '../librilight/librilight-atoks-txts/librilight-small-atoks-3kbps-000004.tar.gz',\n '../librilight/librilight-atoks-txts/librilight-small-atoks-3kbps-000001.tar.gz',\n '../librilight/librilight-atoks-txts/librilight-small-atoks-3kbps-000003.tar.gz',\n '../librilight/librilight-atoks-txts/librilight-small-atoks-3kbps-000002.tar.gz',\n '../librilight/librilight-atoks-txts/librilight-small-atoks-3kbps-000005.tar.gz',\n '../librilight/librilight-atoks-txts/librilight-small-atoks-3kbps-000007.tar.gz']\n\n\n\n# we can also specify the range and generate shard URLs\nshard_glob(Path('https://huggingface.co/datasets/collabora/librilight-processed-webdataset/resolve/main/librilight-small-atoks-3kbps-{000000..000007}.tar.gz'))\n\n['https:/huggingface.co/datasets/collabora/librilight-processed-webdataset/resolve/main/librilight-small-atoks-3kbps-000000.tar.gz',\n 'https:/huggingface.co/datasets/collabora/librilight-processed-webdataset/resolve/main/librilight-small-atoks-3kbps-000001.tar.gz',\n 'https:/huggingface.co/datasets/collabora/librilight-processed-webdataset/resolve/main/librilight-small-atoks-3kbps-000002.tar.gz',\n 'https:/huggingface.co/datasets/collabora/librilight-processed-webdataset/resolve/main/librilight-small-atoks-3kbps-000003.tar.gz',\n 'https:/huggingface.co/datasets/collabora/librilight-processed-webdataset/resolve/main/librilight-small-atoks-3kbps-000004.tar.gz',\n 'https:/huggingface.co/datasets/collabora/librilight-processed-webdataset/resolve/main/librilight-small-atoks-3kbps-000005.tar.gz',\n 'https:/huggingface.co/datasets/collabora/librilight-processed-webdataset/resolve/main/librilight-small-atoks-3kbps-000006.tar.gz',\n 'https:/huggingface.co/datasets/collabora/librilight-processed-webdataset/resolve/main/librilight-small-atoks-3kbps-000007.tar.gz']\n\n\n\nsource\n\n\njoin_datasets\n\n join_datasets (datasets)\n\nAn iterable Dataset.\nAll datasets that represent an iterable of data samples should subclass it. Such form of datasets is particularly useful when data come from a stream.\nAll subclasses should overwrite :meth:__iter__, which would return an iterator of samples in this dataset.\nWhen a subclass is used with :class:~torch.utils.data.DataLoader, each item in the dataset will be yielded from the :class:~torch.utils.data.DataLoader iterator. When :attr:num_workers > 0, each worker process will have a different copy of the dataset object, so it is often desired to configure each copy independently to avoid having duplicate data returned from the workers. :func:~torch.utils.data.get_worker_info, when called in a worker process, returns information about the worker. It can be used in either the dataset’s :meth:__iter__ method or the :class:~torch.utils.data.DataLoader ’s :attr:worker_init_fn option to modify each copy’s behavior.\nExample 1: splitting workload across all workers in :meth:__iter__::\n>>> # xdoctest: +REQUIRES(env:TORCH_DOCTEST_DATALOADER)\n>>> # xdoctest: +SKIP(\"Fails on MacOS12\")\n>>> class MyIterableDataset(torch.utils.data.IterableDataset):\n... def __init__(self, start, end):\n... super(MyIterableDataset).__init__()\n... assert end > start, \"this example code only works with end >= start\"\n... self.start = start\n... self.end = end\n...\n... def __iter__(self):\n... worker_info = torch.utils.data.get_worker_info()\n... if worker_info is None: # single-process data loading, return the full iterator\n... iter_start = self.start\n... iter_end = self.end\n... else: # in a worker process\n... # split workload\n... per_worker = int(math.ceil((self.end - self.start) / float(worker_info.num_workers)))\n... worker_id = worker_info.id\n... iter_start = self.start + worker_id * per_worker\n... iter_end = min(iter_start + per_worker, self.end)\n... return iter(range(iter_start, iter_end))\n...\n>>> # should give same set of data as range(3, 7), i.e., [3, 4, 5, 6].\n>>> ds = MyIterableDataset(start=3, end=7)\n\n>>> # Single-process loading\n>>> print(list(torch.utils.data.DataLoader(ds, num_workers=0)))\n[tensor([3]), tensor([4]), tensor([5]), tensor([6])]\n\n>>> # xdoctest: +REQUIRES(POSIX)\n>>> # Mult-process loading with two worker processes\n>>> # Worker 0 fetched [3, 4]. Worker 1 fetched [5, 6].\n>>> # xdoctest: +IGNORE_WANT(\"non deterministic\")\n>>> print(list(torch.utils.data.DataLoader(ds, num_workers=2)))\n[tensor([3]), tensor([5]), tensor([4]), tensor([6])]\n\n>>> # With even more workers\n>>> # xdoctest: +IGNORE_WANT(\"non deterministic\")\n>>> print(list(torch.utils.data.DataLoader(ds, num_workers=12)))\n[tensor([3]), tensor([5]), tensor([4]), tensor([6])]\nExample 2: splitting workload across all workers using :attr:worker_init_fn::\n>>> # xdoctest: +REQUIRES(env:TORCH_DOCTEST_DATALOADER)\n>>> class MyIterableDataset(torch.utils.data.IterableDataset):\n... def __init__(self, start, end):\n... super(MyIterableDataset).__init__()\n... assert end > start, \"this example code only works with end >= start\"\n... self.start = start\n... self.end = end\n...\n... def __iter__(self):\n... return iter(range(self.start, self.end))\n...\n>>> # should give same set of data as range(3, 7), i.e., [3, 4, 5, 6].\n>>> ds = MyIterableDataset(start=3, end=7)\n\n>>> # Single-process loading\n>>> print(list(torch.utils.data.DataLoader(ds, num_workers=0)))\n[3, 4, 5, 6]\n>>>\n>>> # Directly doing multi-process loading yields duplicate data\n>>> print(list(torch.utils.data.DataLoader(ds, num_workers=2)))\n[3, 3, 4, 4, 5, 5, 6, 6]\n\n>>> # Define a `worker_init_fn` that configures each dataset copy differently\n>>> def worker_init_fn(worker_id):\n... worker_info = torch.utils.data.get_worker_info()\n... dataset = worker_info.dataset # the dataset copy in this worker process\n... overall_start = dataset.start\n... overall_end = dataset.end\n... # configure the dataset to only process the split workload\n... per_worker = int(math.ceil((overall_end - overall_start) / float(worker_info.num_workers)))\n... worker_id = worker_info.id\n... dataset.start = overall_start + worker_id * per_worker\n... dataset.end = min(dataset.start + per_worker, overall_end)\n...\n\n>>> # Mult-process loading with the custom `worker_init_fn`\n>>> # Worker 0 fetched [3, 4]. Worker 1 fetched [5, 6].\n>>> print(list(torch.utils.data.DataLoader(ds, num_workers=2, worker_init_fn=worker_init_fn)))\n[3, 5, 4, 6]\n\n>>> # With even more workers\n>>> print(list(torch.utils.data.DataLoader(ds, num_workers=12, worker_init_fn=worker_init_fn)))\n[3, 4, 5, 6]\n\n# validate that we don't reset the datasets on each `iter`\n# this is important with webdatasets since sample shuffling is very bad initially, unless num_workers << num_shards\nfrom itertools import islice\nds = join_datasets([\"ABCDEFG\"])\nfor x in islice(ds, 3):\n print(x)\nfor x in islice(ds, 5):\n print(x)\n\nA\nB\nC\nD\nE\nF\nG\n\n\n\n# will stop as soon as it exhausts one iterator\nfor x in join_datasets(['ABCDEFG', 'abcdefg', range(20)]):\n print(x)\n\n0\na\n1\n2\n3\nA\n4\n5\nb\nB\nc\nC\nD\nE\n6\nd\ne\n7\nF\nf\ng\n8\nG\n9\n\n\n\nsource\n\n\nresampler\n\n resampler (newsr=24000, key='samples_24k')\n\n\nsource\n\n\nderived_name\n\n derived_name (input, kind, base='audio', suffix='.gz', dir=None)\n\n\nsource\n\n\nderived_dataset\n\n derived_dataset (kind, base='audio', suffix='.gz', decoders=[], dir=None)\n\n\nsource\n\n\nmerge_in\n\n merge_in (dataset_fun)\n\nMerge a dataset into the current one returning samples with the union of keys. Pass in a function that takes a URL of a sample and returns a dataset for it (called everytime the URL changes).\nIt requires (and validates) that both datasets have the same ordering of keys so you have to use it before any sample shuffling. Shard shuffling is ok.\n\nsource\n\n\nAtomicTarWriter\n\n AtomicTarWriter (name, throwaway=False)\n\n\nsource\n\n\nreadlines\n\n readlines (fname)" + }, + { + "objectID": "7. Pipeline.html", + "href": "7. Pipeline.html", + "title": "WhisperSpeech", + "section": "", + "text": "source\n\nPipeline\n\n Pipeline (t2s_ref=None, s2a_ref=None, optimize=True, torch_compile=False,\n device=None)\n\nInitialize self. See help(type(self)) for accurate signature." + }, + { + "objectID": "2c. whisper quantization (semantic token) evaluation.html", + "href": "2c. whisper quantization (semantic token) evaluation.html", + "title": "VQ semantic token extraction evaluation", + "section": "", + "text": "import io\nimport time\nimport torch\nimport torchaudio\nfrom pathlib import Path\nimport json\nfrom fastprogress import progress_bar, master_bar\nimport fastprogress\nimport numpy as np\nimport pylab as plt\nimport pandas as pd\nimport random\nimport IPython\n\nimport whisper\n\nfrom fastcore.script import *\nfrom whisperspeech.wer_metrics import *" + }, + { + "objectID": "2c. whisper quantization (semantic token) evaluation.html#how-whisper-works-with-speech-cut-at-different-lengths", + "href": "2c. whisper quantization (semantic token) evaluation.html#how-whisper-works-with-speech-cut-at-different-lengths", + "title": "VQ semantic token extraction evaluation", + "section": "How Whisper works with speech cut at different lengths", + "text": "How Whisper works with speech cut at different lengths\n\ndef test_incremental(model_name, Tmax=15):\n whmodel = whisper.load_model(model_name)\n for i in range(Tmax):\n print(i, whmodel.transcribe(snd[0,:int(i*16000)])['text'])\n\n\ntest_incremental('tiny.en')\n\n0 \n1 Chapter\n2 Chapter 5 of the\n3 Chapter 5 of the things in our garden.\n4 Chapter 5 of the Things in Our Garden by Arthur Rachael.\n5 Chapter 5 of the things in our garden by Arthur Ransom.\n6 Chapter 5 of the Things in Our Garden by Arthur Ransom. This LibraVox Recordings.\n7 Chapter 5 of the Things in Our Garden by Arthur Ransom. This LibraVox recording is in the public.\n8 Chapter 5 of the Things in Our Garden by Arthur Ransom. This LibraVox recording is in the public domain.\n9 Chapter 5 of the Things in Our Garden by Arthur Ransom. This LibraVox recording is in the public domain. Chapter 5\n10 Chapter 5 of the Things in Our Garden by Arthur Ransom. This LibraVox recording is in the public domain. Chapter 5, their own garden.\n11 Chapter 5 of the Things in Our Garden by Arthur Ransom. This LibraVox recording is in the public domain. Chapter 5, Their Own Gardens.\n12 Chapter 5 of the Things in Our Garden by Arthur Ransom. This Libra-Vox recording is in the public domain. Chapter 5, Their Own Gardens.\n13 Chapter 5 of the Things in Our Garden by Arthur Ransom. This Libra-Vox recording is in the public domain. Chapter 5, their own gardens, close by the wood at the\n14 Chapter 5 of the Things in Our Garden by Arthur Ransom. This Libra box recording is in the public domain. Chapter 5, Their Own Gardens, Close by the wood at the bottom of the garden.\n\n\n\ntest_incremental('base.en')\n\n0 \n1 Chapter 4\n2 Chapter 5 of the\n3 Chapter 5 of the Things in our Guard.\n4 Chapter 5 of The Things in Our Garden by Arthur Raffy\n5 Chapter 5 of The Things in Our Garden by Arthur Ransom.\n6 Chapter 5 of The Things in Our Garden by Arthur Ransom.\n7 CHAPTER V.\n8 CHAPTER V.\n9 CHAPTER V.\n10 CHAPTER V.\n11 CHAPTER V.\n12 CHAPTER V. Their Own Gardens.\n13 CHAPTER V. Their Own Gardens.\n14 CHAPTER V.\n\n\n\ntest_incremental('large-v2')\n\n0 \n1 Chapter 4.\n2 Chapter 5 of the\n3 Chapter 5 of The Things in Our Garden\n4 V. THE THINGS IN OUR GARDEN\n5 V. THE THINGS IN OUR GARDEN.\n6 CHAPTER V\n7 V. THE THINGS IN OUR GARDEN\n8 CHAPTER V\n9 CHAPTER V\n10 V. THEIR OWN GARDEN\n11 V. THEIR OWN GARDENS\n12 V. THEIR OWN GARDENS\n13 V. THEIR OWN GARDENS CLOSE BY THE WOOD\n14 V. THEIR OWN GARDENS CLOSE BY THE WOOD AT THE BOTTOM OF THE GARDEN" + }, + { + "objectID": "2c. whisper quantization (semantic token) evaluation.html#entropy-of-the-token-stream", + "href": "2c. whisper quantization (semantic token) evaluation.html#entropy-of-the-token-stream", + "title": "VQ semantic token extraction evaluation", + "section": "Entropy of the token stream", + "text": "Entropy of the token stream\n\nfrom whisperspeech.vq_stoks import RQBottleneckTransformer\n\n\nimport collections\ndef calc_model_entropy(ds, modelfile):\n vqmodel = RQBottleneckTransformer.load_model(local_filename=modelfile).cuda()\n cnts = collections.Counter()\n for snd,txt in ds:\n stoks = vqmodel.encode_audio(snd.cuda())\n cnts.update(stoks[0].tolist())\n pdf = torch.tensor([cnts[i] for i in range(max(cnts)+1)])\n pdf = pdf / pdf.sum()\n return -torch.nansum(pdf * np.log2(pdf))\n\n\n# the original semantic token model from early 2023\ncalc_model_entropy(make_test_ds(), None)\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 00:04<00:00]\n \n \n\n\n6.097853445304322\n\n\n\ncalc_model_entropy(make_test_ds(), \"vq-ce9.2.model\")\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 00:04<00:00]\n \n \n\n\n6.357563112144668\n\n\n\ncalc_model_entropy(make_test_ds(), \"vq-256c.model\")\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 00:04<00:00]\n \n \n\n\n3.0997004132066834\n\n\n\ncalc_model_entropy(make_test_ds(), \"vq-256c-cosine.model\")\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 00:04<00:00]\n \n \n\n\n5.6921860685011225\n\n\n\ncalc_model_entropy(make_test_ds(), \"vq-2d-256c.model\")\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 00:04<00:00]\n \n \n\n\n2.899952018598168\n\n\n\ncalc_model_entropy(make_test_ds(), \"vq-2d-256c-cosine.model\")\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 00:04<00:00]\n \n \n\n\n5.769594466589709\n\n\n\ncalc_model_entropy(make_test_ds(), \"vq-2d-256c-cosine-padfix2.model\")\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 00:04<00:00]\n \n \n\n\n7.741530540488036\n\n\n\ncalc_model_entropy(make_test_ds(), \"vq-2d-512c-cosine-padfix-premlp-learnpos-5e.model\")\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 00:04<00:00]\n \n \n\n\n8.164144580014993\n\n\n\ncalc_model_entropy(make_test_ds(), \"vq-2d-512c-cosine32-padfix-premlp-learnpos-5e.model\")\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 00:04<00:00]\n \n \n\n\n11.37221612373814\n\n\n\ncalc_model_entropy(make_test_ds(), \"vq-base.en-2d-4096c-cosine32-padfix-premlp-learnpos-5e.model\")\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 00:08<00:00]\n \n \n\n\n11.240560444030649\n\n\n\ncalc_model_entropy(make_test_ds(), \"vq-base.en-2d-1024c-cosine32-padfix-premlp-learnpos-5e-cleaned.model\")\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 00:17<00:00]\n \n \n\n\n/tmp/ipykernel_276/107266959.py:11: RuntimeWarning: divide by zero encountered in log2\n return -torch.nansum(pdf * np.log2(pdf))\n\n\ntensor(9.6971)\n\n\n\ncalc_model_entropy(make_test_ds(), \"vq-base.en-2d-4096c-cosine32-padfix-premlp-learnpos-5e-cleaned.model\")\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 00:06<00:00]\n \n \n\n\n/tmp/ipykernel_276/107266959.py:11: RuntimeWarning: divide by zero encountered in log2\n return -torch.nansum(pdf * np.log2(pdf))\n\n\ntensor(11.4108)\n\n\n\ncalc_model_entropy(make_test_ds(), \"vq-base.en-2d-4096c-cosine32-padfix-premlp-preconv-learnpos-5e-cleaned.model\")\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 00:17<00:00]\n \n \n\n\n/tmp/ipykernel_103351/107266959.py:11: RuntimeWarning: divide by zero encountered in log2\n return -torch.nansum(pdf * np.log2(pdf))\n\n\ntensor(9.9410)\n\n\n\ncalc_model_entropy(make_test_ds(), \"vq-base.en-2d-4096c-cosine32-padfix-premlp-premean-learnpos-5e-cleaned.model\")\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 00:17<00:00]\n \n \n\n\n/tmp/ipykernel_9385/107266959.py:11: RuntimeWarning: divide by zero encountered in log2\n return -torch.nansum(pdf * np.log2(pdf))\n\n\ntensor(11.2880)\n\n\n\ncalc_model_entropy(make_test_ds(), \"vq-base.en-2d-4096c-60k.model\")\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 00:20<00:00]\n \n \n\n\ntensor(11.4831)\n\n\n\ncalc_model_entropy(make_test_ds(), \"vq-base.en-2d-4096c-60k.model\")\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 00:20<00:00]\n \n \n\n\ntensor(11.4831)\n\n\n\n# 4096 tokens, we later found out that tokens from this model do carry speaker information\ncalc_model_entropy(make_test_ds(), \"vqmodel-4e-hyptuned-32gpu.model\")\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 00:05<00:00]\n \n \n\n\ntensor(11.6404)\n\n\n\ncalc_model_entropy(make_test_ds(), \"vqmodel-256c-4e-hyptuned-32gpu.model\")\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 00:05<00:00]\n \n \n\n\ntensor(8.7963)\n\n\n\ncalc_model_entropy(make_test_ds(), \"vqmodel-256c-dim64-4e-hyptuned-32gpu.model\")\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 00:14<00:00]\n \n \n\n\ntensor(8.7499)\n\n\n\ncalc_model_entropy(make_test_ds(), \"vqmodel-base-en+pl-512c-dim64.model\")\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 00:14<00:00]\n \n \n\n\ntensor(8.3956)\n\n\n\n# the final model\ncalc_model_entropy(make_test_ds(), \"vqmodel-medium-en+pl-512c-dim64.model\")\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 00:45<00:00]\n \n \n\n\ntensor(8.4314)" + }, + { + "objectID": "2c. whisper quantization (semantic token) evaluation.html#word-error-rate-measurements", + "href": "2c. whisper quantization (semantic token) evaluation.html#word-error-rate-measurements", + "title": "VQ semantic token extraction evaluation", + "section": "Word Error Rate measurements", + "text": "Word Error Rate measurements\n\nfrom whisperspeech.wer_metrics import *\n\n\nVanilla Whisper models\n\ndef test_wh_model(whmodel):\n decoding_options=whisper.DecodingOptions(language='en')\n stats = WERStats()\n for snd, gt_text in progress_bar(librispeech_data('/data/LibriSpeech/test-clean'), total=1000):\n text = whmodel.decode(whisper.log_mel_spectrogram(whisper.pad_or_trim(snd[0])).cuda(), decoding_options).text\n diff = stats.push_sample(snd, gt_text, text)\n last_diff = diff.alignments[0][-1]\n stats.push(hallucination = last_diff.type == 'insert' and last_diff.hyp_end_idx - last_diff.hyp_start_idx > 3)\n stats = stats.df().sort_values('wer')\n print(f\"WER: {stats.wer.mean()*100:.2f}%\")\n print(f\"WER (w/o hallucinations): {stats[~stats['hallucination']].wer.mean()*100:.2f}%\")\n return stats\n\n\ntest_wh_model(whisper.load_model('tiny.en'))\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 01:05<00:00]\n \n \n\n\nWER: 6.91%\nWER (w/o hallucinations): 6.91%\n\n\n\n\n\n\n\n\n\n\nsecs\nidx\ngt_text\ntext\nwer\nmer\nwil\nwip\nhallucination\n\n\n\n\n0\n8.230\nNone\nAND OFTEN HAS MY MOTHER SAID WHILE ON HER LAP ...\nAnd often has my mother said, while on her lap...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n355\n2.885\nNone\nI'M AFRAID I DON'T KNOW MUCH ABOUT THE LAND OF OZ\nI'm afraid I don't know much about the land of...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n353\n5.870\nNone\nTHE FIRST LOT WE TESTED ON OUR GLASS CAT WHICH...\nThe first lot we tested on our glass cat, whic...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n674\n2.295\nNone\nHE ONLY SHOOK HIS HEAD\nHe only shook his head.\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n675\n11.545\nNone\nWELL BUT NOW SAID THE PRINCESS AND SHE FILLED ...\nWell, but now said the princess, and she fille...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n...\n...\n...\n...\n...\n...\n...\n...\n...\n...\n\n\n524\n3.195\nNone\nBROTHER MAC ARDLE BROTHER KEOGH\nBrother Maccardo, Brother Keoff.\n0.600000\n0.600000\n0.800000\n0.200000\nFalse\n\n\n592\n1.805\nNone\nHANS STIRS NOT\nHans Stairz-Nied.\n0.666667\n0.666667\n0.888889\n0.111111\nFalse\n\n\n820\n2.155\nNone\nTHE FORMER BOOLOOROO GROANED\nThe former Billie Rook-Round\n0.750000\n0.600000\n0.800000\n0.200000\nFalse\n\n\n918\n3.000\nNone\nTHAT IS TRUE BADAUDERIE\nThat is true bad dealt gree.\n0.750000\n0.500000\n0.625000\n0.375000\nFalse\n\n\n371\n2.440\nNone\nCONSEIL WAS MY MANSERVANT\nCause A was my man's servant.\n1.250000\n0.714286\n0.857143\n0.142857\nFalse\n\n\n\n\n1000 rows × 9 columns\n\n\n\n\n\ntest_wh_model(whisper.load_model('base.en'))\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 01:41<00:00]\n \n \n\n\nWER: 5.08%\nWER (w/o hallucinations): 5.08%\n\n\n\n\n\n\n\n\n\n\nsecs\ngt_text\ntext\nwer\nhallucination\n\n\n\n\n0\n8.230\nAND OFTEN HAS MY MOTHER SAID WHILE ON HER LAP ...\nAnd often has my mother said while on her lap ...\n0.000000\nFalse\n\n\n403\n5.370\nDEPARTING FROM FIVE HUNDRED THOUSAND THROATS T...\nDeparting from 500,000 throats, three cheers b...\n0.000000\nFalse\n\n\n404\n13.140\nTHOUSANDS OF HANDKERCHIEFS WERE WAVING ABOVE T...\nThousands of handkerchiefs were waving above t...\n0.000000\nFalse\n\n\n405\n2.695\nIT'S ALMOST BEYOND CONJECTURE\nIt's almost beyond conjecture.\n0.000000\nFalse\n\n\n406\n7.805\nTHIS REALITY BEGINS TO EXPLAIN THE DARK POWER ...\nThis reality begins to explain the dark power ...\n0.000000\nFalse\n\n\n...\n...\n...\n...\n...\n...\n\n\n524\n3.195\nBROTHER MAC ARDLE BROTHER KEOGH\nBrother McCartill, Brother Kiaff.\n0.600000\nFalse\n\n\n592\n1.805\nHANS STIRS NOT\nHans Sturznide.\n0.666667\nFalse\n\n\n918\n3.000\nTHAT IS TRUE BADAUDERIE\nThat is true, bad girl degree.\n0.750000\nFalse\n\n\n371\n2.440\nCONSEIL WAS MY MANSERVANT\nCas← was my man's servant.\n1.000000\nFalse\n\n\n538\n2.215\nSTEPHANOS DEDALOS\nStefano Staedt-Loss\n1.500000\nFalse\n\n\n\n\n1000 rows ᅲ 5 columns\n\n\n\n\n\ntest_wh_model(whisper.load_model('small.en'))\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 02:53<00:00]\n \n \n\n\nWER: 3.89%\nWER (w/o hallucinations): 3.84%\n\n\n\n\n\n\n\n\n\n\nsecs\nidx\ngt_text\ntext\nwer\nmer\nwil\nwip\nhallucination\n\n\n\n\n0\n8.230\nNone\nAND OFTEN HAS MY MOTHER SAID WHILE ON HER LAP ...\nAnd often has my mother said, while on her lap...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n789\n5.945\nNone\nAND THIS PLAN WAS ADOPTED TOO IN ORDER TO EXTR...\nand this plan was adopted too in order to extr...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n461\n10.980\nNone\nSHE MEANWHILE PASSED HER LIFE WITH HER PARENTS...\nShe, meanwhile, passed her life with her paren...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n464\n8.845\nNone\nONE DAY WHEN THE BOY WAS SENT BY HIS GRANDFATH...\nOne day when the boy was sent by his grandfath...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n465\n8.785\nNone\nTHE BED SHE TOO WELL REMEMBERED WAS THERE AND ...\nThe bed she too well remembered was there, and...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n...\n...\n...\n...\n...\n...\n...\n...\n...\n...\n\n\n524\n3.195\nNone\nBROTHER MAC ARDLE BROTHER KEOGH\nBrother McCardle. Brother Kiyof.\n0.600000\n0.600000\n0.800000\n0.200000\nFalse\n\n\n288\n1.905\nNone\nI DELIGHT IN YOUR KITCHEN\nby delighting your kitchen.\n0.600000\n0.600000\n0.800000\n0.200000\nFalse\n\n\n121\n15.270\nNone\nAT LAST THE LITTLE MICE STAYED AWAY ALSO AND T...\nAt last the little mice stayed away also, and ...\n0.636364\n0.636364\n0.636364\n0.363636\nFalse\n\n\n918\n3.000\nNone\nTHAT IS TRUE BADAUDERIE\nThat is true Bad Delt Grey.\n0.750000\n0.500000\n0.625000\n0.375000\nFalse\n\n\n538\n2.215\nNone\nSTEPHANOS DEDALOS\nStefano Staedtlos\n1.000000\n1.000000\n1.000000\n0.000000\nFalse\n\n\n\n\n1000 rows × 9 columns\n\n\n\n\n\ntest_wh_model(whisper.load_model('medium.en'))\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 06:22<00:00]\n \n \n\n\nWER: 4.19%\nWER (w/o hallucinations): 3.19%\n\n\n\n\n\n\n\n\n\n\nsecs\ngt_text\ntext\nwer\nhallucination\n\n\n\n\n386\n5.915\nYES WE ARE CERTAINLY I REPLIED EVASIVELY BUT A...\nYes, we are, certainly, I replied evasively, b...\n0.00\nFalse\n\n\n507\n6.480\nHIS CONDUCT AND PRESENCE OF MIND IN THIS EMERG...\nHis conduct and presence of mind in this emerg...\n0.00\nFalse\n\n\n865\n4.315\nTHEIR SUFFERINGS HAVE NEVER YET BEEN FITLY CHR...\nTheir sufferings have never yet been fitly chr...\n0.00\nFalse\n\n\n509\n13.610\nFROM THE SAME MEN NEW REGIMENTS AND NEW COMPAN...\nFrom the same men new regiments and new compan...\n0.00\nFalse\n\n\n511\n12.655\nTHOUGH THE DISCIPLINE OF THE FORMER PARLIAMENT...\nThough the discipline of the former parliament...\n0.00\nFalse\n\n\n...\n...\n...\n...\n...\n...\n\n\n782\n2.260\nTO DAY I SHOUTED\nToday, I shouted.\n0.50\nFalse\n\n\n524\n3.195\nBROTHER MAC ARDLE BROTHER KEOGH\nBrother McCardle, Brother Kiyof.\n0.60\nFalse\n\n\n918\n3.000\nTHAT IS TRUE BADAUDERIE\nThat is true bad health grief.\n0.75\nFalse\n\n\n538\n2.215\nSTEPHANOS DEDALOS\nStefanos Daedalus\n1.00\nFalse\n\n\n226\n6.750\nHE CONTINUED HIS PRETENDED SEARCH AND TO GIVE ...\nHe continued his pretended search, and to give...\n9.80\nTrue\n\n\n\n\n1000 rows × 5 columns\n\n\n\n\n\ntest_wh_model(whisper.load_model('large-v2'))\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 07:39<00:00]\n \n \n\n\nWER: 6.07%\nWER (w/o hallucinations): 3.19%\n\n\n\n\n\n\n\n\n\n\nsecs\nidx\ngt_text\ntext\nwer\nmer\nwil\nwip\nhallucination\n\n\n\n\n0\n8.230\nNone\nAND OFTEN HAS MY MOTHER SAID WHILE ON HER LAP ...\nAnd often has my mother said while on her lap ...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n606\n2.610\nNone\nWE SUFFER STIFLING PAINS\nWe suffer stifling pains.\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n607\n7.040\nNone\nSATURDAY AUGUST FIFTEENTH THE SEA UNBROKEN ALL...\nSaturday, August 15th. The sea unbroken all ro...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n608\n3.070\nNone\nTHE HORIZON SEEMS EXTREMELY DISTANT\nThe horizon seems extremely distant.\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n609\n9.985\nNone\nALL MY DANGER AND SUFFERINGS WERE NEEDED TO ST...\nAll my danger and sufferings were needed to st...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n...\n...\n...\n...\n...\n...\n...\n...\n...\n...\n\n\n592\n1.805\nNone\nHANS STIRS NOT\nHans Sturznott\n0.666667\n0.666667\n0.833333\n0.166667\nFalse\n\n\n95\n8.800\nNone\nTHOUGHT THE FIR TREE AND BELIEVED IT ALL BECAU...\nthought the fir tree, and believed it all, bec...\n4.285714\n0.810811\n0.810811\n0.189189\nTrue\n\n\n902\n7.370\nNone\nI HAD A NAME I BELIEVE IN MY YOUNG DAYS BUT I ...\nI had a name, I believe, in my young days, but...\n7.476190\n0.882022\n0.882022\n0.117978\nTrue\n\n\n610\n7.370\nNone\nYOU SEEM ANXIOUS MY UNCLE I SAID SEEING HIM CO...\n\"'You seem anxious, my uncle,' I said, seeing ...\n7.823529\n0.886667\n0.886667\n0.113333\nTrue\n\n\n438\n6.665\nNone\nAS TO HIS AGE AND ALSO THE NAME OF HIS MASTER ...\nAs to his age, and also the name of his master...\n8.631579\n0.896175\n0.896175\n0.103825\nTrue\n\n\n\n\n1000 rows × 9 columns\n\n\n\n\n\n\nQuantized Whisper models\n\ndef test_model(modelfile, N=1000):\n vqmodel = RQBottleneckTransformer.load_model(local_filename=modelfile).cuda()\n stats = WERStats()\n for snd, gt_text in progress_bar(librispeech_data('/data/LibriSpeech/test-clean'), total=N):\n stoks = vqmodel.encode_audio(snd.cuda())\n text = vqmodel.decode_text(stoks[0])[0].text\n diff = stats.push_sample(snd, gt_text, text)\n last_diff = diff.alignments[0][-1]\n stats.push(hallucination = last_diff.type == 'insert' and last_diff.hyp_end_idx - last_diff.hyp_start_idx > 3)\n stats = stats.df().sort_values('wer')\n print(f\"WER: {stats.wer.mean()*100:.2f}%\")\n print(f\"WER (w/o hallucinations): {stats[~stats['hallucination']].wer.mean()*100:.2f}%\")\n return stats\n\n\ntest_model(None) # the old stoks model from early 2023\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 01:10<00:00]\n \n \n\n\nWER: 16.06%\n\n\n\n\n\n\n\n\n\n\nsecs\ngt_texts\ntexts\nwers\n\n\n\n\n207\n4.075\nSEVERAL HUNDRED FREE STATE MEN PROMPTLY RESPON...\nseveral hundred free state men promptly respon...\n0.000000\n\n\n209\n5.295\nTHE LEADERS OF THE CONSPIRACY BECAME DISTRUSTF...\nThe leaders of the conspiracy became distrustf...\n0.000000\n\n\n709\n2.440\nTHE THREE MODES OF MANAGEMENT\nThe three modes of management.\n0.000000\n\n\n708\n13.020\nTHE PAIN PRODUCED BY AN ACT OF HASTY AND ANGRY...\nThe pain produced by an act of hasty and angry...\n0.000000\n\n\n705\n5.250\nTHEY ARE CHIEFLY FORMED FROM COMBINATIONS OF T...\nThey are chiefly formed from combinations of t...\n0.000000\n\n\n...\n...\n...\n...\n...\n\n\n371\n2.440\nCONSEIL WAS MY MANSERVANT\nCOSA was my man's servant.\n1.000000\n\n\n144\n4.680\nAND BESIDES SUPPOSE THEE DOES LEARN MEDICINE\nand be sides, supposed to be lost, Lord medicine.\n1.000000\n\n\n907\n4.195\nMADAME QUINSON BESIDES CAN ANSWER YOUR ENQUIRIES\nMadam Gwen-Saun, besides Ken Sir Ian Corrie's.\n1.142857\n\n\n187\n2.230\nNO ITS NOT TOO SOON\nKnow what's sought to assume.\n1.200000\n\n\n538\n2.215\nSTEPHANOS DEDALOS\nStephano's Nerdos.\n1.500000\n\n\n\n\n1000 rows × 4 columns\n\n\n\n\n\ntest_model('vq-ce9.2.model')\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 01:12<00:00]\n \n \n\n\nWER: 8.80%\n\n\n\n\n\n\n\n\n\n\nsecs\ngt_texts\ntexts\nwers\n\n\n\n\n0\n8.230\nAND OFTEN HAS MY MOTHER SAID WHILE ON HER LAP ...\nAnd often has my mother said, while on her lap...\n0.000000\n\n\n283\n1.420\nDIRECTION\ndirection.\n0.000000\n\n\n282\n2.385\nI DIDN'T PREACH WITHOUT DIRECTION\nI didn't preach without direction.\n0.000000\n\n\n624\n3.975\nI SHUDDER AS I RECALL THESE MONSTERS TO MY REM...\nI shudder as I recall these monsters to my rem...\n0.000000\n\n\n279\n10.490\nWE CAN ALL BE SERVANTS OF GOD WHEREVER OUR LOT...\nWe can all be servants of God, wherever our lo...\n0.000000\n\n\n...\n...\n...\n...\n...\n\n\n820\n2.155\nTHE FORMER BOOLOOROO GROANED\nthe former Boula Rook round.\n0.750000\n\n\n918\n3.000\nTHAT IS TRUE BADAUDERIE\nThat is true, bad, old-gree.\n0.750000\n\n\n105\n6.555\nIF IT ONLY WERE NOT SO DARK HERE AND SO TERRIB...\nIf... ... ... ... ... ... ... ... ... ... ... ...\n0.916667\n\n\n371\n2.440\nCONSEIL WAS MY MANSERVANT\nJose was my man's servant.\n1.000000\n\n\n538\n2.215\nSTEPHANOS DEDALOS\nStefanos de los\n1.500000\n\n\n\n\n1000 rows × 4 columns\n\n\n\n\n\ntest_model('vq-256c.model')\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 01:13<00:00]\n \n \n\n\nWER: 10.26%\n\n\n\n\n\n\n\n\n\n\nsecs\ngt_texts\ntexts\nwers\n\n\n\n\n789\n5.945\nAND THIS PLAN WAS ADOPTED TOO IN ORDER TO EXTR...\nAnd this plan was adopted too, in order to ext...\n0.000\n\n\n365\n5.780\nI WILL SHOW YOU WHAT A GOOD JOB I DID AND SHE ...\nI will show you what a good job I did. And she...\n0.000\n\n\n722\n10.720\nAS I SPOKE I MADE HIM A GRACIOUS BOW AND I THI...\nAs I spoke, I made him a gracious bow, and I t...\n0.000\n\n\n723\n7.840\nI HAVE COME TO YOUR SHORES MISTER PRESIDENT WI...\nI have come to your shores, Mr. President, wit...\n0.000\n\n\n362\n5.335\nSOMETIMES IT IS CALLED A CRAZY QUILT BECAUSE T...\nSometimes it is called a crazy quilt because t...\n0.000\n\n\n...\n...\n...\n...\n...\n\n\n106\n2.020\nSQUEAK SQUEAK\nSquick. Squick.\n1.000\n\n\n538\n2.215\nSTEPHANOS DEDALOS\nStephanos de Arlos.\n1.000\n\n\n288\n1.905\nI DELIGHT IN YOUR KITCHEN\nI'd like to introduce you in your kitchen.\n1.000\n\n\n371\n2.440\nCONSEIL WAS MY MANSERVANT\nCall say, was my man servant?\n1.000\n\n\n381\n4.880\nCONSEIL I CALLED A THIRD TIME CONSEIL APPEARED\nCan't say, at call the third time. Can't say a...\n1.125\n\n\n\n\n1000 rows × 4 columns\n\n\n\n\n\ntest_model('vq-256c-cosine.model')\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 01:10<00:00]\n \n \n\n\nWER: 10.24%\n\n\n\n\n\n\n\n\n\n\nsecs\ngt_texts\ntexts\nwers\n\n\n\n\n710\n11.490\nTO SUPPOSE THAT THE OBJECT OF THIS WORK IS TO ...\nTo suppose that the object of this work is to ...\n0.000000\n\n\n629\n3.235\nTWO HOURS AFTERWARDS A TERRIBLE SHOCK AWOKE ME\nTwo hours afterwards, a terrible shock awoke me.\n0.000000\n\n\n640\n1.740\nPOOR ALICE\nPoor Alice.\n0.000000\n\n\n262\n2.435\nTHAT'S WHAT YOU'D LIKE TO BE DOING IS IT\nThat's what you'd like to be doing, is it?\n0.000000\n\n\n644\n3.105\nAND YESTERDAY THINGS WENT ON JUST AS USUAL\nAnd yesterday, things went on just as usual.\n0.000000\n\n\n...\n...\n...\n...\n...\n\n\n187\n2.230\nNO ITS NOT TOO SOON\nNo, it's not just here.\n0.800000\n\n\n115\n4.470\nWHO IS HUMPY DUMPY ASKED THE MICE\nWho is a MP? Don't be. Ask the mice.\n0.857143\n\n\n371\n2.440\nCONSEIL WAS MY MANSERVANT\nCross say, was my man servant.\n1.000000\n\n\n106\n2.020\nSQUEAK SQUEAK\nquick, quick.\n1.000000\n\n\n538\n2.215\nSTEPHANOS DEDALOS\nStephenos der los.\n1.500000\n\n\n\n\n1000 rows × 4 columns\n\n\n\n\n\ntest_model('vq-2d-256c.model')\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 01:11<00:00]\n \n \n\n\nWER: 21.75%\n\n\n\n\n\n\n\n\n\n\nsecs\ngt_texts\ntexts\nwers\n\n\n\n\n709\n2.440\nTHE THREE MODES OF MANAGEMENT\nThe Three Modes of Management\n0.000000\n\n\n419\n2.415\nFATHOM SIX FEET\nFathom six feet.\n0.000000\n\n\n703\n4.775\nNATURE OF THE EFFECT PRODUCED BY EARLY IMPRESS...\nnature of the effect produced by early impress...\n0.000000\n\n\n693\n2.110\nI AM VERY GLAD\nI am very glad.\n0.000000\n\n\n686\n2.740\nNO MY LITTLE SON SHE SAID\nNo, my little son, she said.\n0.000000\n\n\n...\n...\n...\n...\n...\n\n\n627\n3.060\nTUESDAY AUGUST EIGHTEENTH\n2. Day August 8th\n1.000000\n\n\n820\n2.155\nTHE FORMER BOOLOOROO GROANED\nThe former Bill of Rook around.\n1.000000\n\n\n28\n5.530\nKESWICK MARCH TWENTY SECOND EIGHTEEN THIRTY SE...\nYes, we wish between second 1837. Did you reme...\n1.333333\n\n\n106\n2.020\nSQUEAK SQUEAK\nQuick, quick, quick.\n1.500000\n\n\n792\n1.810\nVENICE\nThen Next\n2.000000\n\n\n\n\n1000 rows × 4 columns\n\n\n\n\n\ntest_model('vq-2d-256c-cosine.model')\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 01:11<00:00]\n \n \n\n\nWER: 11.61%\n\n\n\n\n\n\n\n\n\n\nsecs\ngt_texts\ntexts\nwers\n\n\n\n\n686\n2.740\nNO MY LITTLE SON SHE SAID\nNo, my little son, she said.\n0.000000\n\n\n902\n7.370\nI HAD A NAME I BELIEVE IN MY YOUNG DAYS BUT I ...\nI had a name I believe in my young days, but I...\n0.000000\n\n\n904\n3.300\nYOU DO ME A GREAT HONOUR\nYou do me a great honor.\n0.000000\n\n\n228\n6.775\nAS HE HAD PROMISED TO PROTECT THE HOTEL THE RE...\nAs he had promised to protect the hotel, the r...\n0.000000\n\n\n521\n3.440\nSOON THE WHOLE BRIDGE WAS TREMBLING AND RESOUN...\nSoon the whole bridge was trembling and resoun...\n0.000000\n\n\n...\n...\n...\n...\n...\n\n\n918\n3.000\nTHAT IS TRUE BADAUDERIE\nThat is true, bad, old-gree.\n0.750000\n\n\n381\n4.880\nCONSEIL I CALLED A THIRD TIME CONSEIL APPEARED\nConse, at call to third town. Conse, appeared.\n0.750000\n\n\n115\n4.470\nWHO IS HUMPY DUMPY ASKED THE MICE\nWho eats umpi, don't pee? Ask the mice.\n0.857143\n\n\n538\n2.215\nSTEPHANOS DEDALOS\nStephenau Stairlauce.\n1.000000\n\n\n106\n2.020\nSQUEAK SQUEAK\nSpeak. Speak. Speak.\n1.500000\n\n\n\n\n1000 rows × 4 columns\n\n\n\n\n\n# full crop\ntest_model('vq-2d-256c-cosine-padfix2.model')\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 01:10<00:00]\n \n \n\n\nWER: 16.13%\n\n\n\n\n\n\n\n\n\n\nsecs\ngt_texts\ntexts\nwers\n\n\n\n\n652\n3.475\nI AM SO VERY TIRED OF BEING ALL ALONE HERE\nI'm so very tired of being all alone here.\n0.000000\n\n\n906\n2.610\nAT YOUR SERVICE SIR\nAt your service, sir.\n0.000000\n\n\n904\n3.300\nYOU DO ME A GREAT HONOUR\nYou do me a great honor.\n0.000000\n\n\n902\n7.370\nI HAD A NAME I BELIEVE IN MY YOUNG DAYS BUT I ...\nI had a name I believe in my young days, but I...\n0.000000\n\n\n901\n2.755\nI NEVER HAD ANY FAMILY\nI never had any family.\n0.000000\n\n\n...\n...\n...\n...\n...\n\n\n448\n2.215\nWHO TOUCHES ME AM I IN BED\nLook at us, me, our young dad.\n1.000000\n\n\n934\n4.205\nI RESIDE IN THE MARAIS RUE DE DOUZE PORTES\nIrae's eye in the Ma'rae's crew did to support.\n1.111111\n\n\n538\n2.215\nSTEPHANOS DEDALOS\nStep 4, Zetelos.\n1.500000\n\n\n16\n1.695\nFAREWELL MADAM\nFair will, damn.\n1.500000\n\n\n371\n2.440\nCONSEIL WAS MY MANSERVANT\nCos they were my man's servant.\n1.500000\n\n\n\n\n1000 rows × 4 columns\n\n\n\n\n\n# no cropping\ntest_model('vq-2d-256c-cosine-padfix2.model')\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 01:10<00:00]\n \n \n\n\nWER: 11.17%\n\n\n\n\n\n\n\n\n\n\nsecs\ngt_texts\ntexts\nwers\n\n\n\n\n839\n2.275\nTHE CAPTAIN SHOOK HIS HEAD\nThe captain shook his head.\n0.000000\n\n\n408\n9.935\nNEMO BUILDS A FABULOUS FUTURISTIC SUBMARINE TH...\nNemo builds a fabulous futuristic submarine, t...\n0.000000\n\n\n405\n2.695\nIT'S ALMOST BEYOND CONJECTURE\nIt's almost beyond conjecture.\n0.000000\n\n\n404\n13.140\nTHOUSANDS OF HANDKERCHIEFS WERE WAVING ABOVE T...\nThousands of handkerchiefs were waving above t...\n0.000000\n\n\n790\n14.900\nBRIGHTER THAN EARLY DAWN'S MOST BRILLIANT DYE ...\nBrighter than early dawn's most brilliant dye ...\n0.000000\n\n\n...\n...\n...\n...\n...\n\n\n538\n2.215\nSTEPHANOS DEDALOS\nStephenos dellos\n1.000000\n\n\n592\n1.805\nHANS STIRS NOT\nHonsters nod.\n1.000000\n\n\n907\n4.195\nMADAME QUINSON BESIDES CAN ANSWER YOUR ENQUIRIES\nMadam Quinsong, besides Cinanza, you're in que...\n1.000000\n\n\n115\n4.470\nWHO IS HUMPY DUMPY ASKED THE MICE\nPhew, he's on P, don't pee. Ask the mice.\n1.142857\n\n\n371\n2.440\nCONSEIL WAS MY MANSERVANT\nCos they were my man's servant.\n1.500000\n\n\n\n\n1000 rows ᅲ 4 columns\n\n\n\n\n\n# crop to 200 toks minimum\ntest_model('vq-2d-256c-cosine-padfix2.model')\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 01:10<00:00]\n \n \n\n\nWER: 12.56%\n\n\n\n\n\n\n\n\n\n\nsecs\ngt_texts\ntexts\nwers\n\n\n\n\n871\n2.920\nWHO BEGAN THE QUARREL WAS IT THE MORMONS\nWho began the quarrel? Was it the Mormons?\n0.000000\n\n\n938\n12.435\nHOW STRANGE IT SEEMED TO THE SAD WOMAN AS SHE ...\nHow strange it seemed to the sad woman, as she...\n0.000000\n\n\n937\n12.605\nHIS HOUSEKEEPER HAD THE MANAGEMENT OF EVERYTHI...\nHis housekeeper had the management of everythi...\n0.000000\n\n\n558\n15.720\nIT WAS STRANGE TOO THAT HE FOUND AN ARID PLEAS...\nIt was strange too, that he found an arid plea...\n0.000000\n\n\n305\n3.835\nTHE HEAD OF THE PATCHWORK GIRL WAS THE MOST CU...\nThe head of the patchwork girl was the most cu...\n0.000000\n\n\n...\n...\n...\n...\n...\n\n\n538\n2.215\nSTEPHANOS DEDALOS\nStephenos dellos\n1.000000\n\n\n907\n4.195\nMADAME QUINSON BESIDES CAN ANSWER YOUR ENQUIRIES\nMadam Quinsong, besides Cenanza, you're in que...\n1.000000\n\n\n106\n2.020\nSQUEAK SQUEAK\nQuick, quick.\n1.000000\n\n\n115\n4.470\nWHO IS HUMPY DUMPY ASKED THE MICE\nP-E-S-A-P, don't be... asked the mice.\n1.142857\n\n\n371\n2.440\nCONSEIL WAS MY MANSERVANT\nCos-A was my man's servant.\n1.250000\n\n\n\n\n1000 rows ᅲ 4 columns\n\n\n\n\n\n# crop to audio length\ntest_model('vq-2d-512c-cosine-padfix-premlp-learnpos.model')\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 01:09<00:00]\n \n \n\n\nWER: 9.89%\n\n\n\n\n\n\n\n\n\n\nsecs\ngt_texts\ntexts\nwers\n\n\n\n\n570\n2.715\nBEWARE OF MAKING THAT MISTAKE\nBeware of making that mistake.\n0.000000\n\n\n260\n3.155\nWHO TAUGHT YOU TO SCRUB A FLOOR I SHOULD LIKE ...\nWho taught you to scrub a floor? I should like...\n0.000000\n\n\n800\n5.770\nOLD DANCES ARE SIMPLIFIED OF THEIR YEARNING BL...\nOld dances are simplified of their yearning, b...\n0.000000\n\n\n258\n2.260\nSPINNING INDEED\nSpinning Indeed.\n0.000000\n\n\n653\n3.815\nAND I DECLARE IT'S TOO BAD THAT IT IS\nAnd I declare it's too bad that it is.\n0.000000\n\n\n...\n...\n...\n...\n...\n\n\n934\n4.205\nI RESIDE IN THE MARAIS RUE DE DOUZE PORTES\nIries I'd in the Marfra Grudetus port.\n0.777778\n\n\n115\n4.470\nWHO IS HUMPY DUMPY ASKED THE MICE\nWho is a P-Don't Be? Ask the mice.\n0.857143\n\n\n448\n2.215\nWHO TOUCHES ME AM I IN BED\nPotatys me, and my embed.\n0.857143\n\n\n592\n1.805\nHANS STIRS NOT\nHan Stersnide\n1.000000\n\n\n538\n2.215\nSTEPHANOS DEDALOS\nStefanos de los\n1.500000\n\n\n\n\n1000 rows ᅲ 4 columns\n\n\n\n\n\n# crop to audio length\ntest_model('vq-2d-512c-cosine-padfix-premlp-learnpos-5e.model')\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 01:09<00:00]\n \n \n\n\nWER: 9.51%\n\n\n\n\n\n\n\n\n\n\nsecs\ngt_texts\ntexts\nwers\n\n\n\n\n0\n8.230\nAND OFTEN HAS MY MOTHER SAID WHILE ON HER LAP ...\nAnd often has my mother said, while on her lap...\n0.000\n\n\n607\n7.040\nSATURDAY AUGUST FIFTEENTH THE SEA UNBROKEN ALL...\nSaturday, August 15th. The sea unbroken all ro...\n0.000\n\n\n608\n3.070\nTHE HORIZON SEEMS EXTREMELY DISTANT\nThe horizon seems extremely distant.\n0.000\n\n\n615\n3.735\nTHEREFORE DON'T TALK TO ME ABOUT VIEWS AND PRO...\nTherefore, don't talk to me about views and pr...\n0.000\n\n\n616\n5.795\nI TAKE THIS AS MY ANSWER AND I LEAVE THE PROFE...\nI take this as my answer and I leave the profe...\n0.000\n\n\n...\n...\n...\n...\n...\n\n\n157\n3.830\nAND THEE WON'T GO WHY SHOULD I\nAnd, see you all next time!\n0.875\n\n\n381\n4.880\nCONSEIL I CALLED A THIRD TIME CONSEIL APPEARED\nCan say, at call the third time, can say appea...\n0.875\n\n\n371\n2.440\nCONSEIL WAS MY MANSERVANT\nCaus← was my man's servant.\n1.000\n\n\n538\n2.215\nSTEPHANOS DEDALOS\nStefanos dellos.\n1.000\n\n\n106\n2.020\nSQUEAK SQUEAK\nSweet, sweet.\n1.000\n\n\n\n\n1000 rows ᅲ 4 columns\n\n\n\n\n\n# crop to audio length\ntest_model('vq-2d-512c-cosine32-padfix-premlp-learnpos-5e.model')\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 01:08<00:00]\n \n \n\n\nWER: 9.84%\n\n\n\n\n\n\n\n\n\n\nsecs\ngt_text\ntext\nwer\n\n\n\n\n310\n4.040\nSHE POURED INTO THE DISH A QUANTITY FROM EACH ...\nShe poured into the dish a quantity from each ...\n0.0\n\n\n387\n2.735\nA ROUTE SLIGHTLY LESS DIRECT THAT'S ALL\na route slightly less direct, that's all.\n0.0\n\n\n385\n4.530\nANYHOW WE'LL LEAVE INSTRUCTIONS TO SHIP THE WH...\nAnyhow, we'll leave instructions to ship the w...\n0.0\n\n\n742\n4.730\nWE SAT WITH THE OFFICERS SOME LITTLE TIME AFTE...\nWe sat with the officers some little time afte...\n0.0\n\n\n383\n9.300\nPACK AS MUCH INTO MY TRUNK AS YOU CAN MY TRAVE...\nPack as much into my trunk as you can. My trav...\n0.0\n\n\n...\n...\n...\n...\n...\n\n\n559\n13.895\nTHE SENTENCE OF SAINT JAMES WHICH SAYS THAT HE...\nThank you.\n1.0\n\n\n775\n5.545\nTHE PECULIAR CIRCUMSTANCES OF THE COLONY ARE W...\nThank you.\n1.0\n\n\n106\n2.020\nSQUEAK SQUEAK\nQuick, quick.\n1.0\n\n\n538\n2.215\nSTEPHANOS DEDALOS\nStephanos de los\n1.0\n\n\n491\n4.805\nTHE PARLIAMENT AND THE SCOTS LAID THEIR PROPOS...\nThank you.\n1.0\n\n\n\n\n1000 rows × 4 columns\n\n\n\n\n\ntest_model(\"vq-base.en-2d-4096c-cosine32-padfix-premlp-learnpos-5e.model\")\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 01:41<00:00]\n \n \n\n\nWER: 7.51%\n\n\n\n\n\n\n\n\n\n\nsecs\ngt_text\ntext\nwer\n\n\n\n\n862\n6.720\nTO THE FERVENT LATTER DAY SAINT A TEMPLE IS NO...\nTo the fervent Latter-day Saint, a temple is n...\n0.000000\n\n\n436\n6.380\nSHE WAS A LARGE HOMELY WOMAN THEY WERE COMMON ...\nShe was a large, homely woman. They were commo...\n0.000000\n\n\n437\n5.425\nSUBSTANTIALLY THIS WAS JACOB'S UNVARNISHED DES...\nSubstantially, this was Jacob's unvarnished de...\n0.000000\n\n\n438\n6.665\nAS TO HIS AGE AND ALSO THE NAME OF HIS MASTER ...\nAs to his age and also the name of his master,...\n0.000000\n\n\n439\n3.020\nOF STARTING I DIDN'T KNOW THE WAY TO COME\nof starting. I didn't know the way to come.\n0.000000\n\n\n...\n...\n...\n...\n...\n\n\n480\n12.510\nTHIS WAS DONE FOR THE EVENT TOOK PLACE AT A TI...\nThis was done for the event took place.\n0.783784\n\n\n713\n17.945\nTHE MOTHER AS SOON AS THE CHAISE IS SO FAR TUR...\nThe Mother. As soon as the chase\n0.869565\n\n\n454\n7.720\nAMONG OTHER THINGS ON WHICH SHE CAST HER EYES ...\nAmong other things...\n0.869565\n\n\n371\n2.440\nCONSEIL WAS MY MANSERVANT\nCossay was my man's servant.\n1.000000\n\n\n538\n2.215\nSTEPHANOS DEDALOS\nStefano Staedt-Los\n1.500000\n\n\n\n\n1000 rows × 4 columns\n\n\n\n\n\ntest_model(\"vq-base.en-2d-4096c-cosine32-padfix-premlp-learnpos-5e.model\")\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 01:33<00:00]\n \n \n\n\nWER: 7.49%\n\n\n\n\n\n\n\n\n\n\nsecs\nidx\ngt_text\ntext\nwer\nmer\nwil\nwip\n\n\n\n\n714\n8.010\nNone\nSO YOU WILL BE A GOOD GIRL I KNOW AND NOT MAKE...\nSo you will be a good girl, I know, and not ma...\n0.000000\n0.000000\n0.000000\n1.000000\n\n\n365\n5.780\nNone\nI WILL SHOW YOU WHAT A GOOD JOB I DID AND SHE ...\nI will show you what a good job I did, and she...\n0.000000\n0.000000\n0.000000\n1.000000\n\n\n608\n3.070\nNone\nTHE HORIZON SEEMS EXTREMELY DISTANT\nThe horizon seems extremely distant.\n0.000000\n0.000000\n0.000000\n1.000000\n\n\n362\n5.335\nNone\nSOMETIMES IT IS CALLED A CRAZY QUILT BECAUSE T...\nSometimes it is called a crazy quilt because t...\n0.000000\n0.000000\n0.000000\n1.000000\n\n\n361\n6.045\nNone\nA BED QUILT MADE OF PATCHES OF DIFFERENT KINDS...\nA bed quilt made of patches of different kinds...\n0.000000\n0.000000\n0.000000\n1.000000\n\n\n...\n...\n...\n...\n...\n...\n...\n...\n...\n\n\n480\n12.510\nNone\nTHIS WAS DONE FOR THE EVENT TOOK PLACE AT A TI...\nThis was done for the event took place.\n0.783784\n0.783784\n0.783784\n0.216216\n\n\n454\n7.720\nNone\nAMONG OTHER THINGS ON WHICH SHE CAST HER EYES ...\nAmong other things...\n0.869565\n0.869565\n0.869565\n0.130435\n\n\n713\n17.945\nNone\nTHE MOTHER AS SOON AS THE CHAISE IS SO FAR TUR...\nThe Mother. As soon as the chase\n0.869565\n0.869565\n0.888199\n0.111801\n\n\n371\n2.440\nNone\nCONSEIL WAS MY MANSERVANT\nCossay was my man's servant.\n1.000000\n0.666667\n0.833333\n0.166667\n\n\n538\n2.215\nNone\nSTEPHANOS DEDALOS\nStefano Staedt-Los\n1.500000\n1.000000\n1.000000\n0.000000\n\n\n\n\n1000 rows × 8 columns\n\n\n\n\n\ntest_model(\"vq-base.en-2d-1024c-cosine32-padfix-premlp-learnpos-5e-cleaned.model\")\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 01:31<00:00]\n \n \n\n\nWER: 10.44%\n\n\n\n\n\n\n\n\n\n\nsecs\nidx\ngt_text\ntext\nwer\nmer\nwil\nwip\n\n\n\n\n669\n8.540\nNone\nAT THE FARTHER END OF THE LARGEST HALL A TABLE...\nAt the farther end of the largest hall, a tabl...\n0.0\n0.0\n0.0\n1.0\n\n\n349\n2.130\nNone\nTHE WOMAN SEEMED THOUGHTFUL\nThe woman seemed thoughtful.\n0.0\n0.0\n0.0\n1.0\n\n\n572\n4.090\nNone\nHE IS CALLED AS YOU KNOW THE APOSTLE OF THE IN...\nHe is called, as you know, the apostle of the ...\n0.0\n0.0\n0.0\n1.0\n\n\n347\n3.665\nNone\nOJO HAD NEVER EATEN SUCH A FINE MEAL IN ALL HI...\nOjo had never eaten such a fine meal in all hi...\n0.0\n0.0\n0.0\n1.0\n\n\n346\n3.705\nNone\nAND YOU MUST BE OJO THE UNLUCKY SHE ADDED\nAnd you must be Ojo the unlucky,\" she added.\n0.0\n0.0\n0.0\n1.0\n\n\n...\n...\n...\n...\n...\n...\n...\n...\n...\n\n\n896\n18.540\nNone\nSILVIA WAS THE ADORATION OF FRANCE AND HER TAL...\nSylvia\n1.0\n1.0\n1.0\n0.0\n\n\n689\n5.995\nNone\nDELLA HAD A YOUNG SISTER NAMED MARIA AND A COU...\nDela.\n1.0\n1.0\n1.0\n0.0\n\n\n512\n27.525\nNone\nVALOR INDEED WAS VERY GENERALLY DIFFUSED OVER ...\nVala.\n1.0\n1.0\n1.0\n0.0\n\n\n897\n23.740\nNone\nSILVIA DID NOT THINK THAT HER GOOD CONDUCT WAS...\nSylvia.\n1.0\n1.0\n1.0\n0.0\n\n\n538\n2.215\nNone\nSTEPHANOS DEDALOS\nStefano Staedt-Loss\n1.5\n1.0\n1.0\n0.0\n\n\n\n\n1000 rows × 8 columns\n\n\n\n\n\ntest_model(\"vq-base.en-2d-4096c-cosine32-padfix-premlp-learnpos-5e-cleaned.model\")\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 01:45<00:00]\n \n \n\n\nWER: 6.64%\n\n\n\n\n\n\n\n\n\n\nsecs\nidx\ngt_text\ntext\nwer\nmer\nwil\nwip\n\n\n\n\n0\n8.230\nNone\nAND OFTEN HAS MY MOTHER SAID WHILE ON HER LAP ...\nAnd often has my mother said, while on her lap...\n0.000000\n0.000000\n0.000000\n1.000000\n\n\n377\n3.910\nNone\nHE WENT HERE THERE AND EVERYWHERE IN PERFECT C...\nHe went here, there, and everywhere in perfect...\n0.000000\n0.000000\n0.000000\n1.000000\n\n\n376\n8.340\nNone\nNEVER DID HE OBJECT TO BUCKLING UP HIS SUITCAS...\nNever did he object to buckling up his suitcas...\n0.000000\n0.000000\n0.000000\n1.000000\n\n\n687\n8.500\nNone\nIF YOU DRESSED IN SILK AND GOLD FROM TOP TO TO...\nIf you dressed in silk and gold from top to to...\n0.000000\n0.000000\n0.000000\n1.000000\n\n\n688\n11.125\nNone\nTO SUCH PERSONS THESE INDIRECT MODES OF TRAINI...\nTo such persons, these indirect modes of train...\n0.000000\n0.000000\n0.000000\n1.000000\n\n\n...\n...\n...\n...\n...\n...\n...\n...\n...\n\n\n918\n3.000\nNone\nTHAT IS TRUE BADAUDERIE\nThat is true bad-dulch-gree.\n0.750000\n0.500000\n0.625000\n0.375000\n\n\n46\n25.640\nNone\nA GOOD NEIGHBOUR OF THE BRONTES A CLEVER INTEL...\nA good neighbor of the Bronte's, a clever, int...\n0.797101\n0.785714\n0.836957\n0.163043\n\n\n221\n15.060\nNone\nIN THE SHOOTING OF SHERIFF JONES IN LAWRENCE A...\nIn the shooting of Sheriff's\n0.857143\n0.857143\n0.880952\n0.119048\n\n\n879\n17.840\nNone\nTHEY KNEW NO NORTH NO SOUTH NO EAST NO WEST TH...\nThey knew no North.\n0.925926\n0.925926\n0.925926\n0.074074\n\n\n538\n2.215\nNone\nSTEPHANOS DEDALOS\nStefano Staedalus.\n1.000000\n1.000000\n1.000000\n0.000000\n\n\n\n\n1000 rows × 8 columns\n\n\n\n\n\n_9.plot.scatter('secs', 'wer', alpha=.2)\n\n\n\n\n\n\n\n\n\ntest_model(\"vq-base.en-2d-4096c-cosine32-padfix-premlp-preconv-learnpos-5e-cleaned.model\")\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 01:36<00:00]\n \n \n\n\nWER: 6.34%\n\n\n\n\n\n\n\n\n\n\nsecs\nidx\ngt_text\ntext\nwer\nmer\nwil\nwip\n\n\n\n\n0\n8.230\nNone\nAND OFTEN HAS MY MOTHER SAID WHILE ON HER LAP ...\nAnd often has my mother said, while on her lap...\n0.000000\n0.000000\n0.000000\n1.000000\n\n\n696\n18.415\nNone\nFOR INSTANCE ONE DAY THE CHILDREN HAD BEEN PLA...\nFor instance, one day the children had been pl...\n0.000000\n0.000000\n0.000000\n1.000000\n\n\n370\n2.340\nNone\nBUT NOW NOTHING COULD HOLD ME BACK\nBut now nothing could hold me back.\n0.000000\n0.000000\n0.000000\n1.000000\n\n\n369\n9.340\nNone\nI WANTED NOTHING MORE THAN TO SEE MY COUNTRY A...\nI wanted nothing more than to see my country a...\n0.000000\n0.000000\n0.000000\n1.000000\n\n\n368\n6.190\nNone\nEVEN SO I HAD JUST RETURNED FROM AN ARDUOUS JO...\nEven so, I had just returned from an arduous j...\n0.000000\n0.000000\n0.000000\n1.000000\n\n\n...\n...\n...\n...\n...\n...\n...\n...\n...\n\n\n820\n2.155\nNone\nTHE FORMER BOOLOOROO GROANED\nthe former Boula-Ri-Growned.\n0.750000\n0.600000\n0.800000\n0.200000\n\n\n843\n2.110\nNone\nFINE GLORIOUS\nFind. Chlorious.\n1.000000\n1.000000\n1.000000\n0.000000\n\n\n371\n2.440\nNone\nCONSEIL WAS MY MANSERVANT\nCossay was my man's servant.\n1.000000\n0.666667\n0.833333\n0.166667\n\n\n592\n1.805\nNone\nHANS STIRS NOT\nHon Stir's Night.\n1.333333\n1.000000\n1.000000\n0.000000\n\n\n538\n2.215\nNone\nSTEPHANOS DEDALOS\nStefano Staedt-Loss\n1.500000\n1.000000\n1.000000\n0.000000\n\n\n\n\n1000 rows × 8 columns\n\n\n\n\n\ntest_model(\"vq-base.en-2d-4096c-cosine32-padfix-premlp-premean-learnpos-5e-cleaned-repro.model\")\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 01:30<00:00]\n \n \n\n\nWER: 10.00%\nWER (w/o hallucinations): 10.00%\n\n\n\n\n\n\n\n\n\n\nsecs\nidx\ngt_text\ntext\nwer\nmer\nwil\nwip\nhallucination\n\n\n\n\n696\n18.415\nNone\nFOR INSTANCE ONE DAY THE CHILDREN HAD BEEN PLA...\nFor instance, one day the children had been pl...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n594\n4.865\nNone\nI REFER TO THE THERMOMETER IT INDICATES THE FI...\nI refer to the thermometer, it indicates the f...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n738\n8.105\nNone\nTHEN THERE WERE THREE OR FOUR LEADING MEN OF T...\nThen there were three or four leading men of t...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n355\n2.885\nNone\nI'M AFRAID I DON'T KNOW MUCH ABOUT THE LAND OF OZ\nI'm afraid I don't know much about the land of...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n354\n9.840\nNone\nI THINK THE NEXT GLASS CAT THE MAGICIAN MAKES ...\nI think the next glass cat the magician makes ...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n...\n...\n...\n...\n...\n...\n...\n...\n...\n...\n\n\n886\n22.095\nNone\nTHIS MEANT THAT FOR AN ALLEGED MISDEMEANOR FOR...\nThis is the end of the video.\n0.949153\n0.949153\n0.978208\n0.021792\nFalse\n\n\n371\n2.440\nNone\nCONSEIL WAS MY MANSERVANT\nCossay was my man's servant.\n1.000000\n0.666667\n0.833333\n0.166667\nFalse\n\n\n85\n2.610\nNone\nTHIS EVENING THEY ALL SAID\nThis is the end of the video.\n1.200000\n0.857143\n0.971429\n0.028571\nFalse\n\n\n538\n2.215\nNone\nSTEPHANOS DEDALOS\nStefano Staedt-Loss\n1.500000\n1.000000\n1.000000\n0.000000\nFalse\n\n\n418\n17.640\nNone\nFOR MANY THEN THIS BOOK HAS BEEN A SOURCE OF F...\nFor many then, this is the end of the video. F...\n2.923077\n0.942149\n0.989616\n0.010384\nFalse\n\n\n\n\n1000 rows × 9 columns\n\n\n\n\n\ntest_model(\"vq-base.en-2d-4096c-cosine32-padfix-premlp-premean-learnpos-5e-cleaned-repro.model\")\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 01:28<00:00]\n \n \n\n\nWER: 7.82%\nWER (w/o hallucinations): 7.82%\n\n\n\n\n\n\n\n\n\n\nsecs\nidx\ngt_text\ntext\nwer\nmer\nwil\nwip\nhallucination\n\n\n\n\n709\n2.440\nNone\nTHE THREE MODES OF MANAGEMENT\nthe three modes of management.\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n740\n2.715\nNone\nBUT I MEAN TO HAVE MY INNINGS BEFORE LONG\nBut I mean to have my innings before long.\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n362\n5.335\nNone\nSOMETIMES IT IS CALLED A CRAZY QUILT BECAUSE T...\nSometimes it is called a crazy quilt because t...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n361\n6.045\nNone\nA BED QUILT MADE OF PATCHES OF DIFFERENT KINDS...\nA bed quilt made of patches of different kinds...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n605\n6.305\nNone\nA SUFFOCATING SMELL OF NITROGEN FILLS THE AIR ...\nA suffocating smell of nitrogen fills the air....\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n...\n...\n...\n...\n...\n...\n...\n...\n...\n...\n\n\n793\n14.580\nNone\nIN A SUNSET GLOWING OF CRIMSON AND GOLD SHE LI...\nIn a sunset\n0.906250\n0.906250\n0.906250\n0.093750\nFalse\n\n\n170\n8.740\nNone\nRUTH WAS GLAD TO HEAR THAT PHILIP HAD MADE A P...\nRuth was\n0.931034\n0.931034\n0.931034\n0.068966\nFalse\n\n\n818\n9.870\nNone\nI'LL GLADLY DO THAT PROMISED THE NEW BOOLOOROO...\nI'll\n0.933333\n0.933333\n0.933333\n0.066667\nFalse\n\n\n371\n2.440\nNone\nCONSEIL WAS MY MANSERVANT\nCosse was my man's servant.\n1.000000\n0.666667\n0.833333\n0.166667\nFalse\n\n\n538\n2.215\nNone\nSTEPHANOS DEDALOS\nStefano Staedt-Loss\n1.500000\n1.000000\n1.000000\n0.000000\nFalse\n\n\n\n\n1000 rows × 9 columns\n\n\n\n\n\ntest_model(\"vq-base.en-2d-4096c-cosine32-padfix-premlp-premean-learnpos-5e-cleaned-repro-warm1000.model\")\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 01:36<00:00]\n \n \n\n\nWER: 7.23%\nWER (w/o hallucinations): 7.23%\n\n\n\n\n\n\n\n\n\n\nsecs\nidx\ngt_text\ntext\nwer\nmer\nwil\nwip\nhallucination\n\n\n\n\n760\n6.370\nNone\nTHERE CAME UPON ME A SUDDEN SHOCK WHEN I HEARD...\nThere came upon me a sudden shock when I heard...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n368\n6.190\nNone\nEVEN SO I HAD JUST RETURNED FROM AN ARDUOUS JO...\nEven so, I had just returned from an arduous j...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n586\n5.515\nNone\nTHERE'S A HEAVY STORM COMING ON I CRIED POINTI...\nThere's a heavy storm coming on, I cried, poin...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n366\n3.615\nNone\nCHAPTER THREE AS MASTER WISHES\nChapter 3 As Master Wishes\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n365\n5.780\nNone\nI WILL SHOW YOU WHAT A GOOD JOB I DID AND SHE ...\nI will show you what a good job I did, and she...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n...\n...\n...\n...\n...\n...\n...\n...\n...\n...\n\n\n694\n5.965\nNone\nI EXPECT YOU HAVE BEEN A VERY GOOD GIRL ANDELL...\nI\n0.933333\n0.933333\n0.933333\n0.066667\nFalse\n\n\n881\n13.950\nNone\nWE BELIEVE IN A LITERAL RESURRECTION AND AN AC...\nWe believe that we are the most important ones.\n0.944444\n0.944444\n0.987654\n0.012346\nFalse\n\n\n106\n2.020\nNone\nSQUEAK SQUEAK\nQuick, quick!\n1.000000\n1.000000\n1.000000\n0.000000\nFalse\n\n\n371\n2.440\nNone\nCONSEIL WAS MY MANSERVANT\nCossay was my man's servant.\n1.000000\n0.666667\n0.833333\n0.166667\nFalse\n\n\n538\n2.215\nNone\nSTEPHANOS DEDALOS\nStefano's dead loss.\n2.000000\n1.000000\n1.000000\n0.000000\nFalse\n\n\n\n\n1000 rows × 9 columns\n\n\n\n\n\ntest_model(\"vq-base.en-2d-4096c-cosine32-padfix-premlp-premean-learnpos-5e-cleaned-repro-warm1000-2.model\")\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 01:35<00:00]\n \n \n\n\nWER: 6.47%\nWER (w/o hallucinations): 6.47%\n\n\n\n\n\n\n\n\n\n\nsecs\nidx\ngt_text\ntext\nwer\nmer\nwil\nwip\nhallucination\n\n\n\n\n0\n8.230\nNone\nAND OFTEN HAS MY MOTHER SAID WHILE ON HER LAP ...\nAnd often has my mother said, while on her lap...\n0.0\n0.000000\n0.000000\n1.000000\nFalse\n\n\n702\n14.175\nNone\nAND THIS METHOD OF TREATING THE CASE WAS MUCH ...\nAnd this method of treating the case was much ...\n0.0\n0.000000\n0.000000\n1.000000\nFalse\n\n\n703\n4.775\nNone\nNATURE OF THE EFFECT PRODUCED BY EARLY IMPRESS...\nNature of the Effect produced by Early Impress...\n0.0\n0.000000\n0.000000\n1.000000\nFalse\n\n\n377\n3.910\nNone\nHE WENT HERE THERE AND EVERYWHERE IN PERFECT C...\nHe went here, there, and everywhere in perfect...\n0.0\n0.000000\n0.000000\n1.000000\nFalse\n\n\n376\n8.340\nNone\nNEVER DID HE OBJECT TO BUCKLING UP HIS SUITCAS...\nNever did he object to buckling up his suitcas...\n0.0\n0.000000\n0.000000\n1.000000\nFalse\n\n\n...\n...\n...\n...\n...\n...\n...\n...\n...\n...\n\n\n371\n2.440\nNone\nCONSEIL WAS MY MANSERVANT\nCossé was my man's servant.\n1.0\n0.666667\n0.833333\n0.166667\nFalse\n\n\n322\n3.200\nNone\nI NOW USE THEM AS ORNAMENTAL STATUARY IN MY GA...\nand\n1.0\n1.000000\n1.000000\n0.000000\nFalse\n\n\n652\n3.475\nNone\nI AM SO VERY TIRED OF BEING ALL ALONE HERE\nand\n1.0\n1.000000\n1.000000\n0.000000\nFalse\n\n\n555\n5.815\nNone\nBUT THE DUSK DEEPENING IN THE SCHOOLROOM COVER...\nand\n1.0\n1.000000\n1.000000\n0.000000\nFalse\n\n\n538\n2.215\nNone\nSTEPHANOS DEDALOS\nStaphano's dead loss.\n2.0\n1.000000\n1.000000\n0.000000\nFalse\n\n\n\n\n1000 rows × 9 columns\n\n\n\n\n\ntest_model(\"vq-base.en-2d-4096c-cosine32-padfix-premlp-premean-learnpos-5e-cleaned.model\")\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 01:33<00:00]\n \n \n\n\nWER: 5.93%\nWER (w/o hallucinations): 5.93%\n\n\n\n\n\n\n\n\n\n\nsecs\nidx\ngt_text\ntext\nwer\nmer\nwil\nwip\nhallucination\n\n\n\n\n781\n3.050\nNone\nWHEN DO YOU INTEND THAT THE JOHN BRIGHT SHALL ...\nWhen do you intend that the John Bright shall ...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n388\n2.355\nNone\nWE'RE LEAVING ON THE ABRAHAM LINCOLN\nWe're leaving on the Abraham Lincoln.\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n387\n2.735\nNone\nA ROUTE SLIGHTLY LESS DIRECT THAT'S ALL\na route slightly less direct. That's all.\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n386\n5.915\nNone\nYES WE ARE CERTAINLY I REPLIED EVASIVELY BUT A...\nYes, we are. Certainly, I replied evasively, b...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n385\n4.530\nNone\nANYHOW WE'LL LEAVE INSTRUCTIONS TO SHIP THE WH...\nAnyhow, we'll leave instructions to ship the w...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n...\n...\n...\n...\n...\n...\n...\n...\n...\n...\n\n\n524\n3.195\nNone\nBROTHER MAC ARDLE BROTHER KEOGH\nBrother McCarle, Brother Kioff.\n0.600000\n0.600000\n0.800000\n0.200000\nFalse\n\n\n592\n1.805\nNone\nHANS STIRS NOT\nHans-Stirrsnacht.\n0.666667\n0.666667\n0.833333\n0.166667\nFalse\n\n\n766\n2.540\nNone\nYOU PROPOSE TO KIDNAP ME I SAID\nYou proposed a kenatmi set.\n0.857143\n0.857143\n0.971429\n0.028571\nFalse\n\n\n538\n2.215\nNone\nSTEPHANOS DEDALOS\nSteffano Staedalus\n1.000000\n1.000000\n1.000000\n0.000000\nFalse\n\n\n371\n2.440\nNone\nCONSEIL WAS MY MANSERVANT\nCossay was my man's servant.\n1.000000\n0.666667\n0.833333\n0.166667\nFalse\n\n\n\n\n1000 rows × 9 columns\n\n\n\n\n\nax = _14.plot.scatter('secs', 'wer', alpha=.2)\nax.set_ylim(0, 1.5)\n\n\n\n\n\n\n\n\n\ntest_model(\"vq-base.en-2d-4096c-60k.model\")\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 01:30<00:00]\n \n \n\n\nWER: 9.34%\n\n\n\n\n\n\n\n\n\n\nsecs\nidx\ngt_text\ntext\nwer\nmer\nwil\nwip\n\n\n\n\n646\n3.385\nNone\nI ALMOST THINK I CAN REMEMBER FEELING A LITTLE...\nI almost think I can remember feeling a little...\n0.000000\n0.000000\n0.000000\n1.000000\n\n\n862\n6.720\nNone\nTO THE FERVENT LATTER DAY SAINT A TEMPLE IS NO...\nTo the fervent Latter-day Saint, a temple is n...\n0.000000\n0.000000\n0.000000\n1.000000\n\n\n370\n2.340\nNone\nBUT NOW NOTHING COULD HOLD ME BACK\nBut now nothing could hold me back.\n0.000000\n0.000000\n0.000000\n1.000000\n\n\n369\n9.340\nNone\nI WANTED NOTHING MORE THAN TO SEE MY COUNTRY A...\nI wanted nothing more than to see my country a...\n0.000000\n0.000000\n0.000000\n1.000000\n\n\n368\n6.190\nNone\nEVEN SO I HAD JUST RETURNED FROM AN ARDUOUS JO...\nEven so, I had just returned from an arduous j...\n0.000000\n0.000000\n0.000000\n1.000000\n\n\n...\n...\n...\n...\n...\n...\n...\n...\n...\n\n\n61\n10.250\nNone\nIN WINTER WHEN THE SNOW LAY GLITTERING ON THE ...\nIn winter, when the snow lay glittering on the...\n1.791667\n0.651515\n0.666035\n0.333965\n\n\n468\n12.250\nNone\nI HAVE GREAT THINGS TO TELL YOU SENOR SAID DON...\nI have great things to tell you, Senor, sadona...\n1.861111\n0.676768\n0.712682\n0.287318\n\n\n558\n15.720\nNone\nIT WAS STRANGE TOO THAT HE FOUND AN ARID PLEAS...\nIt was strange, too, that he found an arid ple...\n2.317073\n0.698529\n0.698529\n0.301471\n\n\n770\n13.960\nNone\nWHAT WORLD WIDE INIQUITY SUCH A SPEECH AS THAT...\nWhat worldwide iniquity such a speech as that ...\n2.375000\n0.719697\n0.738740\n0.261260\n\n\n444\n12.475\nNone\nTHEY DREW THEIR SWORDS HID THEIR FACES IN THE ...\nThey drew their swords, hid their faces in the...\n4.200000\n0.807692\n0.807692\n0.192308\n\n\n\n\n1000 rows × 8 columns\n\n\n\n\n\ntest_model(\"vq-base.en-2d-4096c-cosine32-padfix-premlp-premean-learnpos-5e-cleaned-eqvad.model\")\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 01:38<00:00]\n \n \n\n\nWER: 7.47%\n\n\n\n\n\n\n\n\n\n\nsecs\nidx\ngt_text\ntext\nwer\nmer\nwil\nwip\n\n\n\n\n0\n8.230\nNone\nAND OFTEN HAS MY MOTHER SAID WHILE ON HER LAP ...\nAnd often has my mother said, while on her lap...\n0.000000\n0.000000\n0.000000\n1.000000\n\n\n673\n12.130\nNone\nTHE PRINCESS CERTAINLY WAS BEAUTIFUL AND HE WO...\nThe princess certainly was beautiful, and he w...\n0.000000\n0.000000\n0.000000\n1.000000\n\n\n674\n2.295\nNone\nHE ONLY SHOOK HIS HEAD\nHe only shook his head.\n0.000000\n0.000000\n0.000000\n1.000000\n\n\n355\n2.885\nNone\nI'M AFRAID I DON'T KNOW MUCH ABOUT THE LAND OF OZ\nI'm afraid I don't know much about the land of...\n0.000000\n0.000000\n0.000000\n1.000000\n\n\n353\n5.870\nNone\nTHE FIRST LOT WE TESTED ON OUR GLASS CAT WHICH...\nThe first lot we tested on our glass cat, whic...\n0.000000\n0.000000\n0.000000\n1.000000\n\n\n...\n...\n...\n...\n...\n...\n...\n...\n...\n\n\n371\n2.440\nNone\nCONSEIL WAS MY MANSERVANT\nCossay was my man's servant.\n1.000000\n0.666667\n0.833333\n0.166667\n\n\n538\n2.215\nNone\nSTEPHANOS DEDALOS\nStefano Stettelos.\n1.000000\n1.000000\n1.000000\n0.000000\n\n\n592\n1.805\nNone\nHANS STIRS NOT\nHonsters, nod.\n1.000000\n1.000000\n1.000000\n0.000000\n\n\n146\n3.260\nNone\nWHERE THEE AND THY FAMILY ARE KNOWN\nWhere's D and I-F where's D and I-F are known?\n1.428571\n0.714286\n0.836735\n0.163265\n\n\n996\n19.915\nNone\nEDISON HAD INSTALLED HIS HISTORIC FIRST GREAT ...\nEdison had installed his historic first-grade ...\n3.208333\n0.766169\n0.771041\n0.228959\n\n\n\n\n1000 rows × 8 columns\n\n\n\n\n\nax = _8.plot.scatter('secs', 'wer', alpha=.2)\nax.set_ylim(0, 1.5)\n\n\n\n\n\n\n\n\n\nax = _15['secs'].hist()\nax.set_yscale('log')\n\n\n\n\n\n\n\n\n\nplt.plot(_15['secs'], 1/_15['gt_text'].str.split('\\w+').str.len(), '.')\n\n\n\n\n\n\n\n\n\n# the reproducibility got pretty low ;)\nfor i in range(4):\n print(i)\n test_model(f\"test-run-{i}.model\")\n print()\n\n0\nWER: 6.37%\nWER (w/o hallucinations): 6.37%\n\n1\nWER: 10.69%\nWER (w/o hallucinations): 9.89%\n\n2\nWER: 12.34%\nWER (w/o hallucinations): 11.79%\n\n3\nWER: 15.83%\nWER (w/o hallucinations): 15.30%\n\n\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 01:33<00:00]\n \n \n\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 01:28<00:00]\n \n \n\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 01:34<00:00]\n \n \n\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 01:31<00:00]\n \n \n\n\n\ntest_model(\"test-run-warm1000.model\")\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 01:31<00:00]\n \n \n\n\nWER: 8.81%\nWER (w/o hallucinations): 8.81%\n\n\n\n\n\n\n\n\n\n\nsecs\nidx\ngt_text\ntext\nwer\nmer\nwil\nwip\nhallucination\n\n\n\n\n0\n8.230\nNone\nAND OFTEN HAS MY MOTHER SAID WHILE ON HER LAP ...\nAnd often has my mother said, while on her lap...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n368\n6.190\nNone\nEVEN SO I HAD JUST RETURNED FROM AN ARDUOUS JO...\nEven so, I had just returned from an arduous j...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n691\n4.985\nNone\nTO GIVE AN IDEA OF THESE CONVERSATIONS I WILL ...\nTo give an idea of these conversations, I will...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n366\n3.615\nNone\nCHAPTER THREE AS MASTER WISHES\nChapter 3 As Master Wishes\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n365\n5.780\nNone\nI WILL SHOW YOU WHAT A GOOD JOB I DID AND SHE ...\nI will show you what a good job I did, and she...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n...\n...\n...\n...\n...\n...\n...\n...\n...\n...\n\n\n792\n1.810\nNone\nVENICE\nVINIS.\n1.000000\n1.000000\n1.000000\n0.000000\nFalse\n\n\n324\n2.700\nNone\nASKED THE VOICE IN SCORNFUL ACCENTS\nAsk the voice in the voice in the voice in the...\n1.500000\n0.750000\n0.875000\n0.125000\nFalse\n\n\n538\n2.215\nNone\nSTEPHANOS DEDALOS\nStefano's dead loss.\n2.000000\n1.000000\n1.000000\n0.000000\nFalse\n\n\n26\n16.735\nNone\nP S PRAY SIR EXCUSE ME FOR WRITING TO YOU A SE...\nP-S-P-S-P-S-P-S-P-S-P-S-P-S-P-S-P-S-P-S-P-S-P-...\n2.037037\n0.982143\n0.999339\n0.000661\nFalse\n\n\n106\n2.020\nNone\nSQUEAK SQUEAK\nIn the past, we have a question.\n3.500000\n1.000000\n1.000000\n0.000000\nFalse\n\n\n\n\n1000 rows × 9 columns\n\n\n\n\n\ntest_model(\"test-run-1e.model\")\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 01:34<00:00]\n \n \n\n\nWER: 8.41%\nWER (w/o hallucinations): 8.05%\n\n\n\n\n\n\n\n\n\n\nsecs\nidx\ngt_text\ntext\nwer\nmer\nwil\nwip\nhallucination\n\n\n\n\n0\n8.230\nNone\nAND OFTEN HAS MY MOTHER SAID WHILE ON HER LAP ...\nAnd often has my mother said, while on her lap...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n655\n4.895\nNone\nI SHALL BE PUNISHED FOR IT NOW I SUPPOSE BY BE...\nI shall be punished for it now, I suppose, by ...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n657\n3.640\nNone\nI AM VERY TIRED OF SWIMMING ABOUT HERE O MOUSE\nI am very tired of swimming about here, oh mouse.\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n318\n5.115\nNone\nMOST PEOPLE TALK TOO MUCH SO IT IS A RELIEF TO...\nMost people talk too much, so it is a relief t...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n317\n7.920\nNone\nHE SELECTED A SMALL GOLD BOTTLE WITH A PEPPER ...\nHe selected a small gold bottle with a pepper ...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n...\n...\n...\n...\n...\n...\n...\n...\n...\n...\n\n\n549\n10.575\nNone\nAT MOST BY AN ALMS GIVEN TO A BEGGAR WHOSE BLE...\nAt most, by an alms given to a beggar whose bl...\n1.000000\n0.500000\n0.500000\n0.500000\nTrue\n\n\n399\n6.365\nNone\nI WAS WELL SATISFIED WITH MY CABIN WHICH WAS L...\nI was well satisfied with my cabin, which was ...\n1.052632\n0.540541\n0.588905\n0.411095\nFalse\n\n\n538\n2.215\nNone\nSTEPHANOS DEDALOS\nSteffinor's Daedalus.\n1.500000\n1.000000\n1.000000\n0.000000\nFalse\n\n\n659\n4.995\nNone\nWE WON'T TALK ABOUT HER ANY MORE IF YOU'D RATH...\nWe won't talk about her anymore if he'd rather...\n1.866667\n0.700000\n0.760000\n0.240000\nTrue\n\n\n95\n8.800\nNone\nTHOUGHT THE FIR TREE AND BELIEVED IT ALL BECAU...\nthought the fur tree, and believed it all, bec...\n4.619048\n0.829060\n0.837200\n0.162800\nFalse\n\n\n\n\n1000 rows × 9 columns\n\n\n\n\n\n# but it got better after some hyperparam tuning\ntest_model(\"vqmodel-4e-6454-hyptuned.model\")\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 01:30<00:00]\n \n \n\n\nWER: 7.71%\nWER (w/o hallucinations): 7.71%\n\n\n\n\n\n\n\n\n\n\nsecs\nidx\ngt_text\ntext\nwer\nmer\nwil\nwip\nhallucination\n\n\n\n\n403\n5.370\nNone\nDEPARTING FROM FIVE HUNDRED THOUSAND THROATS T...\nDeparting from 500,000 throats, three cheers b...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n922\n4.400\nNone\nBUT HOW DID SHE MANAGE TO RENDER IT SO FASHION...\nBut how did she manage to render it so fashion...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n629\n3.235\nNone\nTWO HOURS AFTERWARDS A TERRIBLE SHOCK AWOKE ME\nTwo hours afterwards, a terrible shock, awoke me.\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n355\n2.885\nNone\nI'M AFRAID I DON'T KNOW MUCH ABOUT THE LAND OF OZ\nI'm afraid I don't know much about the land of...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n353\n5.870\nNone\nTHE FIRST LOT WE TESTED ON OUR GLASS CAT WHICH...\nThe first lot we tested on our glass cat, whic...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n...\n...\n...\n...\n...\n...\n...\n...\n...\n...\n\n\n849\n3.560\nNone\nI HAD A NOTION IT WAS YOU MATE AS SAVED ME FRO...\nI'll have a note.\n0.928571\n0.866667\n0.942857\n0.057143\nFalse\n\n\n741\n16.360\nNone\nOF WHAT MISSUS NEVERBEND HAD GONE THROUGH IN P...\nOf what Mrs. N N N N N N N N N N N N N N N N N...\n0.936170\n0.936170\n0.992021\n0.007979\nFalse\n\n\n371\n2.440\nNone\nCONSEIL WAS MY MANSERVANT\nCasa was my man's servant.\n1.000000\n0.666667\n0.833333\n0.166667\nFalse\n\n\n538\n2.215\nNone\nSTEPHANOS DEDALOS\nStefano Stetelos.\n1.000000\n1.000000\n1.000000\n0.000000\nFalse\n\n\n592\n1.805\nNone\nHANS STIRS NOT\nHon Stur's Night.\n1.333333\n1.000000\n1.000000\n0.000000\nFalse\n\n\n\n\n1000 rows × 9 columns\n\n\n\n\n\ntest_model(\"vqmodel-4e-6454-hyptuned-small.en.model\", N=1000)\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 01:41<00:00]\n \n \n\n\nWER: 7.38%\nWER (w/o hallucinations): 7.38%\n\n\n\n\n\n\n\n\n\n\nsecs\nidx\ngt_text\ntext\nwer\nmer\nwil\nwip\nhallucination\n\n\n\n\n0\n8.230\nNone\nAND OFTEN HAS MY MOTHER SAID WHILE ON HER LAP ...\nAnd often has my mother said, while on her lap...\n0.00000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n350\n10.680\nNone\nAT ONE END STOOD A GREAT FIREPLACE IN WHICH A ...\nAt one end stood a great fireplace, in which a...\n0.00000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n349\n2.130\nNone\nTHE WOMAN SEEMED THOUGHTFUL\nThe woman seemed thoughtful.\n0.00000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n680\n6.450\nNone\nHE DARTED LIKE AN ARROW THROUGH ALL THE HALLS ...\nHe darted like an arrow through all the halls,...\n0.00000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n347\n3.665\nNone\nOJO HAD NEVER EATEN SUCH A FINE MEAL IN ALL HI...\nOjo had never eaten such a fine meal in all hi...\n0.00000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n...\n...\n...\n...\n...\n...\n...\n...\n...\n...\n\n\n592\n1.805\nNone\nHANS STIRS NOT\nHonsters, Nod.\n1.00000\n1.000000\n1.000000\n0.000000\nFalse\n\n\n792\n1.810\nNone\nVENICE\nVINUS.\n1.00000\n1.000000\n1.000000\n0.000000\nFalse\n\n\n371\n2.440\nNone\nCONSEIL WAS MY MANSERVANT\nCossay was my man's servant.\n1.00000\n0.666667\n0.833333\n0.166667\nFalse\n\n\n538\n2.215\nNone\nSTEPHANOS DEDALOS\nStephenos dead loss.\n1.50000\n1.000000\n1.000000\n0.000000\nFalse\n\n\n440\n15.770\nNone\nELEVEN O'CLOCK HAD STRUCK IT WAS A FINE CLEAR ...\nAt the time of the day, the morning of the day...\n4.12766\n0.960396\n0.993259\n0.006741\nFalse\n\n\n\n\n1000 rows × 9 columns\n\n\n\n\n\ntest_model(\"vqmodel-4e-hyptuned-16gpu.model\", N=1000)\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 01:32<00:00]\n \n \n\n\nWER: 6.01%\nWER (w/o hallucinations): 6.01%\n\n\n\n\n\n\n\n\n\n\nsecs\nidx\ngt_text\ntext\nwer\nmer\nwil\nwip\nhallucination\n\n\n\n\n0\n8.230\nNone\nAND OFTEN HAS MY MOTHER SAID WHILE ON HER LAP ...\nAnd often has my mother said, while on her lap...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n390\n1.975\nNone\nWE DON'T KNOW WHERE IT WILL TAKE US\nWe don't know where it will take us.\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n708\n13.020\nNone\nTHE PAIN PRODUCED BY AN ACT OF HASTY AND ANGRY...\nThe pain produced by an act of hasty and angry...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n388\n2.355\nNone\nWE'RE LEAVING ON THE ABRAHAM LINCOLN\nWe're leaving on the Abraham Lincoln.\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n387\n2.735\nNone\nA ROUTE SLIGHTLY LESS DIRECT THAT'S ALL\nA route slightly less direct, that's all.\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n...\n...\n...\n...\n...\n...\n...\n...\n...\n...\n\n\n918\n3.000\nNone\nTHAT IS TRUE BADAUDERIE\nThat is true, bad old gree.\n0.750000\n0.500000\n0.625000\n0.375000\nFalse\n\n\n809\n8.875\nNone\nWHEN THE BLUESKINS SAW GHIP GHISIZZLE THEY RAI...\nThanks for watching!\n0.961538\n0.961538\n0.987179\n0.012821\nFalse\n\n\n643\n12.020\nNone\nALICE TOOK UP THE FAN AND GLOVES AND AS THE HA...\nThank you.\n1.000000\n1.000000\n1.000000\n0.000000\nFalse\n\n\n371\n2.440\nNone\nCONSEIL WAS MY MANSERVANT\nCosse was my man's servant.\n1.000000\n0.666667\n0.833333\n0.166667\nFalse\n\n\n538\n2.215\nNone\nSTEPHANOS DEDALOS\nStefanos de los\n1.500000\n1.000000\n1.000000\n0.000000\nFalse\n\n\n\n\n1000 rows × 9 columns\n\n\n\n\n\ntest_model(\"vqmodel-4e-hyptuned-32gpu.model\", N=1000)\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 01:32<00:00]\n \n \n\n\nWER: 5.94%\nWER (w/o hallucinations): 5.94%\n\n\n\n\n\n\n\n\n\n\nsecs\nidx\ngt_text\ntext\nwer\nmer\nwil\nwip\nhallucination\n\n\n\n\n757\n10.030\nNone\nTHEREFORE I FEEL MYSELF QUITE ABLE AS PRESIDEN...\nTherefore, I feel myself quite able, as Presid...\n0.00\n0.000000\n0.000000\n1.000000\nFalse\n\n\n628\n2.550\nNone\nDURING HIS WATCH I SLEPT\nDuring his watch, I slept.\n0.00\n0.000000\n0.000000\n1.000000\nFalse\n\n\n756\n4.735\nNone\nYOU HAVE COME TO US THREATENING US WITH ABSOLU...\nYou have come to us threatening us with absolu...\n0.00\n0.000000\n0.000000\n1.000000\nFalse\n\n\n377\n3.910\nNone\nHE WENT HERE THERE AND EVERYWHERE IN PERFECT C...\nHe went here, there, and everywhere in perfect...\n0.00\n0.000000\n0.000000\n1.000000\nFalse\n\n\n376\n8.340\nNone\nNEVER DID HE OBJECT TO BUCKLING UP HIS SUITCAS...\nNever did he object to buckling up his suitcas...\n0.00\n0.000000\n0.000000\n1.000000\nFalse\n\n\n...\n...\n...\n...\n...\n...\n...\n...\n...\n...\n\n\n918\n3.000\nNone\nTHAT IS TRUE BADAUDERIE\nThat is true bad-delt gree.\n0.75\n0.500000\n0.625000\n0.375000\nFalse\n\n\n371\n2.440\nNone\nCONSEIL WAS MY MANSERVANT\nCossay was my man's servant.\n1.00\n0.666667\n0.833333\n0.166667\nFalse\n\n\n592\n1.805\nNone\nHANS STIRS NOT\nHonsters Nied.\n1.00\n1.000000\n1.000000\n0.000000\nFalse\n\n\n819\n5.775\nNone\nSCUSE ME SAID TROT I NEGLECTED TO TELL YOU THA...\nThanks for watching.\n1.00\n1.000000\n1.000000\n0.000000\nFalse\n\n\n538\n2.215\nNone\nSTEPHANOS DEDALOS\nStefano Staedt-Loss\n1.50\n1.000000\n1.000000\n0.000000\nFalse\n\n\n\n\n1000 rows × 9 columns\n\n\n\n\n\ntest_model(\"vqmodel-512c-4e-hyptuned-32gpu.model\", N=1000)\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 08:52<00:00]\n \n \n\n\nWER: 7.37%\nWER (w/o hallucinations): 7.37%\n\n\n\n\n\n\n\n\n\n\nsecs\nidx\ngt_text\ntext\nwer\nmer\nwil\nwip\nhallucination\n\n\n\n\n715\n11.340\nNone\nTHE MOTHER IN MANAGING THE CASE IN THIS WAY RE...\nThe mother, in managing the case in this way, ...\n0.0\n0.000000\n0.000000\n1.000000\nFalse\n\n\n347\n3.665\nNone\nOJO HAD NEVER EATEN SUCH A FINE MEAL IN ALL HI...\nOjo had never eaten such a fine meal in all hi...\n0.0\n0.000000\n0.000000\n1.000000\nFalse\n\n\n860\n10.555\nNone\nIT IS NOTABLE THAT THE INDIAN TRIBES HAVE GENE...\nIt is notable that the Indian tribes have gene...\n0.0\n0.000000\n0.000000\n1.000000\nFalse\n\n\n608\n3.070\nNone\nTHE HORIZON SEEMS EXTREMELY DISTANT\nThe horizon seems extremely distant.\n0.0\n0.000000\n0.000000\n1.000000\nFalse\n\n\n344\n4.275\nNone\nI AM MY DEAR AND ALL STRANGERS ARE WELCOME TO ...\nI am, my dear, and all strangers are welcome t...\n0.0\n0.000000\n0.000000\n1.000000\nFalse\n\n\n...\n...\n...\n...\n...\n...\n...\n...\n...\n...\n\n\n371\n2.440\nNone\nCONSEIL WAS MY MANSERVANT\nCosay was my man's servant.\n1.0\n0.666667\n0.833333\n0.166667\nFalse\n\n\n260\n3.155\nNone\nWHO TAUGHT YOU TO SCRUB A FLOOR I SHOULD LIKE ...\n.\n1.0\n1.000000\n1.000000\n0.000000\nFalse\n\n\n592\n1.805\nNone\nHANS STIRS NOT\nHonster's Night.\n1.0\n1.000000\n1.000000\n0.000000\nFalse\n\n\n792\n1.810\nNone\nVENICE\nVenus.\n1.0\n1.000000\n1.000000\n0.000000\nFalse\n\n\n538\n2.215\nNone\nSTEPHANOS DEDALOS\nStefanos de los.\n1.5\n1.000000\n1.000000\n0.000000\nFalse\n\n\n\n\n1000 rows × 9 columns\n\n\n\n\n\ntest_model(\"vqmodel-512c-dim64-4e-hyptuned-32gpu.model\", N=1000)\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 01:29<00:00]\n \n \n\n\nWER: 7.13%\nWER (w/o hallucinations): 7.13%\n\n\n\n\n\n\n\n\n\n\nsecs\nidx\ngt_text\ntext\nwer\nmer\nwil\nwip\nhallucination\n\n\n\n\n0\n8.230\nNone\nAND OFTEN HAS MY MOTHER SAID WHILE ON HER LAP ...\nAnd often has my mother said, while on her lap...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n680\n6.450\nNone\nHE DARTED LIKE AN ARROW THROUGH ALL THE HALLS ...\nHe darted like an arrow through all the halls,...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n682\n5.145\nNone\nAND ALL HIS BROTHERS AND SISTERS STOOD ROUND A...\nand all his brothers and sisters stood round a...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n684\n2.165\nNone\nANDERS FACE GREW RED\nAnders face grew red.\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n685\n2.775\nNone\nBUT HIS MOTHER HUGGED HIM CLOSE\nBut his mother hugged him close.\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n...\n...\n...\n...\n...\n...\n...\n...\n...\n...\n\n\n106\n2.020\nNone\nSQUEAK SQUEAK\nSpeak, speak.\n1.000000\n1.000000\n1.000000\n0.000000\nFalse\n\n\n371\n2.440\nNone\nCONSEIL WAS MY MANSERVANT\nCossay was my man's servant.\n1.000000\n0.666667\n0.833333\n0.166667\nFalse\n\n\n592\n1.805\nNone\nHANS STIRS NOT\nHonsters, Nied.\n1.000000\n1.000000\n1.000000\n0.000000\nFalse\n\n\n336\n4.835\nNone\nFOR A LONG TIME HE HAD WISHED TO EXPLORE THE B...\nFor a long time, you can see that the video is...\n1.333333\n0.800000\n0.933333\n0.066667\nFalse\n\n\n538\n2.215\nNone\nSTEPHANOS DEDALOS\nStefano Staedt-Loss\n1.500000\n1.000000\n1.000000\n0.000000\nFalse\n\n\n\n\n1000 rows × 9 columns\n\n\n\n\n\ntest_model(\"vqmodel-base-en+pl-512c-dim64.model\", N=1000)\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 01:34<00:00]\n \n \n\n\nWER: 8.45%\nWER (w/o hallucinations): 8.45%\n\n\n\n\n\n\n\n\n\n\nsecs\nidx\ngt_text\ntext\nwer\nmer\nwil\nwip\nhallucination\n\n\n\n\n0\n8.230\nNone\nAND OFTEN HAS MY MOTHER SAID WHILE ON HER LAP ...\nAnd often has my mother said, while on her lap...\n0.0\n0.0\n0.0\n1.0\nFalse\n\n\n740\n2.715\nNone\nBUT I MEAN TO HAVE MY INNINGS BEFORE LONG\nBut I mean to have my innings before long.\n0.0\n0.0\n0.0\n1.0\nFalse\n\n\n387\n2.735\nNone\nA ROUTE SLIGHTLY LESS DIRECT THAT'S ALL\nA route slightly less direct, that's all.\n0.0\n0.0\n0.0\n1.0\nFalse\n\n\n386\n5.915\nNone\nYES WE ARE CERTAINLY I REPLIED EVASIVELY BUT A...\nYes, we are, certainly, I replied evasively, b...\n0.0\n0.0\n0.0\n1.0\nFalse\n\n\n744\n3.630\nNone\nWHAT COULD I DO NOW BUT JUST LAY MYSELF DOWN A...\nWhat could I do now, but just lay myself down ...\n0.0\n0.0\n0.0\n1.0\nFalse\n\n\n...\n...\n...\n...\n...\n...\n...\n...\n...\n...\n\n\n114\n6.560\nNone\nYES IN REALITY THOSE WERE HAPPY TIMES\nThank you.\n1.0\n1.0\n1.0\n0.0\nFalse\n\n\n191\n5.580\nNone\nWHY IT'S IN MISSOURI SOMEWHERE ON THE FRONTIER...\nThank you.\n1.0\n1.0\n1.0\n0.0\nFalse\n\n\n538\n2.215\nNone\nSTEPHANOS DEDALOS\nStefano Stedilos\n1.0\n1.0\n1.0\n0.0\nFalse\n\n\n16\n1.695\nNone\nFAREWELL MADAM\nFair Well, Madame.\n1.5\n1.0\n1.0\n0.0\nFalse\n\n\n106\n2.020\nNone\nSQUEAK SQUEAK\nS'quik, s'quik !\n2.0\n1.0\n1.0\n0.0\nFalse\n\n\n\n\n1000 rows × 9 columns\n\n\n\n\n\ntest_model(\"vqmodel-medium-en+pl-512c-dim64.model\", N=1000)\n\n\n\n\n\n\n \n \n 100.00% [1000/1000 06:09<00:00]\n \n \n\n\nWER: 7.34%\nWER (w/o hallucinations): 6.62%\n\n\n\n\n\n\n\n\n\n\nsecs\nidx\ngt_text\ntext\nwer\nmer\nwil\nwip\nhallucination\n\n\n\n\n0\n8.230\nNone\nAND OFTEN HAS MY MOTHER SAID WHILE ON HER LAP ...\nAnd often has my mother said, While on her lap...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n571\n6.615\nNone\nSTEPHEN'S HEART BEGAN SLOWLY TO FOLD AND FADE ...\nStephen's heart began slowly to fold and fade ...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n572\n4.090\nNone\nHE IS CALLED AS YOU KNOW THE APOSTLE OF THE IN...\nHe is called, as you know, the Apostle of the ...\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n573\n3.330\nNone\nA GREAT SAINT SAINT FRANCIS XAVIER\nA great saint, St. Francis Xavier.\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n575\n3.445\nNone\nHE HAD THE FAITH IN HIM THAT MOVES MOUNTAINS\nHe had the faith in him that moves mountains.\n0.000000\n0.000000\n0.000000\n1.000000\nFalse\n\n\n...\n...\n...\n...\n...\n...\n...\n...\n...\n...\n\n\n76\n4.110\nNone\nREJOICE IN THY OWN FRESH YOUTH\nRead more at www.BritishMedia.com\n1.000000\n1.000000\n1.000000\n0.000000\nFalse\n\n\n63\n13.950\nNone\nTO GROW AND GROW TO GET OLDER AND BE TALL THOU...\n. . . . . . . . . . . . . . . . . . . . . . . ...\n1.000000\n1.000000\n1.000000\n0.000000\nFalse\n\n\n819\n5.775\nNone\nSCUSE ME SAID TROT I NEGLECTED TO TELL YOU THA...\nThanks for watching!\n1.000000\n1.000000\n1.000000\n0.000000\nFalse\n\n\n95\n8.800\nNone\nTHOUGHT THE FIR TREE AND BELIEVED IT ALL BECAU...\nthought the fur tree, and believed it all, bec...\n2.047619\n0.682540\n0.697657\n0.302343\nFalse\n\n\n654\n6.200\nNone\nI WISH I HADN'T CRIED SO MUCH SAID ALICE AS SH...\n\"'I wish I hadn't cried so much,' said Alice, ...\n6.900000\n0.873418\n0.873418\n0.126582\nTrue\n\n\n\n\n1000 rows × 9 columns\n\n\n\n\n\ndef show_stat(stats, i):\n row = stats.loc[i]\n print('WER: ', row['wer'])\n print('GT: ', row['gt_text'])\n print('GEN: ', row['text'])\n\n\nshow_stat(_18, 654)\n\nWER: 6.9\nGT: I WISH I HADN'T CRIED SO MUCH SAID ALICE AS SHE SWAM ABOUT TRYING TO FIND HER WAY OUT\nGEN: \"'I wish I hadn't cried so much,' said Alice, as she swam about, trying to find her way out. \"'I wish I hadn't cried so much,' said Alice, as she swam about, trying to find her way out. \"'I wish I hadn't cried so much,' said Alice, as she swam about, trying to find her way out. \"'I wish I hadn't cried so much,' said Alice, as she swam about, trying to find her way out. \"'I wish I hadn't cried so much,' said Alice, as she swam about, trying to find her way out. \"'I wish I hadn't cried so much,' said Alice, as she swam about, trying to find her way out. \"'I wish I hadn't cried so much,' said Alice, as she swam about, trying to find her way out. \"'I wish I hadn't cried so much,' said Alice, as she swam about, trying to find her\n\n\n\nshow_stat(_18, 819)\n\nWER: 1.0\nGT: SCUSE ME SAID TROT I NEGLECTED TO TELL YOU THAT YOU'RE NOT THE BOOLOOROO ANY MORE\nGEN: Thanks for watching!\n\n\n\nshow_stat(_13, 114)\n\nWER: 1.0\nGT: YES IN REALITY THOSE WERE HAPPY TIMES\nGEN: Thank you." + }, + { + "objectID": "4b. multi-language semantic to acoustic token modeling.html", + "href": "4b. multi-language semantic to acoustic token modeling.html", + "title": "Semantic to acoustic token modeling", + "section": "", + "text": "from encodec.model import EncodecModel\nimport webdataset as wds\nfrom whisperspeech.train import *\n\nimport pylab as plt\nfrom IPython.display import Audio, HTML, display" + }, + { + "objectID": "4b. multi-language semantic to acoustic token modeling.html#model", + "href": "4b. multi-language semantic to acoustic token modeling.html#model", + "title": "Semantic to acoustic token modeling", + "section": "Model", + "text": "Model\n\nimport pylab as plt\nimport fastprogress\nimport IPython\nimport numpy as np\n\nclass CMLMVisual:\n \"\"\"Visualize training progress\"\"\"\n def __init__ (self, model, masterbar, total_steps):\n self.model = model\n self.masterbar = masterbar\n self.total_steps = total_steps\n self.epochs = total_steps // masterbar.main_bar.total\n \n gs = plt.GridSpec(3, 1, height_ratios=[2,2,1])\n graph_fig = plt.figure(figsize=(10,6))\n self.graph_fig = graph_fig\n self.loss_p = graph_fig.add_subplot(gs[0])\n self.acc_p = graph_fig.add_subplot(gs[1], sharex=self.loss_p)\n self.acc_p.tick_params('x', labelbottom=False)\n self.lr_p = graph_fig.add_subplot(gs[2], sharex=self.loss_p)\n self.lr_p.tick_params('x', labelbottom=False)\n self.graph_out = None\n \n self.its = []\n self.train_losses = []\n self.val_losses = []\n self.lr_history = []\n self.acc = np.nan\n self.acc_history = []\n self.pacc_history = []\n \n def show(self):\n self.start_t = time.time()\n self.masterbar.write([\"samples\", \"train\", \"val\", \"time\"], table=True)\n self.graph_out = display(self.graph_fig, display_id=True)\n self.acc_out = display(IPython.display.HTML(''), display_id=True)\n \n def hide(self):\n if self.graph_out is not None:\n self.graph_out.update(IPython.display.HTML(''))\n \n def plot(self):\n loss_p, acc_p, lr_p = self.loss_p, self.acc_p, self.lr_p\n loss_p.clear()\n loss_p.plot(self.its, self.train_losses)\n loss_p.plot(self.its, self.val_losses)\n loss_p.set_xlim(0, self.total_steps)\n loss_p.set_yscale('log')\n acc_p.clear()\n for k in self.acc_history[-1].keys():\n acc_p.plot(self.its, [x[k] for x in self.acc_history], ':')\n lr_p.clear()\n lrs = np.array(self.lr_history)\n lr_p.plot(self.its, lrs)\n self.graph_out.update(self.graph_fig)\n \n def add_data(self, it, lr, train_loss, val_los):\n self.its.append(it)\n self.train_losses.append(train_loss)\n self.val_losses.append(val_los)\n self.lr_history.append(lr)\n metrics = self.model.get_metrics()\n self.acc_history.append(metrics)\n html = \"<h5>Accuracies:</h5><table>\"\n html += \"<thead>\"+(''.join([f\"<td>{k}<td>\" for k,x in metrics.items()]))+\"</thead>\"\n html += \"<tr>\"+(''.join([f\"<td>{x*100:.1f}%<td>\" for k,x in metrics.items()]))+\"</tr>\"\n html += \"</table>\"\n self.acc_out.update(IPython.display.HTML(html))\n self.plot()\n\n def add_table_row(self, it, avg_train_loss, val_loss):\n elapsed_t = time.time() - self.start_t\n self.masterbar.write([it, f\"{avg_train_loss:.5f}\", f\"{val_loss:.5f}\", fastprogress.core.format_time(elapsed_t)], table=True)\n \n def on_iter(self, bar, it, avg_train_loss, val_loss):\n epoch = math.ceil(it / self.total_steps * self.epochs)\n bar.comment = f\"#{epoch}/{self.epochs} loss: {avg_train_loss:.3f} / {val_loss:.3f}\"\n\n\nsource\n\nDelSumEmbedding\n\n DelSumEmbedding (n_head=6, head_width=64, atoks_width=None, length=2250,\n codes=1024, quantizers=8, pos_embs=None)\n\nBase class for all neural network modules.\nYour models should also subclass this class.\nModules can also contain other Modules, allowing to nest them in a tree structure. You can assign the submodules as regular attributes::\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nclass Model(nn.Module):\n def __init__(self):\n super().__init__()\n self.conv1 = nn.Conv2d(1, 20, 5)\n self.conv2 = nn.Conv2d(20, 20, 5)\n\n def forward(self, x):\n x = F.relu(self.conv1(x))\n return F.relu(self.conv2(x))\nSubmodules assigned in this way will be registered, and will have their parameters converted too when you call :meth:to, etc.\n.. note:: As per the example above, an __init__() call to the parent class must be made before assignment on the child.\n:ivar training: Boolean represents whether this module is in training or evaluation mode. :vartype training: bool\n\nsource\n\n\nSADelARTransformer\n\n SADelARTransformer (depth=3, ctx_n=2250, stoks_len=750, stoks_codes=4097,\n stoks_width=None, spk_width=None, atoks_width=None,\n n_head=3, head_width=64, ffn_mult=4, quantizers=8,\n speaker_map={'1': 0}, tunables=Tunables(init_std=9,\n embeddings_std=0.2, embeddings_lr_scale=10,\n output_mult=5.6, query_mult=0.3,\n encoder_depth_ratio=0.25, linear_heads=False,\n rope=True, q0_loss_mult=1, causal_encoder=False,\n lr0=0.003, clip_gradient_norm=2, weight_decay=0.001,\n warmup_steps=2000, random=False,\n random_finetune=False, force_hidden_to_emb=False))\n\nBase class for all neural network modules.\nYour models should also subclass this class.\nModules can also contain other Modules, allowing to nest them in a tree structure. You can assign the submodules as regular attributes::\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nclass Model(nn.Module):\n def __init__(self):\n super().__init__()\n self.conv1 = nn.Conv2d(1, 20, 5)\n self.conv2 = nn.Conv2d(20, 20, 5)\n\n def forward(self, x):\n x = F.relu(self.conv1(x))\n return F.relu(self.conv2(x))\nSubmodules assigned in this way will be registered, and will have their parameters converted too when you call :meth:to, etc.\n.. note:: As per the example above, an __init__() call to the parent class must be made before assignment on the child.\n:ivar training: Boolean represents whether this module is in training or evaluation mode. :vartype training: bool\n\nsource\n\n\nTunables\n\n Tunables (init_std:float=9, embeddings_std:float=0.2,\n embeddings_lr_scale:float=10, output_mult:float=5.6,\n query_mult:float=0.3, encoder_depth_ratio:float=0.25,\n linear_heads:bool=False, rope:bool=True, q0_loss_mult:float=1,\n causal_encoder:bool=False, lr0:float=0.003,\n clip_gradient_norm:float=2, weight_decay:float=0.001,\n warmup_steps:float=2000, random:bool=False,\n random_finetune:bool=False, force_hidden_to_emb:bool=False)\n\n\nsource\n\n\nrand\n\n rand (start, end)\n\n\nsource\n\n\nDelSumHead\n\n DelSumHead (quantizers=8, n_head=6, head_width=64)\n\nBase class for all neural network modules.\nYour models should also subclass this class.\nModules can also contain other Modules, allowing to nest them in a tree structure. You can assign the submodules as regular attributes::\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nclass Model(nn.Module):\n def __init__(self):\n super().__init__()\n self.conv1 = nn.Conv2d(1, 20, 5)\n self.conv2 = nn.Conv2d(20, 20, 5)\n\n def forward(self, x):\n x = F.relu(self.conv1(x))\n return F.relu(self.conv2(x))\nSubmodules assigned in this way will be registered, and will have their parameters converted too when you call :meth:to, etc.\n.. note:: As per the example above, an __init__() call to the parent class must be made before assignment on the child.\n:ivar training: Boolean represents whether this module is in training or evaluation mode. :vartype training: bool" + }, + { + "objectID": "4b. multi-language semantic to acoustic token modeling.html#training-test", + "href": "4b. multi-language semantic to acoustic token modeling.html#training-test", + "title": "Semantic to acoustic token modeling", + "section": "Training test", + "text": "Training test\n\ntrain_ds = load_dataset('../librilight/*atoks*.tar.gz', '../librilight-vq-en+pl/', 100000, vq_codes=513, exclude_files='../librilight/common-speakers-maxvad')\nval_ds = load_dataset('../librilight/common-speakers-maxvad.tar.gz', '../librilight-vq-en+pl/', 512, vq_codes=513, validation=True)\n\n\nmodel = make_model('micro', quantizers=4, frozen_embeddings_model='vqmodel-medium-en+pl-512c-dim64.model',\n tunables=Tunables()).cuda()\ntrain(f\"s2a-new\", model, train_ds, val_ds, half=True, bs=32, lr=model.tunables.lr0, epochs=1, warmup_steps=model.tunables.warmup_steps,\n table_row_every_iters=25000, run_valid_every_iters=5000, visual_class=CMLMVisual)\n\n\n\n\nAccuracies:acc_0acc_1acc_2acc_329.6%23.6%21.2%19.2%\n\n\n\n\n\n\n\n\n \n \n 100.00% [1/1 09:39<00:00]\n \n \n\n\n\n\nsamples\ntrain\nval\ntime\n\n\n\n\n25024\n3.95886\n4.17079\n02:34\n\n\n50016\n3.71909\n3.81947\n04:56\n\n\n75008\n3.53838\n3.62924\n07:18\n\n\n100000\n3.34118\n3.46100\n09:39\n\n\n\n\n\n \n \n 100.00% [3125/3125 09:39<00:00 #100000/100000 loss: 3.341 / 3.461]\n \n \n\n\n\n/opt/conda/lib/python3.10/site-packages/torch/optim/lr_scheduler.py:149: UserWarning: The epoch parameter in `scheduler.step()` was not necessary and is being deprecated where possible. Please use `scheduler.step()` to step the scheduler. During the deprecation, if epoch is different from None, the closed form is used instead of the new chainable form, where available. Please open an issue if you are unable to replicate your use case: https://github.com/pytorch/pytorch/issues/new/choose.\n warnings.warn(EPOCH_DEPRECATION_WARNING, UserWarning)\n\n\n\n\n\n\n\n\n\n\n# encoder loss barely helps, probably because the RoPE cross-attention bias is already helping a lot\nmodel = make_model('micro', quantizers=4, frozen_embeddings_model='vqmodel-medium-en+pl-512c-dim64.model',\n tunables=Tunables(causal_encoder=True)).cuda()\ntrain(f\"s2a-new\", model, train_ds, val_ds, half=True, bs=32, lr=model.tunables.lr0, epochs=1, warmup_steps=model.tunables.warmup_steps,\n table_row_every_iters=25000, run_valid_every_iters=5000, visual_class=CMLMVisual)\n\n\n\n\nAccuracies:acc_0acc_1acc_2acc_329.6%23.8%21.2%19.2%\n\n\n\n\n\n\n\n\n \n \n 100.00% [1/1 09:41<00:00]\n \n \n\n\n\n\nsamples\ntrain\nval\ntime\n\n\n\n\n25024\n4.16333\n4.16063\n02:32\n\n\n50016\n3.98411\n3.79632\n04:55\n\n\n75008\n3.75278\n3.62357\n07:18\n\n\n100000\n3.54639\n3.45734\n09:41\n\n\n\n\n\n \n \n 100.00% [3125/3125 09:41<00:00 #100000/100000 loss: 3.546 / 3.457]\n \n \n\n\n\n/opt/conda/lib/python3.10/site-packages/torch/optim/lr_scheduler.py:149: UserWarning: The epoch parameter in `scheduler.step()` was not necessary and is being deprecated where possible. Please use `scheduler.step()` to step the scheduler. During the deprecation, if epoch is different from None, the closed form is used instead of the new chainable form, where available. Please open an issue if you are unable to replicate your use case: https://github.com/pytorch/pytorch/issues/new/choose.\n warnings.warn(EPOCH_DEPRECATION_WARNING, UserWarning)\n\n\n\n\n\n\n\n\n\n\n# we can prioritize the loss for the first quantizer\n# we'd have to compare generations to really know if it helps though\nmodel = make_model('micro', quantizers=4, frozen_embeddings_model='vqmodel-medium-en+pl-512c-dim64.model',\n tunables=Tunables(q0_loss_mult=5)).cuda()\ntrain(f\"s2a-new\", model, train_ds, val_ds, half=True, bs=32, lr=model.tunables.lr0, epochs=1, warmup_steps=model.tunables.warmup_steps,\n table_row_every_iters=25000, run_valid_every_iters=5000, visual_class=CMLMVisual)\n\n\n\n\nAccuracies:acc_0acc_1acc_2acc_330.5%23.0%19.8%17.7%\n\n\n\n\n\n\n\n\n \n \n 100.00% [1/1 09:39<00:00]\n \n \n\n\n\n\nsamples\ntrain\nval\ntime\n\n\n\n\n25024\n3.59923\n4.24838\n02:32\n\n\n50016\n3.41711\n3.88030\n04:55\n\n\n75008\n3.19359\n3.70881\n07:17\n\n\n100000\n3.04986\n3.53762\n09:39\n\n\n\n\n\n \n \n 100.00% [3125/3125 09:39<00:00 #100000/100000 loss: 3.050 / 3.538]\n \n \n\n\n\n/opt/conda/lib/python3.10/site-packages/torch/optim/lr_scheduler.py:149: UserWarning: The epoch parameter in `scheduler.step()` was not necessary and is being deprecated where possible. Please use `scheduler.step()` to step the scheduler. During the deprecation, if epoch is different from None, the closed form is used instead of the new chainable form, where available. Please open an issue if you are unable to replicate your use case: https://github.com/pytorch/pytorch/issues/new/choose.\n warnings.warn(EPOCH_DEPRECATION_WARNING, UserWarning)" + }, + { + "objectID": "2A. Speaker Embeddings.html", + "href": "2A. Speaker Embeddings.html", + "title": "WhisperSpeech", + "section": "", + "text": "Doing transcription means sampling from the Whisper auto-regresive decoder. This is too slow to do for each training batch. Fortunately the trainscriptions are small text snippets so we can precompute them once for the whole dataset.\nWe use segments from Voice Activity Detection to reduce any boundary issues, the we use webdataset to yields multiple chunks from a FLAC file we only load once. The VAD segments are merged into longer chunks to make Whisper processing more efficent (it always processes 30s at a time)\nUsage:\npython -m whisperspeech.extract_spk_emb librilight-large-wo6454-flac-000002.tar\nYou can pass in either a URL or a local file name. Either way it will expect a vad file in the local directory. The result will go into a file in the current directory named after the source file but replacing flac with txt." + }, + { + "objectID": "2A. Speaker Embeddings.html#precompute-whisper-transcriptions-for-vq-bottleneck-distilation", + "href": "2A. Speaker Embeddings.html#precompute-whisper-transcriptions-for-vq-bottleneck-distilation", + "title": "WhisperSpeech", + "section": "", + "text": "Doing transcription means sampling from the Whisper auto-regresive decoder. This is too slow to do for each training batch. Fortunately the trainscriptions are small text snippets so we can precompute them once for the whole dataset.\nWe use segments from Voice Activity Detection to reduce any boundary issues, the we use webdataset to yields multiple chunks from a FLAC file we only load once. The VAD segments are merged into longer chunks to make Whisper processing more efficent (it always processes 30s at a time)\nUsage:\npython -m whisperspeech.extract_spk_emb librilight-large-wo6454-flac-000002.tar\nYou can pass in either a URL or a local file name. Either way it will expect a vad file in the local directory. The result will go into a file in the current directory named after the source file but replacing flac with txt." + }, + { + "objectID": "2A. Speaker Embeddings.html#batch-processing", + "href": "2A. Speaker Embeddings.html#batch-processing", + "title": "WhisperSpeech", + "section": "Batch processing", + "text": "Batch processing\nLet’s put everything above together.\n\ndl = chunked_dataset('../cc-small/cc-mix-000000.tar', 'mix')\nfor keys, samples, seconds in dl: break\nkeys, samples, seconds\n\n(['cc/7 Sec Riddles/[-hIfETsPxPg] New TYPE Of Riddles: Can You Ace Our New Game? ?_023',\n 'cc/Bon Appetit/[jmntzm5yBYY] Melissa Makes Chicken Afritada | From the Home Kitchen | Bon Appétit_037',\n 'cc/7 Sec Riddles/[-hIfETsPxPg] New TYPE Of Riddles: Can You Ace Our New Game? ?_009',\n 'cc/Bon Appetit/[jmntzm5yBYY] Melissa Makes Chicken Afritada | From the Home Kitchen | Bon Appétit_010',\n 'cc/7 Sec Riddles/[-hIfETsPxPg] New TYPE Of Riddles: Can You Ace Our New Game? ?_004',\n 'cc/Bon Appetit/[jmntzm5yBYY] Melissa Makes Chicken Afritada | From the Home Kitchen | Bon Appétit_049',\n 'cc/Bon Appetit/[jmntzm5yBYY] Melissa Makes Chicken Afritada | From the Home Kitchen | Bon Appétit_000',\n 'cc/Bon Appetit/[jmntzm5yBYY] Melissa Makes Chicken Afritada | From the Home Kitchen | Bon Appétit_024',\n 'cc/7 Sec Riddles/[-hIfETsPxPg] New TYPE Of Riddles: Can You Ace Our New Game? ?_033',\n 'cc/Bon Appetit/[jmntzm5yBYY] Melissa Makes Chicken Afritada | From the Home Kitchen | Bon Appétit_034',\n 'cc/Bon Appetit/[jmntzm5yBYY] Melissa Makes Chicken Afritada | From the Home Kitchen | Bon Appétit_012',\n 'cc/Bon Appetit/[jmntzm5yBYY] Melissa Makes Chicken Afritada | From the Home Kitchen | Bon Appétit_052',\n 'cc/Bon Appetit/[jmntzm5yBYY] Melissa Makes Chicken Afritada | From the Home Kitchen | Bon Appétit_007',\n 'cc/7 Sec Riddles/[-hIfETsPxPg] New TYPE Of Riddles: Can You Ace Our New Game? ?_030',\n 'cc/Bon Appetit/[jmntzm5yBYY] Melissa Makes Chicken Afritada | From the Home Kitchen | Bon Appétit_055',\n 'cc/Bon Appetit/[jmntzm5yBYY] Melissa Makes Chicken Afritada | From the Home Kitchen | Bon Appétit_006'],\n tensor([[-0.0154, -0.0289, -0.0376, ..., 0.0000, 0.0000, 0.0000],\n [-0.0035, -0.0058, -0.0082, ..., 0.0000, 0.0000, 0.0000],\n [-0.0082, -0.0150, -0.0179, ..., 0.0000, 0.0000, 0.0000],\n ...,\n [-0.0018, 0.0017, 0.0005, ..., 0.0000, 0.0000, 0.0000],\n [ 0.0014, 0.0021, 0.0019, ..., 0.0000, 0.0000, 0.0000],\n [ 0.0055, 0.0106, 0.0086, ..., 0.0000, 0.0000, 0.0000]]),\n tensor([ 5.1536, 8.5666, 2.2867, 22.5939, 1.7406, 22.4744, 2.2355, 2.3549,\n 2.0307, 18.0717, 6.4505, 2.1843, 1.6382, 5.5461, 2.6450, 29.1297]))\n\n\n\nclassifier = EncoderClassifier.from_hparams(\"speechbrain/spkrec-ecapa-voxceleb\",\n savedir=os.path.expanduser(\"~/.cache/speechbrain/\"),\n run_opts={\"device\": \"cuda\"})\n\n\nembs = F.normalize(classifier.encode_batch(samples, wav_lens=seconds/30).squeeze(1), dim=-1)\n\n\nembs @ embs.T\n\ntensor([[ 1.0000e+00, -1.5016e-01, -1.0663e-01, 7.4717e-01, 6.6663e-01,\n 6.7088e-01, 7.3192e-01, 8.0751e-01, -1.4667e-01, -1.5538e-01,\n 7.8594e-01, -1.7131e-01, 4.7389e-02, 3.8923e-01, 8.0528e-02,\n 6.8884e-02],\n [-1.5016e-01, 1.0000e+00, 7.4450e-01, -7.8480e-02, -9.2287e-02,\n -4.8926e-02, -1.8269e-01, -1.2868e-01, 6.2437e-01, 7.6687e-01,\n -1.2109e-01, 5.7231e-01, 5.4483e-02, -1.3711e-02, 5.4225e-02,\n 1.0608e-01],\n [-1.0663e-01, 7.4450e-01, 1.0000e+00, -9.6316e-02, -8.8784e-02,\n -3.4282e-02, -1.6323e-01, -1.2561e-01, 5.7611e-01, 7.6471e-01,\n -1.1900e-01, 5.1093e-01, 3.6564e-02, 2.1784e-03, 4.5240e-02,\n 8.6847e-02],\n [ 7.4717e-01, -7.8480e-02, -9.6316e-02, 1.0000e+00, 6.9144e-01,\n 7.3513e-01, 7.2880e-01, 7.7707e-01, -8.8781e-02, -8.2090e-02,\n 7.7152e-01, -5.2820e-02, 7.3040e-02, 3.4047e-01, 9.3617e-02,\n 1.1111e-01],\n [ 6.6663e-01, -9.2287e-02, -8.8784e-02, 6.9144e-01, 1.0000e+00,\n 7.1832e-01, 6.3586e-01, 7.3829e-01, -7.3225e-02, -1.2791e-01,\n 7.3249e-01, -4.8655e-04, -2.3932e-02, 3.5492e-01, 5.7829e-02,\n 1.2558e-01],\n [ 6.7088e-01, -4.8926e-02, -3.4282e-02, 7.3513e-01, 7.1832e-01,\n 1.0000e+00, 6.7989e-01, 7.1707e-01, -1.1102e-01, -3.2756e-02,\n 7.0298e-01, 9.7910e-04, 3.2516e-02, 3.2002e-01, 9.5534e-02,\n 1.2125e-01],\n [ 7.3192e-01, -1.8269e-01, -1.6323e-01, 7.2880e-01, 6.3586e-01,\n 6.7989e-01, 1.0000e+00, 7.4862e-01, -1.4716e-01, -1.8850e-01,\n 7.7709e-01, -1.4848e-01, 3.5645e-02, 3.9155e-01, 8.5304e-02,\n 7.6598e-02],\n [ 8.0751e-01, -1.2868e-01, -1.2561e-01, 7.7707e-01, 7.3829e-01,\n 7.1707e-01, 7.4862e-01, 1.0000e+00, -1.3192e-01, -9.4631e-02,\n 8.1980e-01, -1.0383e-01, -1.2569e-02, 4.0366e-01, 1.3611e-02,\n 7.2108e-02],\n [-1.4667e-01, 6.2437e-01, 5.7611e-01, -8.8781e-02, -7.3225e-02,\n -1.1102e-01, -1.4716e-01, -1.3192e-01, 1.0000e+00, 6.1238e-01,\n -7.4339e-02, 4.6340e-01, -3.2115e-02, 1.9445e-02, -2.3383e-03,\n -5.2721e-04],\n [-1.5538e-01, 7.6687e-01, 7.6471e-01, -8.2090e-02, -1.2791e-01,\n -3.2756e-02, -1.8850e-01, -9.4631e-02, 6.1238e-01, 1.0000e+00,\n -1.2142e-01, 5.6736e-01, 3.0472e-02, -2.1869e-02, 3.7176e-02,\n 1.1145e-01],\n [ 7.8594e-01, -1.2109e-01, -1.1900e-01, 7.7152e-01, 7.3249e-01,\n 7.0298e-01, 7.7709e-01, 8.1980e-01, -7.4339e-02, -1.2142e-01,\n 1.0000e+00, -4.7116e-02, 3.0283e-02, 3.6122e-01, 3.7660e-02,\n 1.3460e-01],\n [-1.7131e-01, 5.7231e-01, 5.1093e-01, -5.2820e-02, -4.8655e-04,\n 9.7910e-04, -1.4848e-01, -1.0383e-01, 4.6340e-01, 5.6736e-01,\n -4.7116e-02, 1.0000e+00, 1.2047e-01, 1.8673e-02, 1.4013e-01,\n 1.9592e-01],\n [ 4.7389e-02, 5.4483e-02, 3.6564e-02, 7.3040e-02, -2.3932e-02,\n 3.2516e-02, 3.5645e-02, -1.2569e-02, -3.2115e-02, 3.0472e-02,\n 3.0283e-02, 1.2047e-01, 1.0000e+00, -2.5141e-02, 8.7659e-01,\n 6.1994e-01],\n [ 3.8923e-01, -1.3711e-02, 2.1784e-03, 3.4047e-01, 3.5492e-01,\n 3.2002e-01, 3.9155e-01, 4.0366e-01, 1.9445e-02, -2.1869e-02,\n 3.6122e-01, 1.8673e-02, -2.5141e-02, 1.0000e+00, 2.9265e-04,\n 2.0769e-02],\n [ 8.0528e-02, 5.4225e-02, 4.5240e-02, 9.3617e-02, 5.7829e-02,\n 9.5534e-02, 8.5304e-02, 1.3611e-02, -2.3383e-03, 3.7176e-02,\n 3.7660e-02, 1.4013e-01, 8.7659e-01, 2.9265e-04, 1.0000e+00,\n 6.3008e-01],\n [ 6.8884e-02, 1.0608e-01, 8.6847e-02, 1.1111e-01, 1.2558e-01,\n 1.2125e-01, 7.6598e-02, 7.2108e-02, -5.2721e-04, 1.1145e-01,\n 1.3460e-01, 1.9592e-01, 6.1994e-01, 2.0769e-02, 6.3008e-01,\n 1.0000e+00]], device='cuda:0')\n\n\n\nseconds\n\ntensor([ 4.9147, 14.5051, 8.8225, 9.8293, 4.2150, 3.1399, 5.1536, 5.5290,\n 4.9317, 12.8499, 7.5085, 2.3379, 17.1672, 1.2287, 29.0785, 3.2935])\n\n\n\n((embs.unsqueeze(1) - embs.unsqueeze(0))**2).sum(-1)\n\ntensor([[0.0000, 2.3003, 2.2133, 0.5057, 0.6667, 0.6582, 0.5362, 0.3850, 2.2933,\n 2.3108, 0.4281, 2.3426, 1.9052, 1.2215, 1.8389, 1.8622],\n [2.3003, 0.0000, 0.5110, 2.1570, 2.1846, 2.0979, 2.3654, 2.2574, 0.7513,\n 0.4663, 2.2422, 0.8554, 1.8910, 2.0274, 1.8916, 1.7878],\n [2.2133, 0.5110, 0.0000, 2.1926, 2.1776, 2.0686, 2.3265, 2.2512, 0.8478,\n 0.4706, 2.2380, 0.9781, 1.9269, 1.9956, 1.9095, 1.8263],\n [0.5057, 2.1570, 2.1926, 0.0000, 0.6171, 0.5297, 0.5424, 0.4459, 2.1776,\n 2.1642, 0.4570, 2.1056, 1.8539, 1.3191, 1.8128, 1.7778],\n [0.6667, 2.1846, 2.1776, 0.6171, 0.0000, 0.5634, 0.7283, 0.5234, 2.1465,\n 2.2558, 0.5350, 2.0010, 2.0479, 1.2902, 1.8843, 1.7488],\n [0.6582, 2.0979, 2.0686, 0.5297, 0.5634, 0.0000, 0.6402, 0.5659, 2.2220,\n 2.0655, 0.5940, 1.9980, 1.9350, 1.3600, 1.8089, 1.7575],\n [0.5362, 2.3654, 2.3265, 0.5424, 0.7283, 0.6402, 0.0000, 0.5028, 2.2943,\n 2.3770, 0.4458, 2.2970, 1.9287, 1.2169, 1.8294, 1.8468],\n [0.3850, 2.2574, 2.2512, 0.4459, 0.5234, 0.5659, 0.5028, 0.0000, 2.2638,\n 2.1893, 0.3604, 2.2077, 2.0251, 1.1927, 1.9728, 1.8558],\n [2.2933, 0.7513, 0.8478, 2.1776, 2.1465, 2.2220, 2.2943, 2.2638, 0.0000,\n 0.7752, 2.1487, 1.0732, 2.0642, 1.9611, 2.0047, 2.0011],\n [2.3108, 0.4663, 0.4706, 2.1642, 2.2558, 2.0655, 2.3770, 2.1893, 0.7752,\n 0.0000, 2.2428, 0.8653, 1.9391, 2.0437, 1.9256, 1.7771],\n [0.4281, 2.2422, 2.2380, 0.4570, 0.5350, 0.5940, 0.4458, 0.3604, 2.1487,\n 2.2428, 0.0000, 2.0942, 1.9394, 1.2776, 1.9247, 1.7308],\n [2.3426, 0.8554, 0.9781, 2.1056, 2.0010, 1.9980, 2.2970, 2.2077, 1.0732,\n 0.8653, 2.0942, 0.0000, 1.7591, 1.9627, 1.7197, 1.6082],\n [1.9052, 1.8910, 1.9269, 1.8539, 2.0479, 1.9350, 1.9287, 2.0251, 2.0642,\n 1.9391, 1.9394, 1.7591, 0.0000, 2.0503, 0.2468, 0.7601],\n [1.2215, 2.0274, 1.9956, 1.3191, 1.2902, 1.3600, 1.2169, 1.1927, 1.9611,\n 2.0437, 1.2776, 1.9627, 2.0503, 0.0000, 1.9994, 1.9585],\n [1.8389, 1.8916, 1.9095, 1.8128, 1.8843, 1.8089, 1.8294, 1.9728, 2.0047,\n 1.9256, 1.9247, 1.7197, 0.2468, 1.9994, 0.0000, 0.7398],\n [1.8622, 1.7878, 1.8263, 1.7778, 1.7488, 1.7575, 1.8468, 1.8558, 2.0011,\n 1.7771, 1.7308, 1.6082, 0.7601, 1.9585, 0.7398, 0.0000]],\n device='cuda:0')\n\n\n\nplt.imshow(((embs.unsqueeze(1) - embs.unsqueeze(0))**2).sum(-1).cpu())" + }, + { + "objectID": "2b. whisper quantization (semantic token) model.html", + "href": "2b. whisper quantization (semantic token) model.html", + "title": "Distill Whisper with a VQ bottleneck", + "section": "", + "text": "from whisperspeech import wh_transcribe\nimport IPython\n\n/opt/hostedtoolcache/Python/3.9.19/x64/lib/python3.9/site-packages/pyannote/audio/core/io.py:43: UserWarning: torchaudio._backend.set_audio_backend has been deprecated. With dispatcher enabled, this function is no-op. You can remove the function call.\n torchaudio.set_audio_backend(\"soundfile\")\ntorchvision is not available - cannot save figures" + }, + { + "objectID": "2b. whisper quantization (semantic token) model.html#prepare-the-dataset", + "href": "2b. whisper quantization (semantic token) model.html#prepare-the-dataset", + "title": "Distill Whisper with a VQ bottleneck", + "section": "Prepare the dataset", + "text": "Prepare the dataset\n\nshards = [str(x) for x in Path('/data/whisperspeech-wds/').glob('librilight-*.tar')]\n\n\nds = wds.WebDataset(shards, shardshuffle=True)\n\n\nds2 = ds.compose(\n wds.decode(wds.torch_audio),\n utils.find_audio,\n merge_in(derived_dataset('/data/whisperspeech-processed-wds/', 'vad')),\n wds.map_dict(**{\"vad.npy\":wh_transcribe.chunk_merger}),\n wh_transcribe.split_to_chunks,\n merge_in(derived_dataset('/data/whisperspeech-processed-wds/', 'base.en-txt')),\n wds.shuffle(),\n wds.select(lambda x: x['i'] != 0 and x['i'] != x['imax']),\n)\n\n\nvad_shards = [str(x) for x in Path('/data/whisperspeech-processed-wds/').glob('*-large-6454-vad-*.tar.gz')]\n\n\nds = wds.WebDataset(vad_shards).decode().map_dict(**{'vad.npy':wh_transcribe.chunk_merger})\n\n\nchunks = [len(x['vad.npy'][1:-1]) for x in progress_bar(ds, total='noinfer')]\n\n\n\n\n\n\n \n \n 100.00% [3411/3411 00:01<00:00]\n \n \n\n\n\nsum(chunks)\n\n203078\n\n\n\nfor x in progress_bar(ds2, total=5):\n IPython.display.display(IPython.display.Markdown(f\"## {x['__key__']} from {x['__url__']}\\n{x['txt']}\"))\n IPython.display.display(IPython.display.Audio(x['samples'], rate=16000))\n\n\n\n\n\n\n \n \n 100.00% [5/5 00:01<00:00]\n \n \n\n\nlarge/6454/kaffirkangarooklondiketales_1611_librivox_64kb_mp3/kaffirkangaroo_03_leavitt_64kb_006 from /data/whisperspeech-wds/librilight-large-6454-flac-000007.tar\nPhysically I was incapable of complying with the command, and mentally I had not the slightest intention of departing. In an outhouse devoted to storing melees, sheepskins, and harness, an old man was sitting on the doorstep, compounding a mixture which I recognized as a sheep remedy.\n\n\n\n \n \n Your browser does not support the audio element.\n \n \n\n\nlarge/6454/kaffirkangarooklondiketales_1611_librivox_64kb_mp3/kaffirkangaroo_03_leavitt_64kb_009 from /data/whisperspeech-wds/librilight-large-6454-flac-000007.tar\nThe following day I was the most surprised man in South Africa when I learned that my preparation was working a marvelous cure. I was invited to remain with the bore the balance of the season as an honoured guest. Day after day I tramped the hills, returning at night as wise and as rich as when I set out. There were unmistakable indications that gold should be found in the vicinity, but the stubborn fact remained that I could not find it.\n\n\n\n \n \n Your browser does not support the audio element.\n \n \n\n\nlarge/6454/kaffirkangarooklondiketales_1611_librivox_64kb_mp3/kaffirkangaroo_03_leavitt_64kb_001 from /data/whisperspeech-wds/librilight-large-6454-flac-000007.tar\nI was one of the first prospectors in the Transvaal to search for gold and a precious dance it led me. At that time, but few Englishmen had ventured into the Boer country, and such was the jealousy with which they were regarded that it was impossible to secure any information which would assist in the search. Footsoir and weary, I tramped from farm to farm, content\n\n\n\n \n \n Your browser does not support the audio element.\n \n \n\n\nlarge/6454/kaffirkangarooklondiketales_1611_librivox_64kb_mp3/kaffirkangaroo_03_leavitt_64kb_032 from /data/whisperspeech-wds/librilight-large-6454-flac-000007.tar\nDead, more than twenty years. In fact, before I was married and came to live here, for he was my husband’s father. Did you know him? Yes, but I was only a little girl at the time. Why have the clothes been kept?\n\n\n\n \n \n Your browser does not support the audio element.\n \n \n\n\nlarge/6454/kaffirkangarooklondiketales_1611_librivox_64kb_mp3/kaffirkangaroo_03_leavitt_64kb_004 from /data/whisperspeech-wds/librilight-large-6454-flac-000007.tar\nFortunately, I had acquired some knowledge of sheep in Australia else I believe that I should have starved. When all else failed, I became a sheep doctor and then did a compound whose virtues would have done credit to the most widely advertised path and medicine nostrum.\n\n\n\n \n \n Your browser does not support the audio element.\n \n \n\n\n\nds3 = ds2.compose(\n add_masks,\n tokenize_text,\n wds.to_tuple('samples', 'mask', 'in_ttoks', 'out_ttoks')\n)\n\n\nfor x in ds3: break\nx\n\n(tensor([0.0043, 0.0102, 0.0163, ..., 0.0000, 0.0000, 0.0000]),\n tensor([ True, True, True, ..., False, False, False]),\n tensor([50257, 3152, 257, 44823, 3154, 1589, 11, 484, 673, 1144,\n 572, 503, 286, 2837, 290, 706, 2063, 281, 1711, 338,\n 1057, 11, 262, 39535, 21067, 373, 625, 262, 2318, 290,\n 287, 5897, 10150, 13, 1119, 2582, 40424, 510, 262, 27913,\n 4608, 284, 47251, 290, 1043, 257, 1588, 1426, 325, 286,\n 4684, 13384, 3492, 284, 17655, 511, 15892, 13, 50256, 50256,\n 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256,\n 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256,\n 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256,\n 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256,\n 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256,\n 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256,\n 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256,\n 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256,\n 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256,\n 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256,\n 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256,\n 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256,\n 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256,\n 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256, 50256,\n 50256]),\n tensor([ 3152, 257, 44823, 3154, 1589, 11, 484, 673, 1144, 572,\n 503, 286, 2837, 290, 706, 2063, 281, 1711, 338, 1057,\n 11, 262, 39535, 21067, 373, 625, 262, 2318, 290, 287,\n 5897, 10150, 13, 1119, 2582, 40424, 510, 262, 27913, 4608,\n 284, 47251, 290, 1043, 257, 1588, 1426, 325, 286, 4684,\n 13384, 3492, 284, 17655, 511, 15892, 13, 50256, -100, -100,\n -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,\n -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,\n -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,\n -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,\n -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,\n -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,\n -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,\n -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,\n -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,\n -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,\n -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,\n -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,\n -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,\n -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,\n -100]))\n\n\n\nds3 = ds2.compose(\n add_masks,\n lambda x: tokenize_text(x, model='medium', language='en'),\n wds.to_tuple('samples', 'mask', 'in_ttoks', 'out_ttoks')\n)\n\n\nfor x in ds3: break\nx\n\n(tensor([0.0013, 0.0010, 0.0011, ..., 0.0000, 0.0000, 0.0000]),\n tensor([ True, True, True, ..., False, False, False]),\n tensor([50258, 50259, 50359, 32, 1326, 1270, 3931, 382, 613, 11,\n 11672, 293, 37632, 13809, 11, 576, 1319, 264, 1851, 295,\n 264, 1002, 11, 293, 1939, 576, 572, 544, 1643, 281,\n 18071, 264, 1164, 295, 3687, 11, 420, 1497, 554, 1952,\n 6018, 11, 813, 264, 1974, 5010, 295, 721, 11, 689,\n 264, 7700, 366, 4054, 293, 7006, 293, 14154, 292, 13,\n 2188, 1359, 17431, 2212, 281, 3511, 328, 3780, 311, 3567,\n 294, 702, 1536, 6717, 1062, 362, 16424, 796, 666, 257,\n 5403, 14763, 13, 50257, 50257, 50257, 50257, 50257, 50257, 50257,\n 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257,\n 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257,\n 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257,\n 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257,\n 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257,\n 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257,\n 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257,\n 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257,\n 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257,\n 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257,\n 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257, 50257,\n 50257, 50257, 50257]),\n tensor([50259, 50359, 32, 1326, 1270, 3931, 382, 613, 11, 11672,\n 293, 37632, 13809, 11, 576, 1319, 264, 1851, 295, 264,\n 1002, 11, 293, 1939, 576, 572, 544, 1643, 281, 18071,\n 264, 1164, 295, 3687, 11, 420, 1497, 554, 1952, 6018,\n 11, 813, 264, 1974, 5010, 295, 721, 11, 689, 264,\n 7700, 366, 4054, 293, 7006, 293, 14154, 292, 13, 2188,\n 1359, 17431, 2212, 281, 3511, 328, 3780, 311, 3567, 294,\n 702, 1536, 6717, 1062, 362, 16424, 796, 666, 257, 5403,\n 14763, 13, 50257, -100, -100, -100, -100, -100, -100, -100,\n -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,\n -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,\n -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,\n -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,\n -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,\n -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,\n -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,\n -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,\n -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,\n -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,\n -100, -100, -100, -100, -100, -100, -100, -100, -100, -100,\n -100, -100, -100]))\n\n\n\ntrain_ds = load_dataset('librilight-wds/librilight-small-flac-000000-s0*.tar', 'librilight-preproc-wds/', samples=2500 * 32)\n\n\nval_ds = load_dataset('librilight-wds/librilight-small-flac-000000-s11.tar', 'librilight-preproc-wds/', samples=500)\n\n\nfor x in progress_bar(wds.WebLoader(train_ds, num_workers=16, batch_size=None), total='noinfer'): pass\n\n\n\n\n\n\n \n \n [245/? 00:09<?]\n \n \n\n\n╭─────────────────────────────── Traceback (most recent call last) ────────────────────────────────╮\n│ in <module>:1 │\n│ │\n│ ❱ 1 for x in progress_bar(wds.WebLoader(train_ds, num_workers=16, batch_size=None), total='n │\n│ 2 │\n│ │\n│ /opt/conda/lib/python3.10/site-packages/fastprogress/fastprogress.py:41 in __iter__ │\n│ │\n│ 38 │ def __iter__(self): │\n│ 39 │ │ if self.total != 0: self.update(0) │\n│ 40 │ │ try: │\n│ ❱ 41 │ │ │ for i,o in enumerate(self.gen): │\n│ 42 │ │ │ │ if self.total and i >= self.total: break │\n│ 43 │ │ │ │ yield o │\n│ 44 │ │ │ │ self.update(i+1) │\n│ │\n│ /root/workspace/webdataset/webdataset/pipeline.py:64 in iterator │\n│ │\n│ 61 │ def iterator(self): │\n│ 62 │ │ \"\"\"Create an iterator through the entire dataset, using the given number of repe │\n│ 63 │ │ for i in range(self.repetitions): │\n│ ❱ 64 │ │ │ for sample in self.iterator1(): │\n│ 65 │ │ │ │ yield sample │\n│ 66 │ │\n│ 67 │ def __iter__(self): │\n│ │\n│ /opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:633 in __next__ │\n│ │\n│ 630 │ │ │ if self._sampler_iter is None: │\n│ 631 │ │ │ │ # TODO(https://github.com/pytorch/pytorch/issues/76750) │\n│ 632 │ │ │ │ self._reset() # type: ignore[call-arg] │\n│ ❱ 633 │ │ │ data = self._next_data() │\n│ 634 │ │ │ self._num_yielded += 1 │\n│ 635 │ │ │ if self._dataset_kind == _DatasetKind.Iterable and \\ │\n│ 636 │ │ │ │ │ self._IterableDataset_len_called is not None and \\ │\n│ │\n│ /opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:1328 in _next_data │\n│ │\n│ 1325 │ │ │ │ return self._process_data(data) │\n│ 1326 │ │ │ │\n│ 1327 │ │ │ assert not self._shutdown and self._tasks_outstanding > 0 │\n│ ❱ 1328 │ │ │ idx, data = self._get_data() │\n│ 1329 │ │ │ self._tasks_outstanding -= 1 │\n│ 1330 │ │ │ if self._dataset_kind == _DatasetKind.Iterable: │\n│ 1331 │ │ │ │ # Check for _IterableDatasetStopIteration │\n│ │\n│ /opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:1294 in _get_data │\n│ │\n│ 1291 │ │ │ # need to call `.task_done()` because we don't use `.join()`. │\n│ 1292 │ │ else: │\n│ 1293 │ │ │ while True: │\n│ ❱ 1294 │ │ │ │ success, data = self._try_get_data() │\n│ 1295 │ │ │ │ if success: │\n│ 1296 │ │ │ │ │ return data │\n│ 1297 │\n│ │\n│ /opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:1132 in _try_get_data │\n│ │\n│ 1129 │ │ # Returns a 2-tuple: │\n│ 1130 │ │ # (bool: whether successfully get data, any: data if successful else None) │\n│ 1131 │ │ try: │\n│ ❱ 1132 │ │ │ data = self._data_queue.get(timeout=timeout) │\n│ 1133 │ │ │ return (True, data) │\n│ 1134 │ │ except Exception as e: │\n│ 1135 │ │ │ # At timeout and error, we manually check whether any worker has │\n│ │\n│ /opt/conda/lib/python3.10/multiprocessing/queues.py:113 in get │\n│ │\n│ 110 │ │ │ try: │\n│ 111 │ │ │ │ if block: │\n│ 112 │ │ │ │ │ timeout = deadline - time.monotonic() │\n│ ❱ 113 │ │ │ │ │ if not self._poll(timeout): │\n│ 114 │ │ │ │ │ │ raise Empty │\n│ 115 │ │ │ │ elif not self._poll(): │\n│ 116 │ │ │ │ │ raise Empty │\n│ │\n│ /opt/conda/lib/python3.10/multiprocessing/connection.py:257 in poll │\n│ │\n│ 254 │ │ \"\"\"Whether there is any input available to be read\"\"\" │\n│ 255 │ │ self._check_closed() │\n│ 256 │ │ self._check_readable() │\n│ ❱ 257 │ │ return self._poll(timeout) │\n│ 258 │ │\n│ 259 │ def __enter__(self): │\n│ 260 │ │ return self │\n│ │\n│ /opt/conda/lib/python3.10/multiprocessing/connection.py:424 in _poll │\n│ │\n│ 421 │ │ return self._recv(size) │\n│ 422 │ │\n│ 423 │ def _poll(self, timeout): │\n│ ❱ 424 │ │ r = wait([self], timeout) │\n│ 425 │ │ return bool(r) │\n│ 426 │\n│ 427 │\n│ │\n│ /opt/conda/lib/python3.10/multiprocessing/connection.py:931 in wait │\n│ │\n│ 928 │ │ │ │ deadline = time.monotonic() + timeout │\n│ 929 │ │ │ │\n│ 930 │ │ │ while True: │\n│ ❱ 931 │ │ │ │ ready = selector.select(timeout) │\n│ 932 │ │ │ │ if ready: │\n│ 933 │ │ │ │ │ return [key.fileobj for (key, events) in ready] │\n│ 934 │ │ │ │ else: │\n│ │\n│ /opt/conda/lib/python3.10/selectors.py:416 in select │\n│ │\n│ 413 │ │ │ timeout = math.ceil(timeout * 1e3) │\n│ 414 │ │ ready = [] │\n│ 415 │ │ try: │\n│ ❱ 416 │ │ │ fd_event_list = self._selector.poll(timeout) │\n│ 417 │ │ except InterruptedError: │\n│ 418 │ │ │ return ready │\n│ 419 │ │ for fd, event in fd_event_list: │\n╰──────────────────────────────────────────────────────────────────────────────────────────────────╯\nKeyboardInterrupt\n\n\n\n\nfor x in train_ds:\n print(x[3])\n break\n\ntensor([[ 464, 7664, 286, ..., -100, -100, -100],\n [ 2953, 717, 612, ..., -100, -100, -100],\n [25383, 339, 587, ..., -100, -100, -100],\n ...,\n [ 392, 340, 880, ..., -100, -100, -100],\n [ 464, 31526, 11416, ..., -100, -100, -100],\n [ 2202, 262, 16720, ..., -100, -100, -100]])" + }, + { + "objectID": "2b. whisper quantization (semantic token) model.html#large6454kaffirkangarooklondiketales_1611_librivox_64kb_mp3kaffirkangaroo_03_leavitt_64kb_006-from-datawhisperspeech-wdslibrilight-large-6454-flac-000007.tar", + "href": "2b. whisper quantization (semantic token) model.html#large6454kaffirkangarooklondiketales_1611_librivox_64kb_mp3kaffirkangaroo_03_leavitt_64kb_006-from-datawhisperspeech-wdslibrilight-large-6454-flac-000007.tar", + "title": "Distill Whisper with a VQ bottleneck", + "section": "large/6454/kaffirkangarooklondiketales_1611_librivox_64kb_mp3/kaffirkangaroo_03_leavitt_64kb_006 from /data/whisperspeech-wds/librilight-large-6454-flac-000007.tar", + "text": "large/6454/kaffirkangarooklondiketales_1611_librivox_64kb_mp3/kaffirkangaroo_03_leavitt_64kb_006 from /data/whisperspeech-wds/librilight-large-6454-flac-000007.tar\nPhysically I was incapable of complying with the command, and mentally I had not the slightest intention of departing. In an outhouse devoted to storing melees, sheepskins, and harness, an old man was sitting on the doorstep, compounding a mixture which I recognized as a sheep remedy." + }, + { + "objectID": "2b. whisper quantization (semantic token) model.html#large6454kaffirkangarooklondiketales_1611_librivox_64kb_mp3kaffirkangaroo_03_leavitt_64kb_009-from-datawhisperspeech-wdslibrilight-large-6454-flac-000007.tar", + "href": "2b. whisper quantization (semantic token) model.html#large6454kaffirkangarooklondiketales_1611_librivox_64kb_mp3kaffirkangaroo_03_leavitt_64kb_009-from-datawhisperspeech-wdslibrilight-large-6454-flac-000007.tar", + "title": "Distill Whisper with a VQ bottleneck", + "section": "large/6454/kaffirkangarooklondiketales_1611_librivox_64kb_mp3/kaffirkangaroo_03_leavitt_64kb_009 from /data/whisperspeech-wds/librilight-large-6454-flac-000007.tar", + "text": "large/6454/kaffirkangarooklondiketales_1611_librivox_64kb_mp3/kaffirkangaroo_03_leavitt_64kb_009 from /data/whisperspeech-wds/librilight-large-6454-flac-000007.tar\nThe following day I was the most surprised man in South Africa when I learned that my preparation was working a marvelous cure. I was invited to remain with the bore the balance of the season as an honoured guest. Day after day I tramped the hills, returning at night as wise and as rich as when I set out. There were unmistakable indications that gold should be found in the vicinity, but the stubborn fact remained that I could not find it." + }, + { + "objectID": "2b. whisper quantization (semantic token) model.html#large6454kaffirkangarooklondiketales_1611_librivox_64kb_mp3kaffirkangaroo_03_leavitt_64kb_001-from-datawhisperspeech-wdslibrilight-large-6454-flac-000007.tar", + "href": "2b. whisper quantization (semantic token) model.html#large6454kaffirkangarooklondiketales_1611_librivox_64kb_mp3kaffirkangaroo_03_leavitt_64kb_001-from-datawhisperspeech-wdslibrilight-large-6454-flac-000007.tar", + "title": "Distill Whisper with a VQ bottleneck", + "section": "large/6454/kaffirkangarooklondiketales_1611_librivox_64kb_mp3/kaffirkangaroo_03_leavitt_64kb_001 from /data/whisperspeech-wds/librilight-large-6454-flac-000007.tar", + "text": "large/6454/kaffirkangarooklondiketales_1611_librivox_64kb_mp3/kaffirkangaroo_03_leavitt_64kb_001 from /data/whisperspeech-wds/librilight-large-6454-flac-000007.tar\nI was one of the first prospectors in the Transvaal to search for gold and a precious dance it led me. At that time, but few Englishmen had ventured into the Boer country, and such was the jealousy with which they were regarded that it was impossible to secure any information which would assist in the search. Footsoir and weary, I tramped from farm to farm, content" + }, + { + "objectID": "2b. whisper quantization (semantic token) model.html#large6454kaffirkangarooklondiketales_1611_librivox_64kb_mp3kaffirkangaroo_03_leavitt_64kb_032-from-datawhisperspeech-wdslibrilight-large-6454-flac-000007.tar", + "href": "2b. whisper quantization (semantic token) model.html#large6454kaffirkangarooklondiketales_1611_librivox_64kb_mp3kaffirkangaroo_03_leavitt_64kb_032-from-datawhisperspeech-wdslibrilight-large-6454-flac-000007.tar", + "title": "Distill Whisper with a VQ bottleneck", + "section": "large/6454/kaffirkangarooklondiketales_1611_librivox_64kb_mp3/kaffirkangaroo_03_leavitt_64kb_032 from /data/whisperspeech-wds/librilight-large-6454-flac-000007.tar", + "text": "large/6454/kaffirkangarooklondiketales_1611_librivox_64kb_mp3/kaffirkangaroo_03_leavitt_64kb_032 from /data/whisperspeech-wds/librilight-large-6454-flac-000007.tar\nDead, more than twenty years. In fact, before I was married and came to live here, for he was my husband’s father. Did you know him? Yes, but I was only a little girl at the time. Why have the clothes been kept?" + }, + { + "objectID": "2b. whisper quantization (semantic token) model.html#large6454kaffirkangarooklondiketales_1611_librivox_64kb_mp3kaffirkangaroo_03_leavitt_64kb_004-from-datawhisperspeech-wdslibrilight-large-6454-flac-000007.tar", + "href": "2b. whisper quantization (semantic token) model.html#large6454kaffirkangarooklondiketales_1611_librivox_64kb_mp3kaffirkangaroo_03_leavitt_64kb_004-from-datawhisperspeech-wdslibrilight-large-6454-flac-000007.tar", + "title": "Distill Whisper with a VQ bottleneck", + "section": "large/6454/kaffirkangarooklondiketales_1611_librivox_64kb_mp3/kaffirkangaroo_03_leavitt_64kb_004 from /data/whisperspeech-wds/librilight-large-6454-flac-000007.tar", + "text": "large/6454/kaffirkangarooklondiketales_1611_librivox_64kb_mp3/kaffirkangaroo_03_leavitt_64kb_004 from /data/whisperspeech-wds/librilight-large-6454-flac-000007.tar\nFortunately, I had acquired some knowledge of sheep in Australia else I believe that I should have starved. When all else failed, I became a sheep doctor and then did a compound whose virtues would have done credit to the most widely advertised path and medicine nostrum." + }, + { + "objectID": "2b. whisper quantization (semantic token) model.html#architectural-experiments", + "href": "2b. whisper quantization (semantic token) model.html#architectural-experiments", + "title": "Distill Whisper with a VQ bottleneck", + "section": "Architectural experiments", + "text": "Architectural experiments\n\n# with learned positional embeddings, no out_blocks\nvqmodel = RQBottleneckTransformer(codebook_dim=16, vq_codes=512, q_depth=1, n_head=6, depth=1,\n downsample=2, threshold_ema_dead_code=0, use_cosine_sim=True).cuda()\ntrain(\"svq\", vqmodel, train_ds, val_ds, bs=32, epochs=1, lr=3e-3, warmup_steps=1000,\n run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=4, visual_class=RQVisual)\n\nOneCycle: 6290 1\n\n\n\n\n\n'Entropy: 8.71'\n\n\n\n\n\n\n\n\n \n \n 0.00% [0/1 00:00<?]\n \n \n\n\n\n\nsamples\ntrain\nval\ncodebook entropy\ntime\n\n\n\n\n50016\n107.56952\n157.32113\n8.71\n05:24\n\n\n100000\n85.44750\n101.79171\n8.70\n10:37\n\n\n126688\n81.44776\n104.25017\n8.71\n13:27\n\n\n\n\n\n \n \n 62.94% [3959/6290 13:26<07:54 #126688/201280 loss: 81.448 / 104.250]\n \n \n\n\n\n/opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:645: UserWarning: Length of IterableDataset Dataset: 10 batches x 32 samples, 1.9 hours) was reported to be 10 (when accessing len(dataloader)), but 11 samples have been fetched. For multiprocessing data-loading, this could be caused by not properly configuring the IterableDataset replica at each worker. Please see https://pytorch.org/docs/stable/data.html#torch.utils.data.IterableDataset for examples.\n warnings.warn(warn_msg)\n/opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:645: UserWarning: Length of IterableDataset Dataset: 10 batches x 32 samples, 1.9 hours) was reported to be 10 (when accessing len(dataloader)), but 12 samples have been fetched. For multiprocessing data-loading, this could be caused by not properly configuring the IterableDataset replica at each worker. Please see https://pytorch.org/docs/stable/data.html#torch.utils.data.IterableDataset for examples.\n warnings.warn(warn_msg)\n\n\n\n\n\n\n\n\n\n\n# with learned positional embeddings, out_blocks before positional\nvqmodel = RQBottleneckTransformer(codebook_dim=16, vq_codes=512, q_depth=1, n_head=6, depth=1,\n downsample=2, threshold_ema_dead_code=0, use_cosine_sim=True).cuda()\ntrain(\"svq\", vqmodel, train_ds, val_ds, bs=32, epochs=1, lr=3e-3, warmup_steps=1000,\n run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=4, visual_class=RQVisual)\n\nOneCycle: 6290 1\n\n\n\n\n\n'Entropy: 8.70'\n\n\n\n\n\n\n\n\n \n \n 100.00% [1/1 22:57<00:00]\n \n \n\n\n\n\nsamples\ntrain\nval\ncodebook entropy\ntime\n\n\n\n\n50016\n23.45991\n42.24113\n8.80\n05:48\n\n\n100000\n16.19686\n23.67809\n8.78\n11:27\n\n\n150016\n11.99028\n17.22306\n8.74\n17:07\n\n\n200000\n11.68037\n16.67605\n8.70\n22:46\n\n\n201280\n11.92631\n16.65236\n8.70\n22:57\n\n\n\n\n\n \n \n 100.00% [6290/6290 22:57<00:00 #201280/201280 loss: 11.926 / 16.652]\n \n \n\n\n\n/opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:645: UserWarning: Length of IterableDataset Dataset: 6290 batches x 32 samples, 1307.9 hours) was reported to be 6290 (when accessing len(dataloader)), but 6291 samples have been fetched. For multiprocessing data-loading, this could be caused by not properly configuring the IterableDataset replica at each worker. Please see https://pytorch.org/docs/stable/data.html#torch.utils.data.IterableDataset for examples.\n warnings.warn(warn_msg)\n\n\n\n\n\n\n\n\n\n\n# with learned positional embeddings, out_blocks before positional, mlp before vq\nvqmodel = RQBottleneckTransformer(codebook_dim=16, vq_codes=512, q_depth=1, n_head=6, depth=1,\n downsample=2, threshold_ema_dead_code=0, use_cosine_sim=True).cuda()\ntrain(\"svq\", vqmodel, train_ds, val_ds, bs=32, epochs=1, lr=3e-3, warmup_steps=1000,\n run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=4, visual_class=RQVisual)\n\nOneCycle: 6290 1\n\n\n\n\n\n'Entropy: 8.57'\n\n\n\n\n\n\n\n\n \n \n 100.00% [1/1 23:09<00:00]\n \n \n\n\n\n\nsamples\ntrain\nval\ncodebook entropy\ntime\n\n\n\n\n50016\n24.63220\n44.67238\n8.74\n05:53\n\n\n100000\n14.69983\n19.67298\n8.67\n11:35\n\n\n150016\n11.50774\n17.75203\n8.58\n17:16\n\n\n200000\n11.33895\n15.66892\n8.55\n22:58\n\n\n201280\n10.87422\n15.81362\n8.57\n23:09\n\n\n\n\n\n \n \n 100.00% [6290/6290 23:08<00:00 #201280/201280 loss: 10.874 / 15.814]\n \n \n\n\n\n\n\n\n\n\n\n\n\n# with learned positional embeddings, out_blocks after positional, mlp before vq\nvqmodel = RQBottleneckTransformer(codebook_dim=16, vq_codes=512, q_depth=1, n_head=6, depth=1,\n downsample=2, threshold_ema_dead_code=0, use_cosine_sim=True).cuda()\ntrain(\"svq\", vqmodel, train_ds, val_ds, bs=32, epochs=1, lr=3e-3, warmup_steps=1000,\n run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=4, visual_class=RQVisual)\n\nOneCycle: 6290 1\n\n\n\n\n\n'Entropy: 8.54'\n\n\n\n\n\n\n\n\n \n \n 100.00% [1/1 23:11<00:00]\n \n \n\n\n\n\nsamples\ntrain\nval\ncodebook entropy\ntime\n\n\n\n\n50016\n18.37899\n27.54997\n8.65\n05:53\n\n\n100000\n13.13329\n17.32240\n8.60\n11:35\n\n\n150016\n10.83435\n13.55371\n8.56\n17:18\n\n\n200000\n9.69492\n12.35855\n8.51\n23:00\n\n\n201280\n10.54271\n12.43994\n8.54\n23:11\n\n\n\n\n\n \n \n 100.00% [6290/6290 23:11<00:00 #201280/201280 loss: 10.543 / 12.440]\n \n \n\n\n\n\n\n\n\n\n\n\n\n# with learned positional embeddings, out_blocks after positional, mlp before vq\nvqmodel = RQBottleneckTransformer(codebook_dim=16, vq_codes=512, q_depth=1, n_head=6, depth=1,\n downsample=2, threshold_ema_dead_code=0, use_cosine_sim=True).cuda()\ntrain(\"svq\", vqmodel, train_ds, val_ds, bs=32, epochs=5, lr=3e-3, warmup_steps=2000,\n run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=4, visual_class=RQVisual)\nvqmodel.save_model('vq-2d-512c-cosine-padfix-premlp-learnpos-5e.model')\n\nOneCycle: 6290 5\n\n\n\n\n\n'Entropy: 8.40'\n\n\n\n\n\n\n\n\n \n \n 100.00% [5/5 1:55:58<00:00]\n \n \n\n\n\n\nsamples\ntrain\nval\ncodebook entropy\ntime\n\n\n\n\n50016\n24.24790\n47.61960\n8.62\n05:53\n\n\n100000\n14.35983\n18.50102\n8.55\n11:35\n\n\n150016\n12.35634\n16.84217\n8.56\n17:18\n\n\n200000\n11.74603\n16.10603\n8.52\n23:00\n\n\n250016\n10.85323\n14.83014\n8.49\n28:56\n\n\n300000\n10.78046\n14.04290\n8.47\n34:38\n\n\n350016\n10.05354\n12.98133\n8.40\n40:21\n\n\n400000\n9.59631\n13.78049\n8.50\n46:03\n\n\n450016\n9.22316\n12.76403\n8.40\n51:57\n\n\n500000\n9.38958\n11.96084\n8.46\n57:40\n\n\n550016\n8.36034\n12.59843\n8.35\n1:03:22\n\n\n600000\n9.39242\n11.55411\n8.43\n1:09:05\n\n\n650016\n8.30749\n10.80241\n8.42\n1:15:02\n\n\n700000\n8.20436\n10.39852\n8.48\n1:20:45\n\n\n750016\n8.21392\n10.36367\n8.41\n1:26:27\n\n\n800000\n7.73189\n11.21438\n8.48\n1:32:10\n\n\n850016\n7.64852\n10.93893\n8.47\n1:38:06\n\n\n900000\n7.72010\n10.49391\n8.39\n1:43:48\n\n\n950016\n7.58901\n9.85925\n8.42\n1:49:31\n\n\n1000000\n7.14871\n10.67987\n8.40\n1:55:14\n\n\n1006400\n6.73056\n10.67323\n8.40\n1:55:58\n\n\n\n\n\n \n \n 100.00% [6290/6290 23:12<00:00 #201280/201280 loss: 6.731 / 10.673]\n \n \n\n\n\n/opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:645: UserWarning: Length of IterableDataset Dataset: 10 batches x 32 samples, 1.9 hours) was reported to be 10 (when accessing len(dataloader)), but 11 samples have been fetched. For multiprocessing data-loading, this could be caused by not properly configuring the IterableDataset replica at each worker. Please see https://pytorch.org/docs/stable/data.html#torch.utils.data.IterableDataset for examples.\n warnings.warn(warn_msg)\n/opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:645: UserWarning: Length of IterableDataset Dataset: 10 batches x 32 samples, 1.9 hours) was reported to be 10 (when accessing len(dataloader)), but 12 samples have been fetched. For multiprocessing data-loading, this could be caused by not properly configuring the IterableDataset replica at each worker. Please see https://pytorch.org/docs/stable/data.html#torch.utils.data.IterableDataset for examples.\n warnings.warn(warn_msg)\n/opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:645: UserWarning: Length of IterableDataset Dataset: 6290 batches x 32 samples, 1307.9 hours) was reported to be 6290 (when accessing len(dataloader)), but 6291 samples have been fetched. For multiprocessing data-loading, this could be caused by not properly configuring the IterableDataset replica at each worker. Please see https://pytorch.org/docs/stable/data.html#torch.utils.data.IterableDataset for examples.\n warnings.warn(warn_msg)\n\n\n\n\n\n\n\n\n\n\n# with learned positional embeddings, out_blocks after positional, mlp before vq\nvqmodel = RQBottleneckTransformer(codebook_dim=32, vq_codes=4096, q_depth=1, n_head=6, depth=1,\n downsample=2, threshold_ema_dead_code=0, use_cosine_sim=True).cuda()\ntrain(\"svq\", vqmodel, train_ds, val_ds, bs=32, epochs=5, lr=3e-3, warmup_steps=2000,\n run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=4, visual_class=RQVisual)\nvqmodel.save_model('vq-2d-4096c-cosine32-padfix-premlp-learnpos-5e.model')\n\nOneCycle: 6290 5\n\n\n\n\n\n'Entropy: 11.07'\n\n\n\n\n\n\n\n\n \n \n 100.00% [5/5 1:57:58<00:00]\n \n \n\n\n\n\nsamples\ntrain\nval\ncodebook entropy\ntime\n\n\n\n\n50016\n15.49718\n26.42581\n11.23\n06:00\n\n\n100000\n11.36006\n14.78076\n11.25\n11:48\n\n\n150016\n10.29752\n13.68974\n11.19\n17:36\n\n\n200000\n9.22019\n12.14817\n11.26\n23:24\n\n\n250016\n9.09067\n13.16928\n11.17\n29:26\n\n\n300000\n8.56113\n12.38342\n11.13\n35:14\n\n\n350016\n8.30965\n12.02589\n11.15\n41:02\n\n\n400000\n7.76135\n10.97900\n11.14\n46:50\n\n\n450016\n7.34585\n10.10667\n11.11\n52:53\n\n\n500000\n7.65255\n11.02440\n11.10\n58:41\n\n\n550016\n7.47726\n10.73619\n11.10\n1:04:29\n\n\n600000\n6.96974\n9.63206\n11.14\n1:10:17\n\n\n650016\n6.93395\n9.97940\n11.08\n1:16:19\n\n\n700000\n6.64507\n8.91945\n11.13\n1:22:07\n\n\n750016\n6.53036\n9.27800\n11.01\n1:27:55\n\n\n800000\n6.50427\n8.30845\n11.07\n1:33:44\n\n\n850016\n6.51113\n9.09502\n11.12\n1:39:48\n\n\n900000\n6.05660\n8.44461\n10.99\n1:45:36\n\n\n950016\n6.20974\n8.88156\n11.06\n1:51:25\n\n\n1000000\n5.95045\n8.69922\n11.08\n1:57:13\n\n\n1006400\n6.18939\n8.88604\n11.07\n1:57:58\n\n\n\n\n\n \n \n 100.00% [6290/6290 23:37<00:00 #201280/201280 loss: 6.189 / 8.886]\n \n \n\n\n\n\n\n\n\n\n\n\n\n# base.en Whisper with learned positional embeddings, out_blocks after positional, mlp before vq\nvqmodel = RQBottleneckTransformer(codebook_dim=32, vq_codes=4096, q_depth=1, n_head=8, depth=1,\n downsample=2, threshold_ema_dead_code=0, use_cosine_sim=True, whisper_model_name=\"base.en\").cuda()\ntrain(\"svq\", vqmodel, train_ds, val_ds, bs=14, epochs=5, lr=3e-3, warmup_steps=2000,\n run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=4, visual_class=RQVisual)\nvqmodel.save_model('vq-base.en-2d-4096c-cosine32-padfix-premlp-learnpos-5e.model')\n\nOneCycle: 6280 5\n\n\n\n\n\n'Entropy: 10.86'\n\n\n\n\n\n\n\n\n \n \n 100.00% [5/5 3:05:51<00:00]\n \n \n\n\n\n\nsamples\ntrain\nval\ncodebook entropy\ntime\n\n\n\n\n50016\n18.17899\n27.83681\n11.11\n09:23\n\n\n100000\n13.50658\n17.32206\n11.06\n18:34\n\n\n150016\n12.10491\n15.49411\n11.08\n27:47\n\n\n200000\n11.84169\n15.30570\n10.95\n36:58\n\n\n250016\n11.19514\n14.05272\n10.99\n46:23\n\n\n300000\n10.98578\n13.69234\n10.86\n55:34\n\n\n350016\n10.58517\n13.25610\n10.99\n1:04:46\n\n\n400000\n9.87159\n12.88844\n10.91\n1:13:57\n\n\n450016\n9.76353\n12.50161\n10.92\n1:23:22\n\n\n500000\n10.08099\n12.71940\n10.94\n1:32:33\n\n\n550016\n9.85388\n12.70232\n10.89\n1:41:45\n\n\n600000\n10.50843\n11.94505\n10.93\n1:50:57\n\n\n650016\n9.29321\n12.16166\n10.96\n2:00:20\n\n\n700000\n9.24717\n11.35387\n10.93\n2:09:32\n\n\n750016\n8.80798\n11.78821\n10.95\n2:18:43\n\n\n800000\n9.14499\n10.97496\n10.93\n2:27:55\n\n\n850016\n8.75328\n11.08632\n10.96\n2:37:21\n\n\n900000\n8.40084\n10.79851\n10.88\n2:46:33\n\n\n950016\n8.73481\n11.27116\n10.96\n2:55:45\n\n\n1000000\n8.55846\n11.28967\n10.86\n3:04:57\n\n\n1004800\n8.09170\n11.12924\n10.86\n3:05:51\n\n\n\n\n\n \n \n 100.00% [6280/6280 37:12<00:00 #200960/200960 loss: 8.092 / 11.129]\n \n \n\n\n\n/opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:645: UserWarning: Length of IterableDataset Dataset: 10 batches x 32 samples, 1.9 hours) was reported to be 10 (when accessing len(dataloader)), but 11 samples have been fetched. For multiprocessing data-loading, this could be caused by not properly configuring the IterableDataset replica at each worker. Please see https://pytorch.org/docs/stable/data.html#torch.utils.data.IterableDataset for examples.\n warnings.warn(warn_msg)\n/opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:645: UserWarning: Length of IterableDataset Dataset: 10 batches x 32 samples, 1.9 hours) was reported to be 10 (when accessing len(dataloader)), but 12 samples have been fetched. For multiprocessing data-loading, this could be caused by not properly configuring the IterableDataset replica at each worker. Please see https://pytorch.org/docs/stable/data.html#torch.utils.data.IterableDataset for examples.\n warnings.warn(warn_msg)\n/opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:645: UserWarning: Length of IterableDataset Dataset: 6280 batches x 32 samples, 1306.1 hours) was reported to be 6280 (when accessing len(dataloader)), but 6281 samples have been fetched. For multiprocessing data-loading, this could be caused by not properly configuring the IterableDataset replica at each worker. Please see https://pytorch.org/docs/stable/data.html#torch.utils.data.IterableDataset for examples.\n warnings.warn(warn_msg)\n\n\n\n\n\n\n\n\n\n\n# base.en whisper with learned positional embeddings, out_blocks after positional, mlp before vq\n# cleaned dataset (removed 1st and last segments)\nvqmodel = RQBottleneckTransformer(codebook_dim=32, vq_codes=4096, q_depth=1, n_head=8, depth=1,\n downsample=2, threshold_ema_dead_code=0, use_cosine_sim=True, whisper_model_name=\"base.en\").cuda()\ntrain(\"svq\", vqmodel, train_ds, val_ds, bs=14, epochs=5, lr=3e-3, warmup_steps=2000,\n run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=4, visual_class=RQVisual)\nvqmodel.save_model('vq-base.en-2d-4096c-cosine32-padfix-premlp-learnpos-5e-cleaned.model')\n\nOneCycle: 6132 5\n\n\n\n\n\n'Entropy: 10.79'\n\n\n\n\n\n\n\n\n \n \n 100.00% [5/5 3:09:42<00:00]\n \n \n\n\n\n\nsamples\ntrain\nval\ncodebook entropy\ntime\n\n\n\n\n50016\n19.44056\n22.67257\n11.13\n09:46\n\n\n100000\n13.55178\n14.58443\n11.26\n19:23\n\n\n150016\n11.96837\n13.18968\n11.09\n29:00\n\n\n200000\n11.43871\n12.44640\n11.05\n38:49\n\n\n250016\n11.28360\n11.70081\n11.10\n48:26\n\n\n300000\n10.83751\n11.31110\n11.09\n58:03\n\n\n350016\n10.69315\n11.17086\n11.12\n1:07:40\n\n\n400000\n9.98770\n10.92539\n11.05\n1:17:30\n\n\n450016\n9.83174\n10.69181\n11.05\n1:27:07\n\n\n500000\n9.77236\n10.48352\n11.14\n1:36:44\n\n\n550016\n9.66632\n10.36597\n11.09\n1:46:21\n\n\n600000\n9.40930\n10.08656\n11.02\n1:56:09\n\n\n650016\n9.44357\n9.92484\n11.04\n2:05:46\n\n\n700000\n8.96556\n9.79054\n11.06\n2:15:23\n\n\n750016\n8.83601\n9.65099\n11.01\n2:25:00\n\n\n800000\n8.66107\n9.39148\n11.12\n2:34:48\n\n\n850016\n8.44581\n9.40969\n11.00\n2:44:26\n\n\n900000\n8.56439\n9.22455\n11.05\n2:54:03\n\n\n950016\n8.52489\n9.30351\n11.03\n3:03:40\n\n\n981120\n8.84632\n9.33108\n10.79\n3:09:42\n\n\n\n\n\n \n \n 100.00% [6132/6132 37:57<00:00 #196224/196224 loss: 8.846 / 9.331]\n \n \n\n\n\n/tmp/ipykernel_90303/1747892456.py:43: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.\n loss_p.clear()\n/tmp/ipykernel_90303/1747892456.py:46: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.\n loss_p.set_xlim(10000, self.total_steps)\n/opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:645: UserWarning: Length of IterableDataset Dataset: 6132 batches x 32 samples, 1277.7 hours) was reported to be 6132 (when accessing len(dataloader)), but 6133 samples have been fetched. For multiprocessing data-loading, this could be caused by not properly configuring the IterableDataset replica at each worker. Please see https://pytorch.org/docs/stable/data.html#torch.utils.data.IterableDataset for examples.\n warnings.warn(warn_msg)\n\n\n\n\n\n\n\n\n\n\n# base.en whisper with learned positional embeddings, out_blocks after positional, mlp before vq\n# cleaned dataset\nvqmodel = RQBottleneckTransformer(codebook_dim=32, vq_codes=1024, q_depth=1, n_head=8, depth=1,\n downsample=2, threshold_ema_dead_code=0, use_cosine_sim=True, whisper_model_name=\"base.en\").cuda()\ntrain(\"svq\", vqmodel, train_ds, val_ds, bs=12, epochs=5, lr=3e-3, warmup_steps=2000,\n run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=4, visual_class=RQVisual)\nvqmodel.save_model('vq-base.en-2d-1024c-cosine32-padfix-premlp-learnpos-5e-cleaned.model')\n\nOneCycle: 6132 5\n\n\n\n\n\n'Entropy: 9.36'\n\n\n\n\n\n\n\n\n \n \n 100.00% [5/5 3:08:14<00:00]\n \n \n\n\n\n\nsamples\ntrain\nval\ncodebook entropy\ntime\n\n\n\n\n50016\n21.66206\n27.27091\n9.59\n09:41\n\n\n100000\n15.25066\n16.20915\n9.53\n19:13\n\n\n150016\n13.21848\n14.25581\n9.54\n28:45\n\n\n200000\n11.82871\n13.98582\n9.49\n38:30\n\n\n250016\n11.85884\n13.12596\n9.42\n48:02\n\n\n300000\n11.54107\n12.60187\n9.43\n57:34\n\n\n350016\n11.45310\n12.29700\n9.46\n1:07:07\n\n\n400000\n11.08207\n11.98462\n9.38\n1:16:51\n\n\n450016\n10.65160\n11.61482\n9.44\n1:26:24\n\n\n500000\n10.69448\n11.57619\n9.34\n1:35:56\n\n\n550016\n10.25768\n11.15084\n9.38\n1:45:29\n\n\n600000\n9.86860\n10.86430\n9.48\n1:55:14\n\n\n650016\n9.90988\n10.71315\n9.44\n2:04:47\n\n\n700000\n9.53233\n10.52028\n9.42\n2:14:19\n\n\n750016\n9.89578\n10.26827\n9.36\n2:23:52\n\n\n800000\n9.15078\n10.15152\n9.42\n2:33:36\n\n\n850016\n9.16481\n9.96554\n9.34\n2:43:09\n\n\n900000\n9.14512\n9.90501\n9.40\n2:52:42\n\n\n950016\n9.18524\n9.92719\n9.36\n3:02:15\n\n\n981120\n8.97033\n9.95517\n9.36\n3:08:14\n\n\n\n\n\n \n \n 100.00% [6132/6132 37:41<00:00 #196224/196224 loss: 8.970 / 9.955]\n \n \n\n\n\n/tmp/ipykernel_90303/1747892456.py:43: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.\n loss_p.clear()\n/tmp/ipykernel_90303/1747892456.py:46: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.\n loss_p.set_xlim(10000, self.total_steps)\n\n\n\n\n\n\n\n\n\n\n# base.en whisper with learned positional embeddings, out_blocks after positional, mlp before vq\n# cleaned dataset\nvqmodel = RQBottleneckTransformer(codebook_dim=32, vq_codes=64, q_depth=1, n_head=8, depth=1,\n downsample=1, threshold_ema_dead_code=0, use_cosine_sim=True, whisper_model_name=\"base.en\").cuda()\ntrain(\"svq\", vqmodel, train_ds, val_ds, bs=14, epochs=5, lr=3e-3, warmup_steps=2000,\n run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=4, visual_class=RQVisual)\nvqmodel.save_model('vq-base.en-64c-cosine32-padfix-premlp-learnpos-5e-cleaned.model')\n\nOneCycle: 6132 5\n\n\n\n\n\n'Entropy: 5.64'\n\n\n\n\n\n\n\n\n \n \n 100.00% [5/5 3:09:51<00:00]\n \n \n\n\n\n\nsamples\ntrain\nval\ncodebook entropy\ntime\n\n\n\n\n50016\n76.17780\n192.67165\n5.82\n09:48\n\n\n100000\n27.85803\n31.11143\n5.71\n19:25\n\n\n150016\n19.38920\n22.02595\n5.75\n29:02\n\n\n200000\n16.75521\n18.75611\n5.68\n38:51\n\n\n250016\n16.22832\n17.68415\n5.60\n48:29\n\n\n300000\n15.28871\n16.20028\n5.68\n58:06\n\n\n350016\n14.91663\n16.24565\n5.63\n1:07:43\n\n\n400000\n14.08824\n15.30097\n5.64\n1:17:32\n\n\n450016\n13.53690\n15.08575\n5.61\n1:27:10\n\n\n500000\n13.62558\n14.45319\n5.65\n1:36:47\n\n\n550016\n12.45450\n13.74045\n5.66\n1:46:25\n\n\n600000\n12.25172\n14.05763\n5.68\n1:56:14\n\n\n650016\n12.76195\n13.71730\n5.69\n2:05:51\n\n\n700000\n12.19483\n13.02070\n5.61\n2:15:28\n\n\n750016\n11.83110\n12.79714\n5.62\n2:25:06\n\n\n800000\n12.23673\n12.70706\n5.73\n2:34:56\n\n\n850016\n11.69901\n12.50606\n5.64\n2:44:34\n\n\n900000\n12.03180\n12.29434\n5.71\n2:54:11\n\n\n950016\n12.06521\n12.22985\n5.67\n3:03:49\n\n\n981120\n13.17802\n12.70389\n5.64\n3:09:51\n\n\n\n\n\n \n \n 100.00% [6132/6132 38:00<00:00 #196224/196224 loss: 13.178 / 12.704]\n \n \n\n\n\n/tmp/ipykernel_94907/1747892456.py:43: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.\n loss_p.clear()\n/tmp/ipykernel_94907/1747892456.py:46: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.\n loss_p.set_xlim(10000, self.total_steps)\n/opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:645: UserWarning: Length of IterableDataset Dataset: 6132 batches x 32 samples, 1277.7 hours) was reported to be 6132 (when accessing len(dataloader)), but 6133 samples have been fetched. For multiprocessing data-loading, this could be caused by not properly configuring the IterableDataset replica at each worker. Please see https://pytorch.org/docs/stable/data.html#torch.utils.data.IterableDataset for examples.\n warnings.warn(warn_msg)\n\n\n\n\n\n\n\n\n\n\n# base.en! with learned positional embeddings, out_blocks after positional, mlp before vq\n# cleaned dataset\nvqmodel = RQBottleneckTransformer(codebook_dim=32, vq_codes=512, q_depth=1, n_head=8, depth=1,\n downsample=1, threshold_ema_dead_code=0, use_cosine_sim=True, whisper_model_name=\"base.en\").cuda()\ntrain(\"svq\", vqmodel, train_ds, val_ds, bs=12, epochs=5, lr=3e-3, warmup_steps=2000,\n run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=4, visual_class=RQVisual)\nvqmodel.save_model('vq-base.en-512c-cosine32-padfix-premlp-learnpos-5e-cleaned.model')\n\nOneCycle: 6132 5\n\n\n\n\n\n'Entropy: 8.44'\n\n\n\n\n\n\n\n\n \n \n 100.00% [5/5 3:10:13<00:00]\n \n \n\n\n\n\nsamples\ntrain\nval\ncodebook entropy\ntime\n\n\n\n\n50016\n21.94018\n27.54010\n8.70\n09:48\n\n\n100000\n15.30265\n16.38729\n8.72\n19:26\n\n\n150016\n13.55491\n14.22489\n8.67\n29:04\n\n\n200000\n12.27958\n13.59388\n8.53\n38:54\n\n\n250016\n11.48394\n12.79483\n8.59\n48:33\n\n\n300000\n11.45791\n12.34518\n8.52\n58:11\n\n\n350016\n11.51288\n11.73254\n8.54\n1:07:49\n\n\n400000\n11.04880\n11.61340\n8.44\n1:17:41\n\n\n450016\n10.74074\n11.15114\n8.51\n1:27:20\n\n\n500000\n10.22759\n11.11760\n8.52\n1:36:59\n\n\n550016\n10.23485\n10.82111\n8.45\n1:46:38\n\n\n600000\n9.62602\n10.52901\n8.48\n1:56:30\n\n\n650016\n9.54247\n10.39591\n8.40\n2:06:08\n\n\n700000\n9.27610\n10.17579\n8.41\n2:15:47\n\n\n750016\n9.39848\n10.03072\n8.46\n2:25:25\n\n\n800000\n8.95939\n9.87603\n8.49\n2:35:15\n\n\n850016\n9.08446\n9.74571\n8.47\n2:44:54\n\n\n900000\n8.76172\n9.79162\n8.43\n2:54:32\n\n\n950016\n9.12931\n9.58630\n8.47\n3:04:10\n\n\n981120\n9.33700\n9.72177\n8.44\n3:10:13\n\n\n\n\n\n \n \n 100.00% [6132/6132 38:02<00:00 #196224/196224 loss: 9.337 / 9.722]\n \n \n\n\n\n/tmp/ipykernel_94907/1747892456.py:43: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.\n loss_p.clear()\n/tmp/ipykernel_94907/1747892456.py:46: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.\n loss_p.set_xlim(10000, self.total_steps)\n\n\n\n\n\n\n\n\n\n\n# base.en! with learned positional embeddings, out_blocks after positional, mlp before vq\n# cleaned dataset\nvqmodel = RQBottleneckTransformer(codebook_dim=64, vq_codes=512, q_depth=1, n_head=8, depth=1,\n downsample=1, threshold_ema_dead_code=0, use_cosine_sim=True, whisper_model_name=\"base.en\").cuda()\ntrain(\"svq\", vqmodel, train_ds, val_ds, bs=14, epochs=1, lr=3e-3, warmup_steps=2000,\n run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=4, visual_class=RQVisual)\n\nOneCycle: 6132 1\n\n\n\n\n\n'Entropy: 8.55'\n\n\n\n\n\n\n\n\n \n \n 100.00% [1/1 38:00<00:00]\n \n \n\n\n\n\nsamples\ntrain\nval\ncodebook entropy\ntime\n\n\n\n\n50016\n24.54137\n31.36435\n8.57\n09:47\n\n\n100000\n15.90889\n17.09020\n8.58\n19:26\n\n\n150016\n13.30405\n13.95759\n8.51\n29:05\n\n\n196224\n14.19891\n12.88708\n8.55\n38:00\n\n\n\n\n\n \n \n 100.00% [6132/6132 38:00<00:00 #196224/196224 loss: 14.199 / 12.887]\n \n \n\n\n\n/tmp/ipykernel_94907/1747892456.py:43: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.\n loss_p.clear()\n/tmp/ipykernel_94907/1747892456.py:46: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.\n loss_p.set_xlim(10000, self.total_steps)\n\n\n\n\n\n\n\n\n\n\n# base.en! with learned positional embeddings, out_blocks after positional, mlp before vq\n# cleaned dataset\nvqmodel = RQBottleneckTransformer(codebook_dim=32, vq_codes=4096, q_depth=1, n_head=8, depth=1,\n downsample=2, threshold_ema_dead_code=0, use_cosine_sim=True, whisper_model_name=\"base.en\").cuda()\ntrain(\"svq\", vqmodel, train_ds, val_ds, bs=14, epochs=1, lr=3e-3, warmup_steps=2000,\n run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=4, visual_class=RQVisual)\n\nOneCycle: 6132 1\n\n\n\n\n\n'Entropy: 11.28'\n\n\n\n\n\n\n\n\n \n \n 100.00% [1/1 37:54<00:00]\n \n \n\n\n\n\nsamples\ntrain\nval\ncodebook entropy\ntime\n\n\n\n\n50016\n17.26417\n22.29299\n11.24\n09:45\n\n\n100000\n12.41381\n14.22859\n11.25\n19:22\n\n\n150016\n11.16801\n11.97096\n11.21\n29:00\n\n\n196224\n10.49819\n10.57301\n11.28\n37:54\n\n\n\n\n\n \n \n 100.00% [6132/6132 37:54<00:00 #196224/196224 loss: 10.498 / 10.573]\n \n \n\n\n\n/tmp/ipykernel_94907/1747892456.py:43: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.\n loss_p.clear()\n/tmp/ipykernel_94907/1747892456.py:46: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.\n loss_p.set_xlim(10000, self.total_steps)\n\n\n\n\n\n\n\n\n\n\n# base.en! with learned positional embeddings, out_blocks after positional, mlp before vq\n# cleaned dataset\nvqmodel = RQBottleneckTransformer(codebook_dim=32, vq_codes=4096, q_depth=1, n_head=8, depth=1,\n downsample=2, threshold_ema_dead_code=0, use_cosine_sim=True, whisper_model_name=\"base.en\").cuda()\ntrain(\"svq\", vqmodel, train_ds, val_ds, bs=14, epochs=5, lr=3e-3, warmup_steps=2000,\n run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=4, visual_class=RQVisual)\nvqmodel.save_model('vq-base.en-2d-4096c-cosine32-padfix-premlp-preconv-learnpos-5e-cleaned.model')\n\nOneCycle: 6132 5\n\n\n\n\n\n'Entropy: 10.75'\n\n\n\n\n\n\n\n\n \n \n 100.00% [5/5 3:11:21<00:00]\n \n \n\n\n\n\nsamples\ntrain\nval\ncodebook entropy\ntime\n\n\n\n\n50016\n18.85334\n22.89696\n10.80\n09:51\n\n\n100000\n13.86454\n16.37101\n10.73\n19:33\n\n\n150016\n12.85605\n13.55042\n10.70\n29:15\n\n\n200000\n11.59676\n12.87997\n10.70\n39:09\n\n\n250016\n11.12804\n12.39809\n10.76\n48:52\n\n\n300000\n11.10460\n11.67927\n10.78\n58:33\n\n\n350016\n11.11719\n11.55583\n10.77\n1:08:16\n\n\n400000\n10.57183\n11.07552\n10.69\n1:18:09\n\n\n450016\n10.49243\n10.82820\n10.79\n1:27:51\n\n\n500000\n10.20853\n10.77793\n10.81\n1:37:33\n\n\n550016\n10.11812\n10.54805\n10.73\n1:47:15\n\n\n600000\n9.56493\n10.22062\n10.77\n1:57:10\n\n\n650016\n9.40594\n10.19217\n10.68\n2:06:52\n\n\n700000\n9.17259\n9.85726\n10.74\n2:16:34\n\n\n750016\n9.18224\n9.74915\n10.68\n2:26:17\n\n\n800000\n8.92105\n9.47104\n10.70\n2:36:09\n\n\n850016\n8.61280\n9.39290\n10.71\n2:45:51\n\n\n900000\n8.43418\n9.33166\n10.72\n2:55:33\n\n\n950016\n8.57911\n9.33823\n10.71\n3:05:16\n\n\n981120\n8.63924\n9.37749\n10.75\n3:11:21\n\n\n\n\n\n \n \n 100.00% [6132/6132 38:16<00:00 #196224/196224 loss: 8.639 / 9.377]\n \n \n\n\n\n/tmp/ipykernel_100642/1747892456.py:43: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.\n loss_p.clear()\n/tmp/ipykernel_100642/1747892456.py:46: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.\n loss_p.set_xlim(10000, self.total_steps)\nIOPub message rate exceeded.\nThe notebook server will temporarily stop sending output\nto the client in order to avoid crashing it.\nTo change this limit, set the config variable\n`--NotebookApp.iopub_msg_rate_limit`.\n\nCurrent values:\nNotebookApp.iopub_msg_rate_limit=1000.0 (msgs/sec)\nNotebookApp.rate_limit_window=3.0 (secs)\n\n\n\n\n\n\n\n\n\n\n\n# base.en! with learned positional embeddings, out_blocks after positional, mlp before vq\n# cleaned dataset, mean downsampling\nvqmodel = RQBottleneckTransformer(codebook_dim=32, vq_codes=4096, q_depth=1, n_head=8, depth=1,\n downsample=2, threshold_ema_dead_code=0, use_cosine_sim=True, whisper_model_name=\"base.en\").cuda()\ntrain(\"svq\", vqmodel, train_ds, val_ds, bs=14, epochs=5, lr=3e-3, warmup_steps=2000,\n run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=4, visual_class=RQVisual)\nvqmodel.save_model('vq-base.en-2d-4096c-cosine32-padfix-premlp-premean-learnpos-5e-cleaned.model')\n\nOneCycle: 6132 5\n\n\n\n\n\n'Entropy: 10.87'\n\n\n\n\n\n\n\n\n \n \n 100.00% [5/5 3:09:50<00:00]\n \n \n\n\n\n\nsamples\ntrain\nval\ncodebook entropy\ntime\n\n\n\n\n50016\n17.48580\n22.87051\n10.93\n09:49\n\n\n100000\n13.30088\n14.67394\n11.07\n19:26\n\n\n150016\n12.26683\n12.99752\n10.98\n29:04\n\n\n200000\n11.53840\n12.33599\n10.96\n38:53\n\n\n250016\n10.86994\n12.00824\n11.01\n48:30\n\n\n300000\n10.59976\n11.63654\n11.01\n58:08\n\n\n350016\n10.76181\n11.29659\n10.93\n1:07:45\n\n\n400000\n9.99428\n10.90412\n10.98\n1:17:35\n\n\n450016\n9.78972\n10.65274\n10.92\n1:27:13\n\n\n500000\n9.70262\n10.54080\n10.93\n1:36:50\n\n\n550016\n9.86663\n10.32896\n10.96\n1:46:28\n\n\n600000\n9.41082\n10.16734\n10.97\n1:56:16\n\n\n650016\n9.54473\n9.94173\n10.96\n2:05:53\n\n\n700000\n9.06406\n9.71947\n10.93\n2:15:30\n\n\n750016\n9.10101\n9.46919\n10.93\n2:25:08\n\n\n800000\n8.60536\n9.40041\n10.94\n2:34:56\n\n\n850016\n8.50216\n9.23997\n10.89\n2:44:34\n\n\n900000\n8.29970\n9.23626\n10.90\n2:54:11\n\n\n950016\n8.52151\n9.20892\n10.93\n3:03:48\n\n\n981120\n8.69804\n9.14721\n10.87\n3:09:50\n\n\n\n\n\n \n \n 100.00% [6132/6132 37:58<00:00 #196224/196224 loss: 8.698 / 9.147]\n \n \n\n\n\n/tmp/ipykernel_129075/774804256.py:43: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.\n loss_p.clear()\n/tmp/ipykernel_129075/774804256.py:46: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.\n loss_p.set_xlim(10000, self.total_steps)\n/opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:645: UserWarning: Length of IterableDataset Dataset: 6132 batches x 32 samples, 1277.7 hours) was reported to be 6132 (when accessing len(dataloader)), but 6133 samples have been fetched. For multiprocessing data-loading, this could be caused by not properly configuring the IterableDataset replica at each worker. Please see https://pytorch.org/docs/stable/data.html#torch.utils.data.IterableDataset for examples.\n warnings.warn(warn_msg)\n\n\n\n\n\n\n\n\n\n\n# base.en! with learned positional embeddings, out_blocks after positional, mlp before vq\n# cleaned dataset, mean downsampling\nvqmodel = RQBottleneckTransformer(codebook_dim=32, vq_codes=4096, q_depth=1, n_head=8, depth=1,\n downsample=2, threshold_ema_dead_code=0, use_cosine_sim=True, whisper_model_name=\"base.en\").cuda()\nvqmodel.ensure_whisper()\ntrain(\"svq\", vqmodel, train_ds, val_ds, bs=14, epochs=5, lr=3e-3, warmup_steps=2000,\n run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=16, visual_class=RQVisual)\n\n\n\n\n'Entropy: 10.91'\n\n\n\n\n\n\n\n\n \n \n 0.00% [0/5 00:00<?]\n \n \n\n\n\n\nsamples\ntrain\nval\ncodebook entropy\ntime\n\n\n\n\n50008\n15.93577\n18.26651\n10.88\n31:51\n\n\n71736\n14.07252\n15.22314\n10.91\n57:51\n\n\n\n\n\n \n \n 35.23% [5124/14546 57:50<1:46:20 #14348/203648 loss: 14.073 / 15.223]\n \n \n\n\n\n\n\n\n\n\n\n\n\n# base.en! with learned positional embeddings, out_blocks after positional, mlp before vq\n# cleaned dataset, mean downsampling\nvqmodel = RQBottleneckTransformer(codebook_dim=32, vq_codes=4096, q_depth=1, n_head=8, depth=1,\n downsample=2, threshold_ema_dead_code=0, use_cosine_sim=True, whisper_model_name=\"base.en\").cuda()\nvqmodel.ensure_whisper()\ntrain(\"svq\", vqmodel, train_ds, val_ds, bs=14, epochs=5, lr=3e-3, warmup_steps=2000,\n run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=8, visual_class=RQVisual)\n#vqmodel.save_model('vq-base.en-2d-4096c-cosine32-padfix-premlp-premean-learnpos-5e-cleaned.model')\n\n\n\n\n'Entropy: 10.75'\n\n\n\n\n\n\n\n\n \n \n 20.00% [1/5 30:53<2:03:32]\n \n \n\n\n\n\nsamples\ntrain\nval\ncodebook entropy\ntime\n\n\n\n\n50008\n17.99252\n21.13446\n10.86\n07:13\n\n\n100002\n14.73851\n15.26074\n10.74\n14:30\n\n\n150010\n12.67679\n13.50757\n10.61\n22:25\n\n\n200004\n11.98636\n12.63929\n10.72\n30:13\n\n\n248374\n12.14378\n12.26164\n10.75\n37:45\n\n\n\n\n\n \n \n 22.25% [3236/14546 06:51<23:57 #49675/203648 loss: 12.144 / 12.262]\n \n \n\n\n\n\n\n\n\n\n\n\n\n# base.en! with learned positional embeddings, out_blocks after positional, mlp before vq\n# cleaned dataset, mean downsampling, eqvad\nvqmodel = RQBottleneckTransformer(codebook_dim=32, vq_codes=4096, q_depth=1, n_head=8, depth=1,\n downsample=2, threshold_ema_dead_code=0, use_cosine_sim=True, whisper_model_name=\"base.en\").cuda()\ntrain(\"svq\", vqmodel, train_ds, val_ds, bs=14, epochs=5, lr=3e-3, warmup_steps=2000,\n run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=4, visual_class=RQVisual)\nvqmodel.save_model('vq-base.en-2d-4096c-cosine32-padfix-premlp-premean-learnpos-5e-cleaned-eqvad.model')\n\nOneCycle: 9933 5\n\n\n\n\n\n'Entropy: 9.83'\n\n\n\n\n\n\n\n\n \n \n 100.00% [5/5 5:07:42<00:00]\n \n \n\n\n\n\nsamples\ntrain\nval\ncodebook entropy\ntime\n\n\n\n\n50016\n18.06458\n19.45549\n10.27\n09:48\n\n\n100000\n13.27705\n13.06077\n10.36\n19:27\n\n\n150016\n11.91958\n12.15395\n10.17\n29:05\n\n\n200000\n11.59404\n11.67862\n10.28\n38:44\n\n\n250016\n11.44242\n11.32514\n10.16\n48:22\n\n\n300000\n10.80200\n11.16721\n10.17\n58:01\n\n\n350016\n10.78535\n10.94168\n10.32\n1:07:53\n\n\n400000\n10.66275\n10.93297\n10.21\n1:17:32\n\n\n450016\n11.32866\n10.82697\n10.23\n1:27:11\n\n\n500000\n10.40007\n10.87806\n10.05\n1:36:50\n\n\n550016\n10.74838\n10.63030\n10.02\n1:46:30\n\n\n600000\n10.57567\n10.58560\n9.97\n1:56:08\n\n\n650016\n10.26159\n10.44148\n10.19\n2:06:01\n\n\n700000\n10.08803\n10.51371\n10.12\n2:15:40\n\n\n750016\n10.02600\n10.39278\n9.97\n2:25:19\n\n\n800000\n10.27624\n10.39350\n10.06\n2:34:58\n\n\n850016\n10.19159\n10.25763\n9.81\n2:44:37\n\n\n900000\n10.08171\n10.23527\n10.00\n2:54:16\n\n\n950016\n9.88339\n10.25396\n9.92\n3:03:55\n\n\n1000000\n9.62146\n10.11803\n10.06\n3:13:46\n\n\n1050016\n9.46334\n10.04561\n9.84\n3:23:25\n\n\n1100000\n9.51465\n10.11484\n9.79\n3:33:04\n\n\n1150016\n9.50131\n9.95828\n9.79\n3:42:43\n\n\n1200000\n9.53149\n9.94314\n9.89\n3:52:22\n\n\n1250016\n9.33688\n9.85693\n9.80\n4:02:01\n\n\n1300000\n9.26627\n9.81014\n9.75\n4:11:53\n\n\n1350016\n9.37144\n9.76661\n9.77\n4:21:32\n\n\n1400000\n9.06240\n9.80434\n9.76\n4:31:11\n\n\n1450016\n9.10573\n9.80284\n9.77\n4:40:50\n\n\n1500000\n9.01136\n9.71748\n9.74\n4:50:29\n\n\n1550016\n9.15775\n9.71512\n9.85\n5:00:08\n\n\n1589280\n9.26362\n9.71802\n9.83\n5:07:42\n\n\n\n\n\n \n \n 100.00% [9933/9933 1:01:29<00:00 #317856/317856 loss: 9.264 / 9.718]\n \n \n\n\n\n/tmp/ipykernel_133489/774804256.py:43: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.\n loss_p.clear()\n/tmp/ipykernel_133489/774804256.py:46: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.\n loss_p.set_xlim(10000, self.total_steps)\n/opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:645: UserWarning: Length of IterableDataset Dataset: 9933 batches x 32 samples, 1275.0 hours) was reported to be 9933 (when accessing len(dataloader)), but 9934 samples have been fetched. For multiprocessing data-loading, this could be caused by not properly configuring the IterableDataset replica at each worker. Please see https://pytorch.org/docs/stable/data.html#torch.utils.data.IterableDataset for examples.\n warnings.warn(warn_msg)\n\n\n\n\n\n\n\n\n\n\n# base.en! with learned positional embeddings, out_blocks after positional, mlp before vq\n# cleaned dataset\n# downsample conv\nvqmodel = RQBottleneckTransformer(codebook_dim=32, vq_codes=4096, q_depth=1, n_head=8, depth=1,\n downsample=2, threshold_ema_dead_code=0, use_cosine_sim=True, whisper_model_name=\"base.en\").cuda()\ntrain(\"svq\", vqmodel, train_ds, val_ds, bs=14, epochs=1, lr=3e-3, warmup_steps=2000,\n run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=4, visual_class=RQVisual)\n#vqmodel.save_model('vq-base.en-512c-cosine32-padfix-premlp-learnpos-5e-cleaned.model')\n\nOneCycle: 6132 1\n\n\n\n\n\n'Entropy: 10.70'\n\n\n\n\n\n\n\n\n \n \n 100.00% [1/1 38:13<00:00]\n \n \n\n\n\n\nsamples\ntrain\nval\ncodebook entropy\ntime\n\n\n\n\n50016\n18.56527\n21.86226\n10.70\n09:50\n\n\n100000\n14.16297\n14.83381\n10.66\n19:32\n\n\n150016\n11.57994\n12.28649\n10.68\n29:14\n\n\n196224\n10.27239\n10.96855\n10.70\n38:13\n\n\n\n\n\n \n \n 100.00% [6132/6132 38:13<00:00 #196224/196224 loss: 10.272 / 10.969]\n \n \n\n\n\n/tmp/ipykernel_100642/1747892456.py:43: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.\n loss_p.clear()\n/tmp/ipykernel_100642/1747892456.py:46: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.\n loss_p.set_xlim(10000, self.total_steps)\n/opt/conda/lib/python3.10/site-packages/torch/utils/data/dataloader.py:645: UserWarning: Length of IterableDataset Dataset: 6132 batches x 32 samples, 1277.7 hours) was reported to be 6132 (when accessing len(dataloader)), but 6133 samples have been fetched. For multiprocessing data-loading, this could be caused by not properly configuring the IterableDataset replica at each worker. Please see https://pytorch.org/docs/stable/data.html#torch.utils.data.IterableDataset for examples.\n warnings.warn(warn_msg)\n\n\n\n\n\n\n\n\n\n\n# base.en! with learned positional embeddings, out_blocks after positional, mlp before vq\n# cleaned dataset\nvqmodel = RQBottleneckTransformer(codebook_dim=64, vq_codes=4096, q_depth=1, n_head=8, depth=1,\n downsample=2, threshold_ema_dead_code=0, use_cosine_sim=True, whisper_model_name=\"base.en\").cuda()\ntrain(\"svq\", vqmodel, train_ds, val_ds, bs=14, epochs=1, lr=3e-3, warmup_steps=2000,\n run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=4, visual_class=RQVisual)\n#vqmodel.save_model('vq-base.en-512c-cosine32-padfix-premlp-learnpos-5e-cleaned.model')\n\nOneCycle: 6132 1\n\n\n\n\n\n'Entropy: 10.14'\n\n\n\n\n\n\n\n\n \n \n 0.00% [0/1 00:00<?]\n \n \n\n\n\n\nsamples\ntrain\nval\ncodebook entropy\ntime\n\n\n\n\n50016\n19.88679\n26.18120\n10.21\n09:49\n\n\n100000\n14.04911\n15.88962\n10.19\n19:26\n\n\n107520\n13.98125\n15.41472\n10.14\n20:55\n\n\n\n\n\n \n \n 54.79% [3360/6132 20:54<17:14 #107520/196224 loss: 13.981 / 15.415]\n \n \n\n\n\n/tmp/ipykernel_94907/1747892456.py:43: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.\n loss_p.clear()\n/tmp/ipykernel_94907/1747892456.py:46: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.\n loss_p.set_xlim(10000, self.total_steps)\n\n\n\n\n\n\n\n\n\n\n# base.en! with learned positional embeddings, out_blocks after positional, mlp before vq\n# cleaned dataset\nvqmodel = RQBottleneckTransformer(codebook_dim=32, vq_codes=4096, q_depth=1, n_head=8, depth=2,\n downsample=2, threshold_ema_dead_code=0, use_cosine_sim=True, whisper_model_name=\"base.en\").cuda()\ntrain(\"svq\", vqmodel, train_ds, val_ds, bs=14, epochs=1, lr=3e-3, warmup_steps=2000,\n run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=4, visual_class=RQVisual)\n#vqmodel.save_model('vq-base.en-512c-cosine32-padfix-premlp-learnpos-5e-cleaned.model')\n\nOneCycle: 6132 1\n\n\n\n\n\n'Entropy: 11.10'\n\n\n\n\n\n\n\n\n \n \n 100.00% [1/1 40:03<00:00]\n \n \n\n\n\n\nsamples\ntrain\nval\ncodebook entropy\ntime\n\n\n\n\n50016\n18.68695\n25.23358\n11.06\n10:18\n\n\n100000\n13.17344\n14.20349\n11.11\n20:28\n\n\n150016\n10.66736\n11.51643\n11.02\n30:39\n\n\n196224\n9.68099\n10.36363\n11.10\n40:03\n\n\n\n\n\n \n \n 100.00% [6132/6132 40:03<00:00 #196224/196224 loss: 9.681 / 10.364]\n \n \n\n\n\n/tmp/ipykernel_94907/1747892456.py:43: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.\n loss_p.clear()\n/tmp/ipykernel_94907/1747892456.py:46: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.\n loss_p.set_xlim(10000, self.total_steps)\n\n\n\n\n\n\n\n\n\n\n# base.en! with learned positional embeddings, out_blocks after positional, mlp before vq\n# cleaned dataset\nvqmodel = RQBottleneckTransformer(codebook_dim=32, vq_codes=64, q_depth=2, n_head=8, depth=1,\n downsample=2, threshold_ema_dead_code=0, use_cosine_sim=True, whisper_model_name=\"base.en\").cuda()\ntrain(\"svq\", vqmodel, train_ds, val_ds, bs=14, epochs=1, lr=3e-3, warmup_steps=2000,\n run_valid_every_iters=10000, table_row_every_iters=50000, dl_workers=4, visual_class=RQVisual)\n#vqmodel.save_model('vq-base.en-512c-cosine32-padfix-premlp-learnpos-5e-cleaned.model')\n\nOneCycle: 6132 1\n\n\n\n\n\n'Entropy: 5.65'\n\n\n\n\n\n\n\n\n \n \n 100.00% [1/1 37:35<00:00]\n \n \n\n\n\n\nsamples\ntrain\nval\ncodebook entropy\ntime\n\n\n\n\n50016\n82.99027\n173.42301\n5.91\n09:42\n\n\n100000\n31.85972\n36.78515\n5.81\n19:14\n\n\n150016\n23.16688\n25.48340\n5.76\n28:46\n\n\n196224\n20.68511\n23.00216\n5.65\n37:36\n\n\n\n\n\n \n \n 100.00% [6132/6132 37:35<00:00 #196224/196224 loss: 20.685 / 23.002]\n \n \n\n\n\n/tmp/ipykernel_94907/1747892456.py:43: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.\n loss_p.clear()\n/tmp/ipykernel_94907/1747892456.py:46: UserWarning: Attempt to set non-positive xlim on a log-scaled axis will be ignored.\n loss_p.set_xlim(10000, self.total_steps)" + }, + { + "objectID": "C. Word error rate metrics.html", + "href": "C. Word error rate metrics.html", + "title": "WhisperSpeech", + "section": "", + "text": "The autoreload extension is already loaded. To reload it, use:\n %reload_ext autoreload\n\n\n\ndefault_transform([\"Footnote, Somber Tashan, May 12, 1856\", \"FOOTNOTE SUMNER TO SHANNON MAY TWELFTH EIGHTEEN FIFTY SIX\"])\n\n[['footnote', 'somber', 'tashan', 'may', '12', '1856'],\n ['footnote', 'sumner', 'to', 'shannon', 'may', '12th', '1856']]\n\n\n\nsource\n\nlibrispeech_data\n\n librispeech_data (datadir, sample_rate=16000)\n\n\nsource\n\n\nDfBuilder\n\n DfBuilder ()\n\nInitialize self. See help(type(self)) for accurate signature.\n\nsource\n\n\nWERStats\n\n WERStats (transform=<jiwer.transforms.Compose object at 0x7f9cae35aa90>)\n\nInitialize self. See help(type(self)) for accurate signature." + }, + { + "objectID": "A. Neural modules.html", + "href": "A. Neural modules.html", + "title": "WhisperSpeech", + "section": "", + "text": "source\n\ninit_transformer\n\n init_transformer (m)\n\n\nsource\n\n\nQueryHead\n\n QueryHead (in_features:int, out_features:int, bias:bool=True,\n device=None, dtype=None)\n\nApplies a linear transformation to the incoming data: :math:y = xA^T + b.\nThis module supports :ref:TensorFloat32<tf32_on_ampere>.\nOn certain ROCm devices, when using float16 inputs this module will use :ref:different precision<fp16_on_mi200> for backward.\nArgs: in_features: size of each input sample out_features: size of each output sample bias: If set to False, the layer will not learn an additive bias. Default: True\nShape: - Input: :math:(*, H_{in}) where :math:* means any number of dimensions including none and :math:H_{in} = \\text{in\\_features}. - Output: :math:(*, H_{out}) where all but the last dimension are the same shape as the input and :math:H_{out} = \\text{out\\_features}.\nAttributes: weight: the learnable weights of the module of shape :math:(\\text{out\\_features}, \\text{in\\_features}). The values are initialized from :math:\\mathcal{U}(-\\sqrt{k}, \\sqrt{k}), where :math:k = \\frac{1}{\\text{in\\_features}} bias: the learnable bias of the module of shape :math:(\\text{out\\_features}). If :attr:bias is True, the values are initialized from :math:\\mathcal{U}(-\\sqrt{k}, \\sqrt{k}) where :math:k = \\frac{1}{\\text{in\\_features}}\nExamples::\n>>> m = nn.Linear(20, 30)\n>>> input = torch.randn(128, 20)\n>>> output = m(input)\n>>> print(output.size())\ntorch.Size([128, 30])\n\nsource\n\n\nLinearHead\n\n LinearHead (in_features:int, out_features:int, bias:bool=True,\n device=None, dtype=None)\n\nApplies a linear transformation to the incoming data: :math:y = xA^T + b.\nThis module supports :ref:TensorFloat32<tf32_on_ampere>.\nOn certain ROCm devices, when using float16 inputs this module will use :ref:different precision<fp16_on_mi200> for backward.\nArgs: in_features: size of each input sample out_features: size of each output sample bias: If set to False, the layer will not learn an additive bias. Default: True\nShape: - Input: :math:(*, H_{in}) where :math:* means any number of dimensions including none and :math:H_{in} = \\text{in\\_features}. - Output: :math:(*, H_{out}) where all but the last dimension are the same shape as the input and :math:H_{out} = \\text{out\\_features}.\nAttributes: weight: the learnable weights of the module of shape :math:(\\text{out\\_features}, \\text{in\\_features}). The values are initialized from :math:\\mathcal{U}(-\\sqrt{k}, \\sqrt{k}), where :math:k = \\frac{1}{\\text{in\\_features}} bias: the learnable bias of the module of shape :math:(\\text{out\\_features}). If :attr:bias is True, the values are initialized from :math:\\mathcal{U}(-\\sqrt{k}, \\sqrt{k}) where :math:k = \\frac{1}{\\text{in\\_features}}\nExamples::\n>>> m = nn.Linear(20, 30)\n>>> input = torch.randn(128, 20)\n>>> output = m(input)\n>>> print(output.size())\ntorch.Size([128, 30])\n\nsource\n\n\nLayerNorm\n\n LayerNorm (normalized_shape:Union[int,List[int],torch.Size],\n eps:float=1e-05, elementwise_affine:bool=True, bias:bool=True,\n device=None, dtype=None)\n\nApplies Layer Normalization over a mini-batch of inputs.\nThis layer implements the operation as described in the paper Layer Normalization <https://arxiv.org/abs/1607.06450>__\n.. math:: y = * + \nThe mean and standard-deviation are calculated over the last D dimensions, where D is the dimension of :attr:normalized_shape. For example, if :attr:normalized_shape is (3, 5) (a 2-dimensional shape), the mean and standard-deviation are computed over the last 2 dimensions of the input (i.e. input.mean((-2, -1))). :math:\\gamma and :math:\\beta are learnable affine transform parameters of :attr:normalized_shape if :attr:elementwise_affine is True. The standard-deviation is calculated via the biased estimator, equivalent to torch.var(input, unbiased=False).\n.. note:: Unlike Batch Normalization and Instance Normalization, which applies scalar scale and bias for each entire channel/plane with the :attr:affine option, Layer Normalization applies per-element scale and bias with :attr:elementwise_affine.\nThis layer uses statistics computed from input data in both training and evaluation modes.\nArgs: normalized_shape (int or list or torch.Size): input shape from an expected input of size\n .. math::\n [* \\times \\text{normalized\\_shape}[0] \\times \\text{normalized\\_shape}[1]\n \\times \\ldots \\times \\text{normalized\\_shape}[-1]]\n\n If a single integer is used, it is treated as a singleton list, and this module will\n normalize over the last dimension which is expected to be of that specific size.\neps: a value added to the denominator for numerical stability. Default: 1e-5\nelementwise_affine: a boolean value that when set to ``True``, this module\n has learnable per-element affine parameters initialized to ones (for weights)\n and zeros (for biases). Default: ``True``.\nbias: If set to ``False``, the layer will not learn an additive bias (only relevant if\n :attr:`elementwise_affine` is ``True``). Default: ``True``.\nAttributes: weight: the learnable weights of the module of shape :math:\\text{normalized\\_shape} when :attr:elementwise_affine is set to True. The values are initialized to 1. bias: the learnable bias of the module of shape :math:\\text{normalized\\_shape} when :attr:elementwise_affine is set to True. The values are initialized to 0.\nShape: - Input: :math:(N, *) - Output: :math:(N, *) (same shape as input)\nExamples::\n>>> # NLP Example\n>>> batch, sentence_length, embedding_dim = 20, 5, 10\n>>> embedding = torch.randn(batch, sentence_length, embedding_dim)\n>>> layer_norm = nn.LayerNorm(embedding_dim)\n>>> # Activate module\n>>> layer_norm(embedding)\n>>>\n>>> # Image Example\n>>> N, C, H, W = 20, 5, 10, 10\n>>> input = torch.randn(N, C, H, W)\n>>> # Normalize over the last three dimensions (i.e. the channel and spatial dimensions)\n>>> # as shown in the image below\n>>> layer_norm = nn.LayerNorm([C, H, W])\n>>> output = layer_norm(input)\n.. image:: ../_static/img/nn/layer_norm.jpg :scale: 50 %\n\nsource\n\n\nsinusoids\n\n sinusoids (length, channels, max_timescale=10000)\n\nReturns sinusoids for positional embedding\n\nsource\n\n\nMultiHeadAttention\n\n MultiHeadAttention (n_state:int, n_head:int, qk_scale:float=1,\n rope:bool=False, cross=False)\n\nBase class for all neural network modules.\nYour models should also subclass this class.\nModules can also contain other Modules, allowing to nest them in a tree structure. You can assign the submodules as regular attributes::\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nclass Model(nn.Module):\n def __init__(self):\n super().__init__()\n self.conv1 = nn.Conv2d(1, 20, 5)\n self.conv2 = nn.Conv2d(20, 20, 5)\n\n def forward(self, x):\n x = F.relu(self.conv1(x))\n return F.relu(self.conv2(x))\nSubmodules assigned in this way will be registered, and will have their parameters converted too when you call :meth:to, etc.\n.. note:: As per the example above, an __init__() call to the parent class must be made before assignment on the child.\n:ivar training: Boolean represents whether this module is in training or evaluation mode. :vartype training: bool\n\nsource\n\n\nResidualAttentionBlock\n\n ResidualAttentionBlock (n_state:int, n_head:int,\n cross_attention:bool=False, rope:bool=False,\n qk_scale:float=1, ffn_mult:int=4)\n\nBase class for all neural network modules.\nYour models should also subclass this class.\nModules can also contain other Modules, allowing to nest them in a tree structure. You can assign the submodules as regular attributes::\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nclass Model(nn.Module):\n def __init__(self):\n super().__init__()\n self.conv1 = nn.Conv2d(1, 20, 5)\n self.conv2 = nn.Conv2d(20, 20, 5)\n\n def forward(self, x):\n x = F.relu(self.conv1(x))\n return F.relu(self.conv2(x))\nSubmodules assigned in this way will be registered, and will have their parameters converted too when you call :meth:to, etc.\n.. note:: As per the example above, an __init__() call to the parent class must be made before assignment on the child.\n:ivar training: Boolean represents whether this module is in training or evaluation mode. :vartype training: bool\n\nsource\n\n\nBaseDecoder\n\n BaseDecoder (depth=6, n_head=6, width=384, qk_scale=1, ffn_mult=4,\n length=2250, rope=False)\n\nBase class for all neural network modules.\nYour models should also subclass this class.\nModules can also contain other Modules, allowing to nest them in a tree structure. You can assign the submodules as regular attributes::\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nclass Model(nn.Module):\n def __init__(self):\n super().__init__()\n self.conv1 = nn.Conv2d(1, 20, 5)\n self.conv2 = nn.Conv2d(20, 20, 5)\n\n def forward(self, x):\n x = F.relu(self.conv1(x))\n return F.relu(self.conv2(x))\nSubmodules assigned in this way will be registered, and will have their parameters converted too when you call :meth:to, etc.\n.. note:: As per the example above, an __init__() call to the parent class must be made before assignment on the child.\n:ivar training: Boolean represents whether this module is in training or evaluation mode. :vartype training: bool\n\nsource\n\n\nFlexEmbeddings\n\n FlexEmbeddings (codes, width, special_codes=None, frozen_width=None,\n special_embedding=None, unembed=True)\n\nBase class for all neural network modules.\nYour models should also subclass this class.\nModules can also contain other Modules, allowing to nest them in a tree structure. You can assign the submodules as regular attributes::\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nclass Model(nn.Module):\n def __init__(self):\n super().__init__()\n self.conv1 = nn.Conv2d(1, 20, 5)\n self.conv2 = nn.Conv2d(20, 20, 5)\n\n def forward(self, x):\n x = F.relu(self.conv1(x))\n return F.relu(self.conv2(x))\nSubmodules assigned in this way will be registered, and will have their parameters converted too when you call :meth:to, etc.\n.. note:: As per the example above, an __init__() call to the parent class must be made before assignment on the child.\n:ivar training: Boolean represents whether this module is in training or evaluation mode. :vartype training: bool\n\nsource\n\n\nEmbeddingProjector\n\n EmbeddingProjector (in_features:int, out_features:int, bias:bool=True,\n device=None, dtype=None)\n\nApplies a linear transformation to the incoming data: :math:y = xA^T + b.\nThis module supports :ref:TensorFloat32<tf32_on_ampere>.\nOn certain ROCm devices, when using float16 inputs this module will use :ref:different precision<fp16_on_mi200> for backward.\nArgs: in_features: size of each input sample out_features: size of each output sample bias: If set to False, the layer will not learn an additive bias. Default: True\nShape: - Input: :math:(*, H_{in}) where :math:* means any number of dimensions including none and :math:H_{in} = \\text{in\\_features}. - Output: :math:(*, H_{out}) where all but the last dimension are the same shape as the input and :math:H_{out} = \\text{out\\_features}.\nAttributes: weight: the learnable weights of the module of shape :math:(\\text{out\\_features}, \\text{in\\_features}). The values are initialized from :math:\\mathcal{U}(-\\sqrt{k}, \\sqrt{k}), where :math:k = \\frac{1}{\\text{in\\_features}} bias: the learnable bias of the module of shape :math:(\\text{out\\_features}). If :attr:bias is True, the values are initialized from :math:\\mathcal{U}(-\\sqrt{k}, \\sqrt{k}) where :math:k = \\frac{1}{\\text{in\\_features}}\nExamples::\n>>> m = nn.Linear(20, 30)\n>>> input = torch.randn(128, 20)\n>>> output = m(input)\n>>> print(output.size())\ntorch.Size([128, 30])\n\nfemb = FlexEmbeddings(2, 3, 1).half()\nwith torch.no_grad():\n femb.main.weight[:] = 0\n femb.main.weight[:,:2] = torch.eye(2)\n femb.special.weight[:] = torch.tensor([0,0,1])\nfemb.main.weight, femb.special.weight\n\n(Parameter containing:\n tensor([[1., 0., 0.],\n [0., 1., 0.]], dtype=torch.float16, requires_grad=True),\n Parameter containing:\n tensor([[0., 0., 1.]], dtype=torch.float16, requires_grad=True))\n\n\n\nembs = femb(torch.tensor([[0,2,1,0]]))\nembs\n\ntensor([[[1., 0., 0.],\n [0., 0., 1.],\n [0., 1., 0.],\n [1., 0., 0.]]], dtype=torch.float16, grad_fn=<IndexPutBackward0>)\n\n\n\nembs += femb(torch.tensor([[0]]))\n\n\nfemb.unembed(embs.float())\n\ntensor([[[2., 0., 0.],\n [1., 0., 1.],\n [1., 1., 0.],\n [2., 0., 0.]]], grad_fn=<CatBackward0>)" + } +] \ No newline at end of file diff --git a/site_libs/bootstrap/bootstrap-icons.css b/site_libs/bootstrap/bootstrap-icons.css new file mode 100644 index 0000000..285e444 --- /dev/null +++ b/site_libs/bootstrap/bootstrap-icons.css @@ -0,0 +1,2078 @@ +/*! + * Bootstrap Icons v1.11.1 (https://icons.getbootstrap.com/) + * Copyright 2019-2023 The Bootstrap Authors + * Licensed under MIT (https://github.com/twbs/icons/blob/main/LICENSE) + */ + +@font-face { + font-display: block; + font-family: "bootstrap-icons"; + src: +url("./bootstrap-icons.woff?2820a3852bdb9a5832199cc61cec4e65") format("woff"); +} + +.bi::before, +[class^="bi-"]::before, +[class*=" bi-"]::before { + display: inline-block; + font-family: bootstrap-icons !important; + font-style: normal; + font-weight: normal !important; + font-variant: normal; + text-transform: none; + line-height: 1; + vertical-align: -.125em; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; +} + +.bi-123::before { content: "\f67f"; } +.bi-alarm-fill::before { content: "\f101"; } +.bi-alarm::before { content: "\f102"; } +.bi-align-bottom::before { content: "\f103"; } +.bi-align-center::before { content: "\f104"; } +.bi-align-end::before { content: "\f105"; } +.bi-align-middle::before { content: "\f106"; } +.bi-align-start::before { content: "\f107"; } +.bi-align-top::before { content: "\f108"; } +.bi-alt::before { content: "\f109"; } +.bi-app-indicator::before { content: "\f10a"; } +.bi-app::before { content: "\f10b"; } +.bi-archive-fill::before { content: "\f10c"; } +.bi-archive::before { content: "\f10d"; } +.bi-arrow-90deg-down::before { content: "\f10e"; } +.bi-arrow-90deg-left::before { content: "\f10f"; } +.bi-arrow-90deg-right::before { content: "\f110"; } +.bi-arrow-90deg-up::before { content: "\f111"; } +.bi-arrow-bar-down::before { content: "\f112"; } +.bi-arrow-bar-left::before { content: "\f113"; } +.bi-arrow-bar-right::before { content: "\f114"; } +.bi-arrow-bar-up::before { content: "\f115"; } +.bi-arrow-clockwise::before { content: "\f116"; } +.bi-arrow-counterclockwise::before { content: "\f117"; } +.bi-arrow-down-circle-fill::before { content: "\f118"; } +.bi-arrow-down-circle::before { content: "\f119"; } +.bi-arrow-down-left-circle-fill::before { content: "\f11a"; } +.bi-arrow-down-left-circle::before { content: "\f11b"; } +.bi-arrow-down-left-square-fill::before { content: "\f11c"; } +.bi-arrow-down-left-square::before { content: "\f11d"; } +.bi-arrow-down-left::before { content: "\f11e"; } +.bi-arrow-down-right-circle-fill::before { content: "\f11f"; } +.bi-arrow-down-right-circle::before { content: "\f120"; } +.bi-arrow-down-right-square-fill::before { content: "\f121"; } +.bi-arrow-down-right-square::before { content: "\f122"; } +.bi-arrow-down-right::before { content: "\f123"; } +.bi-arrow-down-short::before { content: "\f124"; } +.bi-arrow-down-square-fill::before { content: "\f125"; } +.bi-arrow-down-square::before { content: "\f126"; } +.bi-arrow-down-up::before { content: "\f127"; } +.bi-arrow-down::before { content: "\f128"; } +.bi-arrow-left-circle-fill::before { content: "\f129"; } +.bi-arrow-left-circle::before { content: "\f12a"; } +.bi-arrow-left-right::before { content: "\f12b"; } +.bi-arrow-left-short::before { content: "\f12c"; } +.bi-arrow-left-square-fill::before { content: "\f12d"; } +.bi-arrow-left-square::before { content: "\f12e"; } +.bi-arrow-left::before { content: "\f12f"; } +.bi-arrow-repeat::before { content: "\f130"; } +.bi-arrow-return-left::before { content: "\f131"; } +.bi-arrow-return-right::before { content: "\f132"; } +.bi-arrow-right-circle-fill::before { content: "\f133"; } +.bi-arrow-right-circle::before { content: "\f134"; } +.bi-arrow-right-short::before { content: "\f135"; } +.bi-arrow-right-square-fill::before { content: "\f136"; } +.bi-arrow-right-square::before { content: "\f137"; } +.bi-arrow-right::before { content: "\f138"; } +.bi-arrow-up-circle-fill::before { content: "\f139"; } +.bi-arrow-up-circle::before { content: "\f13a"; } +.bi-arrow-up-left-circle-fill::before { content: "\f13b"; } +.bi-arrow-up-left-circle::before { content: "\f13c"; } +.bi-arrow-up-left-square-fill::before { content: "\f13d"; } +.bi-arrow-up-left-square::before { content: "\f13e"; } +.bi-arrow-up-left::before { content: "\f13f"; } +.bi-arrow-up-right-circle-fill::before { content: "\f140"; } +.bi-arrow-up-right-circle::before { content: "\f141"; } +.bi-arrow-up-right-square-fill::before { content: "\f142"; } +.bi-arrow-up-right-square::before { content: "\f143"; } +.bi-arrow-up-right::before { content: "\f144"; } +.bi-arrow-up-short::before { content: "\f145"; } +.bi-arrow-up-square-fill::before { content: "\f146"; } +.bi-arrow-up-square::before { content: "\f147"; } +.bi-arrow-up::before { content: "\f148"; } +.bi-arrows-angle-contract::before { content: "\f149"; } +.bi-arrows-angle-expand::before { content: "\f14a"; } +.bi-arrows-collapse::before { content: "\f14b"; } +.bi-arrows-expand::before { content: "\f14c"; } +.bi-arrows-fullscreen::before { content: "\f14d"; } +.bi-arrows-move::before { content: "\f14e"; } +.bi-aspect-ratio-fill::before { content: "\f14f"; } +.bi-aspect-ratio::before { content: "\f150"; } +.bi-asterisk::before { content: "\f151"; } +.bi-at::before { content: "\f152"; } +.bi-award-fill::before { content: "\f153"; } +.bi-award::before { content: "\f154"; } +.bi-back::before { content: "\f155"; } +.bi-backspace-fill::before { content: "\f156"; } +.bi-backspace-reverse-fill::before { content: "\f157"; } +.bi-backspace-reverse::before { content: "\f158"; } +.bi-backspace::before { content: "\f159"; } +.bi-badge-3d-fill::before { content: "\f15a"; } +.bi-badge-3d::before { content: "\f15b"; } +.bi-badge-4k-fill::before { content: "\f15c"; } +.bi-badge-4k::before { content: "\f15d"; } +.bi-badge-8k-fill::before { content: "\f15e"; } +.bi-badge-8k::before { content: "\f15f"; } +.bi-badge-ad-fill::before { content: "\f160"; } +.bi-badge-ad::before { content: "\f161"; } +.bi-badge-ar-fill::before { content: "\f162"; } +.bi-badge-ar::before { content: "\f163"; } +.bi-badge-cc-fill::before { content: "\f164"; } +.bi-badge-cc::before { content: "\f165"; } +.bi-badge-hd-fill::before { content: "\f166"; } +.bi-badge-hd::before { content: "\f167"; } +.bi-badge-tm-fill::before { content: "\f168"; } +.bi-badge-tm::before { content: "\f169"; } +.bi-badge-vo-fill::before { content: "\f16a"; } +.bi-badge-vo::before { content: "\f16b"; } +.bi-badge-vr-fill::before { content: "\f16c"; } +.bi-badge-vr::before { content: "\f16d"; } +.bi-badge-wc-fill::before { content: "\f16e"; } +.bi-badge-wc::before { content: "\f16f"; } +.bi-bag-check-fill::before { content: "\f170"; } +.bi-bag-check::before { content: "\f171"; } +.bi-bag-dash-fill::before { content: "\f172"; } +.bi-bag-dash::before { content: "\f173"; } +.bi-bag-fill::before { content: "\f174"; } +.bi-bag-plus-fill::before { content: "\f175"; } +.bi-bag-plus::before { content: "\f176"; } +.bi-bag-x-fill::before { content: "\f177"; } +.bi-bag-x::before { content: "\f178"; } +.bi-bag::before { content: "\f179"; } +.bi-bar-chart-fill::before { content: "\f17a"; } +.bi-bar-chart-line-fill::before { content: "\f17b"; } +.bi-bar-chart-line::before { content: "\f17c"; } +.bi-bar-chart-steps::before { content: "\f17d"; } +.bi-bar-chart::before { content: "\f17e"; } +.bi-basket-fill::before { content: "\f17f"; } +.bi-basket::before { content: "\f180"; } +.bi-basket2-fill::before { content: "\f181"; } +.bi-basket2::before { content: "\f182"; } +.bi-basket3-fill::before { content: "\f183"; } +.bi-basket3::before { content: "\f184"; } +.bi-battery-charging::before { content: "\f185"; } +.bi-battery-full::before { content: "\f186"; } +.bi-battery-half::before { content: "\f187"; } +.bi-battery::before { content: "\f188"; } +.bi-bell-fill::before { content: "\f189"; } +.bi-bell::before { content: "\f18a"; } +.bi-bezier::before { content: "\f18b"; } +.bi-bezier2::before { content: "\f18c"; } +.bi-bicycle::before { content: "\f18d"; } +.bi-binoculars-fill::before { content: "\f18e"; } +.bi-binoculars::before { content: "\f18f"; } +.bi-blockquote-left::before { content: "\f190"; } +.bi-blockquote-right::before { content: "\f191"; } +.bi-book-fill::before { content: "\f192"; } +.bi-book-half::before { content: "\f193"; } +.bi-book::before { content: "\f194"; } +.bi-bookmark-check-fill::before { content: "\f195"; } +.bi-bookmark-check::before { content: "\f196"; } +.bi-bookmark-dash-fill::before { content: "\f197"; } +.bi-bookmark-dash::before { content: "\f198"; } +.bi-bookmark-fill::before { content: "\f199"; } +.bi-bookmark-heart-fill::before { content: "\f19a"; } +.bi-bookmark-heart::before { content: "\f19b"; } +.bi-bookmark-plus-fill::before { content: "\f19c"; } +.bi-bookmark-plus::before { content: "\f19d"; } +.bi-bookmark-star-fill::before { content: "\f19e"; } +.bi-bookmark-star::before { content: "\f19f"; } +.bi-bookmark-x-fill::before { content: "\f1a0"; } +.bi-bookmark-x::before { content: "\f1a1"; } +.bi-bookmark::before { content: "\f1a2"; } +.bi-bookmarks-fill::before { content: "\f1a3"; } +.bi-bookmarks::before { content: "\f1a4"; } +.bi-bookshelf::before { content: "\f1a5"; } +.bi-bootstrap-fill::before { content: "\f1a6"; } +.bi-bootstrap-reboot::before { content: "\f1a7"; } +.bi-bootstrap::before { content: "\f1a8"; } +.bi-border-all::before { content: "\f1a9"; } +.bi-border-bottom::before { content: "\f1aa"; } +.bi-border-center::before { content: "\f1ab"; } +.bi-border-inner::before { content: "\f1ac"; } +.bi-border-left::before { content: "\f1ad"; } +.bi-border-middle::before { content: "\f1ae"; } +.bi-border-outer::before { content: "\f1af"; } +.bi-border-right::before { content: "\f1b0"; } +.bi-border-style::before { content: "\f1b1"; } +.bi-border-top::before { content: "\f1b2"; } +.bi-border-width::before { content: "\f1b3"; } +.bi-border::before { content: "\f1b4"; } +.bi-bounding-box-circles::before { content: "\f1b5"; } +.bi-bounding-box::before { content: "\f1b6"; } +.bi-box-arrow-down-left::before { content: "\f1b7"; } +.bi-box-arrow-down-right::before { content: "\f1b8"; } +.bi-box-arrow-down::before { content: "\f1b9"; } +.bi-box-arrow-in-down-left::before { content: "\f1ba"; } +.bi-box-arrow-in-down-right::before { content: "\f1bb"; } +.bi-box-arrow-in-down::before { content: "\f1bc"; } +.bi-box-arrow-in-left::before { content: "\f1bd"; } +.bi-box-arrow-in-right::before { content: "\f1be"; } +.bi-box-arrow-in-up-left::before { content: "\f1bf"; } +.bi-box-arrow-in-up-right::before { content: "\f1c0"; } +.bi-box-arrow-in-up::before { content: "\f1c1"; } +.bi-box-arrow-left::before { content: "\f1c2"; } +.bi-box-arrow-right::before { content: "\f1c3"; } +.bi-box-arrow-up-left::before { content: "\f1c4"; } +.bi-box-arrow-up-right::before { content: "\f1c5"; } +.bi-box-arrow-up::before { content: "\f1c6"; } +.bi-box-seam::before { content: "\f1c7"; } +.bi-box::before { content: "\f1c8"; } +.bi-braces::before { content: "\f1c9"; } +.bi-bricks::before { content: "\f1ca"; } +.bi-briefcase-fill::before { content: "\f1cb"; } +.bi-briefcase::before { content: "\f1cc"; } +.bi-brightness-alt-high-fill::before { content: "\f1cd"; } +.bi-brightness-alt-high::before { content: "\f1ce"; } +.bi-brightness-alt-low-fill::before { content: "\f1cf"; } +.bi-brightness-alt-low::before { content: "\f1d0"; } +.bi-brightness-high-fill::before { content: "\f1d1"; } +.bi-brightness-high::before { content: "\f1d2"; } +.bi-brightness-low-fill::before { content: "\f1d3"; } +.bi-brightness-low::before { content: "\f1d4"; } +.bi-broadcast-pin::before { content: "\f1d5"; } +.bi-broadcast::before { content: "\f1d6"; } +.bi-brush-fill::before { content: "\f1d7"; } +.bi-brush::before { content: "\f1d8"; } +.bi-bucket-fill::before { content: "\f1d9"; } +.bi-bucket::before { content: "\f1da"; } +.bi-bug-fill::before { content: "\f1db"; } +.bi-bug::before { content: "\f1dc"; } +.bi-building::before { content: "\f1dd"; } +.bi-bullseye::before { content: "\f1de"; } +.bi-calculator-fill::before { content: "\f1df"; } +.bi-calculator::before { content: "\f1e0"; } +.bi-calendar-check-fill::before { content: "\f1e1"; } +.bi-calendar-check::before { content: "\f1e2"; } +.bi-calendar-date-fill::before { content: "\f1e3"; } +.bi-calendar-date::before { content: "\f1e4"; } +.bi-calendar-day-fill::before { content: "\f1e5"; } +.bi-calendar-day::before { content: "\f1e6"; } +.bi-calendar-event-fill::before { content: "\f1e7"; } +.bi-calendar-event::before { content: "\f1e8"; } +.bi-calendar-fill::before { content: "\f1e9"; } +.bi-calendar-minus-fill::before { content: "\f1ea"; } +.bi-calendar-minus::before { content: "\f1eb"; } +.bi-calendar-month-fill::before { content: "\f1ec"; } +.bi-calendar-month::before { content: "\f1ed"; } +.bi-calendar-plus-fill::before { content: "\f1ee"; } +.bi-calendar-plus::before { content: "\f1ef"; } +.bi-calendar-range-fill::before { content: "\f1f0"; } +.bi-calendar-range::before { content: "\f1f1"; } +.bi-calendar-week-fill::before { content: "\f1f2"; } +.bi-calendar-week::before { content: "\f1f3"; } +.bi-calendar-x-fill::before { content: "\f1f4"; } +.bi-calendar-x::before { content: "\f1f5"; } +.bi-calendar::before { content: "\f1f6"; } +.bi-calendar2-check-fill::before { content: "\f1f7"; } +.bi-calendar2-check::before { content: "\f1f8"; } +.bi-calendar2-date-fill::before { content: "\f1f9"; } +.bi-calendar2-date::before { content: "\f1fa"; } +.bi-calendar2-day-fill::before { content: "\f1fb"; } +.bi-calendar2-day::before { content: "\f1fc"; } +.bi-calendar2-event-fill::before { content: "\f1fd"; } +.bi-calendar2-event::before { content: "\f1fe"; } +.bi-calendar2-fill::before { content: "\f1ff"; } +.bi-calendar2-minus-fill::before { content: "\f200"; } +.bi-calendar2-minus::before { content: "\f201"; } +.bi-calendar2-month-fill::before { content: "\f202"; } +.bi-calendar2-month::before { content: "\f203"; } +.bi-calendar2-plus-fill::before { content: "\f204"; } +.bi-calendar2-plus::before { content: "\f205"; } +.bi-calendar2-range-fill::before { content: "\f206"; } +.bi-calendar2-range::before { content: "\f207"; } +.bi-calendar2-week-fill::before { content: "\f208"; } +.bi-calendar2-week::before { content: "\f209"; } +.bi-calendar2-x-fill::before { content: "\f20a"; } +.bi-calendar2-x::before { content: "\f20b"; } +.bi-calendar2::before { content: "\f20c"; } +.bi-calendar3-event-fill::before { content: "\f20d"; } +.bi-calendar3-event::before { content: "\f20e"; } +.bi-calendar3-fill::before { content: "\f20f"; } +.bi-calendar3-range-fill::before { content: "\f210"; } +.bi-calendar3-range::before { content: "\f211"; } +.bi-calendar3-week-fill::before { content: "\f212"; } +.bi-calendar3-week::before { content: "\f213"; } +.bi-calendar3::before { content: "\f214"; } +.bi-calendar4-event::before { content: "\f215"; } +.bi-calendar4-range::before { content: "\f216"; } +.bi-calendar4-week::before { content: "\f217"; } +.bi-calendar4::before { content: "\f218"; } +.bi-camera-fill::before { content: "\f219"; } +.bi-camera-reels-fill::before { content: "\f21a"; } +.bi-camera-reels::before { content: "\f21b"; } +.bi-camera-video-fill::before { content: "\f21c"; } +.bi-camera-video-off-fill::before { content: "\f21d"; } +.bi-camera-video-off::before { content: "\f21e"; } +.bi-camera-video::before { content: "\f21f"; } +.bi-camera::before { content: "\f220"; } +.bi-camera2::before { content: "\f221"; } +.bi-capslock-fill::before { content: "\f222"; } +.bi-capslock::before { content: "\f223"; } +.bi-card-checklist::before { content: "\f224"; } +.bi-card-heading::before { content: "\f225"; } +.bi-card-image::before { content: "\f226"; } +.bi-card-list::before { content: "\f227"; } +.bi-card-text::before { content: "\f228"; } +.bi-caret-down-fill::before { content: "\f229"; } +.bi-caret-down-square-fill::before { content: "\f22a"; } +.bi-caret-down-square::before { content: "\f22b"; } +.bi-caret-down::before { content: "\f22c"; } +.bi-caret-left-fill::before { content: "\f22d"; } +.bi-caret-left-square-fill::before { content: "\f22e"; } +.bi-caret-left-square::before { content: "\f22f"; } +.bi-caret-left::before { content: "\f230"; } +.bi-caret-right-fill::before { content: "\f231"; } +.bi-caret-right-square-fill::before { content: "\f232"; } +.bi-caret-right-square::before { content: "\f233"; } +.bi-caret-right::before { content: "\f234"; } +.bi-caret-up-fill::before { content: "\f235"; } +.bi-caret-up-square-fill::before { content: "\f236"; } +.bi-caret-up-square::before { content: "\f237"; } +.bi-caret-up::before { content: "\f238"; } +.bi-cart-check-fill::before { content: "\f239"; } +.bi-cart-check::before { content: "\f23a"; } +.bi-cart-dash-fill::before { content: "\f23b"; } +.bi-cart-dash::before { content: "\f23c"; } +.bi-cart-fill::before { content: "\f23d"; } +.bi-cart-plus-fill::before { content: "\f23e"; } +.bi-cart-plus::before { content: "\f23f"; } +.bi-cart-x-fill::before { content: "\f240"; } +.bi-cart-x::before { content: "\f241"; } +.bi-cart::before { content: "\f242"; } +.bi-cart2::before { content: "\f243"; } +.bi-cart3::before { content: "\f244"; } +.bi-cart4::before { content: "\f245"; } +.bi-cash-stack::before { content: "\f246"; } +.bi-cash::before { content: "\f247"; } +.bi-cast::before { content: "\f248"; } +.bi-chat-dots-fill::before { content: "\f249"; } +.bi-chat-dots::before { content: "\f24a"; } +.bi-chat-fill::before { content: "\f24b"; } +.bi-chat-left-dots-fill::before { content: "\f24c"; } +.bi-chat-left-dots::before { content: "\f24d"; } +.bi-chat-left-fill::before { content: "\f24e"; } +.bi-chat-left-quote-fill::before { content: "\f24f"; } +.bi-chat-left-quote::before { content: "\f250"; } +.bi-chat-left-text-fill::before { content: "\f251"; } +.bi-chat-left-text::before { content: "\f252"; } +.bi-chat-left::before { content: "\f253"; } +.bi-chat-quote-fill::before { content: "\f254"; } +.bi-chat-quote::before { content: "\f255"; } +.bi-chat-right-dots-fill::before { content: "\f256"; } +.bi-chat-right-dots::before { content: "\f257"; } +.bi-chat-right-fill::before { content: "\f258"; } +.bi-chat-right-quote-fill::before { content: "\f259"; } +.bi-chat-right-quote::before { content: "\f25a"; } +.bi-chat-right-text-fill::before { content: "\f25b"; } +.bi-chat-right-text::before { content: "\f25c"; } +.bi-chat-right::before { content: "\f25d"; } +.bi-chat-square-dots-fill::before { content: "\f25e"; } +.bi-chat-square-dots::before { content: "\f25f"; } +.bi-chat-square-fill::before { content: "\f260"; } +.bi-chat-square-quote-fill::before { content: "\f261"; } +.bi-chat-square-quote::before { content: "\f262"; } +.bi-chat-square-text-fill::before { content: "\f263"; } +.bi-chat-square-text::before { content: "\f264"; } +.bi-chat-square::before { content: "\f265"; } +.bi-chat-text-fill::before { content: "\f266"; } +.bi-chat-text::before { content: "\f267"; } +.bi-chat::before { content: "\f268"; } +.bi-check-all::before { content: "\f269"; } +.bi-check-circle-fill::before { content: "\f26a"; } +.bi-check-circle::before { content: "\f26b"; } +.bi-check-square-fill::before { content: "\f26c"; } +.bi-check-square::before { content: "\f26d"; } +.bi-check::before { content: "\f26e"; } +.bi-check2-all::before { content: "\f26f"; } +.bi-check2-circle::before { content: "\f270"; } +.bi-check2-square::before { content: "\f271"; } +.bi-check2::before { content: "\f272"; } +.bi-chevron-bar-contract::before { content: "\f273"; } +.bi-chevron-bar-down::before { content: "\f274"; } +.bi-chevron-bar-expand::before { content: "\f275"; } +.bi-chevron-bar-left::before { content: "\f276"; } +.bi-chevron-bar-right::before { content: "\f277"; } +.bi-chevron-bar-up::before { content: "\f278"; } +.bi-chevron-compact-down::before { content: "\f279"; } +.bi-chevron-compact-left::before { content: "\f27a"; } +.bi-chevron-compact-right::before { content: "\f27b"; } +.bi-chevron-compact-up::before { content: "\f27c"; } +.bi-chevron-contract::before { content: "\f27d"; } +.bi-chevron-double-down::before { content: "\f27e"; } +.bi-chevron-double-left::before { content: "\f27f"; } +.bi-chevron-double-right::before { content: "\f280"; } +.bi-chevron-double-up::before { content: "\f281"; } +.bi-chevron-down::before { content: "\f282"; } +.bi-chevron-expand::before { content: "\f283"; } +.bi-chevron-left::before { content: "\f284"; } +.bi-chevron-right::before { content: "\f285"; } +.bi-chevron-up::before { content: "\f286"; } +.bi-circle-fill::before { content: "\f287"; } +.bi-circle-half::before { content: "\f288"; } +.bi-circle-square::before { content: "\f289"; } +.bi-circle::before { content: "\f28a"; } +.bi-clipboard-check::before { content: "\f28b"; } +.bi-clipboard-data::before { content: "\f28c"; } +.bi-clipboard-minus::before { content: "\f28d"; } +.bi-clipboard-plus::before { content: "\f28e"; } +.bi-clipboard-x::before { content: "\f28f"; } +.bi-clipboard::before { content: "\f290"; } +.bi-clock-fill::before { content: "\f291"; } +.bi-clock-history::before { content: "\f292"; } +.bi-clock::before { content: "\f293"; } +.bi-cloud-arrow-down-fill::before { content: "\f294"; } +.bi-cloud-arrow-down::before { content: "\f295"; } +.bi-cloud-arrow-up-fill::before { content: "\f296"; } +.bi-cloud-arrow-up::before { content: "\f297"; } +.bi-cloud-check-fill::before { content: "\f298"; } +.bi-cloud-check::before { content: "\f299"; } +.bi-cloud-download-fill::before { content: "\f29a"; } +.bi-cloud-download::before { content: "\f29b"; } +.bi-cloud-drizzle-fill::before { content: "\f29c"; } +.bi-cloud-drizzle::before { content: "\f29d"; } +.bi-cloud-fill::before { content: "\f29e"; } +.bi-cloud-fog-fill::before { content: "\f29f"; } +.bi-cloud-fog::before { content: "\f2a0"; } +.bi-cloud-fog2-fill::before { content: "\f2a1"; } +.bi-cloud-fog2::before { content: "\f2a2"; } +.bi-cloud-hail-fill::before { content: "\f2a3"; } +.bi-cloud-hail::before { content: "\f2a4"; } +.bi-cloud-haze-fill::before { content: "\f2a6"; } +.bi-cloud-haze::before { content: "\f2a7"; } +.bi-cloud-haze2-fill::before { content: "\f2a8"; } +.bi-cloud-lightning-fill::before { content: "\f2a9"; } +.bi-cloud-lightning-rain-fill::before { content: "\f2aa"; } +.bi-cloud-lightning-rain::before { content: "\f2ab"; } +.bi-cloud-lightning::before { content: "\f2ac"; } +.bi-cloud-minus-fill::before { content: "\f2ad"; } +.bi-cloud-minus::before { content: "\f2ae"; } +.bi-cloud-moon-fill::before { content: "\f2af"; } +.bi-cloud-moon::before { content: "\f2b0"; } +.bi-cloud-plus-fill::before { content: "\f2b1"; } +.bi-cloud-plus::before { content: "\f2b2"; } +.bi-cloud-rain-fill::before { content: "\f2b3"; } +.bi-cloud-rain-heavy-fill::before { content: "\f2b4"; } +.bi-cloud-rain-heavy::before { content: "\f2b5"; } +.bi-cloud-rain::before { content: "\f2b6"; } +.bi-cloud-slash-fill::before { content: "\f2b7"; } +.bi-cloud-slash::before { content: "\f2b8"; } +.bi-cloud-sleet-fill::before { content: "\f2b9"; } +.bi-cloud-sleet::before { content: "\f2ba"; } +.bi-cloud-snow-fill::before { content: "\f2bb"; } +.bi-cloud-snow::before { content: "\f2bc"; } +.bi-cloud-sun-fill::before { content: "\f2bd"; } +.bi-cloud-sun::before { content: "\f2be"; } +.bi-cloud-upload-fill::before { content: "\f2bf"; } +.bi-cloud-upload::before { content: "\f2c0"; } +.bi-cloud::before { content: "\f2c1"; } +.bi-clouds-fill::before { content: "\f2c2"; } +.bi-clouds::before { content: "\f2c3"; } +.bi-cloudy-fill::before { content: "\f2c4"; } +.bi-cloudy::before { content: "\f2c5"; } +.bi-code-slash::before { content: "\f2c6"; } +.bi-code-square::before { content: "\f2c7"; } +.bi-code::before { content: "\f2c8"; } +.bi-collection-fill::before { content: "\f2c9"; } +.bi-collection-play-fill::before { content: "\f2ca"; } +.bi-collection-play::before { content: "\f2cb"; } +.bi-collection::before { content: "\f2cc"; } +.bi-columns-gap::before { content: "\f2cd"; } +.bi-columns::before { content: "\f2ce"; } +.bi-command::before { content: "\f2cf"; } +.bi-compass-fill::before { content: "\f2d0"; } +.bi-compass::before { content: "\f2d1"; } +.bi-cone-striped::before { content: "\f2d2"; } +.bi-cone::before { content: "\f2d3"; } +.bi-controller::before { content: "\f2d4"; } +.bi-cpu-fill::before { content: "\f2d5"; } +.bi-cpu::before { content: "\f2d6"; } +.bi-credit-card-2-back-fill::before { content: "\f2d7"; } +.bi-credit-card-2-back::before { content: "\f2d8"; } +.bi-credit-card-2-front-fill::before { content: "\f2d9"; } +.bi-credit-card-2-front::before { content: "\f2da"; } +.bi-credit-card-fill::before { content: "\f2db"; } +.bi-credit-card::before { content: "\f2dc"; } +.bi-crop::before { content: "\f2dd"; } +.bi-cup-fill::before { content: "\f2de"; } +.bi-cup-straw::before { content: "\f2df"; } +.bi-cup::before { content: "\f2e0"; } +.bi-cursor-fill::before { content: "\f2e1"; } +.bi-cursor-text::before { content: "\f2e2"; } +.bi-cursor::before { content: "\f2e3"; } +.bi-dash-circle-dotted::before { content: "\f2e4"; } +.bi-dash-circle-fill::before { content: "\f2e5"; } +.bi-dash-circle::before { content: "\f2e6"; } +.bi-dash-square-dotted::before { content: "\f2e7"; } +.bi-dash-square-fill::before { content: "\f2e8"; } +.bi-dash-square::before { content: "\f2e9"; } +.bi-dash::before { content: "\f2ea"; } +.bi-diagram-2-fill::before { content: "\f2eb"; } +.bi-diagram-2::before { content: "\f2ec"; } +.bi-diagram-3-fill::before { content: "\f2ed"; } +.bi-diagram-3::before { content: "\f2ee"; } +.bi-diamond-fill::before { content: "\f2ef"; } +.bi-diamond-half::before { content: "\f2f0"; } +.bi-diamond::before { content: "\f2f1"; } +.bi-dice-1-fill::before { content: "\f2f2"; } +.bi-dice-1::before { content: "\f2f3"; } +.bi-dice-2-fill::before { content: "\f2f4"; } +.bi-dice-2::before { content: "\f2f5"; } +.bi-dice-3-fill::before { content: "\f2f6"; } +.bi-dice-3::before { content: "\f2f7"; } +.bi-dice-4-fill::before { content: "\f2f8"; } +.bi-dice-4::before { content: "\f2f9"; } +.bi-dice-5-fill::before { content: "\f2fa"; } +.bi-dice-5::before { content: "\f2fb"; } +.bi-dice-6-fill::before { content: "\f2fc"; } +.bi-dice-6::before { content: "\f2fd"; } +.bi-disc-fill::before { content: "\f2fe"; } +.bi-disc::before { content: "\f2ff"; } +.bi-discord::before { content: "\f300"; } +.bi-display-fill::before { content: "\f301"; } +.bi-display::before { content: "\f302"; } +.bi-distribute-horizontal::before { content: "\f303"; } +.bi-distribute-vertical::before { content: "\f304"; } +.bi-door-closed-fill::before { content: "\f305"; } +.bi-door-closed::before { content: "\f306"; } +.bi-door-open-fill::before { content: "\f307"; } +.bi-door-open::before { content: "\f308"; } +.bi-dot::before { content: "\f309"; } +.bi-download::before { content: "\f30a"; } +.bi-droplet-fill::before { content: "\f30b"; } +.bi-droplet-half::before { content: "\f30c"; } +.bi-droplet::before { content: "\f30d"; } +.bi-earbuds::before { content: "\f30e"; } +.bi-easel-fill::before { content: "\f30f"; } +.bi-easel::before { content: "\f310"; } +.bi-egg-fill::before { content: "\f311"; } +.bi-egg-fried::before { content: "\f312"; } +.bi-egg::before { content: "\f313"; } +.bi-eject-fill::before { content: "\f314"; } +.bi-eject::before { content: "\f315"; } +.bi-emoji-angry-fill::before { content: "\f316"; } +.bi-emoji-angry::before { content: "\f317"; } +.bi-emoji-dizzy-fill::before { content: "\f318"; } +.bi-emoji-dizzy::before { content: "\f319"; } +.bi-emoji-expressionless-fill::before { content: "\f31a"; } +.bi-emoji-expressionless::before { content: "\f31b"; } +.bi-emoji-frown-fill::before { content: "\f31c"; } +.bi-emoji-frown::before { content: "\f31d"; } +.bi-emoji-heart-eyes-fill::before { content: "\f31e"; } +.bi-emoji-heart-eyes::before { content: "\f31f"; } +.bi-emoji-laughing-fill::before { content: "\f320"; } +.bi-emoji-laughing::before { content: "\f321"; } +.bi-emoji-neutral-fill::before { content: "\f322"; } +.bi-emoji-neutral::before { content: "\f323"; } +.bi-emoji-smile-fill::before { content: "\f324"; } +.bi-emoji-smile-upside-down-fill::before { content: "\f325"; } +.bi-emoji-smile-upside-down::before { content: "\f326"; } +.bi-emoji-smile::before { content: "\f327"; } +.bi-emoji-sunglasses-fill::before { content: "\f328"; } +.bi-emoji-sunglasses::before { content: "\f329"; } +.bi-emoji-wink-fill::before { content: "\f32a"; } +.bi-emoji-wink::before { content: "\f32b"; } +.bi-envelope-fill::before { content: "\f32c"; } +.bi-envelope-open-fill::before { content: "\f32d"; } +.bi-envelope-open::before { content: "\f32e"; } +.bi-envelope::before { content: "\f32f"; } +.bi-eraser-fill::before { content: "\f330"; } +.bi-eraser::before { content: "\f331"; } +.bi-exclamation-circle-fill::before { content: "\f332"; } +.bi-exclamation-circle::before { content: "\f333"; } +.bi-exclamation-diamond-fill::before { content: "\f334"; } +.bi-exclamation-diamond::before { content: "\f335"; } +.bi-exclamation-octagon-fill::before { content: "\f336"; } +.bi-exclamation-octagon::before { content: "\f337"; } +.bi-exclamation-square-fill::before { content: "\f338"; } +.bi-exclamation-square::before { content: "\f339"; } +.bi-exclamation-triangle-fill::before { content: "\f33a"; } +.bi-exclamation-triangle::before { content: "\f33b"; } +.bi-exclamation::before { content: "\f33c"; } +.bi-exclude::before { content: "\f33d"; } +.bi-eye-fill::before { content: "\f33e"; } +.bi-eye-slash-fill::before { content: "\f33f"; } +.bi-eye-slash::before { content: "\f340"; } +.bi-eye::before { content: "\f341"; } +.bi-eyedropper::before { content: "\f342"; } +.bi-eyeglasses::before { content: "\f343"; } +.bi-facebook::before { content: "\f344"; } +.bi-file-arrow-down-fill::before { content: "\f345"; } +.bi-file-arrow-down::before { content: "\f346"; } +.bi-file-arrow-up-fill::before { content: "\f347"; } +.bi-file-arrow-up::before { content: "\f348"; } +.bi-file-bar-graph-fill::before { content: "\f349"; } +.bi-file-bar-graph::before { content: "\f34a"; } +.bi-file-binary-fill::before { content: "\f34b"; } +.bi-file-binary::before { content: "\f34c"; } +.bi-file-break-fill::before { content: "\f34d"; } +.bi-file-break::before { content: "\f34e"; } +.bi-file-check-fill::before { content: "\f34f"; } +.bi-file-check::before { content: "\f350"; } +.bi-file-code-fill::before { content: "\f351"; } +.bi-file-code::before { content: "\f352"; } +.bi-file-diff-fill::before { content: "\f353"; } +.bi-file-diff::before { content: "\f354"; } +.bi-file-earmark-arrow-down-fill::before { content: "\f355"; } +.bi-file-earmark-arrow-down::before { content: "\f356"; } +.bi-file-earmark-arrow-up-fill::before { content: "\f357"; } +.bi-file-earmark-arrow-up::before { content: "\f358"; } +.bi-file-earmark-bar-graph-fill::before { content: "\f359"; } +.bi-file-earmark-bar-graph::before { content: "\f35a"; } +.bi-file-earmark-binary-fill::before { content: "\f35b"; } +.bi-file-earmark-binary::before { content: "\f35c"; } +.bi-file-earmark-break-fill::before { content: "\f35d"; } +.bi-file-earmark-break::before { content: "\f35e"; } +.bi-file-earmark-check-fill::before { content: "\f35f"; } +.bi-file-earmark-check::before { content: "\f360"; } +.bi-file-earmark-code-fill::before { content: "\f361"; } +.bi-file-earmark-code::before { content: "\f362"; } +.bi-file-earmark-diff-fill::before { content: "\f363"; } +.bi-file-earmark-diff::before { content: "\f364"; } +.bi-file-earmark-easel-fill::before { content: "\f365"; } +.bi-file-earmark-easel::before { content: "\f366"; } +.bi-file-earmark-excel-fill::before { content: "\f367"; } +.bi-file-earmark-excel::before { content: "\f368"; } +.bi-file-earmark-fill::before { content: "\f369"; } +.bi-file-earmark-font-fill::before { content: "\f36a"; } +.bi-file-earmark-font::before { content: "\f36b"; } +.bi-file-earmark-image-fill::before { content: "\f36c"; } +.bi-file-earmark-image::before { content: "\f36d"; } +.bi-file-earmark-lock-fill::before { content: "\f36e"; } +.bi-file-earmark-lock::before { content: "\f36f"; } +.bi-file-earmark-lock2-fill::before { content: "\f370"; } +.bi-file-earmark-lock2::before { content: "\f371"; } +.bi-file-earmark-medical-fill::before { content: "\f372"; } +.bi-file-earmark-medical::before { content: "\f373"; } +.bi-file-earmark-minus-fill::before { content: "\f374"; } +.bi-file-earmark-minus::before { content: "\f375"; } +.bi-file-earmark-music-fill::before { content: "\f376"; } +.bi-file-earmark-music::before { content: "\f377"; } +.bi-file-earmark-person-fill::before { content: "\f378"; } +.bi-file-earmark-person::before { content: "\f379"; } +.bi-file-earmark-play-fill::before { content: "\f37a"; } +.bi-file-earmark-play::before { content: "\f37b"; } +.bi-file-earmark-plus-fill::before { content: "\f37c"; } +.bi-file-earmark-plus::before { content: "\f37d"; } +.bi-file-earmark-post-fill::before { content: "\f37e"; } +.bi-file-earmark-post::before { content: "\f37f"; } +.bi-file-earmark-ppt-fill::before { content: "\f380"; } +.bi-file-earmark-ppt::before { content: "\f381"; } +.bi-file-earmark-richtext-fill::before { content: "\f382"; } +.bi-file-earmark-richtext::before { content: "\f383"; } +.bi-file-earmark-ruled-fill::before { content: "\f384"; } +.bi-file-earmark-ruled::before { content: "\f385"; } +.bi-file-earmark-slides-fill::before { content: "\f386"; } +.bi-file-earmark-slides::before { content: "\f387"; } +.bi-file-earmark-spreadsheet-fill::before { content: "\f388"; } +.bi-file-earmark-spreadsheet::before { content: "\f389"; } +.bi-file-earmark-text-fill::before { content: "\f38a"; } +.bi-file-earmark-text::before { content: "\f38b"; } +.bi-file-earmark-word-fill::before { content: "\f38c"; } +.bi-file-earmark-word::before { content: "\f38d"; } +.bi-file-earmark-x-fill::before { content: "\f38e"; } +.bi-file-earmark-x::before { content: "\f38f"; } +.bi-file-earmark-zip-fill::before { content: "\f390"; } +.bi-file-earmark-zip::before { content: "\f391"; } +.bi-file-earmark::before { content: "\f392"; } +.bi-file-easel-fill::before { content: "\f393"; } +.bi-file-easel::before { content: "\f394"; } +.bi-file-excel-fill::before { content: "\f395"; } +.bi-file-excel::before { content: "\f396"; } +.bi-file-fill::before { content: "\f397"; } +.bi-file-font-fill::before { content: "\f398"; } +.bi-file-font::before { content: "\f399"; } +.bi-file-image-fill::before { content: "\f39a"; } +.bi-file-image::before { content: "\f39b"; } +.bi-file-lock-fill::before { content: "\f39c"; } +.bi-file-lock::before { content: "\f39d"; } +.bi-file-lock2-fill::before { content: "\f39e"; } +.bi-file-lock2::before { content: "\f39f"; } +.bi-file-medical-fill::before { content: "\f3a0"; } +.bi-file-medical::before { content: "\f3a1"; } +.bi-file-minus-fill::before { content: "\f3a2"; } +.bi-file-minus::before { content: "\f3a3"; } +.bi-file-music-fill::before { content: "\f3a4"; } +.bi-file-music::before { content: "\f3a5"; } +.bi-file-person-fill::before { content: "\f3a6"; } +.bi-file-person::before { content: "\f3a7"; } +.bi-file-play-fill::before { content: "\f3a8"; } +.bi-file-play::before { content: "\f3a9"; } +.bi-file-plus-fill::before { content: "\f3aa"; } +.bi-file-plus::before { content: "\f3ab"; } +.bi-file-post-fill::before { content: "\f3ac"; } +.bi-file-post::before { content: "\f3ad"; } +.bi-file-ppt-fill::before { content: "\f3ae"; } +.bi-file-ppt::before { content: "\f3af"; } +.bi-file-richtext-fill::before { content: "\f3b0"; } +.bi-file-richtext::before { content: "\f3b1"; } +.bi-file-ruled-fill::before { content: "\f3b2"; } +.bi-file-ruled::before { content: "\f3b3"; } +.bi-file-slides-fill::before { content: "\f3b4"; } +.bi-file-slides::before { content: "\f3b5"; } +.bi-file-spreadsheet-fill::before { content: "\f3b6"; } +.bi-file-spreadsheet::before { content: "\f3b7"; } +.bi-file-text-fill::before { content: "\f3b8"; } +.bi-file-text::before { content: "\f3b9"; } +.bi-file-word-fill::before { content: "\f3ba"; } +.bi-file-word::before { content: "\f3bb"; } +.bi-file-x-fill::before { content: "\f3bc"; } +.bi-file-x::before { content: "\f3bd"; } +.bi-file-zip-fill::before { content: "\f3be"; } +.bi-file-zip::before { content: "\f3bf"; } +.bi-file::before { content: "\f3c0"; } +.bi-files-alt::before { content: "\f3c1"; } +.bi-files::before { content: "\f3c2"; } +.bi-film::before { content: "\f3c3"; } +.bi-filter-circle-fill::before { content: "\f3c4"; } +.bi-filter-circle::before { content: "\f3c5"; } +.bi-filter-left::before { content: "\f3c6"; } +.bi-filter-right::before { content: "\f3c7"; } +.bi-filter-square-fill::before { content: "\f3c8"; } +.bi-filter-square::before { content: "\f3c9"; } +.bi-filter::before { content: "\f3ca"; } +.bi-flag-fill::before { content: "\f3cb"; } +.bi-flag::before { content: "\f3cc"; } +.bi-flower1::before { content: "\f3cd"; } +.bi-flower2::before { content: "\f3ce"; } +.bi-flower3::before { content: "\f3cf"; } +.bi-folder-check::before { content: "\f3d0"; } +.bi-folder-fill::before { content: "\f3d1"; } +.bi-folder-minus::before { content: "\f3d2"; } +.bi-folder-plus::before { content: "\f3d3"; } +.bi-folder-symlink-fill::before { content: "\f3d4"; } +.bi-folder-symlink::before { content: "\f3d5"; } +.bi-folder-x::before { content: "\f3d6"; } +.bi-folder::before { content: "\f3d7"; } +.bi-folder2-open::before { content: "\f3d8"; } +.bi-folder2::before { content: "\f3d9"; } +.bi-fonts::before { content: "\f3da"; } +.bi-forward-fill::before { content: "\f3db"; } +.bi-forward::before { content: "\f3dc"; } +.bi-front::before { content: "\f3dd"; } +.bi-fullscreen-exit::before { content: "\f3de"; } +.bi-fullscreen::before { content: "\f3df"; } +.bi-funnel-fill::before { content: "\f3e0"; } +.bi-funnel::before { content: "\f3e1"; } +.bi-gear-fill::before { content: "\f3e2"; } +.bi-gear-wide-connected::before { content: "\f3e3"; } +.bi-gear-wide::before { content: "\f3e4"; } +.bi-gear::before { content: "\f3e5"; } +.bi-gem::before { content: "\f3e6"; } +.bi-geo-alt-fill::before { content: "\f3e7"; } +.bi-geo-alt::before { content: "\f3e8"; } +.bi-geo-fill::before { content: "\f3e9"; } +.bi-geo::before { content: "\f3ea"; } +.bi-gift-fill::before { content: "\f3eb"; } +.bi-gift::before { content: "\f3ec"; } +.bi-github::before { content: "\f3ed"; } +.bi-globe::before { content: "\f3ee"; } +.bi-globe2::before { content: "\f3ef"; } +.bi-google::before { content: "\f3f0"; } +.bi-graph-down::before { content: "\f3f1"; } +.bi-graph-up::before { content: "\f3f2"; } +.bi-grid-1x2-fill::before { content: "\f3f3"; } +.bi-grid-1x2::before { content: "\f3f4"; } +.bi-grid-3x2-gap-fill::before { content: "\f3f5"; } +.bi-grid-3x2-gap::before { content: "\f3f6"; } +.bi-grid-3x2::before { content: "\f3f7"; } +.bi-grid-3x3-gap-fill::before { content: "\f3f8"; } +.bi-grid-3x3-gap::before { content: "\f3f9"; } +.bi-grid-3x3::before { content: "\f3fa"; } +.bi-grid-fill::before { content: "\f3fb"; } +.bi-grid::before { content: "\f3fc"; } +.bi-grip-horizontal::before { content: "\f3fd"; } +.bi-grip-vertical::before { content: "\f3fe"; } +.bi-hammer::before { content: "\f3ff"; } +.bi-hand-index-fill::before { content: "\f400"; } +.bi-hand-index-thumb-fill::before { content: "\f401"; } +.bi-hand-index-thumb::before { content: "\f402"; } +.bi-hand-index::before { content: "\f403"; } +.bi-hand-thumbs-down-fill::before { content: "\f404"; } +.bi-hand-thumbs-down::before { content: "\f405"; } +.bi-hand-thumbs-up-fill::before { content: "\f406"; } +.bi-hand-thumbs-up::before { content: "\f407"; } +.bi-handbag-fill::before { content: "\f408"; } +.bi-handbag::before { content: "\f409"; } +.bi-hash::before { content: "\f40a"; } +.bi-hdd-fill::before { content: "\f40b"; } +.bi-hdd-network-fill::before { content: "\f40c"; } +.bi-hdd-network::before { content: "\f40d"; } +.bi-hdd-rack-fill::before { content: "\f40e"; } +.bi-hdd-rack::before { content: "\f40f"; } +.bi-hdd-stack-fill::before { content: "\f410"; } +.bi-hdd-stack::before { content: "\f411"; } +.bi-hdd::before { content: "\f412"; } +.bi-headphones::before { content: "\f413"; } +.bi-headset::before { content: "\f414"; } +.bi-heart-fill::before { content: "\f415"; } +.bi-heart-half::before { content: "\f416"; } +.bi-heart::before { content: "\f417"; } +.bi-heptagon-fill::before { content: "\f418"; } +.bi-heptagon-half::before { content: "\f419"; } +.bi-heptagon::before { content: "\f41a"; } +.bi-hexagon-fill::before { content: "\f41b"; } +.bi-hexagon-half::before { content: "\f41c"; } +.bi-hexagon::before { content: "\f41d"; } +.bi-hourglass-bottom::before { content: "\f41e"; } +.bi-hourglass-split::before { content: "\f41f"; } +.bi-hourglass-top::before { content: "\f420"; } +.bi-hourglass::before { content: "\f421"; } +.bi-house-door-fill::before { content: "\f422"; } +.bi-house-door::before { content: "\f423"; } +.bi-house-fill::before { content: "\f424"; } +.bi-house::before { content: "\f425"; } +.bi-hr::before { content: "\f426"; } +.bi-hurricane::before { content: "\f427"; } +.bi-image-alt::before { content: "\f428"; } +.bi-image-fill::before { content: "\f429"; } +.bi-image::before { content: "\f42a"; } +.bi-images::before { content: "\f42b"; } +.bi-inbox-fill::before { content: "\f42c"; } +.bi-inbox::before { content: "\f42d"; } +.bi-inboxes-fill::before { content: "\f42e"; } +.bi-inboxes::before { content: "\f42f"; } +.bi-info-circle-fill::before { content: "\f430"; } +.bi-info-circle::before { content: "\f431"; } +.bi-info-square-fill::before { content: "\f432"; } +.bi-info-square::before { content: "\f433"; } +.bi-info::before { content: "\f434"; } +.bi-input-cursor-text::before { content: "\f435"; } +.bi-input-cursor::before { content: "\f436"; } +.bi-instagram::before { content: "\f437"; } +.bi-intersect::before { content: "\f438"; } +.bi-journal-album::before { content: "\f439"; } +.bi-journal-arrow-down::before { content: "\f43a"; } +.bi-journal-arrow-up::before { content: "\f43b"; } +.bi-journal-bookmark-fill::before { content: "\f43c"; } +.bi-journal-bookmark::before { content: "\f43d"; } +.bi-journal-check::before { content: "\f43e"; } +.bi-journal-code::before { content: "\f43f"; } +.bi-journal-medical::before { content: "\f440"; } +.bi-journal-minus::before { content: "\f441"; } +.bi-journal-plus::before { content: "\f442"; } +.bi-journal-richtext::before { content: "\f443"; } +.bi-journal-text::before { content: "\f444"; } +.bi-journal-x::before { content: "\f445"; } +.bi-journal::before { content: "\f446"; } +.bi-journals::before { content: "\f447"; } +.bi-joystick::before { content: "\f448"; } +.bi-justify-left::before { content: "\f449"; } +.bi-justify-right::before { content: "\f44a"; } +.bi-justify::before { content: "\f44b"; } +.bi-kanban-fill::before { content: "\f44c"; } +.bi-kanban::before { content: "\f44d"; } +.bi-key-fill::before { content: "\f44e"; } +.bi-key::before { content: "\f44f"; } +.bi-keyboard-fill::before { content: "\f450"; } +.bi-keyboard::before { content: "\f451"; } +.bi-ladder::before { content: "\f452"; } +.bi-lamp-fill::before { content: "\f453"; } +.bi-lamp::before { content: "\f454"; } +.bi-laptop-fill::before { content: "\f455"; } +.bi-laptop::before { content: "\f456"; } +.bi-layer-backward::before { content: "\f457"; } +.bi-layer-forward::before { content: "\f458"; } +.bi-layers-fill::before { content: "\f459"; } +.bi-layers-half::before { content: "\f45a"; } +.bi-layers::before { content: "\f45b"; } +.bi-layout-sidebar-inset-reverse::before { content: "\f45c"; } +.bi-layout-sidebar-inset::before { content: "\f45d"; } +.bi-layout-sidebar-reverse::before { content: "\f45e"; } +.bi-layout-sidebar::before { content: "\f45f"; } +.bi-layout-split::before { content: "\f460"; } +.bi-layout-text-sidebar-reverse::before { content: "\f461"; } +.bi-layout-text-sidebar::before { content: "\f462"; } +.bi-layout-text-window-reverse::before { content: "\f463"; } +.bi-layout-text-window::before { content: "\f464"; } +.bi-layout-three-columns::before { content: "\f465"; } +.bi-layout-wtf::before { content: "\f466"; } +.bi-life-preserver::before { content: "\f467"; } +.bi-lightbulb-fill::before { content: "\f468"; } +.bi-lightbulb-off-fill::before { content: "\f469"; } +.bi-lightbulb-off::before { content: "\f46a"; } +.bi-lightbulb::before { content: "\f46b"; } +.bi-lightning-charge-fill::before { content: "\f46c"; } +.bi-lightning-charge::before { content: "\f46d"; } +.bi-lightning-fill::before { content: "\f46e"; } +.bi-lightning::before { content: "\f46f"; } +.bi-link-45deg::before { content: "\f470"; } +.bi-link::before { content: "\f471"; } +.bi-linkedin::before { content: "\f472"; } +.bi-list-check::before { content: "\f473"; } +.bi-list-nested::before { content: "\f474"; } +.bi-list-ol::before { content: "\f475"; } +.bi-list-stars::before { content: "\f476"; } +.bi-list-task::before { content: "\f477"; } +.bi-list-ul::before { content: "\f478"; } +.bi-list::before { content: "\f479"; } +.bi-lock-fill::before { content: "\f47a"; } +.bi-lock::before { content: "\f47b"; } +.bi-mailbox::before { content: "\f47c"; } +.bi-mailbox2::before { content: "\f47d"; } +.bi-map-fill::before { content: "\f47e"; } +.bi-map::before { content: "\f47f"; } +.bi-markdown-fill::before { content: "\f480"; } +.bi-markdown::before { content: "\f481"; } +.bi-mask::before { content: "\f482"; } +.bi-megaphone-fill::before { content: "\f483"; } +.bi-megaphone::before { content: "\f484"; } +.bi-menu-app-fill::before { content: "\f485"; } +.bi-menu-app::before { content: "\f486"; } +.bi-menu-button-fill::before { content: "\f487"; } +.bi-menu-button-wide-fill::before { content: "\f488"; } +.bi-menu-button-wide::before { content: "\f489"; } +.bi-menu-button::before { content: "\f48a"; } +.bi-menu-down::before { content: "\f48b"; } +.bi-menu-up::before { content: "\f48c"; } +.bi-mic-fill::before { content: "\f48d"; } +.bi-mic-mute-fill::before { content: "\f48e"; } +.bi-mic-mute::before { content: "\f48f"; } +.bi-mic::before { content: "\f490"; } +.bi-minecart-loaded::before { content: "\f491"; } +.bi-minecart::before { content: "\f492"; } +.bi-moisture::before { content: "\f493"; } +.bi-moon-fill::before { content: "\f494"; } +.bi-moon-stars-fill::before { content: "\f495"; } +.bi-moon-stars::before { content: "\f496"; } +.bi-moon::before { content: "\f497"; } +.bi-mouse-fill::before { content: "\f498"; } +.bi-mouse::before { content: "\f499"; } +.bi-mouse2-fill::before { content: "\f49a"; } +.bi-mouse2::before { content: "\f49b"; } +.bi-mouse3-fill::before { content: "\f49c"; } +.bi-mouse3::before { content: "\f49d"; } +.bi-music-note-beamed::before { content: "\f49e"; } +.bi-music-note-list::before { content: "\f49f"; } +.bi-music-note::before { content: "\f4a0"; } +.bi-music-player-fill::before { content: "\f4a1"; } +.bi-music-player::before { content: "\f4a2"; } +.bi-newspaper::before { content: "\f4a3"; } +.bi-node-minus-fill::before { content: "\f4a4"; } +.bi-node-minus::before { content: "\f4a5"; } +.bi-node-plus-fill::before { content: "\f4a6"; } +.bi-node-plus::before { content: "\f4a7"; } +.bi-nut-fill::before { content: "\f4a8"; } +.bi-nut::before { content: "\f4a9"; } +.bi-octagon-fill::before { content: "\f4aa"; } +.bi-octagon-half::before { content: "\f4ab"; } +.bi-octagon::before { content: "\f4ac"; } +.bi-option::before { content: "\f4ad"; } +.bi-outlet::before { content: "\f4ae"; } +.bi-paint-bucket::before { content: "\f4af"; } +.bi-palette-fill::before { content: "\f4b0"; } +.bi-palette::before { content: "\f4b1"; } +.bi-palette2::before { content: "\f4b2"; } +.bi-paperclip::before { content: "\f4b3"; } +.bi-paragraph::before { content: "\f4b4"; } +.bi-patch-check-fill::before { content: "\f4b5"; } +.bi-patch-check::before { content: "\f4b6"; } +.bi-patch-exclamation-fill::before { content: "\f4b7"; } +.bi-patch-exclamation::before { content: "\f4b8"; } +.bi-patch-minus-fill::before { content: "\f4b9"; } +.bi-patch-minus::before { content: "\f4ba"; } +.bi-patch-plus-fill::before { content: "\f4bb"; } +.bi-patch-plus::before { content: "\f4bc"; } +.bi-patch-question-fill::before { content: "\f4bd"; } +.bi-patch-question::before { content: "\f4be"; } +.bi-pause-btn-fill::before { content: "\f4bf"; } +.bi-pause-btn::before { content: "\f4c0"; } +.bi-pause-circle-fill::before { content: "\f4c1"; } +.bi-pause-circle::before { content: "\f4c2"; } +.bi-pause-fill::before { content: "\f4c3"; } +.bi-pause::before { content: "\f4c4"; } +.bi-peace-fill::before { content: "\f4c5"; } +.bi-peace::before { content: "\f4c6"; } +.bi-pen-fill::before { content: "\f4c7"; } +.bi-pen::before { content: "\f4c8"; } +.bi-pencil-fill::before { content: "\f4c9"; } +.bi-pencil-square::before { content: "\f4ca"; } +.bi-pencil::before { content: "\f4cb"; } +.bi-pentagon-fill::before { content: "\f4cc"; } +.bi-pentagon-half::before { content: "\f4cd"; } +.bi-pentagon::before { content: "\f4ce"; } +.bi-people-fill::before { content: "\f4cf"; } +.bi-people::before { content: "\f4d0"; } +.bi-percent::before { content: "\f4d1"; } +.bi-person-badge-fill::before { content: "\f4d2"; } +.bi-person-badge::before { content: "\f4d3"; } +.bi-person-bounding-box::before { content: "\f4d4"; } +.bi-person-check-fill::before { content: "\f4d5"; } +.bi-person-check::before { content: "\f4d6"; } +.bi-person-circle::before { content: "\f4d7"; } +.bi-person-dash-fill::before { content: "\f4d8"; } +.bi-person-dash::before { content: "\f4d9"; } +.bi-person-fill::before { content: "\f4da"; } +.bi-person-lines-fill::before { content: "\f4db"; } +.bi-person-plus-fill::before { content: "\f4dc"; } +.bi-person-plus::before { content: "\f4dd"; } +.bi-person-square::before { content: "\f4de"; } +.bi-person-x-fill::before { content: "\f4df"; } +.bi-person-x::before { content: "\f4e0"; } +.bi-person::before { content: "\f4e1"; } +.bi-phone-fill::before { content: "\f4e2"; } +.bi-phone-landscape-fill::before { content: "\f4e3"; } +.bi-phone-landscape::before { content: "\f4e4"; } +.bi-phone-vibrate-fill::before { content: "\f4e5"; } +.bi-phone-vibrate::before { content: "\f4e6"; } +.bi-phone::before { content: "\f4e7"; } +.bi-pie-chart-fill::before { content: "\f4e8"; } +.bi-pie-chart::before { content: "\f4e9"; } +.bi-pin-angle-fill::before { content: "\f4ea"; } +.bi-pin-angle::before { content: "\f4eb"; } +.bi-pin-fill::before { content: "\f4ec"; } +.bi-pin::before { content: "\f4ed"; } +.bi-pip-fill::before { content: "\f4ee"; } +.bi-pip::before { content: "\f4ef"; } +.bi-play-btn-fill::before { content: "\f4f0"; } +.bi-play-btn::before { content: "\f4f1"; } +.bi-play-circle-fill::before { content: "\f4f2"; } +.bi-play-circle::before { content: "\f4f3"; } +.bi-play-fill::before { content: "\f4f4"; } +.bi-play::before { content: "\f4f5"; } +.bi-plug-fill::before { content: "\f4f6"; } +.bi-plug::before { content: "\f4f7"; } +.bi-plus-circle-dotted::before { content: "\f4f8"; } +.bi-plus-circle-fill::before { content: "\f4f9"; } +.bi-plus-circle::before { content: "\f4fa"; } +.bi-plus-square-dotted::before { content: "\f4fb"; } +.bi-plus-square-fill::before { content: "\f4fc"; } +.bi-plus-square::before { content: "\f4fd"; } +.bi-plus::before { content: "\f4fe"; } +.bi-power::before { content: "\f4ff"; } +.bi-printer-fill::before { content: "\f500"; } +.bi-printer::before { content: "\f501"; } +.bi-puzzle-fill::before { content: "\f502"; } +.bi-puzzle::before { content: "\f503"; } +.bi-question-circle-fill::before { content: "\f504"; } +.bi-question-circle::before { content: "\f505"; } +.bi-question-diamond-fill::before { content: "\f506"; } +.bi-question-diamond::before { content: "\f507"; } +.bi-question-octagon-fill::before { content: "\f508"; } +.bi-question-octagon::before { content: "\f509"; } +.bi-question-square-fill::before { content: "\f50a"; } +.bi-question-square::before { content: "\f50b"; } +.bi-question::before { content: "\f50c"; } +.bi-rainbow::before { content: "\f50d"; } +.bi-receipt-cutoff::before { content: "\f50e"; } +.bi-receipt::before { content: "\f50f"; } +.bi-reception-0::before { content: "\f510"; } +.bi-reception-1::before { content: "\f511"; } +.bi-reception-2::before { content: "\f512"; } +.bi-reception-3::before { content: "\f513"; } +.bi-reception-4::before { content: "\f514"; } +.bi-record-btn-fill::before { content: "\f515"; } +.bi-record-btn::before { content: "\f516"; } +.bi-record-circle-fill::before { content: "\f517"; } +.bi-record-circle::before { content: "\f518"; } +.bi-record-fill::before { content: "\f519"; } +.bi-record::before { content: "\f51a"; } +.bi-record2-fill::before { content: "\f51b"; } +.bi-record2::before { content: "\f51c"; } +.bi-reply-all-fill::before { content: "\f51d"; } +.bi-reply-all::before { content: "\f51e"; } +.bi-reply-fill::before { content: "\f51f"; } +.bi-reply::before { content: "\f520"; } +.bi-rss-fill::before { content: "\f521"; } +.bi-rss::before { content: "\f522"; } +.bi-rulers::before { content: "\f523"; } +.bi-save-fill::before { content: "\f524"; } +.bi-save::before { content: "\f525"; } +.bi-save2-fill::before { content: "\f526"; } +.bi-save2::before { content: "\f527"; } +.bi-scissors::before { content: "\f528"; } +.bi-screwdriver::before { content: "\f529"; } +.bi-search::before { content: "\f52a"; } +.bi-segmented-nav::before { content: "\f52b"; } +.bi-server::before { content: "\f52c"; } +.bi-share-fill::before { content: "\f52d"; } +.bi-share::before { content: "\f52e"; } +.bi-shield-check::before { content: "\f52f"; } +.bi-shield-exclamation::before { content: "\f530"; } +.bi-shield-fill-check::before { content: "\f531"; } +.bi-shield-fill-exclamation::before { content: "\f532"; } +.bi-shield-fill-minus::before { content: "\f533"; } +.bi-shield-fill-plus::before { content: "\f534"; } +.bi-shield-fill-x::before { content: "\f535"; } +.bi-shield-fill::before { content: "\f536"; } +.bi-shield-lock-fill::before { content: "\f537"; } +.bi-shield-lock::before { content: "\f538"; } +.bi-shield-minus::before { content: "\f539"; } +.bi-shield-plus::before { content: "\f53a"; } +.bi-shield-shaded::before { content: "\f53b"; } +.bi-shield-slash-fill::before { content: "\f53c"; } +.bi-shield-slash::before { content: "\f53d"; } +.bi-shield-x::before { content: "\f53e"; } +.bi-shield::before { content: "\f53f"; } +.bi-shift-fill::before { content: "\f540"; } +.bi-shift::before { content: "\f541"; } +.bi-shop-window::before { content: "\f542"; } +.bi-shop::before { content: "\f543"; } +.bi-shuffle::before { content: "\f544"; } +.bi-signpost-2-fill::before { content: "\f545"; } +.bi-signpost-2::before { content: "\f546"; } +.bi-signpost-fill::before { content: "\f547"; } +.bi-signpost-split-fill::before { content: "\f548"; } +.bi-signpost-split::before { content: "\f549"; } +.bi-signpost::before { content: "\f54a"; } +.bi-sim-fill::before { content: "\f54b"; } +.bi-sim::before { content: "\f54c"; } +.bi-skip-backward-btn-fill::before { content: "\f54d"; } +.bi-skip-backward-btn::before { content: "\f54e"; } +.bi-skip-backward-circle-fill::before { content: "\f54f"; } +.bi-skip-backward-circle::before { content: "\f550"; } +.bi-skip-backward-fill::before { content: "\f551"; } +.bi-skip-backward::before { content: "\f552"; } +.bi-skip-end-btn-fill::before { content: "\f553"; } +.bi-skip-end-btn::before { content: "\f554"; } +.bi-skip-end-circle-fill::before { content: "\f555"; } +.bi-skip-end-circle::before { content: "\f556"; } +.bi-skip-end-fill::before { content: "\f557"; } +.bi-skip-end::before { content: "\f558"; } +.bi-skip-forward-btn-fill::before { content: "\f559"; } +.bi-skip-forward-btn::before { content: "\f55a"; } +.bi-skip-forward-circle-fill::before { content: "\f55b"; } +.bi-skip-forward-circle::before { content: "\f55c"; } +.bi-skip-forward-fill::before { content: "\f55d"; } +.bi-skip-forward::before { content: "\f55e"; } +.bi-skip-start-btn-fill::before { content: "\f55f"; } +.bi-skip-start-btn::before { content: "\f560"; } +.bi-skip-start-circle-fill::before { content: "\f561"; } +.bi-skip-start-circle::before { content: "\f562"; } +.bi-skip-start-fill::before { content: "\f563"; } +.bi-skip-start::before { content: "\f564"; } +.bi-slack::before { content: "\f565"; } +.bi-slash-circle-fill::before { content: "\f566"; } +.bi-slash-circle::before { content: "\f567"; } +.bi-slash-square-fill::before { content: "\f568"; } +.bi-slash-square::before { content: "\f569"; } +.bi-slash::before { content: "\f56a"; } +.bi-sliders::before { content: "\f56b"; } +.bi-smartwatch::before { content: "\f56c"; } +.bi-snow::before { content: "\f56d"; } +.bi-snow2::before { content: "\f56e"; } +.bi-snow3::before { content: "\f56f"; } +.bi-sort-alpha-down-alt::before { content: "\f570"; } +.bi-sort-alpha-down::before { content: "\f571"; } +.bi-sort-alpha-up-alt::before { content: "\f572"; } +.bi-sort-alpha-up::before { content: "\f573"; } +.bi-sort-down-alt::before { content: "\f574"; } +.bi-sort-down::before { content: "\f575"; } +.bi-sort-numeric-down-alt::before { content: "\f576"; } +.bi-sort-numeric-down::before { content: "\f577"; } +.bi-sort-numeric-up-alt::before { content: "\f578"; } +.bi-sort-numeric-up::before { content: "\f579"; } +.bi-sort-up-alt::before { content: "\f57a"; } +.bi-sort-up::before { content: "\f57b"; } +.bi-soundwave::before { content: "\f57c"; } +.bi-speaker-fill::before { content: "\f57d"; } +.bi-speaker::before { content: "\f57e"; } +.bi-speedometer::before { content: "\f57f"; } +.bi-speedometer2::before { content: "\f580"; } +.bi-spellcheck::before { content: "\f581"; } +.bi-square-fill::before { content: "\f582"; } +.bi-square-half::before { content: "\f583"; } +.bi-square::before { content: "\f584"; } +.bi-stack::before { content: "\f585"; } +.bi-star-fill::before { content: "\f586"; } +.bi-star-half::before { content: "\f587"; } +.bi-star::before { content: "\f588"; } +.bi-stars::before { content: "\f589"; } +.bi-stickies-fill::before { content: "\f58a"; } +.bi-stickies::before { content: "\f58b"; } +.bi-sticky-fill::before { content: "\f58c"; } +.bi-sticky::before { content: "\f58d"; } +.bi-stop-btn-fill::before { content: "\f58e"; } +.bi-stop-btn::before { content: "\f58f"; } +.bi-stop-circle-fill::before { content: "\f590"; } +.bi-stop-circle::before { content: "\f591"; } +.bi-stop-fill::before { content: "\f592"; } +.bi-stop::before { content: "\f593"; } +.bi-stoplights-fill::before { content: "\f594"; } +.bi-stoplights::before { content: "\f595"; } +.bi-stopwatch-fill::before { content: "\f596"; } +.bi-stopwatch::before { content: "\f597"; } +.bi-subtract::before { content: "\f598"; } +.bi-suit-club-fill::before { content: "\f599"; } +.bi-suit-club::before { content: "\f59a"; } +.bi-suit-diamond-fill::before { content: "\f59b"; } +.bi-suit-diamond::before { content: "\f59c"; } +.bi-suit-heart-fill::before { content: "\f59d"; } +.bi-suit-heart::before { content: "\f59e"; } +.bi-suit-spade-fill::before { content: "\f59f"; } +.bi-suit-spade::before { content: "\f5a0"; } +.bi-sun-fill::before { content: "\f5a1"; } +.bi-sun::before { content: "\f5a2"; } +.bi-sunglasses::before { content: "\f5a3"; } +.bi-sunrise-fill::before { content: "\f5a4"; } +.bi-sunrise::before { content: "\f5a5"; } +.bi-sunset-fill::before { content: "\f5a6"; } +.bi-sunset::before { content: "\f5a7"; } +.bi-symmetry-horizontal::before { content: "\f5a8"; } +.bi-symmetry-vertical::before { content: "\f5a9"; } +.bi-table::before { content: "\f5aa"; } +.bi-tablet-fill::before { content: "\f5ab"; } +.bi-tablet-landscape-fill::before { content: "\f5ac"; } +.bi-tablet-landscape::before { content: "\f5ad"; } +.bi-tablet::before { content: "\f5ae"; } +.bi-tag-fill::before { content: "\f5af"; } +.bi-tag::before { content: "\f5b0"; } +.bi-tags-fill::before { content: "\f5b1"; } +.bi-tags::before { content: "\f5b2"; } +.bi-telegram::before { content: "\f5b3"; } +.bi-telephone-fill::before { content: "\f5b4"; } +.bi-telephone-forward-fill::before { content: "\f5b5"; } +.bi-telephone-forward::before { content: "\f5b6"; } +.bi-telephone-inbound-fill::before { content: "\f5b7"; } +.bi-telephone-inbound::before { content: "\f5b8"; } +.bi-telephone-minus-fill::before { content: "\f5b9"; } +.bi-telephone-minus::before { content: "\f5ba"; } +.bi-telephone-outbound-fill::before { content: "\f5bb"; } +.bi-telephone-outbound::before { content: "\f5bc"; } +.bi-telephone-plus-fill::before { content: "\f5bd"; } +.bi-telephone-plus::before { content: "\f5be"; } +.bi-telephone-x-fill::before { content: "\f5bf"; } +.bi-telephone-x::before { content: "\f5c0"; } +.bi-telephone::before { content: "\f5c1"; } +.bi-terminal-fill::before { content: "\f5c2"; } +.bi-terminal::before { content: "\f5c3"; } +.bi-text-center::before { content: "\f5c4"; } +.bi-text-indent-left::before { content: "\f5c5"; } +.bi-text-indent-right::before { content: "\f5c6"; } +.bi-text-left::before { content: "\f5c7"; } +.bi-text-paragraph::before { content: "\f5c8"; } +.bi-text-right::before { content: "\f5c9"; } +.bi-textarea-resize::before { content: "\f5ca"; } +.bi-textarea-t::before { content: "\f5cb"; } +.bi-textarea::before { content: "\f5cc"; } +.bi-thermometer-half::before { content: "\f5cd"; } +.bi-thermometer-high::before { content: "\f5ce"; } +.bi-thermometer-low::before { content: "\f5cf"; } +.bi-thermometer-snow::before { content: "\f5d0"; } +.bi-thermometer-sun::before { content: "\f5d1"; } +.bi-thermometer::before { content: "\f5d2"; } +.bi-three-dots-vertical::before { content: "\f5d3"; } +.bi-three-dots::before { content: "\f5d4"; } +.bi-toggle-off::before { content: "\f5d5"; } +.bi-toggle-on::before { content: "\f5d6"; } +.bi-toggle2-off::before { content: "\f5d7"; } +.bi-toggle2-on::before { content: "\f5d8"; } +.bi-toggles::before { content: "\f5d9"; } +.bi-toggles2::before { content: "\f5da"; } +.bi-tools::before { content: "\f5db"; } +.bi-tornado::before { content: "\f5dc"; } +.bi-trash-fill::before { content: "\f5dd"; } +.bi-trash::before { content: "\f5de"; } +.bi-trash2-fill::before { content: "\f5df"; } +.bi-trash2::before { content: "\f5e0"; } +.bi-tree-fill::before { content: "\f5e1"; } +.bi-tree::before { content: "\f5e2"; } +.bi-triangle-fill::before { content: "\f5e3"; } +.bi-triangle-half::before { content: "\f5e4"; } +.bi-triangle::before { content: "\f5e5"; } +.bi-trophy-fill::before { content: "\f5e6"; } +.bi-trophy::before { content: "\f5e7"; } +.bi-tropical-storm::before { content: "\f5e8"; } +.bi-truck-flatbed::before { content: "\f5e9"; } +.bi-truck::before { content: "\f5ea"; } +.bi-tsunami::before { content: "\f5eb"; } +.bi-tv-fill::before { content: "\f5ec"; } +.bi-tv::before { content: "\f5ed"; } +.bi-twitch::before { content: "\f5ee"; } +.bi-twitter::before { content: "\f5ef"; } +.bi-type-bold::before { content: "\f5f0"; } +.bi-type-h1::before { content: "\f5f1"; } +.bi-type-h2::before { content: "\f5f2"; } +.bi-type-h3::before { content: "\f5f3"; } +.bi-type-italic::before { content: "\f5f4"; } +.bi-type-strikethrough::before { content: "\f5f5"; } +.bi-type-underline::before { content: "\f5f6"; } +.bi-type::before { content: "\f5f7"; } +.bi-ui-checks-grid::before { content: "\f5f8"; } +.bi-ui-checks::before { content: "\f5f9"; } +.bi-ui-radios-grid::before { content: "\f5fa"; } +.bi-ui-radios::before { content: "\f5fb"; } +.bi-umbrella-fill::before { content: "\f5fc"; } +.bi-umbrella::before { content: "\f5fd"; } +.bi-union::before { content: "\f5fe"; } +.bi-unlock-fill::before { content: "\f5ff"; } +.bi-unlock::before { content: "\f600"; } +.bi-upc-scan::before { content: "\f601"; } +.bi-upc::before { content: "\f602"; } +.bi-upload::before { content: "\f603"; } +.bi-vector-pen::before { content: "\f604"; } +.bi-view-list::before { content: "\f605"; } +.bi-view-stacked::before { content: "\f606"; } +.bi-vinyl-fill::before { content: "\f607"; } +.bi-vinyl::before { content: "\f608"; } +.bi-voicemail::before { content: "\f609"; } +.bi-volume-down-fill::before { content: "\f60a"; } +.bi-volume-down::before { content: "\f60b"; } +.bi-volume-mute-fill::before { content: "\f60c"; } +.bi-volume-mute::before { content: "\f60d"; } +.bi-volume-off-fill::before { content: "\f60e"; } +.bi-volume-off::before { content: "\f60f"; } +.bi-volume-up-fill::before { content: "\f610"; } +.bi-volume-up::before { content: "\f611"; } +.bi-vr::before { content: "\f612"; } +.bi-wallet-fill::before { content: "\f613"; } +.bi-wallet::before { content: "\f614"; } +.bi-wallet2::before { content: "\f615"; } +.bi-watch::before { content: "\f616"; } +.bi-water::before { content: "\f617"; } +.bi-whatsapp::before { content: "\f618"; } +.bi-wifi-1::before { content: "\f619"; } +.bi-wifi-2::before { content: "\f61a"; } +.bi-wifi-off::before { content: "\f61b"; } +.bi-wifi::before { content: "\f61c"; } +.bi-wind::before { content: "\f61d"; } +.bi-window-dock::before { content: "\f61e"; } +.bi-window-sidebar::before { content: "\f61f"; } +.bi-window::before { content: "\f620"; } +.bi-wrench::before { content: "\f621"; } +.bi-x-circle-fill::before { content: "\f622"; } +.bi-x-circle::before { content: "\f623"; } +.bi-x-diamond-fill::before { content: "\f624"; } +.bi-x-diamond::before { content: "\f625"; } +.bi-x-octagon-fill::before { content: "\f626"; } +.bi-x-octagon::before { content: "\f627"; } +.bi-x-square-fill::before { content: "\f628"; } +.bi-x-square::before { content: "\f629"; } +.bi-x::before { content: "\f62a"; } +.bi-youtube::before { content: "\f62b"; } +.bi-zoom-in::before { content: "\f62c"; } +.bi-zoom-out::before { content: "\f62d"; } +.bi-bank::before { content: "\f62e"; } +.bi-bank2::before { content: "\f62f"; } +.bi-bell-slash-fill::before { content: "\f630"; } +.bi-bell-slash::before { content: "\f631"; } +.bi-cash-coin::before { content: "\f632"; } +.bi-check-lg::before { content: "\f633"; } +.bi-coin::before { content: "\f634"; } +.bi-currency-bitcoin::before { content: "\f635"; } +.bi-currency-dollar::before { content: "\f636"; } +.bi-currency-euro::before { content: "\f637"; } +.bi-currency-exchange::before { content: "\f638"; } +.bi-currency-pound::before { content: "\f639"; } +.bi-currency-yen::before { content: "\f63a"; } +.bi-dash-lg::before { content: "\f63b"; } +.bi-exclamation-lg::before { content: "\f63c"; } +.bi-file-earmark-pdf-fill::before { content: "\f63d"; } +.bi-file-earmark-pdf::before { content: "\f63e"; } +.bi-file-pdf-fill::before { content: "\f63f"; } +.bi-file-pdf::before { content: "\f640"; } +.bi-gender-ambiguous::before { content: "\f641"; } +.bi-gender-female::before { content: "\f642"; } +.bi-gender-male::before { content: "\f643"; } +.bi-gender-trans::before { content: "\f644"; } +.bi-headset-vr::before { content: "\f645"; } +.bi-info-lg::before { content: "\f646"; } +.bi-mastodon::before { content: "\f647"; } +.bi-messenger::before { content: "\f648"; } +.bi-piggy-bank-fill::before { content: "\f649"; } +.bi-piggy-bank::before { content: "\f64a"; } +.bi-pin-map-fill::before { content: "\f64b"; } +.bi-pin-map::before { content: "\f64c"; } +.bi-plus-lg::before { content: "\f64d"; } +.bi-question-lg::before { content: "\f64e"; } +.bi-recycle::before { content: "\f64f"; } +.bi-reddit::before { content: "\f650"; } +.bi-safe-fill::before { content: "\f651"; } +.bi-safe2-fill::before { content: "\f652"; } +.bi-safe2::before { content: "\f653"; } +.bi-sd-card-fill::before { content: "\f654"; } +.bi-sd-card::before { content: "\f655"; } +.bi-skype::before { content: "\f656"; } +.bi-slash-lg::before { content: "\f657"; } +.bi-translate::before { content: "\f658"; } +.bi-x-lg::before { content: "\f659"; } +.bi-safe::before { content: "\f65a"; } +.bi-apple::before { content: "\f65b"; } +.bi-microsoft::before { content: "\f65d"; } +.bi-windows::before { content: "\f65e"; } +.bi-behance::before { content: "\f65c"; } +.bi-dribbble::before { content: "\f65f"; } +.bi-line::before { content: "\f660"; } +.bi-medium::before { content: "\f661"; } +.bi-paypal::before { content: "\f662"; } +.bi-pinterest::before { content: "\f663"; } +.bi-signal::before { content: "\f664"; } +.bi-snapchat::before { content: "\f665"; } +.bi-spotify::before { content: "\f666"; } +.bi-stack-overflow::before { content: "\f667"; } +.bi-strava::before { content: "\f668"; } +.bi-wordpress::before { content: "\f669"; } +.bi-vimeo::before { content: "\f66a"; } +.bi-activity::before { content: "\f66b"; } +.bi-easel2-fill::before { content: "\f66c"; } +.bi-easel2::before { content: "\f66d"; } +.bi-easel3-fill::before { content: "\f66e"; } +.bi-easel3::before { content: "\f66f"; } +.bi-fan::before { content: "\f670"; } +.bi-fingerprint::before { content: "\f671"; } +.bi-graph-down-arrow::before { content: "\f672"; } +.bi-graph-up-arrow::before { content: "\f673"; } +.bi-hypnotize::before { content: "\f674"; } +.bi-magic::before { content: "\f675"; } +.bi-person-rolodex::before { content: "\f676"; } +.bi-person-video::before { content: "\f677"; } +.bi-person-video2::before { content: "\f678"; } +.bi-person-video3::before { content: "\f679"; } +.bi-person-workspace::before { content: "\f67a"; } +.bi-radioactive::before { content: "\f67b"; } +.bi-webcam-fill::before { content: "\f67c"; } +.bi-webcam::before { content: "\f67d"; } +.bi-yin-yang::before { content: "\f67e"; } +.bi-bandaid-fill::before { content: "\f680"; } +.bi-bandaid::before { content: "\f681"; } +.bi-bluetooth::before { content: "\f682"; } +.bi-body-text::before { content: "\f683"; } +.bi-boombox::before { content: "\f684"; } +.bi-boxes::before { content: "\f685"; } +.bi-dpad-fill::before { content: "\f686"; } +.bi-dpad::before { content: "\f687"; } +.bi-ear-fill::before { content: "\f688"; } +.bi-ear::before { content: "\f689"; } +.bi-envelope-check-fill::before { content: "\f68b"; } +.bi-envelope-check::before { content: "\f68c"; } +.bi-envelope-dash-fill::before { content: "\f68e"; } +.bi-envelope-dash::before { content: "\f68f"; } +.bi-envelope-exclamation-fill::before { content: "\f691"; } +.bi-envelope-exclamation::before { content: "\f692"; } +.bi-envelope-plus-fill::before { content: "\f693"; } +.bi-envelope-plus::before { content: "\f694"; } +.bi-envelope-slash-fill::before { content: "\f696"; } +.bi-envelope-slash::before { content: "\f697"; } +.bi-envelope-x-fill::before { content: "\f699"; } +.bi-envelope-x::before { content: "\f69a"; } +.bi-explicit-fill::before { content: "\f69b"; } +.bi-explicit::before { content: "\f69c"; } +.bi-git::before { content: "\f69d"; } +.bi-infinity::before { content: "\f69e"; } +.bi-list-columns-reverse::before { content: "\f69f"; } +.bi-list-columns::before { content: "\f6a0"; } +.bi-meta::before { content: "\f6a1"; } +.bi-nintendo-switch::before { content: "\f6a4"; } +.bi-pc-display-horizontal::before { content: "\f6a5"; } +.bi-pc-display::before { content: "\f6a6"; } +.bi-pc-horizontal::before { content: "\f6a7"; } +.bi-pc::before { content: "\f6a8"; } +.bi-playstation::before { content: "\f6a9"; } +.bi-plus-slash-minus::before { content: "\f6aa"; } +.bi-projector-fill::before { content: "\f6ab"; } +.bi-projector::before { content: "\f6ac"; } +.bi-qr-code-scan::before { content: "\f6ad"; } +.bi-qr-code::before { content: "\f6ae"; } +.bi-quora::before { content: "\f6af"; } +.bi-quote::before { content: "\f6b0"; } +.bi-robot::before { content: "\f6b1"; } +.bi-send-check-fill::before { content: "\f6b2"; } +.bi-send-check::before { content: "\f6b3"; } +.bi-send-dash-fill::before { content: "\f6b4"; } +.bi-send-dash::before { content: "\f6b5"; } +.bi-send-exclamation-fill::before { content: "\f6b7"; } +.bi-send-exclamation::before { content: "\f6b8"; } +.bi-send-fill::before { content: "\f6b9"; } +.bi-send-plus-fill::before { content: "\f6ba"; } +.bi-send-plus::before { content: "\f6bb"; } +.bi-send-slash-fill::before { content: "\f6bc"; } +.bi-send-slash::before { content: "\f6bd"; } +.bi-send-x-fill::before { content: "\f6be"; } +.bi-send-x::before { content: "\f6bf"; } +.bi-send::before { content: "\f6c0"; } +.bi-steam::before { content: "\f6c1"; } +.bi-terminal-dash::before { content: "\f6c3"; } +.bi-terminal-plus::before { content: "\f6c4"; } +.bi-terminal-split::before { content: "\f6c5"; } +.bi-ticket-detailed-fill::before { content: "\f6c6"; } +.bi-ticket-detailed::before { content: "\f6c7"; } +.bi-ticket-fill::before { content: "\f6c8"; } +.bi-ticket-perforated-fill::before { content: "\f6c9"; } +.bi-ticket-perforated::before { content: "\f6ca"; } +.bi-ticket::before { content: "\f6cb"; } +.bi-tiktok::before { content: "\f6cc"; } +.bi-window-dash::before { content: "\f6cd"; } +.bi-window-desktop::before { content: "\f6ce"; } +.bi-window-fullscreen::before { content: "\f6cf"; } +.bi-window-plus::before { content: "\f6d0"; } +.bi-window-split::before { content: "\f6d1"; } +.bi-window-stack::before { content: "\f6d2"; } +.bi-window-x::before { content: "\f6d3"; } +.bi-xbox::before { content: "\f6d4"; } +.bi-ethernet::before { content: "\f6d5"; } +.bi-hdmi-fill::before { content: "\f6d6"; } +.bi-hdmi::before { content: "\f6d7"; } +.bi-usb-c-fill::before { content: "\f6d8"; } +.bi-usb-c::before { content: "\f6d9"; } +.bi-usb-fill::before { content: "\f6da"; } +.bi-usb-plug-fill::before { content: "\f6db"; } +.bi-usb-plug::before { content: "\f6dc"; } +.bi-usb-symbol::before { content: "\f6dd"; } +.bi-usb::before { content: "\f6de"; } +.bi-boombox-fill::before { content: "\f6df"; } +.bi-displayport::before { content: "\f6e1"; } +.bi-gpu-card::before { content: "\f6e2"; } +.bi-memory::before { content: "\f6e3"; } +.bi-modem-fill::before { content: "\f6e4"; } +.bi-modem::before { content: "\f6e5"; } +.bi-motherboard-fill::before { content: "\f6e6"; } +.bi-motherboard::before { content: "\f6e7"; } +.bi-optical-audio-fill::before { content: "\f6e8"; } +.bi-optical-audio::before { content: "\f6e9"; } +.bi-pci-card::before { content: "\f6ea"; } +.bi-router-fill::before { content: "\f6eb"; } +.bi-router::before { content: "\f6ec"; } +.bi-thunderbolt-fill::before { content: "\f6ef"; } +.bi-thunderbolt::before { content: "\f6f0"; } +.bi-usb-drive-fill::before { content: "\f6f1"; } +.bi-usb-drive::before { content: "\f6f2"; } +.bi-usb-micro-fill::before { content: "\f6f3"; } +.bi-usb-micro::before { content: "\f6f4"; } +.bi-usb-mini-fill::before { content: "\f6f5"; } +.bi-usb-mini::before { content: "\f6f6"; } +.bi-cloud-haze2::before { content: "\f6f7"; } +.bi-device-hdd-fill::before { content: "\f6f8"; } +.bi-device-hdd::before { content: "\f6f9"; } +.bi-device-ssd-fill::before { content: "\f6fa"; } +.bi-device-ssd::before { content: "\f6fb"; } +.bi-displayport-fill::before { content: "\f6fc"; } +.bi-mortarboard-fill::before { content: "\f6fd"; } +.bi-mortarboard::before { content: "\f6fe"; } +.bi-terminal-x::before { content: "\f6ff"; } +.bi-arrow-through-heart-fill::before { content: "\f700"; } +.bi-arrow-through-heart::before { content: "\f701"; } +.bi-badge-sd-fill::before { content: "\f702"; } +.bi-badge-sd::before { content: "\f703"; } +.bi-bag-heart-fill::before { content: "\f704"; } +.bi-bag-heart::before { content: "\f705"; } +.bi-balloon-fill::before { content: "\f706"; } +.bi-balloon-heart-fill::before { content: "\f707"; } +.bi-balloon-heart::before { content: "\f708"; } +.bi-balloon::before { content: "\f709"; } +.bi-box2-fill::before { content: "\f70a"; } +.bi-box2-heart-fill::before { content: "\f70b"; } +.bi-box2-heart::before { content: "\f70c"; } +.bi-box2::before { content: "\f70d"; } +.bi-braces-asterisk::before { content: "\f70e"; } +.bi-calendar-heart-fill::before { content: "\f70f"; } +.bi-calendar-heart::before { content: "\f710"; } +.bi-calendar2-heart-fill::before { content: "\f711"; } +.bi-calendar2-heart::before { content: "\f712"; } +.bi-chat-heart-fill::before { content: "\f713"; } +.bi-chat-heart::before { content: "\f714"; } +.bi-chat-left-heart-fill::before { content: "\f715"; } +.bi-chat-left-heart::before { content: "\f716"; } +.bi-chat-right-heart-fill::before { content: "\f717"; } +.bi-chat-right-heart::before { content: "\f718"; } +.bi-chat-square-heart-fill::before { content: "\f719"; } +.bi-chat-square-heart::before { content: "\f71a"; } +.bi-clipboard-check-fill::before { content: "\f71b"; } +.bi-clipboard-data-fill::before { content: "\f71c"; } +.bi-clipboard-fill::before { content: "\f71d"; } +.bi-clipboard-heart-fill::before { content: "\f71e"; } +.bi-clipboard-heart::before { content: "\f71f"; } +.bi-clipboard-minus-fill::before { content: "\f720"; } +.bi-clipboard-plus-fill::before { content: "\f721"; } +.bi-clipboard-pulse::before { content: "\f722"; } +.bi-clipboard-x-fill::before { content: "\f723"; } +.bi-clipboard2-check-fill::before { content: "\f724"; } +.bi-clipboard2-check::before { content: "\f725"; } +.bi-clipboard2-data-fill::before { content: "\f726"; } +.bi-clipboard2-data::before { content: "\f727"; } +.bi-clipboard2-fill::before { content: "\f728"; } +.bi-clipboard2-heart-fill::before { content: "\f729"; } +.bi-clipboard2-heart::before { content: "\f72a"; } +.bi-clipboard2-minus-fill::before { content: "\f72b"; } +.bi-clipboard2-minus::before { content: "\f72c"; } +.bi-clipboard2-plus-fill::before { content: "\f72d"; } +.bi-clipboard2-plus::before { content: "\f72e"; } +.bi-clipboard2-pulse-fill::before { content: "\f72f"; } +.bi-clipboard2-pulse::before { content: "\f730"; } +.bi-clipboard2-x-fill::before { content: "\f731"; } +.bi-clipboard2-x::before { content: "\f732"; } +.bi-clipboard2::before { content: "\f733"; } +.bi-emoji-kiss-fill::before { content: "\f734"; } +.bi-emoji-kiss::before { content: "\f735"; } +.bi-envelope-heart-fill::before { content: "\f736"; } +.bi-envelope-heart::before { content: "\f737"; } +.bi-envelope-open-heart-fill::before { content: "\f738"; } +.bi-envelope-open-heart::before { content: "\f739"; } +.bi-envelope-paper-fill::before { content: "\f73a"; } +.bi-envelope-paper-heart-fill::before { content: "\f73b"; } +.bi-envelope-paper-heart::before { content: "\f73c"; } +.bi-envelope-paper::before { content: "\f73d"; } +.bi-filetype-aac::before { content: "\f73e"; } +.bi-filetype-ai::before { content: "\f73f"; } +.bi-filetype-bmp::before { content: "\f740"; } +.bi-filetype-cs::before { content: "\f741"; } +.bi-filetype-css::before { content: "\f742"; } +.bi-filetype-csv::before { content: "\f743"; } +.bi-filetype-doc::before { content: "\f744"; } +.bi-filetype-docx::before { content: "\f745"; } +.bi-filetype-exe::before { content: "\f746"; } +.bi-filetype-gif::before { content: "\f747"; } +.bi-filetype-heic::before { content: "\f748"; } +.bi-filetype-html::before { content: "\f749"; } +.bi-filetype-java::before { content: "\f74a"; } +.bi-filetype-jpg::before { content: "\f74b"; } +.bi-filetype-js::before { content: "\f74c"; } +.bi-filetype-jsx::before { content: "\f74d"; } +.bi-filetype-key::before { content: "\f74e"; } +.bi-filetype-m4p::before { content: "\f74f"; } +.bi-filetype-md::before { content: "\f750"; } +.bi-filetype-mdx::before { content: "\f751"; } +.bi-filetype-mov::before { content: "\f752"; } +.bi-filetype-mp3::before { content: "\f753"; } +.bi-filetype-mp4::before { content: "\f754"; } +.bi-filetype-otf::before { content: "\f755"; } +.bi-filetype-pdf::before { content: "\f756"; } +.bi-filetype-php::before { content: "\f757"; } +.bi-filetype-png::before { content: "\f758"; } +.bi-filetype-ppt::before { content: "\f75a"; } +.bi-filetype-psd::before { content: "\f75b"; } +.bi-filetype-py::before { content: "\f75c"; } +.bi-filetype-raw::before { content: "\f75d"; } +.bi-filetype-rb::before { content: "\f75e"; } +.bi-filetype-sass::before { content: "\f75f"; } +.bi-filetype-scss::before { content: "\f760"; } +.bi-filetype-sh::before { content: "\f761"; } +.bi-filetype-svg::before { content: "\f762"; } +.bi-filetype-tiff::before { content: "\f763"; } +.bi-filetype-tsx::before { content: "\f764"; } +.bi-filetype-ttf::before { content: "\f765"; } +.bi-filetype-txt::before { content: "\f766"; } +.bi-filetype-wav::before { content: "\f767"; } +.bi-filetype-woff::before { content: "\f768"; } +.bi-filetype-xls::before { content: "\f76a"; } +.bi-filetype-xml::before { content: "\f76b"; } +.bi-filetype-yml::before { content: "\f76c"; } +.bi-heart-arrow::before { content: "\f76d"; } +.bi-heart-pulse-fill::before { content: "\f76e"; } +.bi-heart-pulse::before { content: "\f76f"; } +.bi-heartbreak-fill::before { content: "\f770"; } +.bi-heartbreak::before { content: "\f771"; } +.bi-hearts::before { content: "\f772"; } +.bi-hospital-fill::before { content: "\f773"; } +.bi-hospital::before { content: "\f774"; } +.bi-house-heart-fill::before { content: "\f775"; } +.bi-house-heart::before { content: "\f776"; } +.bi-incognito::before { content: "\f777"; } +.bi-magnet-fill::before { content: "\f778"; } +.bi-magnet::before { content: "\f779"; } +.bi-person-heart::before { content: "\f77a"; } +.bi-person-hearts::before { content: "\f77b"; } +.bi-phone-flip::before { content: "\f77c"; } +.bi-plugin::before { content: "\f77d"; } +.bi-postage-fill::before { content: "\f77e"; } +.bi-postage-heart-fill::before { content: "\f77f"; } +.bi-postage-heart::before { content: "\f780"; } +.bi-postage::before { content: "\f781"; } +.bi-postcard-fill::before { content: "\f782"; } +.bi-postcard-heart-fill::before { content: "\f783"; } +.bi-postcard-heart::before { content: "\f784"; } +.bi-postcard::before { content: "\f785"; } +.bi-search-heart-fill::before { content: "\f786"; } +.bi-search-heart::before { content: "\f787"; } +.bi-sliders2-vertical::before { content: "\f788"; } +.bi-sliders2::before { content: "\f789"; } +.bi-trash3-fill::before { content: "\f78a"; } +.bi-trash3::before { content: "\f78b"; } +.bi-valentine::before { content: "\f78c"; } +.bi-valentine2::before { content: "\f78d"; } +.bi-wrench-adjustable-circle-fill::before { content: "\f78e"; } +.bi-wrench-adjustable-circle::before { content: "\f78f"; } +.bi-wrench-adjustable::before { content: "\f790"; } +.bi-filetype-json::before { content: "\f791"; } +.bi-filetype-pptx::before { content: "\f792"; } +.bi-filetype-xlsx::before { content: "\f793"; } +.bi-1-circle-fill::before { content: "\f796"; } +.bi-1-circle::before { content: "\f797"; } +.bi-1-square-fill::before { content: "\f798"; } +.bi-1-square::before { content: "\f799"; } +.bi-2-circle-fill::before { content: "\f79c"; } +.bi-2-circle::before { content: "\f79d"; } +.bi-2-square-fill::before { content: "\f79e"; } +.bi-2-square::before { content: "\f79f"; } +.bi-3-circle-fill::before { content: "\f7a2"; } +.bi-3-circle::before { content: "\f7a3"; } +.bi-3-square-fill::before { content: "\f7a4"; } +.bi-3-square::before { content: "\f7a5"; } +.bi-4-circle-fill::before { content: "\f7a8"; } +.bi-4-circle::before { content: "\f7a9"; } +.bi-4-square-fill::before { content: "\f7aa"; } +.bi-4-square::before { content: "\f7ab"; } +.bi-5-circle-fill::before { content: "\f7ae"; } +.bi-5-circle::before { content: "\f7af"; } +.bi-5-square-fill::before { content: "\f7b0"; } +.bi-5-square::before { content: "\f7b1"; } +.bi-6-circle-fill::before { content: "\f7b4"; } +.bi-6-circle::before { content: "\f7b5"; } +.bi-6-square-fill::before { content: "\f7b6"; } +.bi-6-square::before { content: "\f7b7"; } +.bi-7-circle-fill::before { content: "\f7ba"; } +.bi-7-circle::before { content: "\f7bb"; } +.bi-7-square-fill::before { content: "\f7bc"; } +.bi-7-square::before { content: "\f7bd"; } +.bi-8-circle-fill::before { content: "\f7c0"; } +.bi-8-circle::before { content: "\f7c1"; } +.bi-8-square-fill::before { content: "\f7c2"; } +.bi-8-square::before { content: "\f7c3"; } +.bi-9-circle-fill::before { content: "\f7c6"; } +.bi-9-circle::before { content: "\f7c7"; } +.bi-9-square-fill::before { content: "\f7c8"; } +.bi-9-square::before { content: "\f7c9"; } +.bi-airplane-engines-fill::before { content: "\f7ca"; } +.bi-airplane-engines::before { content: "\f7cb"; } +.bi-airplane-fill::before { content: "\f7cc"; } +.bi-airplane::before { content: "\f7cd"; } +.bi-alexa::before { content: "\f7ce"; } +.bi-alipay::before { content: "\f7cf"; } +.bi-android::before { content: "\f7d0"; } +.bi-android2::before { content: "\f7d1"; } +.bi-box-fill::before { content: "\f7d2"; } +.bi-box-seam-fill::before { content: "\f7d3"; } +.bi-browser-chrome::before { content: "\f7d4"; } +.bi-browser-edge::before { content: "\f7d5"; } +.bi-browser-firefox::before { content: "\f7d6"; } +.bi-browser-safari::before { content: "\f7d7"; } +.bi-c-circle-fill::before { content: "\f7da"; } +.bi-c-circle::before { content: "\f7db"; } +.bi-c-square-fill::before { content: "\f7dc"; } +.bi-c-square::before { content: "\f7dd"; } +.bi-capsule-pill::before { content: "\f7de"; } +.bi-capsule::before { content: "\f7df"; } +.bi-car-front-fill::before { content: "\f7e0"; } +.bi-car-front::before { content: "\f7e1"; } +.bi-cassette-fill::before { content: "\f7e2"; } +.bi-cassette::before { content: "\f7e3"; } +.bi-cc-circle-fill::before { content: "\f7e6"; } +.bi-cc-circle::before { content: "\f7e7"; } +.bi-cc-square-fill::before { content: "\f7e8"; } +.bi-cc-square::before { content: "\f7e9"; } +.bi-cup-hot-fill::before { content: "\f7ea"; } +.bi-cup-hot::before { content: "\f7eb"; } +.bi-currency-rupee::before { content: "\f7ec"; } +.bi-dropbox::before { content: "\f7ed"; } +.bi-escape::before { content: "\f7ee"; } +.bi-fast-forward-btn-fill::before { content: "\f7ef"; } +.bi-fast-forward-btn::before { content: "\f7f0"; } +.bi-fast-forward-circle-fill::before { content: "\f7f1"; } +.bi-fast-forward-circle::before { content: "\f7f2"; } +.bi-fast-forward-fill::before { content: "\f7f3"; } +.bi-fast-forward::before { content: "\f7f4"; } +.bi-filetype-sql::before { content: "\f7f5"; } +.bi-fire::before { content: "\f7f6"; } +.bi-google-play::before { content: "\f7f7"; } +.bi-h-circle-fill::before { content: "\f7fa"; } +.bi-h-circle::before { content: "\f7fb"; } +.bi-h-square-fill::before { content: "\f7fc"; } +.bi-h-square::before { content: "\f7fd"; } +.bi-indent::before { content: "\f7fe"; } +.bi-lungs-fill::before { content: "\f7ff"; } +.bi-lungs::before { content: "\f800"; } +.bi-microsoft-teams::before { content: "\f801"; } +.bi-p-circle-fill::before { content: "\f804"; } +.bi-p-circle::before { content: "\f805"; } +.bi-p-square-fill::before { content: "\f806"; } +.bi-p-square::before { content: "\f807"; } +.bi-pass-fill::before { content: "\f808"; } +.bi-pass::before { content: "\f809"; } +.bi-prescription::before { content: "\f80a"; } +.bi-prescription2::before { content: "\f80b"; } +.bi-r-circle-fill::before { content: "\f80e"; } +.bi-r-circle::before { content: "\f80f"; } +.bi-r-square-fill::before { content: "\f810"; } +.bi-r-square::before { content: "\f811"; } +.bi-repeat-1::before { content: "\f812"; } +.bi-repeat::before { content: "\f813"; } +.bi-rewind-btn-fill::before { content: "\f814"; } +.bi-rewind-btn::before { content: "\f815"; } +.bi-rewind-circle-fill::before { content: "\f816"; } +.bi-rewind-circle::before { content: "\f817"; } +.bi-rewind-fill::before { content: "\f818"; } +.bi-rewind::before { content: "\f819"; } +.bi-train-freight-front-fill::before { content: "\f81a"; } +.bi-train-freight-front::before { content: "\f81b"; } +.bi-train-front-fill::before { content: "\f81c"; } +.bi-train-front::before { content: "\f81d"; } +.bi-train-lightrail-front-fill::before { content: "\f81e"; } +.bi-train-lightrail-front::before { content: "\f81f"; } +.bi-truck-front-fill::before { content: "\f820"; } +.bi-truck-front::before { content: "\f821"; } +.bi-ubuntu::before { content: "\f822"; } +.bi-unindent::before { content: "\f823"; } +.bi-unity::before { content: "\f824"; } +.bi-universal-access-circle::before { content: "\f825"; } +.bi-universal-access::before { content: "\f826"; } +.bi-virus::before { content: "\f827"; } +.bi-virus2::before { content: "\f828"; } +.bi-wechat::before { content: "\f829"; } +.bi-yelp::before { content: "\f82a"; } +.bi-sign-stop-fill::before { content: "\f82b"; } +.bi-sign-stop-lights-fill::before { content: "\f82c"; } +.bi-sign-stop-lights::before { content: "\f82d"; } +.bi-sign-stop::before { content: "\f82e"; } +.bi-sign-turn-left-fill::before { content: "\f82f"; } +.bi-sign-turn-left::before { content: "\f830"; } +.bi-sign-turn-right-fill::before { content: "\f831"; } +.bi-sign-turn-right::before { content: "\f832"; } +.bi-sign-turn-slight-left-fill::before { content: "\f833"; } +.bi-sign-turn-slight-left::before { content: "\f834"; } +.bi-sign-turn-slight-right-fill::before { content: "\f835"; } +.bi-sign-turn-slight-right::before { content: "\f836"; } +.bi-sign-yield-fill::before { content: "\f837"; } +.bi-sign-yield::before { content: "\f838"; } +.bi-ev-station-fill::before { content: "\f839"; } +.bi-ev-station::before { content: "\f83a"; } +.bi-fuel-pump-diesel-fill::before { content: "\f83b"; } +.bi-fuel-pump-diesel::before { content: "\f83c"; } +.bi-fuel-pump-fill::before { content: "\f83d"; } +.bi-fuel-pump::before { content: "\f83e"; } +.bi-0-circle-fill::before { content: "\f83f"; } +.bi-0-circle::before { content: "\f840"; } +.bi-0-square-fill::before { content: "\f841"; } +.bi-0-square::before { content: "\f842"; } +.bi-rocket-fill::before { content: "\f843"; } +.bi-rocket-takeoff-fill::before { content: "\f844"; } +.bi-rocket-takeoff::before { content: "\f845"; } +.bi-rocket::before { content: "\f846"; } +.bi-stripe::before { content: "\f847"; } +.bi-subscript::before { content: "\f848"; } +.bi-superscript::before { content: "\f849"; } +.bi-trello::before { content: "\f84a"; } +.bi-envelope-at-fill::before { content: "\f84b"; } +.bi-envelope-at::before { content: "\f84c"; } +.bi-regex::before { content: "\f84d"; } +.bi-text-wrap::before { content: "\f84e"; } +.bi-sign-dead-end-fill::before { content: "\f84f"; } +.bi-sign-dead-end::before { content: "\f850"; } +.bi-sign-do-not-enter-fill::before { content: "\f851"; } +.bi-sign-do-not-enter::before { content: "\f852"; } +.bi-sign-intersection-fill::before { content: "\f853"; } +.bi-sign-intersection-side-fill::before { content: "\f854"; } +.bi-sign-intersection-side::before { content: "\f855"; } +.bi-sign-intersection-t-fill::before { content: "\f856"; } +.bi-sign-intersection-t::before { content: "\f857"; } +.bi-sign-intersection-y-fill::before { content: "\f858"; } +.bi-sign-intersection-y::before { content: "\f859"; } +.bi-sign-intersection::before { content: "\f85a"; } +.bi-sign-merge-left-fill::before { content: "\f85b"; } +.bi-sign-merge-left::before { content: "\f85c"; } +.bi-sign-merge-right-fill::before { content: "\f85d"; } +.bi-sign-merge-right::before { content: "\f85e"; } +.bi-sign-no-left-turn-fill::before { content: "\f85f"; } +.bi-sign-no-left-turn::before { content: "\f860"; } +.bi-sign-no-parking-fill::before { content: "\f861"; } +.bi-sign-no-parking::before { content: "\f862"; } +.bi-sign-no-right-turn-fill::before { content: "\f863"; } +.bi-sign-no-right-turn::before { content: "\f864"; } +.bi-sign-railroad-fill::before { content: "\f865"; } +.bi-sign-railroad::before { content: "\f866"; } +.bi-building-add::before { content: "\f867"; } +.bi-building-check::before { content: "\f868"; } +.bi-building-dash::before { content: "\f869"; } +.bi-building-down::before { content: "\f86a"; } +.bi-building-exclamation::before { content: "\f86b"; } +.bi-building-fill-add::before { content: "\f86c"; } +.bi-building-fill-check::before { content: "\f86d"; } +.bi-building-fill-dash::before { content: "\f86e"; } +.bi-building-fill-down::before { content: "\f86f"; } +.bi-building-fill-exclamation::before { content: "\f870"; } +.bi-building-fill-gear::before { content: "\f871"; } +.bi-building-fill-lock::before { content: "\f872"; } +.bi-building-fill-slash::before { content: "\f873"; } +.bi-building-fill-up::before { content: "\f874"; } +.bi-building-fill-x::before { content: "\f875"; } +.bi-building-fill::before { content: "\f876"; } +.bi-building-gear::before { content: "\f877"; } +.bi-building-lock::before { content: "\f878"; } +.bi-building-slash::before { content: "\f879"; } +.bi-building-up::before { content: "\f87a"; } +.bi-building-x::before { content: "\f87b"; } +.bi-buildings-fill::before { content: "\f87c"; } +.bi-buildings::before { content: "\f87d"; } +.bi-bus-front-fill::before { content: "\f87e"; } +.bi-bus-front::before { content: "\f87f"; } +.bi-ev-front-fill::before { content: "\f880"; } +.bi-ev-front::before { content: "\f881"; } +.bi-globe-americas::before { content: "\f882"; } +.bi-globe-asia-australia::before { content: "\f883"; } +.bi-globe-central-south-asia::before { content: "\f884"; } +.bi-globe-europe-africa::before { content: "\f885"; } +.bi-house-add-fill::before { content: "\f886"; } +.bi-house-add::before { content: "\f887"; } +.bi-house-check-fill::before { content: "\f888"; } +.bi-house-check::before { content: "\f889"; } +.bi-house-dash-fill::before { content: "\f88a"; } +.bi-house-dash::before { content: "\f88b"; } +.bi-house-down-fill::before { content: "\f88c"; } +.bi-house-down::before { content: "\f88d"; } +.bi-house-exclamation-fill::before { content: "\f88e"; } +.bi-house-exclamation::before { content: "\f88f"; } +.bi-house-gear-fill::before { content: "\f890"; } +.bi-house-gear::before { content: "\f891"; } +.bi-house-lock-fill::before { content: "\f892"; } +.bi-house-lock::before { content: "\f893"; } +.bi-house-slash-fill::before { content: "\f894"; } +.bi-house-slash::before { content: "\f895"; } +.bi-house-up-fill::before { content: "\f896"; } +.bi-house-up::before { content: "\f897"; } +.bi-house-x-fill::before { content: "\f898"; } +.bi-house-x::before { content: "\f899"; } +.bi-person-add::before { content: "\f89a"; } +.bi-person-down::before { content: "\f89b"; } +.bi-person-exclamation::before { content: "\f89c"; } +.bi-person-fill-add::before { content: "\f89d"; } +.bi-person-fill-check::before { content: "\f89e"; } +.bi-person-fill-dash::before { content: "\f89f"; } +.bi-person-fill-down::before { content: "\f8a0"; } +.bi-person-fill-exclamation::before { content: "\f8a1"; } +.bi-person-fill-gear::before { content: "\f8a2"; } +.bi-person-fill-lock::before { content: "\f8a3"; } +.bi-person-fill-slash::before { content: "\f8a4"; } +.bi-person-fill-up::before { content: "\f8a5"; } +.bi-person-fill-x::before { content: "\f8a6"; } +.bi-person-gear::before { content: "\f8a7"; } +.bi-person-lock::before { content: "\f8a8"; } +.bi-person-slash::before { content: "\f8a9"; } +.bi-person-up::before { content: "\f8aa"; } +.bi-scooter::before { content: "\f8ab"; } +.bi-taxi-front-fill::before { content: "\f8ac"; } +.bi-taxi-front::before { content: "\f8ad"; } +.bi-amd::before { content: "\f8ae"; } +.bi-database-add::before { content: "\f8af"; } +.bi-database-check::before { content: "\f8b0"; } +.bi-database-dash::before { content: "\f8b1"; } +.bi-database-down::before { content: "\f8b2"; } +.bi-database-exclamation::before { content: "\f8b3"; } +.bi-database-fill-add::before { content: "\f8b4"; } +.bi-database-fill-check::before { content: "\f8b5"; } +.bi-database-fill-dash::before { content: "\f8b6"; } +.bi-database-fill-down::before { content: "\f8b7"; } +.bi-database-fill-exclamation::before { content: "\f8b8"; } +.bi-database-fill-gear::before { content: "\f8b9"; } +.bi-database-fill-lock::before { content: "\f8ba"; } +.bi-database-fill-slash::before { content: "\f8bb"; } +.bi-database-fill-up::before { content: "\f8bc"; } +.bi-database-fill-x::before { content: "\f8bd"; } +.bi-database-fill::before { content: "\f8be"; } +.bi-database-gear::before { content: "\f8bf"; } +.bi-database-lock::before { content: "\f8c0"; } +.bi-database-slash::before { content: "\f8c1"; } +.bi-database-up::before { content: "\f8c2"; } +.bi-database-x::before { content: "\f8c3"; } +.bi-database::before { content: "\f8c4"; } +.bi-houses-fill::before { content: "\f8c5"; } +.bi-houses::before { content: "\f8c6"; } +.bi-nvidia::before { content: "\f8c7"; } +.bi-person-vcard-fill::before { content: "\f8c8"; } +.bi-person-vcard::before { content: "\f8c9"; } +.bi-sina-weibo::before { content: "\f8ca"; } +.bi-tencent-qq::before { content: "\f8cb"; } +.bi-wikipedia::before { content: "\f8cc"; } +.bi-alphabet-uppercase::before { content: "\f2a5"; } +.bi-alphabet::before { content: "\f68a"; } +.bi-amazon::before { content: "\f68d"; } +.bi-arrows-collapse-vertical::before { content: "\f690"; } +.bi-arrows-expand-vertical::before { content: "\f695"; } +.bi-arrows-vertical::before { content: "\f698"; } +.bi-arrows::before { content: "\f6a2"; } +.bi-ban-fill::before { content: "\f6a3"; } +.bi-ban::before { content: "\f6b6"; } +.bi-bing::before { content: "\f6c2"; } +.bi-cake::before { content: "\f6e0"; } +.bi-cake2::before { content: "\f6ed"; } +.bi-cookie::before { content: "\f6ee"; } +.bi-copy::before { content: "\f759"; } +.bi-crosshair::before { content: "\f769"; } +.bi-crosshair2::before { content: "\f794"; } +.bi-emoji-astonished-fill::before { content: "\f795"; } +.bi-emoji-astonished::before { content: "\f79a"; } +.bi-emoji-grimace-fill::before { content: "\f79b"; } +.bi-emoji-grimace::before { content: "\f7a0"; } +.bi-emoji-grin-fill::before { content: "\f7a1"; } +.bi-emoji-grin::before { content: "\f7a6"; } +.bi-emoji-surprise-fill::before { content: "\f7a7"; } +.bi-emoji-surprise::before { content: "\f7ac"; } +.bi-emoji-tear-fill::before { content: "\f7ad"; } +.bi-emoji-tear::before { content: "\f7b2"; } +.bi-envelope-arrow-down-fill::before { content: "\f7b3"; } +.bi-envelope-arrow-down::before { content: "\f7b8"; } +.bi-envelope-arrow-up-fill::before { content: "\f7b9"; } +.bi-envelope-arrow-up::before { content: "\f7be"; } +.bi-feather::before { content: "\f7bf"; } +.bi-feather2::before { content: "\f7c4"; } +.bi-floppy-fill::before { content: "\f7c5"; } +.bi-floppy::before { content: "\f7d8"; } +.bi-floppy2-fill::before { content: "\f7d9"; } +.bi-floppy2::before { content: "\f7e4"; } +.bi-gitlab::before { content: "\f7e5"; } +.bi-highlighter::before { content: "\f7f8"; } +.bi-marker-tip::before { content: "\f802"; } +.bi-nvme-fill::before { content: "\f803"; } +.bi-nvme::before { content: "\f80c"; } +.bi-opencollective::before { content: "\f80d"; } +.bi-pci-card-network::before { content: "\f8cd"; } +.bi-pci-card-sound::before { content: "\f8ce"; } +.bi-radar::before { content: "\f8cf"; } +.bi-send-arrow-down-fill::before { content: "\f8d0"; } +.bi-send-arrow-down::before { content: "\f8d1"; } +.bi-send-arrow-up-fill::before { content: "\f8d2"; } +.bi-send-arrow-up::before { content: "\f8d3"; } +.bi-sim-slash-fill::before { content: "\f8d4"; } +.bi-sim-slash::before { content: "\f8d5"; } +.bi-sourceforge::before { content: "\f8d6"; } +.bi-substack::before { content: "\f8d7"; } +.bi-threads-fill::before { content: "\f8d8"; } +.bi-threads::before { content: "\f8d9"; } +.bi-transparency::before { content: "\f8da"; } +.bi-twitter-x::before { content: "\f8db"; } +.bi-type-h4::before { content: "\f8dc"; } +.bi-type-h5::before { content: "\f8dd"; } +.bi-type-h6::before { content: "\f8de"; } +.bi-backpack-fill::before { content: "\f8df"; } +.bi-backpack::before { content: "\f8e0"; } +.bi-backpack2-fill::before { content: "\f8e1"; } +.bi-backpack2::before { content: "\f8e2"; } +.bi-backpack3-fill::before { content: "\f8e3"; } +.bi-backpack3::before { content: "\f8e4"; } +.bi-backpack4-fill::before { content: "\f8e5"; } +.bi-backpack4::before { content: "\f8e6"; } +.bi-brilliance::before { content: "\f8e7"; } +.bi-cake-fill::before { content: "\f8e8"; } +.bi-cake2-fill::before { content: "\f8e9"; } +.bi-duffle-fill::before { content: "\f8ea"; } +.bi-duffle::before { content: "\f8eb"; } +.bi-exposure::before { content: "\f8ec"; } +.bi-gender-neuter::before { content: "\f8ed"; } +.bi-highlights::before { content: "\f8ee"; } +.bi-luggage-fill::before { content: "\f8ef"; } +.bi-luggage::before { content: "\f8f0"; } +.bi-mailbox-flag::before { content: "\f8f1"; } +.bi-mailbox2-flag::before { content: "\f8f2"; } +.bi-noise-reduction::before { content: "\f8f3"; } +.bi-passport-fill::before { content: "\f8f4"; } +.bi-passport::before { content: "\f8f5"; } +.bi-person-arms-up::before { content: "\f8f6"; } +.bi-person-raised-hand::before { content: "\f8f7"; } +.bi-person-standing-dress::before { content: "\f8f8"; } +.bi-person-standing::before { content: "\f8f9"; } +.bi-person-walking::before { content: "\f8fa"; } +.bi-person-wheelchair::before { content: "\f8fb"; } +.bi-shadows::before { content: "\f8fc"; } +.bi-suitcase-fill::before { content: "\f8fd"; } +.bi-suitcase-lg-fill::before { content: "\f8fe"; } +.bi-suitcase-lg::before { content: "\f8ff"; } +.bi-suitcase::before { content: "\f900"; } +.bi-suitcase2-fill::before { content: "\f901"; } +.bi-suitcase2::before { content: "\f902"; } +.bi-vignette::before { content: "\f903"; } diff --git a/site_libs/bootstrap/bootstrap-icons.woff b/site_libs/bootstrap/bootstrap-icons.woff new file mode 100644 index 0000000..dbeeb05 Binary files /dev/null and b/site_libs/bootstrap/bootstrap-icons.woff differ diff --git a/site_libs/bootstrap/bootstrap.min.css b/site_libs/bootstrap/bootstrap.min.css new file mode 100644 index 0000000..32aeabd --- /dev/null +++ b/site_libs/bootstrap/bootstrap.min.css @@ -0,0 +1,12 @@ +@import"https://fonts.googleapis.com/css2?family=Source+Sans+Pro:wght@300;400;700&display=swap";.navbar-title{font-weight:bold;color:#e0d6e7}.navbar-logo{min-width:60px}a{font-weight:bold}/*! + * Bootstrap v5.3.1 (https://getbootstrap.com/) + * Copyright 2011-2023 The Bootstrap Authors + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE) + */:root,[data-bs-theme=light]{--bs-blue: #2780e3;--bs-indigo: #6610f2;--bs-purple: #613d7c;--bs-pink: #e83e8c;--bs-red: #ff0039;--bs-orange: #f0ad4e;--bs-yellow: #ff7518;--bs-green: #3fb618;--bs-teal: #20c997;--bs-cyan: #9954bb;--bs-black: #000;--bs-white: #fff;--bs-gray: #6c757d;--bs-gray-dark: #343a40;--bs-gray-100: #f8f9fa;--bs-gray-200: #e9ecef;--bs-gray-300: #dee2e6;--bs-gray-400: #ced4da;--bs-gray-500: #adb5bd;--bs-gray-600: #6c757d;--bs-gray-700: #495057;--bs-gray-800: #343a40;--bs-gray-900: #212529;--bs-default: #343a40;--bs-primary: #5c2983;--bs-secondary: #343a40;--bs-success: #3fb618;--bs-info: #9954bb;--bs-warning: #ff7518;--bs-danger: #ff0039;--bs-light: #f8f9fa;--bs-dark: #343a40;--bs-default-rgb: 52, 58, 64;--bs-primary-rgb: 92, 41, 131;--bs-secondary-rgb: 52, 58, 64;--bs-success-rgb: 63, 182, 24;--bs-info-rgb: 153, 84, 187;--bs-warning-rgb: 255, 117, 24;--bs-danger-rgb: 255, 0, 57;--bs-light-rgb: 248, 249, 250;--bs-dark-rgb: 52, 58, 64;--bs-primary-text-emphasis: #251034;--bs-secondary-text-emphasis: #15171a;--bs-success-text-emphasis: #19490a;--bs-info-text-emphasis: #3d224b;--bs-warning-text-emphasis: #662f0a;--bs-danger-text-emphasis: #660017;--bs-light-text-emphasis: #495057;--bs-dark-text-emphasis: #495057;--bs-primary-bg-subtle: #ded4e6;--bs-secondary-bg-subtle: #d6d8d9;--bs-success-bg-subtle: #d9f0d1;--bs-info-bg-subtle: #ebddf1;--bs-warning-bg-subtle: #ffe3d1;--bs-danger-bg-subtle: #ffccd7;--bs-light-bg-subtle: #fcfcfd;--bs-dark-bg-subtle: #ced4da;--bs-primary-border-subtle: #bea9cd;--bs-secondary-border-subtle: #aeb0b3;--bs-success-border-subtle: #b2e2a3;--bs-info-border-subtle: #d6bbe4;--bs-warning-border-subtle: #ffc8a3;--bs-danger-border-subtle: #ff99b0;--bs-light-border-subtle: #e9ecef;--bs-dark-border-subtle: #adb5bd;--bs-white-rgb: 255, 255, 255;--bs-black-rgb: 0, 0, 0;--bs-font-sans-serif: "Source Sans Pro", -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol";--bs-font-monospace: SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace;--bs-gradient: linear-gradient(180deg, rgba(255, 255, 255, 0.15), rgba(255, 255, 255, 0));--bs-root-font-size: 17px;--bs-body-font-family: "Source Sans Pro", -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol";--bs-body-font-size:1rem;--bs-body-font-weight: 400;--bs-body-line-height: 1.5;--bs-body-color: #343a40;--bs-body-color-rgb: 52, 58, 64;--bs-body-bg: #fff;--bs-body-bg-rgb: 255, 255, 255;--bs-emphasis-color: #000;--bs-emphasis-color-rgb: 0, 0, 0;--bs-secondary-color: rgba(52, 58, 64, 0.75);--bs-secondary-color-rgb: 52, 58, 64;--bs-secondary-bg: #e9ecef;--bs-secondary-bg-rgb: 233, 236, 239;--bs-tertiary-color: rgba(52, 58, 64, 0.5);--bs-tertiary-color-rgb: 52, 58, 64;--bs-tertiary-bg: #f8f9fa;--bs-tertiary-bg-rgb: 248, 249, 250;--bs-heading-color: inherit;--bs-link-color: #40ba2f;--bs-link-color-rgb: 64, 186, 47;--bs-link-decoration: underline;--bs-link-hover-color: #339526;--bs-link-hover-color-rgb: 51, 149, 38;--bs-code-color: #7d12ba;--bs-highlight-bg: #ffe3d1;--bs-border-width: 1px;--bs-border-style: solid;--bs-border-color: #dee2e6;--bs-border-color-translucent: rgba(0, 0, 0, 0.175);--bs-border-radius: 0.25rem;--bs-border-radius-sm: 0.2em;--bs-border-radius-lg: 0.5rem;--bs-border-radius-xl: 1rem;--bs-border-radius-xxl: 2rem;--bs-border-radius-2xl: var(--bs-border-radius-xxl);--bs-border-radius-pill: 50rem;--bs-box-shadow: 0 0.5rem 1rem rgba(0, 0, 0, 0.15);--bs-box-shadow-sm: 0 0.125rem 0.25rem rgba(0, 0, 0, 0.075);--bs-box-shadow-lg: 0 1rem 3rem rgba(0, 0, 0, 0.175);--bs-box-shadow-inset: inset 0 1px 2px rgba(0, 0, 0, 0.075);--bs-focus-ring-width: 0.25rem;--bs-focus-ring-opacity: 0.25;--bs-focus-ring-color: rgba(92, 41, 131, 0.25);--bs-form-valid-color: #3fb618;--bs-form-valid-border-color: #3fb618;--bs-form-invalid-color: #ff0039;--bs-form-invalid-border-color: #ff0039}[data-bs-theme=dark]{color-scheme:dark;--bs-body-color: #dee2e6;--bs-body-color-rgb: 222, 226, 230;--bs-body-bg: #212529;--bs-body-bg-rgb: 33, 37, 41;--bs-emphasis-color: #fff;--bs-emphasis-color-rgb: 255, 255, 255;--bs-secondary-color: rgba(222, 226, 230, 0.75);--bs-secondary-color-rgb: 222, 226, 230;--bs-secondary-bg: #343a40;--bs-secondary-bg-rgb: 52, 58, 64;--bs-tertiary-color: rgba(222, 226, 230, 0.5);--bs-tertiary-color-rgb: 222, 226, 230;--bs-tertiary-bg: #2b3035;--bs-tertiary-bg-rgb: 43, 48, 53;--bs-primary-text-emphasis: #9d7fb5;--bs-secondary-text-emphasis: #85898c;--bs-success-text-emphasis: #8cd374;--bs-info-text-emphasis: #c298d6;--bs-warning-text-emphasis: #ffac74;--bs-danger-text-emphasis: #ff6688;--bs-light-text-emphasis: #f8f9fa;--bs-dark-text-emphasis: #dee2e6;--bs-primary-bg-subtle: #12081a;--bs-secondary-bg-subtle: #0a0c0d;--bs-success-bg-subtle: #0d2405;--bs-info-bg-subtle: #1f1125;--bs-warning-bg-subtle: #331705;--bs-danger-bg-subtle: #33000b;--bs-light-bg-subtle: #343a40;--bs-dark-bg-subtle: #1a1d20;--bs-primary-border-subtle: #37194f;--bs-secondary-border-subtle: #1f2326;--bs-success-border-subtle: #266d0e;--bs-info-border-subtle: #5c3270;--bs-warning-border-subtle: #99460e;--bs-danger-border-subtle: #990022;--bs-light-border-subtle: #495057;--bs-dark-border-subtle: #343a40;--bs-heading-color: inherit;--bs-link-color: #9d7fb5;--bs-link-hover-color: #b199c4;--bs-link-color-rgb: 157, 127, 181;--bs-link-hover-color-rgb: 177, 153, 196;--bs-code-color: white;--bs-border-color: #495057;--bs-border-color-translucent: rgba(255, 255, 255, 0.15);--bs-form-valid-color: #8cd374;--bs-form-valid-border-color: #8cd374;--bs-form-invalid-color: #ff6688;--bs-form-invalid-border-color: #ff6688}*,*::before,*::after{box-sizing:border-box}:root{font-size:var(--bs-root-font-size)}body{margin:0;font-family:var(--bs-body-font-family);font-size:var(--bs-body-font-size);font-weight:var(--bs-body-font-weight);line-height:var(--bs-body-line-height);color:var(--bs-body-color);text-align:var(--bs-body-text-align);background-color:var(--bs-body-bg);-webkit-text-size-adjust:100%;-webkit-tap-highlight-color:rgba(0,0,0,0)}hr{margin:1rem 0;color:inherit;border:0;border-top:1px solid;opacity:.25}h6,.h6,h5,.h5,h4,.h4,h3,.h3,h2,.h2,h1,.h1{margin-top:0;margin-bottom:.5rem;font-weight:400;line-height:1.2;color:var(--bs-heading-color)}h1,.h1{font-size:calc(1.325rem + 0.9vw)}@media(min-width: 1200px){h1,.h1{font-size:2rem}}h2,.h2{font-size:calc(1.29rem + 0.48vw)}@media(min-width: 1200px){h2,.h2{font-size:1.65rem}}h3,.h3{font-size:calc(1.27rem + 0.24vw)}@media(min-width: 1200px){h3,.h3{font-size:1.45rem}}h4,.h4{font-size:1.25rem}h5,.h5{font-size:1.1rem}h6,.h6{font-size:1rem}p{margin-top:0;margin-bottom:1rem}abbr[title]{text-decoration:underline dotted;-webkit-text-decoration:underline dotted;-moz-text-decoration:underline dotted;-ms-text-decoration:underline dotted;-o-text-decoration:underline dotted;cursor:help;text-decoration-skip-ink:none}address{margin-bottom:1rem;font-style:normal;line-height:inherit}ol,ul{padding-left:2rem}ol,ul,dl{margin-top:0;margin-bottom:1rem}ol ol,ul ul,ol ul,ul ol{margin-bottom:0}dt{font-weight:700}dd{margin-bottom:.5rem;margin-left:0}blockquote{margin:0 0 1rem;padding:.625rem 1.25rem;border-left:.25rem solid #e9ecef}blockquote p:last-child,blockquote ul:last-child,blockquote ol:last-child{margin-bottom:0}b,strong{font-weight:bolder}small,.small{font-size:0.875em}mark,.mark{padding:.1875em;background-color:var(--bs-highlight-bg)}sub,sup{position:relative;font-size:0.75em;line-height:0;vertical-align:baseline}sub{bottom:-0.25em}sup{top:-0.5em}a{color:rgba(var(--bs-link-color-rgb), var(--bs-link-opacity, 1));text-decoration:underline;-webkit-text-decoration:underline;-moz-text-decoration:underline;-ms-text-decoration:underline;-o-text-decoration:underline}a:hover{--bs-link-color-rgb: var(--bs-link-hover-color-rgb)}a:not([href]):not([class]),a:not([href]):not([class]):hover{color:inherit;text-decoration:none}pre,code,kbd,samp{font-family:SFMono-Regular,Menlo,Monaco,Consolas,"Liberation Mono","Courier New",monospace;font-size:1em}pre{display:block;margin-top:0;margin-bottom:1rem;overflow:auto;font-size:0.875em;color:#000;background-color:#f8f9fa;padding:.5rem;border:1px solid var(--bs-border-color, #dee2e6)}pre code{background-color:rgba(0,0,0,0);font-size:inherit;color:inherit;word-break:normal}code{font-size:0.875em;color:var(--bs-code-color);background-color:#f8f9fa;padding:.125rem .25rem;word-wrap:break-word}a>code{color:inherit}kbd{padding:.4rem .4rem;font-size:0.875em;color:#fff;background-color:#343a40}kbd kbd{padding:0;font-size:1em}figure{margin:0 0 1rem}img,svg{vertical-align:middle}table{caption-side:bottom;border-collapse:collapse}caption{padding-top:.5rem;padding-bottom:.5rem;color:rgba(52,58,64,.75);text-align:left}th{text-align:inherit;text-align:-webkit-match-parent}thead,tbody,tfoot,tr,td,th{border-color:inherit;border-style:solid;border-width:0}label{display:inline-block}button{border-radius:0}button:focus:not(:focus-visible){outline:0}input,button,select,optgroup,textarea{margin:0;font-family:inherit;font-size:inherit;line-height:inherit}button,select{text-transform:none}[role=button]{cursor:pointer}select{word-wrap:normal}select:disabled{opacity:1}[list]:not([type=date]):not([type=datetime-local]):not([type=month]):not([type=week]):not([type=time])::-webkit-calendar-picker-indicator{display:none !important}button,[type=button],[type=reset],[type=submit]{-webkit-appearance:button}button:not(:disabled),[type=button]:not(:disabled),[type=reset]:not(:disabled),[type=submit]:not(:disabled){cursor:pointer}::-moz-focus-inner{padding:0;border-style:none}textarea{resize:vertical}fieldset{min-width:0;padding:0;margin:0;border:0}legend{float:left;width:100%;padding:0;margin-bottom:.5rem;font-size:calc(1.275rem + 0.3vw);line-height:inherit}@media(min-width: 1200px){legend{font-size:1.5rem}}legend+*{clear:left}::-webkit-datetime-edit-fields-wrapper,::-webkit-datetime-edit-text,::-webkit-datetime-edit-minute,::-webkit-datetime-edit-hour-field,::-webkit-datetime-edit-day-field,::-webkit-datetime-edit-month-field,::-webkit-datetime-edit-year-field{padding:0}::-webkit-inner-spin-button{height:auto}[type=search]{-webkit-appearance:textfield;outline-offset:-2px}::-webkit-search-decoration{-webkit-appearance:none}::-webkit-color-swatch-wrapper{padding:0}::file-selector-button{font:inherit;-webkit-appearance:button}output{display:inline-block}iframe{border:0}summary{display:list-item;cursor:pointer}progress{vertical-align:baseline}[hidden]{display:none !important}.lead{font-size:1.25rem;font-weight:300}.display-1{font-size:calc(1.625rem + 4.5vw);font-weight:300;line-height:1.2}@media(min-width: 1200px){.display-1{font-size:5rem}}.display-2{font-size:calc(1.575rem + 3.9vw);font-weight:300;line-height:1.2}@media(min-width: 1200px){.display-2{font-size:4.5rem}}.display-3{font-size:calc(1.525rem + 3.3vw);font-weight:300;line-height:1.2}@media(min-width: 1200px){.display-3{font-size:4rem}}.display-4{font-size:calc(1.475rem + 2.7vw);font-weight:300;line-height:1.2}@media(min-width: 1200px){.display-4{font-size:3.5rem}}.display-5{font-size:calc(1.425rem + 2.1vw);font-weight:300;line-height:1.2}@media(min-width: 1200px){.display-5{font-size:3rem}}.display-6{font-size:calc(1.375rem + 1.5vw);font-weight:300;line-height:1.2}@media(min-width: 1200px){.display-6{font-size:2.5rem}}.list-unstyled{padding-left:0;list-style:none}.list-inline{padding-left:0;list-style:none}.list-inline-item{display:inline-block}.list-inline-item:not(:last-child){margin-right:.5rem}.initialism{font-size:0.875em;text-transform:uppercase}.blockquote{margin-bottom:1rem;font-size:1.25rem}.blockquote>:last-child{margin-bottom:0}.blockquote-footer{margin-top:-1rem;margin-bottom:1rem;font-size:0.875em;color:#6c757d}.blockquote-footer::before{content:"— "}.img-fluid{max-width:100%;height:auto}.img-thumbnail{padding:.25rem;background-color:#fff;border:1px solid #dee2e6;max-width:100%;height:auto}.figure{display:inline-block}.figure-img{margin-bottom:.5rem;line-height:1}.figure-caption{font-size:0.875em;color:rgba(52,58,64,.75)}.container,.container-fluid,.container-xxl,.container-xl,.container-lg,.container-md,.container-sm{--bs-gutter-x: 1.5rem;--bs-gutter-y: 0;width:100%;padding-right:calc(var(--bs-gutter-x)*.5);padding-left:calc(var(--bs-gutter-x)*.5);margin-right:auto;margin-left:auto}@media(min-width: 576px){.container-sm,.container{max-width:540px}}@media(min-width: 768px){.container-md,.container-sm,.container{max-width:720px}}@media(min-width: 992px){.container-lg,.container-md,.container-sm,.container{max-width:960px}}@media(min-width: 1200px){.container-xl,.container-lg,.container-md,.container-sm,.container{max-width:1140px}}@media(min-width: 1400px){.container-xxl,.container-xl,.container-lg,.container-md,.container-sm,.container{max-width:1320px}}:root{--bs-breakpoint-xs: 0;--bs-breakpoint-sm: 576px;--bs-breakpoint-md: 768px;--bs-breakpoint-lg: 992px;--bs-breakpoint-xl: 1200px;--bs-breakpoint-xxl: 1400px}.grid{display:grid;grid-template-rows:repeat(var(--bs-rows, 1), 1fr);grid-template-columns:repeat(var(--bs-columns, 12), 1fr);gap:var(--bs-gap, 1.5rem)}.grid .g-col-1{grid-column:auto/span 1}.grid .g-col-2{grid-column:auto/span 2}.grid .g-col-3{grid-column:auto/span 3}.grid .g-col-4{grid-column:auto/span 4}.grid .g-col-5{grid-column:auto/span 5}.grid .g-col-6{grid-column:auto/span 6}.grid .g-col-7{grid-column:auto/span 7}.grid .g-col-8{grid-column:auto/span 8}.grid .g-col-9{grid-column:auto/span 9}.grid .g-col-10{grid-column:auto/span 10}.grid .g-col-11{grid-column:auto/span 11}.grid .g-col-12{grid-column:auto/span 12}.grid .g-start-1{grid-column-start:1}.grid .g-start-2{grid-column-start:2}.grid .g-start-3{grid-column-start:3}.grid .g-start-4{grid-column-start:4}.grid .g-start-5{grid-column-start:5}.grid .g-start-6{grid-column-start:6}.grid .g-start-7{grid-column-start:7}.grid .g-start-8{grid-column-start:8}.grid .g-start-9{grid-column-start:9}.grid .g-start-10{grid-column-start:10}.grid .g-start-11{grid-column-start:11}@media(min-width: 576px){.grid .g-col-sm-1{grid-column:auto/span 1}.grid .g-col-sm-2{grid-column:auto/span 2}.grid .g-col-sm-3{grid-column:auto/span 3}.grid .g-col-sm-4{grid-column:auto/span 4}.grid .g-col-sm-5{grid-column:auto/span 5}.grid .g-col-sm-6{grid-column:auto/span 6}.grid .g-col-sm-7{grid-column:auto/span 7}.grid .g-col-sm-8{grid-column:auto/span 8}.grid .g-col-sm-9{grid-column:auto/span 9}.grid .g-col-sm-10{grid-column:auto/span 10}.grid .g-col-sm-11{grid-column:auto/span 11}.grid .g-col-sm-12{grid-column:auto/span 12}.grid .g-start-sm-1{grid-column-start:1}.grid .g-start-sm-2{grid-column-start:2}.grid .g-start-sm-3{grid-column-start:3}.grid .g-start-sm-4{grid-column-start:4}.grid .g-start-sm-5{grid-column-start:5}.grid .g-start-sm-6{grid-column-start:6}.grid .g-start-sm-7{grid-column-start:7}.grid .g-start-sm-8{grid-column-start:8}.grid .g-start-sm-9{grid-column-start:9}.grid .g-start-sm-10{grid-column-start:10}.grid .g-start-sm-11{grid-column-start:11}}@media(min-width: 768px){.grid .g-col-md-1{grid-column:auto/span 1}.grid .g-col-md-2{grid-column:auto/span 2}.grid .g-col-md-3{grid-column:auto/span 3}.grid .g-col-md-4{grid-column:auto/span 4}.grid .g-col-md-5{grid-column:auto/span 5}.grid .g-col-md-6{grid-column:auto/span 6}.grid .g-col-md-7{grid-column:auto/span 7}.grid .g-col-md-8{grid-column:auto/span 8}.grid .g-col-md-9{grid-column:auto/span 9}.grid .g-col-md-10{grid-column:auto/span 10}.grid .g-col-md-11{grid-column:auto/span 11}.grid .g-col-md-12{grid-column:auto/span 12}.grid .g-start-md-1{grid-column-start:1}.grid .g-start-md-2{grid-column-start:2}.grid .g-start-md-3{grid-column-start:3}.grid .g-start-md-4{grid-column-start:4}.grid .g-start-md-5{grid-column-start:5}.grid .g-start-md-6{grid-column-start:6}.grid .g-start-md-7{grid-column-start:7}.grid .g-start-md-8{grid-column-start:8}.grid .g-start-md-9{grid-column-start:9}.grid .g-start-md-10{grid-column-start:10}.grid .g-start-md-11{grid-column-start:11}}@media(min-width: 992px){.grid .g-col-lg-1{grid-column:auto/span 1}.grid .g-col-lg-2{grid-column:auto/span 2}.grid .g-col-lg-3{grid-column:auto/span 3}.grid .g-col-lg-4{grid-column:auto/span 4}.grid .g-col-lg-5{grid-column:auto/span 5}.grid .g-col-lg-6{grid-column:auto/span 6}.grid .g-col-lg-7{grid-column:auto/span 7}.grid .g-col-lg-8{grid-column:auto/span 8}.grid .g-col-lg-9{grid-column:auto/span 9}.grid .g-col-lg-10{grid-column:auto/span 10}.grid .g-col-lg-11{grid-column:auto/span 11}.grid .g-col-lg-12{grid-column:auto/span 12}.grid .g-start-lg-1{grid-column-start:1}.grid .g-start-lg-2{grid-column-start:2}.grid .g-start-lg-3{grid-column-start:3}.grid .g-start-lg-4{grid-column-start:4}.grid .g-start-lg-5{grid-column-start:5}.grid .g-start-lg-6{grid-column-start:6}.grid .g-start-lg-7{grid-column-start:7}.grid .g-start-lg-8{grid-column-start:8}.grid .g-start-lg-9{grid-column-start:9}.grid .g-start-lg-10{grid-column-start:10}.grid .g-start-lg-11{grid-column-start:11}}@media(min-width: 1200px){.grid .g-col-xl-1{grid-column:auto/span 1}.grid .g-col-xl-2{grid-column:auto/span 2}.grid .g-col-xl-3{grid-column:auto/span 3}.grid .g-col-xl-4{grid-column:auto/span 4}.grid .g-col-xl-5{grid-column:auto/span 5}.grid .g-col-xl-6{grid-column:auto/span 6}.grid .g-col-xl-7{grid-column:auto/span 7}.grid .g-col-xl-8{grid-column:auto/span 8}.grid .g-col-xl-9{grid-column:auto/span 9}.grid .g-col-xl-10{grid-column:auto/span 10}.grid .g-col-xl-11{grid-column:auto/span 11}.grid .g-col-xl-12{grid-column:auto/span 12}.grid .g-start-xl-1{grid-column-start:1}.grid .g-start-xl-2{grid-column-start:2}.grid .g-start-xl-3{grid-column-start:3}.grid .g-start-xl-4{grid-column-start:4}.grid .g-start-xl-5{grid-column-start:5}.grid .g-start-xl-6{grid-column-start:6}.grid .g-start-xl-7{grid-column-start:7}.grid .g-start-xl-8{grid-column-start:8}.grid .g-start-xl-9{grid-column-start:9}.grid .g-start-xl-10{grid-column-start:10}.grid .g-start-xl-11{grid-column-start:11}}@media(min-width: 1400px){.grid .g-col-xxl-1{grid-column:auto/span 1}.grid .g-col-xxl-2{grid-column:auto/span 2}.grid .g-col-xxl-3{grid-column:auto/span 3}.grid .g-col-xxl-4{grid-column:auto/span 4}.grid .g-col-xxl-5{grid-column:auto/span 5}.grid .g-col-xxl-6{grid-column:auto/span 6}.grid .g-col-xxl-7{grid-column:auto/span 7}.grid .g-col-xxl-8{grid-column:auto/span 8}.grid .g-col-xxl-9{grid-column:auto/span 9}.grid .g-col-xxl-10{grid-column:auto/span 10}.grid .g-col-xxl-11{grid-column:auto/span 11}.grid .g-col-xxl-12{grid-column:auto/span 12}.grid .g-start-xxl-1{grid-column-start:1}.grid .g-start-xxl-2{grid-column-start:2}.grid .g-start-xxl-3{grid-column-start:3}.grid .g-start-xxl-4{grid-column-start:4}.grid .g-start-xxl-5{grid-column-start:5}.grid .g-start-xxl-6{grid-column-start:6}.grid .g-start-xxl-7{grid-column-start:7}.grid .g-start-xxl-8{grid-column-start:8}.grid .g-start-xxl-9{grid-column-start:9}.grid .g-start-xxl-10{grid-column-start:10}.grid .g-start-xxl-11{grid-column-start:11}}.table{--bs-table-color-type: initial;--bs-table-bg-type: initial;--bs-table-color-state: initial;--bs-table-bg-state: initial;--bs-table-color: #343a40;--bs-table-bg: #fff;--bs-table-border-color: #dee2e6;--bs-table-accent-bg: transparent;--bs-table-striped-color: #343a40;--bs-table-striped-bg: rgba(0, 0, 0, 0.05);--bs-table-active-color: #343a40;--bs-table-active-bg: rgba(0, 0, 0, 0.1);--bs-table-hover-color: #343a40;--bs-table-hover-bg: rgba(0, 0, 0, 0.075);width:100%;margin-bottom:1rem;vertical-align:top;border-color:var(--bs-table-border-color)}.table>:not(caption)>*>*{padding:.5rem .5rem;color:var(--bs-table-color-state, var(--bs-table-color-type, var(--bs-table-color)));background-color:var(--bs-table-bg);border-bottom-width:1px;box-shadow:inset 0 0 0 9999px var(--bs-table-bg-state, var(--bs-table-bg-type, var(--bs-table-accent-bg)))}.table>tbody{vertical-align:inherit}.table>thead{vertical-align:bottom}.table-group-divider{border-top:calc(1px*2) solid #b2bac1}.caption-top{caption-side:top}.table-sm>:not(caption)>*>*{padding:.25rem .25rem}.table-bordered>:not(caption)>*{border-width:1px 0}.table-bordered>:not(caption)>*>*{border-width:0 1px}.table-borderless>:not(caption)>*>*{border-bottom-width:0}.table-borderless>:not(:first-child){border-top-width:0}.table-striped>tbody>tr:nth-of-type(odd)>*{--bs-table-color-type: var(--bs-table-striped-color);--bs-table-bg-type: var(--bs-table-striped-bg)}.table-striped-columns>:not(caption)>tr>:nth-child(even){--bs-table-color-type: var(--bs-table-striped-color);--bs-table-bg-type: var(--bs-table-striped-bg)}.table-active{--bs-table-color-state: var(--bs-table-active-color);--bs-table-bg-state: var(--bs-table-active-bg)}.table-hover>tbody>tr:hover>*{--bs-table-color-state: var(--bs-table-hover-color);--bs-table-bg-state: var(--bs-table-hover-bg)}.table-primary{--bs-table-color: #000;--bs-table-bg: #ded4e6;--bs-table-border-color: #c8bfcf;--bs-table-striped-bg: #d3c9db;--bs-table-striped-color: #000;--bs-table-active-bg: #c8bfcf;--bs-table-active-color: #000;--bs-table-hover-bg: #cdc4d5;--bs-table-hover-color: #000;color:var(--bs-table-color);border-color:var(--bs-table-border-color)}.table-secondary{--bs-table-color: #000;--bs-table-bg: #d6d8d9;--bs-table-border-color: #c1c2c3;--bs-table-striped-bg: #cbcdce;--bs-table-striped-color: #000;--bs-table-active-bg: #c1c2c3;--bs-table-active-color: #000;--bs-table-hover-bg: #c6c8c9;--bs-table-hover-color: #000;color:var(--bs-table-color);border-color:var(--bs-table-border-color)}.table-success{--bs-table-color: #000;--bs-table-bg: #d9f0d1;--bs-table-border-color: #c3d8bc;--bs-table-striped-bg: #cee4c7;--bs-table-striped-color: #000;--bs-table-active-bg: #c3d8bc;--bs-table-active-color: #000;--bs-table-hover-bg: #c9dec1;--bs-table-hover-color: #000;color:var(--bs-table-color);border-color:var(--bs-table-border-color)}.table-info{--bs-table-color: #000;--bs-table-bg: #ebddf1;--bs-table-border-color: #d4c7d9;--bs-table-striped-bg: #dfd2e5;--bs-table-striped-color: #000;--bs-table-active-bg: #d4c7d9;--bs-table-active-color: #000;--bs-table-hover-bg: #d9ccdf;--bs-table-hover-color: #000;color:var(--bs-table-color);border-color:var(--bs-table-border-color)}.table-warning{--bs-table-color: #000;--bs-table-bg: #ffe3d1;--bs-table-border-color: #e6ccbc;--bs-table-striped-bg: #f2d8c7;--bs-table-striped-color: #000;--bs-table-active-bg: #e6ccbc;--bs-table-active-color: #000;--bs-table-hover-bg: #ecd2c1;--bs-table-hover-color: #000;color:var(--bs-table-color);border-color:var(--bs-table-border-color)}.table-danger{--bs-table-color: #000;--bs-table-bg: #ffccd7;--bs-table-border-color: #e6b8c2;--bs-table-striped-bg: #f2c2cc;--bs-table-striped-color: #000;--bs-table-active-bg: #e6b8c2;--bs-table-active-color: #000;--bs-table-hover-bg: #ecbdc7;--bs-table-hover-color: #000;color:var(--bs-table-color);border-color:var(--bs-table-border-color)}.table-light{--bs-table-color: #000;--bs-table-bg: #f8f9fa;--bs-table-border-color: #dfe0e1;--bs-table-striped-bg: #ecedee;--bs-table-striped-color: #000;--bs-table-active-bg: #dfe0e1;--bs-table-active-color: #000;--bs-table-hover-bg: #e5e6e7;--bs-table-hover-color: #000;color:var(--bs-table-color);border-color:var(--bs-table-border-color)}.table-dark{--bs-table-color: #fff;--bs-table-bg: #343a40;--bs-table-border-color: #484e53;--bs-table-striped-bg: #3e444a;--bs-table-striped-color: #fff;--bs-table-active-bg: #484e53;--bs-table-active-color: #fff;--bs-table-hover-bg: #43494e;--bs-table-hover-color: #fff;color:var(--bs-table-color);border-color:var(--bs-table-border-color)}.table-responsive{overflow-x:auto;-webkit-overflow-scrolling:touch}@media(max-width: 575.98px){.table-responsive-sm{overflow-x:auto;-webkit-overflow-scrolling:touch}}@media(max-width: 767.98px){.table-responsive-md{overflow-x:auto;-webkit-overflow-scrolling:touch}}@media(max-width: 991.98px){.table-responsive-lg{overflow-x:auto;-webkit-overflow-scrolling:touch}}@media(max-width: 1199.98px){.table-responsive-xl{overflow-x:auto;-webkit-overflow-scrolling:touch}}@media(max-width: 1399.98px){.table-responsive-xxl{overflow-x:auto;-webkit-overflow-scrolling:touch}}.form-label,.shiny-input-container .control-label{margin-bottom:.5rem}.col-form-label{padding-top:calc(0.375rem + 1px);padding-bottom:calc(0.375rem + 1px);margin-bottom:0;font-size:inherit;line-height:1.5}.col-form-label-lg{padding-top:calc(0.5rem + 1px);padding-bottom:calc(0.5rem + 1px);font-size:1.25rem}.col-form-label-sm{padding-top:calc(0.25rem + 1px);padding-bottom:calc(0.25rem + 1px);font-size:0.875rem}.form-text{margin-top:.25rem;font-size:0.875em;color:rgba(52,58,64,.75)}.form-control{display:block;width:100%;padding:.375rem .75rem;font-size:1rem;font-weight:400;line-height:1.5;color:#343a40;appearance:none;-webkit-appearance:none;-moz-appearance:none;-ms-appearance:none;-o-appearance:none;background-color:#fff;background-clip:padding-box;border:1px solid #dee2e6;border-radius:0;transition:border-color .15s ease-in-out,box-shadow .15s ease-in-out}@media(prefers-reduced-motion: reduce){.form-control{transition:none}}.form-control[type=file]{overflow:hidden}.form-control[type=file]:not(:disabled):not([readonly]){cursor:pointer}.form-control:focus{color:#343a40;background-color:#fff;border-color:#ae94c1;outline:0;box-shadow:0 0 0 .25rem rgba(92,41,131,.25)}.form-control::-webkit-date-and-time-value{min-width:85px;height:1.5em;margin:0}.form-control::-webkit-datetime-edit{display:block;padding:0}.form-control::placeholder{color:rgba(52,58,64,.75);opacity:1}.form-control:disabled{background-color:#e9ecef;opacity:1}.form-control::file-selector-button{padding:.375rem .75rem;margin:-0.375rem -0.75rem;margin-inline-end:.75rem;color:#343a40;background-color:#f8f9fa;pointer-events:none;border-color:inherit;border-style:solid;border-width:0;border-inline-end-width:1px;border-radius:0;transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out}@media(prefers-reduced-motion: reduce){.form-control::file-selector-button{transition:none}}.form-control:hover:not(:disabled):not([readonly])::file-selector-button{background-color:#e9ecef}.form-control-plaintext{display:block;width:100%;padding:.375rem 0;margin-bottom:0;line-height:1.5;color:#343a40;background-color:rgba(0,0,0,0);border:solid rgba(0,0,0,0);border-width:1px 0}.form-control-plaintext:focus{outline:0}.form-control-plaintext.form-control-sm,.form-control-plaintext.form-control-lg{padding-right:0;padding-left:0}.form-control-sm{min-height:calc(1.5em + 0.5rem + calc(1px * 2));padding:.25rem .5rem;font-size:0.875rem}.form-control-sm::file-selector-button{padding:.25rem .5rem;margin:-0.25rem -0.5rem;margin-inline-end:.5rem}.form-control-lg{min-height:calc(1.5em + 1rem + calc(1px * 2));padding:.5rem 1rem;font-size:1.25rem}.form-control-lg::file-selector-button{padding:.5rem 1rem;margin:-0.5rem -1rem;margin-inline-end:1rem}textarea.form-control{min-height:calc(1.5em + 0.75rem + calc(1px * 2))}textarea.form-control-sm{min-height:calc(1.5em + 0.5rem + calc(1px * 2))}textarea.form-control-lg{min-height:calc(1.5em + 1rem + calc(1px * 2))}.form-control-color{width:3rem;height:calc(1.5em + 0.75rem + calc(1px * 2));padding:.375rem}.form-control-color:not(:disabled):not([readonly]){cursor:pointer}.form-control-color::-moz-color-swatch{border:0 !important}.form-control-color::-webkit-color-swatch{border:0 !important}.form-control-color.form-control-sm{height:calc(1.5em + 0.5rem + calc(1px * 2))}.form-control-color.form-control-lg{height:calc(1.5em + 1rem + calc(1px * 2))}.form-select{--bs-form-select-bg-img: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16'%3e%3cpath fill='none' stroke='%23343a40' stroke-linecap='round' stroke-linejoin='round' stroke-width='2' d='m2 5 6 6 6-6'/%3e%3c/svg%3e");display:block;width:100%;padding:.375rem 2.25rem .375rem .75rem;font-size:1rem;font-weight:400;line-height:1.5;color:#343a40;appearance:none;-webkit-appearance:none;-moz-appearance:none;-ms-appearance:none;-o-appearance:none;background-color:#fff;background-image:var(--bs-form-select-bg-img),var(--bs-form-select-bg-icon, none);background-repeat:no-repeat;background-position:right .75rem center;background-size:16px 12px;border:1px solid #dee2e6;border-radius:0;transition:border-color .15s ease-in-out,box-shadow .15s ease-in-out}@media(prefers-reduced-motion: reduce){.form-select{transition:none}}.form-select:focus{border-color:#ae94c1;outline:0;box-shadow:0 0 0 .25rem rgba(92,41,131,.25)}.form-select[multiple],.form-select[size]:not([size="1"]){padding-right:.75rem;background-image:none}.form-select:disabled{background-color:#e9ecef}.form-select:-moz-focusring{color:rgba(0,0,0,0);text-shadow:0 0 0 #343a40}.form-select-sm{padding-top:.25rem;padding-bottom:.25rem;padding-left:.5rem;font-size:0.875rem}.form-select-lg{padding-top:.5rem;padding-bottom:.5rem;padding-left:1rem;font-size:1.25rem}[data-bs-theme=dark] .form-select{--bs-form-select-bg-img: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16'%3e%3cpath fill='none' stroke='%23dee2e6' stroke-linecap='round' stroke-linejoin='round' stroke-width='2' d='m2 5 6 6 6-6'/%3e%3c/svg%3e")}.form-check,.shiny-input-container .checkbox,.shiny-input-container .radio{display:block;min-height:1.5rem;padding-left:0;margin-bottom:.125rem}.form-check .form-check-input,.form-check .shiny-input-container .checkbox input,.form-check .shiny-input-container .radio input,.shiny-input-container .checkbox .form-check-input,.shiny-input-container .checkbox .shiny-input-container .checkbox input,.shiny-input-container .checkbox .shiny-input-container .radio input,.shiny-input-container .radio .form-check-input,.shiny-input-container .radio .shiny-input-container .checkbox input,.shiny-input-container .radio .shiny-input-container .radio input{float:left;margin-left:0}.form-check-reverse{padding-right:0;padding-left:0;text-align:right}.form-check-reverse .form-check-input{float:right;margin-right:0;margin-left:0}.form-check-input,.shiny-input-container .checkbox input,.shiny-input-container .checkbox-inline input,.shiny-input-container .radio input,.shiny-input-container .radio-inline input{--bs-form-check-bg: #fff;width:1em;height:1em;margin-top:.25em;vertical-align:top;appearance:none;-webkit-appearance:none;-moz-appearance:none;-ms-appearance:none;-o-appearance:none;background-color:var(--bs-form-check-bg);background-image:var(--bs-form-check-bg-image);background-repeat:no-repeat;background-position:center;background-size:contain;border:1px solid #dee2e6;print-color-adjust:exact}.form-check-input[type=radio],.shiny-input-container .checkbox input[type=radio],.shiny-input-container .checkbox-inline input[type=radio],.shiny-input-container .radio input[type=radio],.shiny-input-container .radio-inline input[type=radio]{border-radius:50%}.form-check-input:active,.shiny-input-container .checkbox input:active,.shiny-input-container .checkbox-inline input:active,.shiny-input-container .radio input:active,.shiny-input-container .radio-inline input:active{filter:brightness(90%)}.form-check-input:focus,.shiny-input-container .checkbox input:focus,.shiny-input-container .checkbox-inline input:focus,.shiny-input-container .radio input:focus,.shiny-input-container .radio-inline input:focus{border-color:#ae94c1;outline:0;box-shadow:0 0 0 .25rem rgba(92,41,131,.25)}.form-check-input:checked,.shiny-input-container .checkbox input:checked,.shiny-input-container .checkbox-inline input:checked,.shiny-input-container .radio input:checked,.shiny-input-container .radio-inline input:checked{background-color:#5c2983;border-color:#5c2983}.form-check-input:checked[type=checkbox],.shiny-input-container .checkbox input:checked[type=checkbox],.shiny-input-container .checkbox-inline input:checked[type=checkbox],.shiny-input-container .radio input:checked[type=checkbox],.shiny-input-container .radio-inline input:checked[type=checkbox]{--bs-form-check-bg-image: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 20 20'%3e%3cpath fill='none' stroke='%23fff' stroke-linecap='round' stroke-linejoin='round' stroke-width='3' d='m6 10 3 3 6-6'/%3e%3c/svg%3e")}.form-check-input:checked[type=radio],.shiny-input-container .checkbox input:checked[type=radio],.shiny-input-container .checkbox-inline input:checked[type=radio],.shiny-input-container .radio input:checked[type=radio],.shiny-input-container .radio-inline input:checked[type=radio]{--bs-form-check-bg-image: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='-4 -4 8 8'%3e%3ccircle r='2' fill='%23fff'/%3e%3c/svg%3e")}.form-check-input[type=checkbox]:indeterminate,.shiny-input-container .checkbox input[type=checkbox]:indeterminate,.shiny-input-container .checkbox-inline input[type=checkbox]:indeterminate,.shiny-input-container .radio input[type=checkbox]:indeterminate,.shiny-input-container .radio-inline input[type=checkbox]:indeterminate{background-color:#5c2983;border-color:#5c2983;--bs-form-check-bg-image: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 20 20'%3e%3cpath fill='none' stroke='%23fff' stroke-linecap='round' stroke-linejoin='round' stroke-width='3' d='M6 10h8'/%3e%3c/svg%3e")}.form-check-input:disabled,.shiny-input-container .checkbox input:disabled,.shiny-input-container .checkbox-inline input:disabled,.shiny-input-container .radio input:disabled,.shiny-input-container .radio-inline input:disabled{pointer-events:none;filter:none;opacity:.5}.form-check-input[disabled]~.form-check-label,.form-check-input[disabled]~span,.form-check-input:disabled~.form-check-label,.form-check-input:disabled~span,.shiny-input-container .checkbox input[disabled]~.form-check-label,.shiny-input-container .checkbox input[disabled]~span,.shiny-input-container .checkbox input:disabled~.form-check-label,.shiny-input-container .checkbox input:disabled~span,.shiny-input-container .checkbox-inline input[disabled]~.form-check-label,.shiny-input-container .checkbox-inline input[disabled]~span,.shiny-input-container .checkbox-inline input:disabled~.form-check-label,.shiny-input-container .checkbox-inline input:disabled~span,.shiny-input-container .radio input[disabled]~.form-check-label,.shiny-input-container .radio input[disabled]~span,.shiny-input-container .radio input:disabled~.form-check-label,.shiny-input-container .radio input:disabled~span,.shiny-input-container .radio-inline input[disabled]~.form-check-label,.shiny-input-container .radio-inline input[disabled]~span,.shiny-input-container .radio-inline input:disabled~.form-check-label,.shiny-input-container .radio-inline input:disabled~span{cursor:default;opacity:.5}.form-check-label,.shiny-input-container .checkbox label,.shiny-input-container .checkbox-inline label,.shiny-input-container .radio label,.shiny-input-container .radio-inline label{cursor:pointer}.form-switch{padding-left:2.5em}.form-switch .form-check-input{--bs-form-switch-bg: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='-4 -4 8 8'%3e%3ccircle r='3' fill='rgba%280, 0, 0, 0.25%29'/%3e%3c/svg%3e");width:2em;margin-left:-2.5em;background-image:var(--bs-form-switch-bg);background-position:left center;transition:background-position .15s ease-in-out}@media(prefers-reduced-motion: reduce){.form-switch .form-check-input{transition:none}}.form-switch .form-check-input:focus{--bs-form-switch-bg: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='-4 -4 8 8'%3e%3ccircle r='3' fill='%23ae94c1'/%3e%3c/svg%3e")}.form-switch .form-check-input:checked{background-position:right center;--bs-form-switch-bg: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='-4 -4 8 8'%3e%3ccircle r='3' fill='%23fff'/%3e%3c/svg%3e")}.form-switch.form-check-reverse{padding-right:2.5em;padding-left:0}.form-switch.form-check-reverse .form-check-input{margin-right:-2.5em;margin-left:0}.form-check-inline{display:inline-block;margin-right:1rem}.btn-check{position:absolute;clip:rect(0, 0, 0, 0);pointer-events:none}.btn-check[disabled]+.btn,.btn-check:disabled+.btn{pointer-events:none;filter:none;opacity:.65}[data-bs-theme=dark] .form-switch .form-check-input:not(:checked):not(:focus){--bs-form-switch-bg: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='-4 -4 8 8'%3e%3ccircle r='3' fill='rgba%28255, 255, 255, 0.25%29'/%3e%3c/svg%3e")}.form-range{width:100%;height:1.5rem;padding:0;appearance:none;-webkit-appearance:none;-moz-appearance:none;-ms-appearance:none;-o-appearance:none;background-color:rgba(0,0,0,0)}.form-range:focus{outline:0}.form-range:focus::-webkit-slider-thumb{box-shadow:0 0 0 1px #fff,0 0 0 .25rem rgba(92,41,131,.25)}.form-range:focus::-moz-range-thumb{box-shadow:0 0 0 1px #fff,0 0 0 .25rem rgba(92,41,131,.25)}.form-range::-moz-focus-outer{border:0}.form-range::-webkit-slider-thumb{width:1rem;height:1rem;margin-top:-0.25rem;appearance:none;-webkit-appearance:none;-moz-appearance:none;-ms-appearance:none;-o-appearance:none;background-color:#5c2983;border:0;transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out}@media(prefers-reduced-motion: reduce){.form-range::-webkit-slider-thumb{transition:none}}.form-range::-webkit-slider-thumb:active{background-color:#cebfda}.form-range::-webkit-slider-runnable-track{width:100%;height:.5rem;color:rgba(0,0,0,0);cursor:pointer;background-color:#f8f9fa;border-color:rgba(0,0,0,0)}.form-range::-moz-range-thumb{width:1rem;height:1rem;appearance:none;-webkit-appearance:none;-moz-appearance:none;-ms-appearance:none;-o-appearance:none;background-color:#5c2983;border:0;transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out}@media(prefers-reduced-motion: reduce){.form-range::-moz-range-thumb{transition:none}}.form-range::-moz-range-thumb:active{background-color:#cebfda}.form-range::-moz-range-track{width:100%;height:.5rem;color:rgba(0,0,0,0);cursor:pointer;background-color:#f8f9fa;border-color:rgba(0,0,0,0)}.form-range:disabled{pointer-events:none}.form-range:disabled::-webkit-slider-thumb{background-color:rgba(52,58,64,.75)}.form-range:disabled::-moz-range-thumb{background-color:rgba(52,58,64,.75)}.form-floating{position:relative}.form-floating>.form-control,.form-floating>.form-control-plaintext,.form-floating>.form-select{height:calc(3.5rem + calc(1px * 2));min-height:calc(3.5rem + calc(1px * 2));line-height:1.25}.form-floating>label{position:absolute;top:0;left:0;z-index:2;height:100%;padding:1rem .75rem;overflow:hidden;text-align:start;text-overflow:ellipsis;white-space:nowrap;pointer-events:none;border:1px solid rgba(0,0,0,0);transform-origin:0 0;transition:opacity .1s ease-in-out,transform .1s ease-in-out}@media(prefers-reduced-motion: reduce){.form-floating>label{transition:none}}.form-floating>.form-control,.form-floating>.form-control-plaintext{padding:1rem .75rem}.form-floating>.form-control::placeholder,.form-floating>.form-control-plaintext::placeholder{color:rgba(0,0,0,0)}.form-floating>.form-control:focus,.form-floating>.form-control:not(:placeholder-shown),.form-floating>.form-control-plaintext:focus,.form-floating>.form-control-plaintext:not(:placeholder-shown){padding-top:1.625rem;padding-bottom:.625rem}.form-floating>.form-control:-webkit-autofill,.form-floating>.form-control-plaintext:-webkit-autofill{padding-top:1.625rem;padding-bottom:.625rem}.form-floating>.form-select{padding-top:1.625rem;padding-bottom:.625rem}.form-floating>.form-control:focus~label,.form-floating>.form-control:not(:placeholder-shown)~label,.form-floating>.form-control-plaintext~label,.form-floating>.form-select~label{color:rgba(var(--bs-body-color-rgb), 0.65);transform:scale(0.85) translateY(-0.5rem) translateX(0.15rem)}.form-floating>.form-control:focus~label::after,.form-floating>.form-control:not(:placeholder-shown)~label::after,.form-floating>.form-control-plaintext~label::after,.form-floating>.form-select~label::after{position:absolute;inset:1rem .375rem;z-index:-1;height:1.5em;content:"";background-color:#fff}.form-floating>.form-control:-webkit-autofill~label{color:rgba(var(--bs-body-color-rgb), 0.65);transform:scale(0.85) translateY(-0.5rem) translateX(0.15rem)}.form-floating>.form-control-plaintext~label{border-width:1px 0}.form-floating>:disabled~label,.form-floating>.form-control:disabled~label{color:#6c757d}.form-floating>:disabled~label::after,.form-floating>.form-control:disabled~label::after{background-color:#e9ecef}.input-group{position:relative;display:flex;display:-webkit-flex;flex-wrap:wrap;-webkit-flex-wrap:wrap;align-items:stretch;-webkit-align-items:stretch;width:100%}.input-group>.form-control,.input-group>.form-select,.input-group>.form-floating{position:relative;flex:1 1 auto;-webkit-flex:1 1 auto;width:1%;min-width:0}.input-group>.form-control:focus,.input-group>.form-select:focus,.input-group>.form-floating:focus-within{z-index:5}.input-group .btn{position:relative;z-index:2}.input-group .btn:focus{z-index:5}.input-group-text{display:flex;display:-webkit-flex;align-items:center;-webkit-align-items:center;padding:.375rem .75rem;font-size:1rem;font-weight:400;line-height:1.5;color:#343a40;text-align:center;white-space:nowrap;background-color:#f8f9fa;border:1px solid #dee2e6}.input-group-lg>.form-control,.input-group-lg>.form-select,.input-group-lg>.input-group-text,.input-group-lg>.btn{padding:.5rem 1rem;font-size:1.25rem}.input-group-sm>.form-control,.input-group-sm>.form-select,.input-group-sm>.input-group-text,.input-group-sm>.btn{padding:.25rem .5rem;font-size:0.875rem}.input-group-lg>.form-select,.input-group-sm>.form-select{padding-right:3rem}.input-group>:not(:first-child):not(.dropdown-menu):not(.valid-tooltip):not(.valid-feedback):not(.invalid-tooltip):not(.invalid-feedback){margin-left:calc(1px*-1)}.valid-feedback{display:none;width:100%;margin-top:.25rem;font-size:0.875em;color:#3fb618}.valid-tooltip{position:absolute;top:100%;z-index:5;display:none;max-width:100%;padding:.25rem .5rem;margin-top:.1rem;font-size:0.875rem;color:#fff;background-color:#3fb618}.was-validated :valid~.valid-feedback,.was-validated :valid~.valid-tooltip,.is-valid~.valid-feedback,.is-valid~.valid-tooltip{display:block}.was-validated .form-control:valid,.form-control.is-valid{border-color:#3fb618;padding-right:calc(1.5em + 0.75rem);background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 8 8'%3e%3cpath fill='%233fb618' d='M2.3 6.73.6 4.53c-.4-1.04.46-1.4 1.1-.8l1.1 1.4 3.4-3.8c.6-.63 1.6-.27 1.2.7l-4 4.6c-.43.5-.8.4-1.1.1z'/%3e%3c/svg%3e");background-repeat:no-repeat;background-position:right calc(0.375em + 0.1875rem) center;background-size:calc(0.75em + 0.375rem) calc(0.75em + 0.375rem)}.was-validated .form-control:valid:focus,.form-control.is-valid:focus{border-color:#3fb618;box-shadow:0 0 0 .25rem rgba(63,182,24,.25)}.was-validated textarea.form-control:valid,textarea.form-control.is-valid{padding-right:calc(1.5em + 0.75rem);background-position:top calc(0.375em + 0.1875rem) right calc(0.375em + 0.1875rem)}.was-validated .form-select:valid,.form-select.is-valid{border-color:#3fb618}.was-validated .form-select:valid:not([multiple]):not([size]),.was-validated .form-select:valid:not([multiple])[size="1"],.form-select.is-valid:not([multiple]):not([size]),.form-select.is-valid:not([multiple])[size="1"]{--bs-form-select-bg-icon: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 8 8'%3e%3cpath fill='%233fb618' d='M2.3 6.73.6 4.53c-.4-1.04.46-1.4 1.1-.8l1.1 1.4 3.4-3.8c.6-.63 1.6-.27 1.2.7l-4 4.6c-.43.5-.8.4-1.1.1z'/%3e%3c/svg%3e");padding-right:4.125rem;background-position:right .75rem center,center right 2.25rem;background-size:16px 12px,calc(0.75em + 0.375rem) calc(0.75em + 0.375rem)}.was-validated .form-select:valid:focus,.form-select.is-valid:focus{border-color:#3fb618;box-shadow:0 0 0 .25rem rgba(63,182,24,.25)}.was-validated .form-control-color:valid,.form-control-color.is-valid{width:calc(3rem + calc(1.5em + 0.75rem))}.was-validated .form-check-input:valid,.form-check-input.is-valid{border-color:#3fb618}.was-validated .form-check-input:valid:checked,.form-check-input.is-valid:checked{background-color:#3fb618}.was-validated .form-check-input:valid:focus,.form-check-input.is-valid:focus{box-shadow:0 0 0 .25rem rgba(63,182,24,.25)}.was-validated .form-check-input:valid~.form-check-label,.form-check-input.is-valid~.form-check-label{color:#3fb618}.form-check-inline .form-check-input~.valid-feedback{margin-left:.5em}.was-validated .input-group>.form-control:not(:focus):valid,.input-group>.form-control:not(:focus).is-valid,.was-validated .input-group>.form-select:not(:focus):valid,.input-group>.form-select:not(:focus).is-valid,.was-validated .input-group>.form-floating:not(:focus-within):valid,.input-group>.form-floating:not(:focus-within).is-valid{z-index:3}.invalid-feedback{display:none;width:100%;margin-top:.25rem;font-size:0.875em;color:#ff0039}.invalid-tooltip{position:absolute;top:100%;z-index:5;display:none;max-width:100%;padding:.25rem .5rem;margin-top:.1rem;font-size:0.875rem;color:#fff;background-color:#ff0039}.was-validated :invalid~.invalid-feedback,.was-validated :invalid~.invalid-tooltip,.is-invalid~.invalid-feedback,.is-invalid~.invalid-tooltip{display:block}.was-validated .form-control:invalid,.form-control.is-invalid{border-color:#ff0039;padding-right:calc(1.5em + 0.75rem);background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 12 12' width='12' height='12' fill='none' stroke='%23ff0039'%3e%3ccircle cx='6' cy='6' r='4.5'/%3e%3cpath stroke-linejoin='round' d='M5.8 3.6h.4L6 6.5z'/%3e%3ccircle cx='6' cy='8.2' r='.6' fill='%23ff0039' stroke='none'/%3e%3c/svg%3e");background-repeat:no-repeat;background-position:right calc(0.375em + 0.1875rem) center;background-size:calc(0.75em + 0.375rem) calc(0.75em + 0.375rem)}.was-validated .form-control:invalid:focus,.form-control.is-invalid:focus{border-color:#ff0039;box-shadow:0 0 0 .25rem rgba(255,0,57,.25)}.was-validated textarea.form-control:invalid,textarea.form-control.is-invalid{padding-right:calc(1.5em + 0.75rem);background-position:top calc(0.375em + 0.1875rem) right calc(0.375em + 0.1875rem)}.was-validated .form-select:invalid,.form-select.is-invalid{border-color:#ff0039}.was-validated .form-select:invalid:not([multiple]):not([size]),.was-validated .form-select:invalid:not([multiple])[size="1"],.form-select.is-invalid:not([multiple]):not([size]),.form-select.is-invalid:not([multiple])[size="1"]{--bs-form-select-bg-icon: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 12 12' width='12' height='12' fill='none' stroke='%23ff0039'%3e%3ccircle cx='6' cy='6' r='4.5'/%3e%3cpath stroke-linejoin='round' d='M5.8 3.6h.4L6 6.5z'/%3e%3ccircle cx='6' cy='8.2' r='.6' fill='%23ff0039' stroke='none'/%3e%3c/svg%3e");padding-right:4.125rem;background-position:right .75rem center,center right 2.25rem;background-size:16px 12px,calc(0.75em + 0.375rem) calc(0.75em + 0.375rem)}.was-validated .form-select:invalid:focus,.form-select.is-invalid:focus{border-color:#ff0039;box-shadow:0 0 0 .25rem rgba(255,0,57,.25)}.was-validated .form-control-color:invalid,.form-control-color.is-invalid{width:calc(3rem + calc(1.5em + 0.75rem))}.was-validated .form-check-input:invalid,.form-check-input.is-invalid{border-color:#ff0039}.was-validated .form-check-input:invalid:checked,.form-check-input.is-invalid:checked{background-color:#ff0039}.was-validated .form-check-input:invalid:focus,.form-check-input.is-invalid:focus{box-shadow:0 0 0 .25rem rgba(255,0,57,.25)}.was-validated .form-check-input:invalid~.form-check-label,.form-check-input.is-invalid~.form-check-label{color:#ff0039}.form-check-inline .form-check-input~.invalid-feedback{margin-left:.5em}.was-validated .input-group>.form-control:not(:focus):invalid,.input-group>.form-control:not(:focus).is-invalid,.was-validated .input-group>.form-select:not(:focus):invalid,.input-group>.form-select:not(:focus).is-invalid,.was-validated .input-group>.form-floating:not(:focus-within):invalid,.input-group>.form-floating:not(:focus-within).is-invalid{z-index:4}.btn{--bs-btn-padding-x: 0.75rem;--bs-btn-padding-y: 0.375rem;--bs-btn-font-family: ;--bs-btn-font-size:1rem;--bs-btn-font-weight: 400;--bs-btn-line-height: 1.5;--bs-btn-color: #343a40;--bs-btn-bg: transparent;--bs-btn-border-width: 1px;--bs-btn-border-color: transparent;--bs-btn-border-radius: 0.25rem;--bs-btn-hover-border-color: transparent;--bs-btn-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.15), 0 1px 1px rgba(0, 0, 0, 0.075);--bs-btn-disabled-opacity: 0.65;--bs-btn-focus-box-shadow: 0 0 0 0.25rem rgba(var(--bs-btn-focus-shadow-rgb), .5);display:inline-block;padding:var(--bs-btn-padding-y) var(--bs-btn-padding-x);font-family:var(--bs-btn-font-family);font-size:var(--bs-btn-font-size);font-weight:var(--bs-btn-font-weight);line-height:var(--bs-btn-line-height);color:var(--bs-btn-color);text-align:center;text-decoration:none;-webkit-text-decoration:none;-moz-text-decoration:none;-ms-text-decoration:none;-o-text-decoration:none;vertical-align:middle;cursor:pointer;user-select:none;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;-o-user-select:none;border:var(--bs-btn-border-width) solid var(--bs-btn-border-color);background-color:var(--bs-btn-bg);transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out}@media(prefers-reduced-motion: reduce){.btn{transition:none}}.btn:hover{color:var(--bs-btn-hover-color);background-color:var(--bs-btn-hover-bg);border-color:var(--bs-btn-hover-border-color)}.btn-check+.btn:hover{color:var(--bs-btn-color);background-color:var(--bs-btn-bg);border-color:var(--bs-btn-border-color)}.btn:focus-visible{color:var(--bs-btn-hover-color);background-color:var(--bs-btn-hover-bg);border-color:var(--bs-btn-hover-border-color);outline:0;box-shadow:var(--bs-btn-focus-box-shadow)}.btn-check:focus-visible+.btn{border-color:var(--bs-btn-hover-border-color);outline:0;box-shadow:var(--bs-btn-focus-box-shadow)}.btn-check:checked+.btn,:not(.btn-check)+.btn:active,.btn:first-child:active,.btn.active,.btn.show{color:var(--bs-btn-active-color);background-color:var(--bs-btn-active-bg);border-color:var(--bs-btn-active-border-color)}.btn-check:checked+.btn:focus-visible,:not(.btn-check)+.btn:active:focus-visible,.btn:first-child:active:focus-visible,.btn.active:focus-visible,.btn.show:focus-visible{box-shadow:var(--bs-btn-focus-box-shadow)}.btn:disabled,.btn.disabled,fieldset:disabled .btn{color:var(--bs-btn-disabled-color);pointer-events:none;background-color:var(--bs-btn-disabled-bg);border-color:var(--bs-btn-disabled-border-color);opacity:var(--bs-btn-disabled-opacity)}.btn-default{--bs-btn-color: #fff;--bs-btn-bg: #343a40;--bs-btn-border-color: #343a40;--bs-btn-hover-color: #fff;--bs-btn-hover-bg: #2c3136;--bs-btn-hover-border-color: #2a2e33;--bs-btn-focus-shadow-rgb: 82, 88, 93;--bs-btn-active-color: #fff;--bs-btn-active-bg: #2a2e33;--bs-btn-active-border-color: #272c30;--bs-btn-active-shadow: inset 0 3px 5px rgba(0, 0, 0, 0.125);--bs-btn-disabled-color: #fff;--bs-btn-disabled-bg: #343a40;--bs-btn-disabled-border-color: #343a40}.btn-primary{--bs-btn-color: #fff;--bs-btn-bg: #5c2983;--bs-btn-border-color: #5c2983;--bs-btn-hover-color: #fff;--bs-btn-hover-bg: #4e236f;--bs-btn-hover-border-color: #4a2169;--bs-btn-focus-shadow-rgb: 116, 73, 150;--bs-btn-active-color: #fff;--bs-btn-active-bg: #4a2169;--bs-btn-active-border-color: #451f62;--bs-btn-active-shadow: inset 0 3px 5px rgba(0, 0, 0, 0.125);--bs-btn-disabled-color: #fff;--bs-btn-disabled-bg: #5c2983;--bs-btn-disabled-border-color: #5c2983}.btn-secondary{--bs-btn-color: #fff;--bs-btn-bg: #343a40;--bs-btn-border-color: #343a40;--bs-btn-hover-color: #fff;--bs-btn-hover-bg: #2c3136;--bs-btn-hover-border-color: #2a2e33;--bs-btn-focus-shadow-rgb: 82, 88, 93;--bs-btn-active-color: #fff;--bs-btn-active-bg: #2a2e33;--bs-btn-active-border-color: #272c30;--bs-btn-active-shadow: inset 0 3px 5px rgba(0, 0, 0, 0.125);--bs-btn-disabled-color: #fff;--bs-btn-disabled-bg: #343a40;--bs-btn-disabled-border-color: #343a40}.btn-success{--bs-btn-color: #fff;--bs-btn-bg: #3fb618;--bs-btn-border-color: #3fb618;--bs-btn-hover-color: #fff;--bs-btn-hover-bg: #369b14;--bs-btn-hover-border-color: #329213;--bs-btn-focus-shadow-rgb: 92, 193, 59;--bs-btn-active-color: #fff;--bs-btn-active-bg: #329213;--bs-btn-active-border-color: #2f8912;--bs-btn-active-shadow: inset 0 3px 5px rgba(0, 0, 0, 0.125);--bs-btn-disabled-color: #fff;--bs-btn-disabled-bg: #3fb618;--bs-btn-disabled-border-color: #3fb618}.btn-info{--bs-btn-color: #fff;--bs-btn-bg: #9954bb;--bs-btn-border-color: #9954bb;--bs-btn-hover-color: #fff;--bs-btn-hover-bg: #82479f;--bs-btn-hover-border-color: #7a4396;--bs-btn-focus-shadow-rgb: 168, 110, 197;--bs-btn-active-color: #fff;--bs-btn-active-bg: #7a4396;--bs-btn-active-border-color: #733f8c;--bs-btn-active-shadow: inset 0 3px 5px rgba(0, 0, 0, 0.125);--bs-btn-disabled-color: #fff;--bs-btn-disabled-bg: #9954bb;--bs-btn-disabled-border-color: #9954bb}.btn-warning{--bs-btn-color: #fff;--bs-btn-bg: #ff7518;--bs-btn-border-color: #ff7518;--bs-btn-hover-color: #fff;--bs-btn-hover-bg: #d96314;--bs-btn-hover-border-color: #cc5e13;--bs-btn-focus-shadow-rgb: 255, 138, 59;--bs-btn-active-color: #fff;--bs-btn-active-bg: #cc5e13;--bs-btn-active-border-color: #bf5812;--bs-btn-active-shadow: inset 0 3px 5px rgba(0, 0, 0, 0.125);--bs-btn-disabled-color: #fff;--bs-btn-disabled-bg: #ff7518;--bs-btn-disabled-border-color: #ff7518}.btn-danger{--bs-btn-color: #fff;--bs-btn-bg: #ff0039;--bs-btn-border-color: #ff0039;--bs-btn-hover-color: #fff;--bs-btn-hover-bg: #d90030;--bs-btn-hover-border-color: #cc002e;--bs-btn-focus-shadow-rgb: 255, 38, 87;--bs-btn-active-color: #fff;--bs-btn-active-bg: #cc002e;--bs-btn-active-border-color: #bf002b;--bs-btn-active-shadow: inset 0 3px 5px rgba(0, 0, 0, 0.125);--bs-btn-disabled-color: #fff;--bs-btn-disabled-bg: #ff0039;--bs-btn-disabled-border-color: #ff0039}.btn-light{--bs-btn-color: #000;--bs-btn-bg: #f8f9fa;--bs-btn-border-color: #f8f9fa;--bs-btn-hover-color: #000;--bs-btn-hover-bg: #d3d4d5;--bs-btn-hover-border-color: #c6c7c8;--bs-btn-focus-shadow-rgb: 211, 212, 213;--bs-btn-active-color: #000;--bs-btn-active-bg: #c6c7c8;--bs-btn-active-border-color: #babbbc;--bs-btn-active-shadow: inset 0 3px 5px rgba(0, 0, 0, 0.125);--bs-btn-disabled-color: #000;--bs-btn-disabled-bg: #f8f9fa;--bs-btn-disabled-border-color: #f8f9fa}.btn-dark{--bs-btn-color: #fff;--bs-btn-bg: #343a40;--bs-btn-border-color: #343a40;--bs-btn-hover-color: #fff;--bs-btn-hover-bg: #52585d;--bs-btn-hover-border-color: #484e53;--bs-btn-focus-shadow-rgb: 82, 88, 93;--bs-btn-active-color: #fff;--bs-btn-active-bg: #5d6166;--bs-btn-active-border-color: #484e53;--bs-btn-active-shadow: inset 0 3px 5px rgba(0, 0, 0, 0.125);--bs-btn-disabled-color: #fff;--bs-btn-disabled-bg: #343a40;--bs-btn-disabled-border-color: #343a40}.btn-outline-default{--bs-btn-color: #343a40;--bs-btn-border-color: #343a40;--bs-btn-hover-color: #fff;--bs-btn-hover-bg: #343a40;--bs-btn-hover-border-color: #343a40;--bs-btn-focus-shadow-rgb: 52, 58, 64;--bs-btn-active-color: #fff;--bs-btn-active-bg: #343a40;--bs-btn-active-border-color: #343a40;--bs-btn-active-shadow: inset 0 3px 5px rgba(0, 0, 0, 0.125);--bs-btn-disabled-color: #343a40;--bs-btn-disabled-bg: transparent;--bs-btn-disabled-border-color: #343a40;--bs-btn-bg: transparent;--bs-gradient: none}.btn-outline-primary{--bs-btn-color: #5c2983;--bs-btn-border-color: #5c2983;--bs-btn-hover-color: #fff;--bs-btn-hover-bg: #5c2983;--bs-btn-hover-border-color: #5c2983;--bs-btn-focus-shadow-rgb: 92, 41, 131;--bs-btn-active-color: #fff;--bs-btn-active-bg: #5c2983;--bs-btn-active-border-color: #5c2983;--bs-btn-active-shadow: inset 0 3px 5px rgba(0, 0, 0, 0.125);--bs-btn-disabled-color: #5c2983;--bs-btn-disabled-bg: transparent;--bs-btn-disabled-border-color: #5c2983;--bs-btn-bg: transparent;--bs-gradient: none}.btn-outline-secondary{--bs-btn-color: #343a40;--bs-btn-border-color: #343a40;--bs-btn-hover-color: #fff;--bs-btn-hover-bg: #343a40;--bs-btn-hover-border-color: #343a40;--bs-btn-focus-shadow-rgb: 52, 58, 64;--bs-btn-active-color: #fff;--bs-btn-active-bg: #343a40;--bs-btn-active-border-color: #343a40;--bs-btn-active-shadow: inset 0 3px 5px rgba(0, 0, 0, 0.125);--bs-btn-disabled-color: #343a40;--bs-btn-disabled-bg: transparent;--bs-btn-disabled-border-color: #343a40;--bs-btn-bg: transparent;--bs-gradient: none}.btn-outline-success{--bs-btn-color: #3fb618;--bs-btn-border-color: #3fb618;--bs-btn-hover-color: #fff;--bs-btn-hover-bg: #3fb618;--bs-btn-hover-border-color: #3fb618;--bs-btn-focus-shadow-rgb: 63, 182, 24;--bs-btn-active-color: #fff;--bs-btn-active-bg: #3fb618;--bs-btn-active-border-color: #3fb618;--bs-btn-active-shadow: inset 0 3px 5px rgba(0, 0, 0, 0.125);--bs-btn-disabled-color: #3fb618;--bs-btn-disabled-bg: transparent;--bs-btn-disabled-border-color: #3fb618;--bs-btn-bg: transparent;--bs-gradient: none}.btn-outline-info{--bs-btn-color: #9954bb;--bs-btn-border-color: #9954bb;--bs-btn-hover-color: #fff;--bs-btn-hover-bg: #9954bb;--bs-btn-hover-border-color: #9954bb;--bs-btn-focus-shadow-rgb: 153, 84, 187;--bs-btn-active-color: #fff;--bs-btn-active-bg: #9954bb;--bs-btn-active-border-color: #9954bb;--bs-btn-active-shadow: inset 0 3px 5px rgba(0, 0, 0, 0.125);--bs-btn-disabled-color: #9954bb;--bs-btn-disabled-bg: transparent;--bs-btn-disabled-border-color: #9954bb;--bs-btn-bg: transparent;--bs-gradient: none}.btn-outline-warning{--bs-btn-color: #ff7518;--bs-btn-border-color: #ff7518;--bs-btn-hover-color: #fff;--bs-btn-hover-bg: #ff7518;--bs-btn-hover-border-color: #ff7518;--bs-btn-focus-shadow-rgb: 255, 117, 24;--bs-btn-active-color: #fff;--bs-btn-active-bg: #ff7518;--bs-btn-active-border-color: #ff7518;--bs-btn-active-shadow: inset 0 3px 5px rgba(0, 0, 0, 0.125);--bs-btn-disabled-color: #ff7518;--bs-btn-disabled-bg: transparent;--bs-btn-disabled-border-color: #ff7518;--bs-btn-bg: transparent;--bs-gradient: none}.btn-outline-danger{--bs-btn-color: #ff0039;--bs-btn-border-color: #ff0039;--bs-btn-hover-color: #fff;--bs-btn-hover-bg: #ff0039;--bs-btn-hover-border-color: #ff0039;--bs-btn-focus-shadow-rgb: 255, 0, 57;--bs-btn-active-color: #fff;--bs-btn-active-bg: #ff0039;--bs-btn-active-border-color: #ff0039;--bs-btn-active-shadow: inset 0 3px 5px rgba(0, 0, 0, 0.125);--bs-btn-disabled-color: #ff0039;--bs-btn-disabled-bg: transparent;--bs-btn-disabled-border-color: #ff0039;--bs-btn-bg: transparent;--bs-gradient: none}.btn-outline-light{--bs-btn-color: #f8f9fa;--bs-btn-border-color: #f8f9fa;--bs-btn-hover-color: #000;--bs-btn-hover-bg: #f8f9fa;--bs-btn-hover-border-color: #f8f9fa;--bs-btn-focus-shadow-rgb: 248, 249, 250;--bs-btn-active-color: #000;--bs-btn-active-bg: #f8f9fa;--bs-btn-active-border-color: #f8f9fa;--bs-btn-active-shadow: inset 0 3px 5px rgba(0, 0, 0, 0.125);--bs-btn-disabled-color: #f8f9fa;--bs-btn-disabled-bg: transparent;--bs-btn-disabled-border-color: #f8f9fa;--bs-btn-bg: transparent;--bs-gradient: none}.btn-outline-dark{--bs-btn-color: #343a40;--bs-btn-border-color: #343a40;--bs-btn-hover-color: #fff;--bs-btn-hover-bg: #343a40;--bs-btn-hover-border-color: #343a40;--bs-btn-focus-shadow-rgb: 52, 58, 64;--bs-btn-active-color: #fff;--bs-btn-active-bg: #343a40;--bs-btn-active-border-color: #343a40;--bs-btn-active-shadow: inset 0 3px 5px rgba(0, 0, 0, 0.125);--bs-btn-disabled-color: #343a40;--bs-btn-disabled-bg: transparent;--bs-btn-disabled-border-color: #343a40;--bs-btn-bg: transparent;--bs-gradient: none}.btn-link{--bs-btn-font-weight: 400;--bs-btn-color: #40ba2f;--bs-btn-bg: transparent;--bs-btn-border-color: transparent;--bs-btn-hover-color: #339526;--bs-btn-hover-border-color: transparent;--bs-btn-active-color: #339526;--bs-btn-active-border-color: transparent;--bs-btn-disabled-color: #6c757d;--bs-btn-disabled-border-color: transparent;--bs-btn-box-shadow: 0 0 0 #000;--bs-btn-focus-shadow-rgb: 54, 158, 40;text-decoration:underline;-webkit-text-decoration:underline;-moz-text-decoration:underline;-ms-text-decoration:underline;-o-text-decoration:underline}.btn-link:focus-visible{color:var(--bs-btn-color)}.btn-link:hover{color:var(--bs-btn-hover-color)}.btn-lg,.btn-group-lg>.btn{--bs-btn-padding-y: 0.5rem;--bs-btn-padding-x: 1rem;--bs-btn-font-size:1.25rem;--bs-btn-border-radius: 0.5rem}.btn-sm,.btn-group-sm>.btn{--bs-btn-padding-y: 0.25rem;--bs-btn-padding-x: 0.5rem;--bs-btn-font-size:0.875rem;--bs-btn-border-radius: 0.2em}.fade{transition:opacity .15s linear}@media(prefers-reduced-motion: reduce){.fade{transition:none}}.fade:not(.show){opacity:0}.collapse:not(.show){display:none}.collapsing{height:0;overflow:hidden;transition:height .2s ease}@media(prefers-reduced-motion: reduce){.collapsing{transition:none}}.collapsing.collapse-horizontal{width:0;height:auto;transition:width .35s ease}@media(prefers-reduced-motion: reduce){.collapsing.collapse-horizontal{transition:none}}.dropup,.dropend,.dropdown,.dropstart,.dropup-center,.dropdown-center{position:relative}.dropdown-toggle{white-space:nowrap}.dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid rgba(0,0,0,0);border-bottom:0;border-left:.3em solid rgba(0,0,0,0)}.dropdown-toggle:empty::after{margin-left:0}.dropdown-menu{--bs-dropdown-zindex: 1000;--bs-dropdown-min-width: 10rem;--bs-dropdown-padding-x: 0;--bs-dropdown-padding-y: 0.5rem;--bs-dropdown-spacer: 0.125rem;--bs-dropdown-font-size:1rem;--bs-dropdown-color: #343a40;--bs-dropdown-bg: #fff;--bs-dropdown-border-color: rgba(0, 0, 0, 0.175);--bs-dropdown-border-radius: 0.25rem;--bs-dropdown-border-width: 1px;--bs-dropdown-inner-border-radius: calc(0.25rem - 1px);--bs-dropdown-divider-bg: rgba(0, 0, 0, 0.175);--bs-dropdown-divider-margin-y: 0.5rem;--bs-dropdown-box-shadow: 0 0.5rem 1rem rgba(0, 0, 0, 0.15);--bs-dropdown-link-color: #343a40;--bs-dropdown-link-hover-color: #343a40;--bs-dropdown-link-hover-bg: #f8f9fa;--bs-dropdown-link-active-color: #fff;--bs-dropdown-link-active-bg: #5c2983;--bs-dropdown-link-disabled-color: rgba(52, 58, 64, 0.5);--bs-dropdown-item-padding-x: 1rem;--bs-dropdown-item-padding-y: 0.25rem;--bs-dropdown-header-color: #6c757d;--bs-dropdown-header-padding-x: 1rem;--bs-dropdown-header-padding-y: 0.5rem;position:absolute;z-index:var(--bs-dropdown-zindex);display:none;min-width:var(--bs-dropdown-min-width);padding:var(--bs-dropdown-padding-y) var(--bs-dropdown-padding-x);margin:0;font-size:var(--bs-dropdown-font-size);color:var(--bs-dropdown-color);text-align:left;list-style:none;background-color:var(--bs-dropdown-bg);background-clip:padding-box;border:var(--bs-dropdown-border-width) solid var(--bs-dropdown-border-color)}.dropdown-menu[data-bs-popper]{top:100%;left:0;margin-top:var(--bs-dropdown-spacer)}.dropdown-menu-start{--bs-position: start}.dropdown-menu-start[data-bs-popper]{right:auto;left:0}.dropdown-menu-end{--bs-position: end}.dropdown-menu-end[data-bs-popper]{right:0;left:auto}@media(min-width: 576px){.dropdown-menu-sm-start{--bs-position: start}.dropdown-menu-sm-start[data-bs-popper]{right:auto;left:0}.dropdown-menu-sm-end{--bs-position: end}.dropdown-menu-sm-end[data-bs-popper]{right:0;left:auto}}@media(min-width: 768px){.dropdown-menu-md-start{--bs-position: start}.dropdown-menu-md-start[data-bs-popper]{right:auto;left:0}.dropdown-menu-md-end{--bs-position: end}.dropdown-menu-md-end[data-bs-popper]{right:0;left:auto}}@media(min-width: 992px){.dropdown-menu-lg-start{--bs-position: start}.dropdown-menu-lg-start[data-bs-popper]{right:auto;left:0}.dropdown-menu-lg-end{--bs-position: end}.dropdown-menu-lg-end[data-bs-popper]{right:0;left:auto}}@media(min-width: 1200px){.dropdown-menu-xl-start{--bs-position: start}.dropdown-menu-xl-start[data-bs-popper]{right:auto;left:0}.dropdown-menu-xl-end{--bs-position: end}.dropdown-menu-xl-end[data-bs-popper]{right:0;left:auto}}@media(min-width: 1400px){.dropdown-menu-xxl-start{--bs-position: start}.dropdown-menu-xxl-start[data-bs-popper]{right:auto;left:0}.dropdown-menu-xxl-end{--bs-position: end}.dropdown-menu-xxl-end[data-bs-popper]{right:0;left:auto}}.dropup .dropdown-menu[data-bs-popper]{top:auto;bottom:100%;margin-top:0;margin-bottom:var(--bs-dropdown-spacer)}.dropup .dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:0;border-right:.3em solid rgba(0,0,0,0);border-bottom:.3em solid;border-left:.3em solid rgba(0,0,0,0)}.dropup .dropdown-toggle:empty::after{margin-left:0}.dropend .dropdown-menu[data-bs-popper]{top:0;right:auto;left:100%;margin-top:0;margin-left:var(--bs-dropdown-spacer)}.dropend .dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid rgba(0,0,0,0);border-right:0;border-bottom:.3em solid rgba(0,0,0,0);border-left:.3em solid}.dropend .dropdown-toggle:empty::after{margin-left:0}.dropend .dropdown-toggle::after{vertical-align:0}.dropstart .dropdown-menu[data-bs-popper]{top:0;right:100%;left:auto;margin-top:0;margin-right:var(--bs-dropdown-spacer)}.dropstart .dropdown-toggle::after{display:inline-block;margin-left:.255em;vertical-align:.255em;content:""}.dropstart .dropdown-toggle::after{display:none}.dropstart .dropdown-toggle::before{display:inline-block;margin-right:.255em;vertical-align:.255em;content:"";border-top:.3em solid rgba(0,0,0,0);border-right:.3em solid;border-bottom:.3em solid rgba(0,0,0,0)}.dropstart .dropdown-toggle:empty::after{margin-left:0}.dropstart .dropdown-toggle::before{vertical-align:0}.dropdown-divider{height:0;margin:var(--bs-dropdown-divider-margin-y) 0;overflow:hidden;border-top:1px solid var(--bs-dropdown-divider-bg);opacity:1}.dropdown-item{display:block;width:100%;padding:var(--bs-dropdown-item-padding-y) var(--bs-dropdown-item-padding-x);clear:both;font-weight:400;color:var(--bs-dropdown-link-color);text-align:inherit;text-decoration:none;-webkit-text-decoration:none;-moz-text-decoration:none;-ms-text-decoration:none;-o-text-decoration:none;white-space:nowrap;background-color:rgba(0,0,0,0);border:0}.dropdown-item:hover,.dropdown-item:focus{color:var(--bs-dropdown-link-hover-color);background-color:var(--bs-dropdown-link-hover-bg)}.dropdown-item.active,.dropdown-item:active{color:var(--bs-dropdown-link-active-color);text-decoration:none;background-color:var(--bs-dropdown-link-active-bg)}.dropdown-item.disabled,.dropdown-item:disabled{color:var(--bs-dropdown-link-disabled-color);pointer-events:none;background-color:rgba(0,0,0,0)}.dropdown-menu.show{display:block}.dropdown-header{display:block;padding:var(--bs-dropdown-header-padding-y) var(--bs-dropdown-header-padding-x);margin-bottom:0;font-size:0.875rem;color:var(--bs-dropdown-header-color);white-space:nowrap}.dropdown-item-text{display:block;padding:var(--bs-dropdown-item-padding-y) var(--bs-dropdown-item-padding-x);color:var(--bs-dropdown-link-color)}.dropdown-menu-dark{--bs-dropdown-color: #dee2e6;--bs-dropdown-bg: #343a40;--bs-dropdown-border-color: rgba(0, 0, 0, 0.175);--bs-dropdown-box-shadow: ;--bs-dropdown-link-color: #dee2e6;--bs-dropdown-link-hover-color: #fff;--bs-dropdown-divider-bg: rgba(0, 0, 0, 0.175);--bs-dropdown-link-hover-bg: rgba(255, 255, 255, 0.15);--bs-dropdown-link-active-color: #fff;--bs-dropdown-link-active-bg: #5c2983;--bs-dropdown-link-disabled-color: #adb5bd;--bs-dropdown-header-color: #adb5bd}.btn-group,.btn-group-vertical{position:relative;display:inline-flex;vertical-align:middle}.btn-group>.btn,.btn-group-vertical>.btn{position:relative;flex:1 1 auto;-webkit-flex:1 1 auto}.btn-group>.btn-check:checked+.btn,.btn-group>.btn-check:focus+.btn,.btn-group>.btn:hover,.btn-group>.btn:focus,.btn-group>.btn:active,.btn-group>.btn.active,.btn-group-vertical>.btn-check:checked+.btn,.btn-group-vertical>.btn-check:focus+.btn,.btn-group-vertical>.btn:hover,.btn-group-vertical>.btn:focus,.btn-group-vertical>.btn:active,.btn-group-vertical>.btn.active{z-index:1}.btn-toolbar{display:flex;display:-webkit-flex;flex-wrap:wrap;-webkit-flex-wrap:wrap;justify-content:flex-start;-webkit-justify-content:flex-start}.btn-toolbar .input-group{width:auto}.btn-group>:not(.btn-check:first-child)+.btn,.btn-group>.btn-group:not(:first-child){margin-left:calc(1px*-1)}.dropdown-toggle-split{padding-right:.5625rem;padding-left:.5625rem}.dropdown-toggle-split::after,.dropup .dropdown-toggle-split::after,.dropend .dropdown-toggle-split::after{margin-left:0}.dropstart .dropdown-toggle-split::before{margin-right:0}.btn-sm+.dropdown-toggle-split,.btn-group-sm>.btn+.dropdown-toggle-split{padding-right:.375rem;padding-left:.375rem}.btn-lg+.dropdown-toggle-split,.btn-group-lg>.btn+.dropdown-toggle-split{padding-right:.75rem;padding-left:.75rem}.btn-group-vertical{flex-direction:column;-webkit-flex-direction:column;align-items:flex-start;-webkit-align-items:flex-start;justify-content:center;-webkit-justify-content:center}.btn-group-vertical>.btn,.btn-group-vertical>.btn-group{width:100%}.btn-group-vertical>.btn:not(:first-child),.btn-group-vertical>.btn-group:not(:first-child){margin-top:calc(1px*-1)}.nav{--bs-nav-link-padding-x: 1rem;--bs-nav-link-padding-y: 0.5rem;--bs-nav-link-font-weight: ;--bs-nav-link-color: #40ba2f;--bs-nav-link-hover-color: #339526;--bs-nav-link-disabled-color: rgba(52, 58, 64, 0.75);display:flex;display:-webkit-flex;flex-wrap:wrap;-webkit-flex-wrap:wrap;padding-left:0;margin-bottom:0;list-style:none}.nav-link{display:block;padding:var(--bs-nav-link-padding-y) var(--bs-nav-link-padding-x);font-size:var(--bs-nav-link-font-size);font-weight:var(--bs-nav-link-font-weight);color:var(--bs-nav-link-color);text-decoration:none;-webkit-text-decoration:none;-moz-text-decoration:none;-ms-text-decoration:none;-o-text-decoration:none;background:none;border:0;transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out}@media(prefers-reduced-motion: reduce){.nav-link{transition:none}}.nav-link:hover,.nav-link:focus{color:var(--bs-nav-link-hover-color)}.nav-link:focus-visible{outline:0;box-shadow:0 0 0 .25rem rgba(92,41,131,.25)}.nav-link.disabled,.nav-link:disabled{color:var(--bs-nav-link-disabled-color);pointer-events:none;cursor:default}.nav-tabs{--bs-nav-tabs-border-width: 1px;--bs-nav-tabs-border-color: #dee2e6;--bs-nav-tabs-border-radius: 0.25rem;--bs-nav-tabs-link-hover-border-color: #e9ecef #e9ecef #dee2e6;--bs-nav-tabs-link-active-color: #000;--bs-nav-tabs-link-active-bg: #fff;--bs-nav-tabs-link-active-border-color: #dee2e6 #dee2e6 #fff;border-bottom:var(--bs-nav-tabs-border-width) solid var(--bs-nav-tabs-border-color)}.nav-tabs .nav-link{margin-bottom:calc(-1*var(--bs-nav-tabs-border-width));border:var(--bs-nav-tabs-border-width) solid rgba(0,0,0,0)}.nav-tabs .nav-link:hover,.nav-tabs .nav-link:focus{isolation:isolate;border-color:var(--bs-nav-tabs-link-hover-border-color)}.nav-tabs .nav-link.active,.nav-tabs .nav-item.show .nav-link{color:var(--bs-nav-tabs-link-active-color);background-color:var(--bs-nav-tabs-link-active-bg);border-color:var(--bs-nav-tabs-link-active-border-color)}.nav-tabs .dropdown-menu{margin-top:calc(-1*var(--bs-nav-tabs-border-width))}.nav-pills{--bs-nav-pills-border-radius: 0.25rem;--bs-nav-pills-link-active-color: #fff;--bs-nav-pills-link-active-bg: #5c2983}.nav-pills .nav-link.active,.nav-pills .show>.nav-link{color:var(--bs-nav-pills-link-active-color);background-color:var(--bs-nav-pills-link-active-bg)}.nav-underline{--bs-nav-underline-gap: 1rem;--bs-nav-underline-border-width: 0.125rem;--bs-nav-underline-link-active-color: #000;gap:var(--bs-nav-underline-gap)}.nav-underline .nav-link{padding-right:0;padding-left:0;border-bottom:var(--bs-nav-underline-border-width) solid rgba(0,0,0,0)}.nav-underline .nav-link:hover,.nav-underline .nav-link:focus{border-bottom-color:currentcolor}.nav-underline .nav-link.active,.nav-underline .show>.nav-link{font-weight:700;color:var(--bs-nav-underline-link-active-color);border-bottom-color:currentcolor}.nav-fill>.nav-link,.nav-fill .nav-item{flex:1 1 auto;-webkit-flex:1 1 auto;text-align:center}.nav-justified>.nav-link,.nav-justified .nav-item{flex-basis:0;-webkit-flex-basis:0;flex-grow:1;-webkit-flex-grow:1;text-align:center}.nav-fill .nav-item .nav-link,.nav-justified .nav-item .nav-link{width:100%}.tab-content>.tab-pane{display:none}.tab-content>.active{display:block}.navbar{--bs-navbar-padding-x: 0;--bs-navbar-padding-y: 0.5rem;--bs-navbar-color: #e0d6e7;--bs-navbar-hover-color: rgba(182, 229, 176, 0.8);--bs-navbar-disabled-color: rgba(224, 214, 231, 0.75);--bs-navbar-active-color: #b6e5b0;--bs-navbar-brand-padding-y: 0.3125rem;--bs-navbar-brand-margin-end: 1rem;--bs-navbar-brand-font-size: 1.25rem;--bs-navbar-brand-color: #e0d6e7;--bs-navbar-brand-hover-color: #b6e5b0;--bs-navbar-nav-link-padding-x: 0.5rem;--bs-navbar-toggler-padding-y: 0.25;--bs-navbar-toggler-padding-x: 0;--bs-navbar-toggler-font-size: 1.25rem;--bs-navbar-toggler-icon-bg: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 30 30'%3e%3cpath stroke='%23e0d6e7' stroke-linecap='round' stroke-miterlimit='10' stroke-width='2' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e");--bs-navbar-toggler-border-color: rgba(224, 214, 231, 0);--bs-navbar-toggler-border-radius: 0.25rem;--bs-navbar-toggler-focus-width: 0.25rem;--bs-navbar-toggler-transition: box-shadow 0.15s ease-in-out;position:relative;display:flex;display:-webkit-flex;flex-wrap:wrap;-webkit-flex-wrap:wrap;align-items:center;-webkit-align-items:center;justify-content:space-between;-webkit-justify-content:space-between;padding:var(--bs-navbar-padding-y) var(--bs-navbar-padding-x)}.navbar>.container,.navbar>.container-fluid,.navbar>.container-sm,.navbar>.container-md,.navbar>.container-lg,.navbar>.container-xl,.navbar>.container-xxl{display:flex;display:-webkit-flex;flex-wrap:inherit;-webkit-flex-wrap:inherit;align-items:center;-webkit-align-items:center;justify-content:space-between;-webkit-justify-content:space-between}.navbar-brand{padding-top:var(--bs-navbar-brand-padding-y);padding-bottom:var(--bs-navbar-brand-padding-y);margin-right:var(--bs-navbar-brand-margin-end);font-size:var(--bs-navbar-brand-font-size);color:var(--bs-navbar-brand-color);text-decoration:none;-webkit-text-decoration:none;-moz-text-decoration:none;-ms-text-decoration:none;-o-text-decoration:none;white-space:nowrap}.navbar-brand:hover,.navbar-brand:focus{color:var(--bs-navbar-brand-hover-color)}.navbar-nav{--bs-nav-link-padding-x: 0;--bs-nav-link-padding-y: 0.5rem;--bs-nav-link-font-weight: ;--bs-nav-link-color: var(--bs-navbar-color);--bs-nav-link-hover-color: var(--bs-navbar-hover-color);--bs-nav-link-disabled-color: var(--bs-navbar-disabled-color);display:flex;display:-webkit-flex;flex-direction:column;-webkit-flex-direction:column;padding-left:0;margin-bottom:0;list-style:none}.navbar-nav .nav-link.active,.navbar-nav .nav-link.show{color:var(--bs-navbar-active-color)}.navbar-nav .dropdown-menu{position:static}.navbar-text{padding-top:.5rem;padding-bottom:.5rem;color:var(--bs-navbar-color)}.navbar-text a,.navbar-text a:hover,.navbar-text a:focus{color:var(--bs-navbar-active-color)}.navbar-collapse{flex-basis:100%;-webkit-flex-basis:100%;flex-grow:1;-webkit-flex-grow:1;align-items:center;-webkit-align-items:center}.navbar-toggler{padding:var(--bs-navbar-toggler-padding-y) var(--bs-navbar-toggler-padding-x);font-size:var(--bs-navbar-toggler-font-size);line-height:1;color:var(--bs-navbar-color);background-color:rgba(0,0,0,0);border:var(--bs-border-width) solid var(--bs-navbar-toggler-border-color);transition:var(--bs-navbar-toggler-transition)}@media(prefers-reduced-motion: reduce){.navbar-toggler{transition:none}}.navbar-toggler:hover{text-decoration:none}.navbar-toggler:focus{text-decoration:none;outline:0;box-shadow:0 0 0 var(--bs-navbar-toggler-focus-width)}.navbar-toggler-icon{display:inline-block;width:1.5em;height:1.5em;vertical-align:middle;background-image:var(--bs-navbar-toggler-icon-bg);background-repeat:no-repeat;background-position:center;background-size:100%}.navbar-nav-scroll{max-height:var(--bs-scroll-height, 75vh);overflow-y:auto}@media(min-width: 576px){.navbar-expand-sm{flex-wrap:nowrap;-webkit-flex-wrap:nowrap;justify-content:flex-start;-webkit-justify-content:flex-start}.navbar-expand-sm .navbar-nav{flex-direction:row;-webkit-flex-direction:row}.navbar-expand-sm .navbar-nav .dropdown-menu{position:absolute}.navbar-expand-sm .navbar-nav .nav-link{padding-right:var(--bs-navbar-nav-link-padding-x);padding-left:var(--bs-navbar-nav-link-padding-x)}.navbar-expand-sm .navbar-nav-scroll{overflow:visible}.navbar-expand-sm .navbar-collapse{display:flex !important;display:-webkit-flex !important;flex-basis:auto;-webkit-flex-basis:auto}.navbar-expand-sm .navbar-toggler{display:none}.navbar-expand-sm .offcanvas{position:static;z-index:auto;flex-grow:1;-webkit-flex-grow:1;width:auto !important;height:auto !important;visibility:visible !important;background-color:rgba(0,0,0,0) !important;border:0 !important;transform:none !important;transition:none}.navbar-expand-sm .offcanvas .offcanvas-header{display:none}.navbar-expand-sm .offcanvas .offcanvas-body{display:flex;display:-webkit-flex;flex-grow:0;-webkit-flex-grow:0;padding:0;overflow-y:visible}}@media(min-width: 768px){.navbar-expand-md{flex-wrap:nowrap;-webkit-flex-wrap:nowrap;justify-content:flex-start;-webkit-justify-content:flex-start}.navbar-expand-md .navbar-nav{flex-direction:row;-webkit-flex-direction:row}.navbar-expand-md .navbar-nav .dropdown-menu{position:absolute}.navbar-expand-md .navbar-nav .nav-link{padding-right:var(--bs-navbar-nav-link-padding-x);padding-left:var(--bs-navbar-nav-link-padding-x)}.navbar-expand-md .navbar-nav-scroll{overflow:visible}.navbar-expand-md .navbar-collapse{display:flex !important;display:-webkit-flex !important;flex-basis:auto;-webkit-flex-basis:auto}.navbar-expand-md .navbar-toggler{display:none}.navbar-expand-md .offcanvas{position:static;z-index:auto;flex-grow:1;-webkit-flex-grow:1;width:auto !important;height:auto !important;visibility:visible !important;background-color:rgba(0,0,0,0) !important;border:0 !important;transform:none !important;transition:none}.navbar-expand-md .offcanvas .offcanvas-header{display:none}.navbar-expand-md .offcanvas .offcanvas-body{display:flex;display:-webkit-flex;flex-grow:0;-webkit-flex-grow:0;padding:0;overflow-y:visible}}@media(min-width: 992px){.navbar-expand-lg{flex-wrap:nowrap;-webkit-flex-wrap:nowrap;justify-content:flex-start;-webkit-justify-content:flex-start}.navbar-expand-lg .navbar-nav{flex-direction:row;-webkit-flex-direction:row}.navbar-expand-lg .navbar-nav .dropdown-menu{position:absolute}.navbar-expand-lg .navbar-nav .nav-link{padding-right:var(--bs-navbar-nav-link-padding-x);padding-left:var(--bs-navbar-nav-link-padding-x)}.navbar-expand-lg .navbar-nav-scroll{overflow:visible}.navbar-expand-lg .navbar-collapse{display:flex !important;display:-webkit-flex !important;flex-basis:auto;-webkit-flex-basis:auto}.navbar-expand-lg .navbar-toggler{display:none}.navbar-expand-lg .offcanvas{position:static;z-index:auto;flex-grow:1;-webkit-flex-grow:1;width:auto !important;height:auto !important;visibility:visible !important;background-color:rgba(0,0,0,0) !important;border:0 !important;transform:none !important;transition:none}.navbar-expand-lg .offcanvas .offcanvas-header{display:none}.navbar-expand-lg .offcanvas .offcanvas-body{display:flex;display:-webkit-flex;flex-grow:0;-webkit-flex-grow:0;padding:0;overflow-y:visible}}@media(min-width: 1200px){.navbar-expand-xl{flex-wrap:nowrap;-webkit-flex-wrap:nowrap;justify-content:flex-start;-webkit-justify-content:flex-start}.navbar-expand-xl .navbar-nav{flex-direction:row;-webkit-flex-direction:row}.navbar-expand-xl .navbar-nav .dropdown-menu{position:absolute}.navbar-expand-xl .navbar-nav .nav-link{padding-right:var(--bs-navbar-nav-link-padding-x);padding-left:var(--bs-navbar-nav-link-padding-x)}.navbar-expand-xl .navbar-nav-scroll{overflow:visible}.navbar-expand-xl .navbar-collapse{display:flex !important;display:-webkit-flex !important;flex-basis:auto;-webkit-flex-basis:auto}.navbar-expand-xl .navbar-toggler{display:none}.navbar-expand-xl .offcanvas{position:static;z-index:auto;flex-grow:1;-webkit-flex-grow:1;width:auto !important;height:auto !important;visibility:visible !important;background-color:rgba(0,0,0,0) !important;border:0 !important;transform:none !important;transition:none}.navbar-expand-xl .offcanvas .offcanvas-header{display:none}.navbar-expand-xl .offcanvas .offcanvas-body{display:flex;display:-webkit-flex;flex-grow:0;-webkit-flex-grow:0;padding:0;overflow-y:visible}}@media(min-width: 1400px){.navbar-expand-xxl{flex-wrap:nowrap;-webkit-flex-wrap:nowrap;justify-content:flex-start;-webkit-justify-content:flex-start}.navbar-expand-xxl .navbar-nav{flex-direction:row;-webkit-flex-direction:row}.navbar-expand-xxl .navbar-nav .dropdown-menu{position:absolute}.navbar-expand-xxl .navbar-nav .nav-link{padding-right:var(--bs-navbar-nav-link-padding-x);padding-left:var(--bs-navbar-nav-link-padding-x)}.navbar-expand-xxl .navbar-nav-scroll{overflow:visible}.navbar-expand-xxl .navbar-collapse{display:flex !important;display:-webkit-flex !important;flex-basis:auto;-webkit-flex-basis:auto}.navbar-expand-xxl .navbar-toggler{display:none}.navbar-expand-xxl .offcanvas{position:static;z-index:auto;flex-grow:1;-webkit-flex-grow:1;width:auto !important;height:auto !important;visibility:visible !important;background-color:rgba(0,0,0,0) !important;border:0 !important;transform:none !important;transition:none}.navbar-expand-xxl .offcanvas .offcanvas-header{display:none}.navbar-expand-xxl .offcanvas .offcanvas-body{display:flex;display:-webkit-flex;flex-grow:0;-webkit-flex-grow:0;padding:0;overflow-y:visible}}.navbar-expand{flex-wrap:nowrap;-webkit-flex-wrap:nowrap;justify-content:flex-start;-webkit-justify-content:flex-start}.navbar-expand .navbar-nav{flex-direction:row;-webkit-flex-direction:row}.navbar-expand .navbar-nav .dropdown-menu{position:absolute}.navbar-expand .navbar-nav .nav-link{padding-right:var(--bs-navbar-nav-link-padding-x);padding-left:var(--bs-navbar-nav-link-padding-x)}.navbar-expand .navbar-nav-scroll{overflow:visible}.navbar-expand .navbar-collapse{display:flex !important;display:-webkit-flex !important;flex-basis:auto;-webkit-flex-basis:auto}.navbar-expand .navbar-toggler{display:none}.navbar-expand .offcanvas{position:static;z-index:auto;flex-grow:1;-webkit-flex-grow:1;width:auto !important;height:auto !important;visibility:visible !important;background-color:rgba(0,0,0,0) !important;border:0 !important;transform:none !important;transition:none}.navbar-expand .offcanvas .offcanvas-header{display:none}.navbar-expand .offcanvas .offcanvas-body{display:flex;display:-webkit-flex;flex-grow:0;-webkit-flex-grow:0;padding:0;overflow-y:visible}.navbar-dark,.navbar[data-bs-theme=dark]{--bs-navbar-color: #e0d6e7;--bs-navbar-hover-color: rgba(182, 229, 176, 0.8);--bs-navbar-disabled-color: rgba(224, 214, 231, 0.75);--bs-navbar-active-color: #b6e5b0;--bs-navbar-brand-color: #e0d6e7;--bs-navbar-brand-hover-color: #b6e5b0;--bs-navbar-toggler-border-color: rgba(224, 214, 231, 0);--bs-navbar-toggler-icon-bg: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 30 30'%3e%3cpath stroke='%23e0d6e7' stroke-linecap='round' stroke-miterlimit='10' stroke-width='2' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}[data-bs-theme=dark] .navbar-toggler-icon{--bs-navbar-toggler-icon-bg: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 30 30'%3e%3cpath stroke='%23e0d6e7' stroke-linecap='round' stroke-miterlimit='10' stroke-width='2' d='M4 7h22M4 15h22M4 23h22'/%3e%3c/svg%3e")}.card{--bs-card-spacer-y: 1rem;--bs-card-spacer-x: 1rem;--bs-card-title-spacer-y: 0.5rem;--bs-card-title-color: ;--bs-card-subtitle-color: ;--bs-card-border-width: 1px;--bs-card-border-color: rgba(0, 0, 0, 0.175);--bs-card-border-radius: 0.25rem;--bs-card-box-shadow: ;--bs-card-inner-border-radius: calc(0.25rem - 1px);--bs-card-cap-padding-y: 0.5rem;--bs-card-cap-padding-x: 1rem;--bs-card-cap-bg: rgba(52, 58, 64, 0.25);--bs-card-cap-color: ;--bs-card-height: ;--bs-card-color: ;--bs-card-bg: #fff;--bs-card-img-overlay-padding: 1rem;--bs-card-group-margin: 0.75rem;position:relative;display:flex;display:-webkit-flex;flex-direction:column;-webkit-flex-direction:column;min-width:0;height:var(--bs-card-height);color:var(--bs-body-color);word-wrap:break-word;background-color:var(--bs-card-bg);background-clip:border-box;border:var(--bs-card-border-width) solid var(--bs-card-border-color)}.card>hr{margin-right:0;margin-left:0}.card>.list-group{border-top:inherit;border-bottom:inherit}.card>.list-group:first-child{border-top-width:0}.card>.list-group:last-child{border-bottom-width:0}.card>.card-header+.list-group,.card>.list-group+.card-footer{border-top:0}.card-body{flex:1 1 auto;-webkit-flex:1 1 auto;padding:var(--bs-card-spacer-y) var(--bs-card-spacer-x);color:var(--bs-card-color)}.card-title{margin-bottom:var(--bs-card-title-spacer-y);color:var(--bs-card-title-color)}.card-subtitle{margin-top:calc(-0.5*var(--bs-card-title-spacer-y));margin-bottom:0;color:var(--bs-card-subtitle-color)}.card-text:last-child{margin-bottom:0}.card-link+.card-link{margin-left:var(--bs-card-spacer-x)}.card-header{padding:var(--bs-card-cap-padding-y) var(--bs-card-cap-padding-x);margin-bottom:0;color:var(--bs-card-cap-color);background-color:var(--bs-card-cap-bg);border-bottom:var(--bs-card-border-width) solid var(--bs-card-border-color)}.card-footer{padding:var(--bs-card-cap-padding-y) var(--bs-card-cap-padding-x);color:var(--bs-card-cap-color);background-color:var(--bs-card-cap-bg);border-top:var(--bs-card-border-width) solid var(--bs-card-border-color)}.card-header-tabs{margin-right:calc(-0.5*var(--bs-card-cap-padding-x));margin-bottom:calc(-1*var(--bs-card-cap-padding-y));margin-left:calc(-0.5*var(--bs-card-cap-padding-x));border-bottom:0}.card-header-tabs .nav-link.active{background-color:var(--bs-card-bg);border-bottom-color:var(--bs-card-bg)}.card-header-pills{margin-right:calc(-0.5*var(--bs-card-cap-padding-x));margin-left:calc(-0.5*var(--bs-card-cap-padding-x))}.card-img-overlay{position:absolute;top:0;right:0;bottom:0;left:0;padding:var(--bs-card-img-overlay-padding)}.card-img,.card-img-top,.card-img-bottom{width:100%}.card-group>.card{margin-bottom:var(--bs-card-group-margin)}@media(min-width: 576px){.card-group{display:flex;display:-webkit-flex;flex-flow:row wrap;-webkit-flex-flow:row wrap}.card-group>.card{flex:1 0 0%;-webkit-flex:1 0 0%;margin-bottom:0}.card-group>.card+.card{margin-left:0;border-left:0}}.accordion{--bs-accordion-color: #343a40;--bs-accordion-bg: #fff;--bs-accordion-transition: color 0.15s ease-in-out, background-color 0.15s ease-in-out, border-color 0.15s ease-in-out, box-shadow 0.15s ease-in-out, border-radius 0.15s ease;--bs-accordion-border-color: #dee2e6;--bs-accordion-border-width: 1px;--bs-accordion-border-radius: 0.25rem;--bs-accordion-inner-border-radius: calc(0.25rem - 1px);--bs-accordion-btn-padding-x: 1.25rem;--bs-accordion-btn-padding-y: 1rem;--bs-accordion-btn-color: #343a40;--bs-accordion-btn-bg: #fff;--bs-accordion-btn-icon: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='%23343a40'%3e%3cpath fill-rule='evenodd' d='M1.646 4.646a.5.5 0 0 1 .708 0L8 10.293l5.646-5.647a.5.5 0 0 1 .708.708l-6 6a.5.5 0 0 1-.708 0l-6-6a.5.5 0 0 1 0-.708z'/%3e%3c/svg%3e");--bs-accordion-btn-icon-width: 1.25rem;--bs-accordion-btn-icon-transform: rotate(-180deg);--bs-accordion-btn-icon-transition: transform 0.2s ease-in-out;--bs-accordion-btn-active-icon: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='%23251034'%3e%3cpath fill-rule='evenodd' d='M1.646 4.646a.5.5 0 0 1 .708 0L8 10.293l5.646-5.647a.5.5 0 0 1 .708.708l-6 6a.5.5 0 0 1-.708 0l-6-6a.5.5 0 0 1 0-.708z'/%3e%3c/svg%3e");--bs-accordion-btn-focus-border-color: #ae94c1;--bs-accordion-btn-focus-box-shadow: 0 0 0 0.25rem rgba(92, 41, 131, 0.25);--bs-accordion-body-padding-x: 1.25rem;--bs-accordion-body-padding-y: 1rem;--bs-accordion-active-color: #251034;--bs-accordion-active-bg: #ded4e6}.accordion-button{position:relative;display:flex;display:-webkit-flex;align-items:center;-webkit-align-items:center;width:100%;padding:var(--bs-accordion-btn-padding-y) var(--bs-accordion-btn-padding-x);font-size:1rem;color:var(--bs-accordion-btn-color);text-align:left;background-color:var(--bs-accordion-btn-bg);border:0;overflow-anchor:none;transition:var(--bs-accordion-transition)}@media(prefers-reduced-motion: reduce){.accordion-button{transition:none}}.accordion-button:not(.collapsed){color:var(--bs-accordion-active-color);background-color:var(--bs-accordion-active-bg);box-shadow:inset 0 calc(-1*var(--bs-accordion-border-width)) 0 var(--bs-accordion-border-color)}.accordion-button:not(.collapsed)::after{background-image:var(--bs-accordion-btn-active-icon);transform:var(--bs-accordion-btn-icon-transform)}.accordion-button::after{flex-shrink:0;-webkit-flex-shrink:0;width:var(--bs-accordion-btn-icon-width);height:var(--bs-accordion-btn-icon-width);margin-left:auto;content:"";background-image:var(--bs-accordion-btn-icon);background-repeat:no-repeat;background-size:var(--bs-accordion-btn-icon-width);transition:var(--bs-accordion-btn-icon-transition)}@media(prefers-reduced-motion: reduce){.accordion-button::after{transition:none}}.accordion-button:hover{z-index:2}.accordion-button:focus{z-index:3;border-color:var(--bs-accordion-btn-focus-border-color);outline:0;box-shadow:var(--bs-accordion-btn-focus-box-shadow)}.accordion-header{margin-bottom:0}.accordion-item{color:var(--bs-accordion-color);background-color:var(--bs-accordion-bg);border:var(--bs-accordion-border-width) solid var(--bs-accordion-border-color)}.accordion-item:not(:first-of-type){border-top:0}.accordion-body{padding:var(--bs-accordion-body-padding-y) var(--bs-accordion-body-padding-x)}.accordion-flush .accordion-collapse{border-width:0}.accordion-flush .accordion-item{border-right:0;border-left:0}.accordion-flush .accordion-item:first-child{border-top:0}.accordion-flush .accordion-item:last-child{border-bottom:0}[data-bs-theme=dark] .accordion-button::after{--bs-accordion-btn-icon: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='%239d7fb5'%3e%3cpath fill-rule='evenodd' d='M1.646 4.646a.5.5 0 0 1 .708 0L8 10.293l5.646-5.647a.5.5 0 0 1 .708.708l-6 6a.5.5 0 0 1-.708 0l-6-6a.5.5 0 0 1 0-.708z'/%3e%3c/svg%3e");--bs-accordion-btn-active-icon: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='%239d7fb5'%3e%3cpath fill-rule='evenodd' d='M1.646 4.646a.5.5 0 0 1 .708 0L8 10.293l5.646-5.647a.5.5 0 0 1 .708.708l-6 6a.5.5 0 0 1-.708 0l-6-6a.5.5 0 0 1 0-.708z'/%3e%3c/svg%3e")}.breadcrumb{--bs-breadcrumb-padding-x: 0;--bs-breadcrumb-padding-y: 0;--bs-breadcrumb-margin-bottom: 1rem;--bs-breadcrumb-bg: ;--bs-breadcrumb-border-radius: ;--bs-breadcrumb-divider-color: rgba(52, 58, 64, 0.75);--bs-breadcrumb-item-padding-x: 0.5rem;--bs-breadcrumb-item-active-color: rgba(52, 58, 64, 0.75);display:flex;display:-webkit-flex;flex-wrap:wrap;-webkit-flex-wrap:wrap;padding:var(--bs-breadcrumb-padding-y) var(--bs-breadcrumb-padding-x);margin-bottom:var(--bs-breadcrumb-margin-bottom);font-size:var(--bs-breadcrumb-font-size);list-style:none;background-color:var(--bs-breadcrumb-bg)}.breadcrumb-item+.breadcrumb-item{padding-left:var(--bs-breadcrumb-item-padding-x)}.breadcrumb-item+.breadcrumb-item::before{float:left;padding-right:var(--bs-breadcrumb-item-padding-x);color:var(--bs-breadcrumb-divider-color);content:var(--bs-breadcrumb-divider, ">") /* rtl: var(--bs-breadcrumb-divider, ">") */}.breadcrumb-item.active{color:var(--bs-breadcrumb-item-active-color)}.pagination{--bs-pagination-padding-x: 0.75rem;--bs-pagination-padding-y: 0.375rem;--bs-pagination-font-size:1rem;--bs-pagination-color: #40ba2f;--bs-pagination-bg: #fff;--bs-pagination-border-width: 1px;--bs-pagination-border-color: #dee2e6;--bs-pagination-border-radius: 0.25rem;--bs-pagination-hover-color: #339526;--bs-pagination-hover-bg: #f8f9fa;--bs-pagination-hover-border-color: #dee2e6;--bs-pagination-focus-color: #339526;--bs-pagination-focus-bg: #e9ecef;--bs-pagination-focus-box-shadow: 0 0 0 0.25rem rgba(92, 41, 131, 0.25);--bs-pagination-active-color: #fff;--bs-pagination-active-bg: #5c2983;--bs-pagination-active-border-color: #5c2983;--bs-pagination-disabled-color: rgba(52, 58, 64, 0.75);--bs-pagination-disabled-bg: #e9ecef;--bs-pagination-disabled-border-color: #dee2e6;display:flex;display:-webkit-flex;padding-left:0;list-style:none}.page-link{position:relative;display:block;padding:var(--bs-pagination-padding-y) var(--bs-pagination-padding-x);font-size:var(--bs-pagination-font-size);color:var(--bs-pagination-color);text-decoration:none;-webkit-text-decoration:none;-moz-text-decoration:none;-ms-text-decoration:none;-o-text-decoration:none;background-color:var(--bs-pagination-bg);border:var(--bs-pagination-border-width) solid var(--bs-pagination-border-color);transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out}@media(prefers-reduced-motion: reduce){.page-link{transition:none}}.page-link:hover{z-index:2;color:var(--bs-pagination-hover-color);background-color:var(--bs-pagination-hover-bg);border-color:var(--bs-pagination-hover-border-color)}.page-link:focus{z-index:3;color:var(--bs-pagination-focus-color);background-color:var(--bs-pagination-focus-bg);outline:0;box-shadow:var(--bs-pagination-focus-box-shadow)}.page-link.active,.active>.page-link{z-index:3;color:var(--bs-pagination-active-color);background-color:var(--bs-pagination-active-bg);border-color:var(--bs-pagination-active-border-color)}.page-link.disabled,.disabled>.page-link{color:var(--bs-pagination-disabled-color);pointer-events:none;background-color:var(--bs-pagination-disabled-bg);border-color:var(--bs-pagination-disabled-border-color)}.page-item:not(:first-child) .page-link{margin-left:calc(1px*-1)}.pagination-lg{--bs-pagination-padding-x: 1.5rem;--bs-pagination-padding-y: 0.75rem;--bs-pagination-font-size:1.25rem;--bs-pagination-border-radius: 0.5rem}.pagination-sm{--bs-pagination-padding-x: 0.5rem;--bs-pagination-padding-y: 0.25rem;--bs-pagination-font-size:0.875rem;--bs-pagination-border-radius: 0.2em}.badge{--bs-badge-padding-x: 0.65em;--bs-badge-padding-y: 0.35em;--bs-badge-font-size:0.75em;--bs-badge-font-weight: 700;--bs-badge-color: #fff;--bs-badge-border-radius: 0.25rem;display:inline-block;padding:var(--bs-badge-padding-y) var(--bs-badge-padding-x);font-size:var(--bs-badge-font-size);font-weight:var(--bs-badge-font-weight);line-height:1;color:var(--bs-badge-color);text-align:center;white-space:nowrap;vertical-align:baseline}.badge:empty{display:none}.btn .badge{position:relative;top:-1px}.alert{--bs-alert-bg: transparent;--bs-alert-padding-x: 1rem;--bs-alert-padding-y: 1rem;--bs-alert-margin-bottom: 1rem;--bs-alert-color: inherit;--bs-alert-border-color: transparent;--bs-alert-border: 0 solid var(--bs-alert-border-color);--bs-alert-border-radius: 0.25rem;--bs-alert-link-color: inherit;position:relative;padding:var(--bs-alert-padding-y) var(--bs-alert-padding-x);margin-bottom:var(--bs-alert-margin-bottom);color:var(--bs-alert-color);background-color:var(--bs-alert-bg);border:var(--bs-alert-border)}.alert-heading{color:inherit}.alert-link{font-weight:700;color:var(--bs-alert-link-color)}.alert-dismissible{padding-right:3rem}.alert-dismissible .btn-close{position:absolute;top:0;right:0;z-index:2;padding:1.25rem 1rem}.alert-default{--bs-alert-color: var(--bs-default-text-emphasis);--bs-alert-bg: var(--bs-default-bg-subtle);--bs-alert-border-color: var(--bs-default-border-subtle);--bs-alert-link-color: var(--bs-default-text-emphasis)}.alert-primary{--bs-alert-color: var(--bs-primary-text-emphasis);--bs-alert-bg: var(--bs-primary-bg-subtle);--bs-alert-border-color: var(--bs-primary-border-subtle);--bs-alert-link-color: var(--bs-primary-text-emphasis)}.alert-secondary{--bs-alert-color: var(--bs-secondary-text-emphasis);--bs-alert-bg: var(--bs-secondary-bg-subtle);--bs-alert-border-color: var(--bs-secondary-border-subtle);--bs-alert-link-color: var(--bs-secondary-text-emphasis)}.alert-success{--bs-alert-color: var(--bs-success-text-emphasis);--bs-alert-bg: var(--bs-success-bg-subtle);--bs-alert-border-color: var(--bs-success-border-subtle);--bs-alert-link-color: var(--bs-success-text-emphasis)}.alert-info{--bs-alert-color: var(--bs-info-text-emphasis);--bs-alert-bg: var(--bs-info-bg-subtle);--bs-alert-border-color: var(--bs-info-border-subtle);--bs-alert-link-color: var(--bs-info-text-emphasis)}.alert-warning{--bs-alert-color: var(--bs-warning-text-emphasis);--bs-alert-bg: var(--bs-warning-bg-subtle);--bs-alert-border-color: var(--bs-warning-border-subtle);--bs-alert-link-color: var(--bs-warning-text-emphasis)}.alert-danger{--bs-alert-color: var(--bs-danger-text-emphasis);--bs-alert-bg: var(--bs-danger-bg-subtle);--bs-alert-border-color: var(--bs-danger-border-subtle);--bs-alert-link-color: var(--bs-danger-text-emphasis)}.alert-light{--bs-alert-color: var(--bs-light-text-emphasis);--bs-alert-bg: var(--bs-light-bg-subtle);--bs-alert-border-color: var(--bs-light-border-subtle);--bs-alert-link-color: var(--bs-light-text-emphasis)}.alert-dark{--bs-alert-color: var(--bs-dark-text-emphasis);--bs-alert-bg: var(--bs-dark-bg-subtle);--bs-alert-border-color: var(--bs-dark-border-subtle);--bs-alert-link-color: var(--bs-dark-text-emphasis)}@keyframes progress-bar-stripes{0%{background-position-x:.5rem}}.progress,.progress-stacked{--bs-progress-height: 0.5rem;--bs-progress-font-size:0.75rem;--bs-progress-bg: #e9ecef;--bs-progress-border-radius: 0.25rem;--bs-progress-box-shadow: inset 0 1px 2px rgba(0, 0, 0, 0.075);--bs-progress-bar-color: #fff;--bs-progress-bar-bg: #5c2983;--bs-progress-bar-transition: width 0.6s ease;display:flex;display:-webkit-flex;height:var(--bs-progress-height);overflow:hidden;font-size:var(--bs-progress-font-size);background-color:var(--bs-progress-bg)}.progress-bar{display:flex;display:-webkit-flex;flex-direction:column;-webkit-flex-direction:column;justify-content:center;-webkit-justify-content:center;overflow:hidden;color:var(--bs-progress-bar-color);text-align:center;white-space:nowrap;background-color:var(--bs-progress-bar-bg);transition:var(--bs-progress-bar-transition)}@media(prefers-reduced-motion: reduce){.progress-bar{transition:none}}.progress-bar-striped{background-image:linear-gradient(45deg, rgba(255, 255, 255, 0.15) 25%, transparent 25%, transparent 50%, rgba(255, 255, 255, 0.15) 50%, rgba(255, 255, 255, 0.15) 75%, transparent 75%, transparent);background-size:var(--bs-progress-height) var(--bs-progress-height)}.progress-stacked>.progress{overflow:visible}.progress-stacked>.progress>.progress-bar{width:100%}.progress-bar-animated{animation:1s linear infinite progress-bar-stripes}@media(prefers-reduced-motion: reduce){.progress-bar-animated{animation:none}}.list-group{--bs-list-group-color: #343a40;--bs-list-group-bg: #fff;--bs-list-group-border-color: #dee2e6;--bs-list-group-border-width: 1px;--bs-list-group-border-radius: 0.25rem;--bs-list-group-item-padding-x: 1rem;--bs-list-group-item-padding-y: 0.5rem;--bs-list-group-action-color: rgba(52, 58, 64, 0.75);--bs-list-group-action-hover-color: #000;--bs-list-group-action-hover-bg: #f8f9fa;--bs-list-group-action-active-color: #343a40;--bs-list-group-action-active-bg: #e9ecef;--bs-list-group-disabled-color: rgba(52, 58, 64, 0.75);--bs-list-group-disabled-bg: #fff;--bs-list-group-active-color: #fff;--bs-list-group-active-bg: #5c2983;--bs-list-group-active-border-color: #5c2983;display:flex;display:-webkit-flex;flex-direction:column;-webkit-flex-direction:column;padding-left:0;margin-bottom:0}.list-group-numbered{list-style-type:none;counter-reset:section}.list-group-numbered>.list-group-item::before{content:counters(section, ".") ". ";counter-increment:section}.list-group-item-action{width:100%;color:var(--bs-list-group-action-color);text-align:inherit}.list-group-item-action:hover,.list-group-item-action:focus{z-index:1;color:var(--bs-list-group-action-hover-color);text-decoration:none;background-color:var(--bs-list-group-action-hover-bg)}.list-group-item-action:active{color:var(--bs-list-group-action-active-color);background-color:var(--bs-list-group-action-active-bg)}.list-group-item{position:relative;display:block;padding:var(--bs-list-group-item-padding-y) var(--bs-list-group-item-padding-x);color:var(--bs-list-group-color);text-decoration:none;-webkit-text-decoration:none;-moz-text-decoration:none;-ms-text-decoration:none;-o-text-decoration:none;background-color:var(--bs-list-group-bg);border:var(--bs-list-group-border-width) solid var(--bs-list-group-border-color)}.list-group-item.disabled,.list-group-item:disabled{color:var(--bs-list-group-disabled-color);pointer-events:none;background-color:var(--bs-list-group-disabled-bg)}.list-group-item.active{z-index:2;color:var(--bs-list-group-active-color);background-color:var(--bs-list-group-active-bg);border-color:var(--bs-list-group-active-border-color)}.list-group-item+.list-group-item{border-top-width:0}.list-group-item+.list-group-item.active{margin-top:calc(-1*var(--bs-list-group-border-width));border-top-width:var(--bs-list-group-border-width)}.list-group-horizontal{flex-direction:row;-webkit-flex-direction:row}.list-group-horizontal>.list-group-item.active{margin-top:0}.list-group-horizontal>.list-group-item+.list-group-item{border-top-width:var(--bs-list-group-border-width);border-left-width:0}.list-group-horizontal>.list-group-item+.list-group-item.active{margin-left:calc(-1*var(--bs-list-group-border-width));border-left-width:var(--bs-list-group-border-width)}@media(min-width: 576px){.list-group-horizontal-sm{flex-direction:row;-webkit-flex-direction:row}.list-group-horizontal-sm>.list-group-item.active{margin-top:0}.list-group-horizontal-sm>.list-group-item+.list-group-item{border-top-width:var(--bs-list-group-border-width);border-left-width:0}.list-group-horizontal-sm>.list-group-item+.list-group-item.active{margin-left:calc(-1*var(--bs-list-group-border-width));border-left-width:var(--bs-list-group-border-width)}}@media(min-width: 768px){.list-group-horizontal-md{flex-direction:row;-webkit-flex-direction:row}.list-group-horizontal-md>.list-group-item.active{margin-top:0}.list-group-horizontal-md>.list-group-item+.list-group-item{border-top-width:var(--bs-list-group-border-width);border-left-width:0}.list-group-horizontal-md>.list-group-item+.list-group-item.active{margin-left:calc(-1*var(--bs-list-group-border-width));border-left-width:var(--bs-list-group-border-width)}}@media(min-width: 992px){.list-group-horizontal-lg{flex-direction:row;-webkit-flex-direction:row}.list-group-horizontal-lg>.list-group-item.active{margin-top:0}.list-group-horizontal-lg>.list-group-item+.list-group-item{border-top-width:var(--bs-list-group-border-width);border-left-width:0}.list-group-horizontal-lg>.list-group-item+.list-group-item.active{margin-left:calc(-1*var(--bs-list-group-border-width));border-left-width:var(--bs-list-group-border-width)}}@media(min-width: 1200px){.list-group-horizontal-xl{flex-direction:row;-webkit-flex-direction:row}.list-group-horizontal-xl>.list-group-item.active{margin-top:0}.list-group-horizontal-xl>.list-group-item+.list-group-item{border-top-width:var(--bs-list-group-border-width);border-left-width:0}.list-group-horizontal-xl>.list-group-item+.list-group-item.active{margin-left:calc(-1*var(--bs-list-group-border-width));border-left-width:var(--bs-list-group-border-width)}}@media(min-width: 1400px){.list-group-horizontal-xxl{flex-direction:row;-webkit-flex-direction:row}.list-group-horizontal-xxl>.list-group-item.active{margin-top:0}.list-group-horizontal-xxl>.list-group-item+.list-group-item{border-top-width:var(--bs-list-group-border-width);border-left-width:0}.list-group-horizontal-xxl>.list-group-item+.list-group-item.active{margin-left:calc(-1*var(--bs-list-group-border-width));border-left-width:var(--bs-list-group-border-width)}}.list-group-flush>.list-group-item{border-width:0 0 var(--bs-list-group-border-width)}.list-group-flush>.list-group-item:last-child{border-bottom-width:0}.list-group-item-default{--bs-list-group-color: var(--bs-default-text-emphasis);--bs-list-group-bg: var(--bs-default-bg-subtle);--bs-list-group-border-color: var(--bs-default-border-subtle);--bs-list-group-action-hover-color: var(--bs-emphasis-color);--bs-list-group-action-hover-bg: var(--bs-default-border-subtle);--bs-list-group-action-active-color: var(--bs-emphasis-color);--bs-list-group-action-active-bg: var(--bs-default-border-subtle);--bs-list-group-active-color: var(--bs-default-bg-subtle);--bs-list-group-active-bg: var(--bs-default-text-emphasis);--bs-list-group-active-border-color: var(--bs-default-text-emphasis)}.list-group-item-primary{--bs-list-group-color: var(--bs-primary-text-emphasis);--bs-list-group-bg: var(--bs-primary-bg-subtle);--bs-list-group-border-color: var(--bs-primary-border-subtle);--bs-list-group-action-hover-color: var(--bs-emphasis-color);--bs-list-group-action-hover-bg: var(--bs-primary-border-subtle);--bs-list-group-action-active-color: var(--bs-emphasis-color);--bs-list-group-action-active-bg: var(--bs-primary-border-subtle);--bs-list-group-active-color: var(--bs-primary-bg-subtle);--bs-list-group-active-bg: var(--bs-primary-text-emphasis);--bs-list-group-active-border-color: var(--bs-primary-text-emphasis)}.list-group-item-secondary{--bs-list-group-color: var(--bs-secondary-text-emphasis);--bs-list-group-bg: var(--bs-secondary-bg-subtle);--bs-list-group-border-color: var(--bs-secondary-border-subtle);--bs-list-group-action-hover-color: var(--bs-emphasis-color);--bs-list-group-action-hover-bg: var(--bs-secondary-border-subtle);--bs-list-group-action-active-color: var(--bs-emphasis-color);--bs-list-group-action-active-bg: var(--bs-secondary-border-subtle);--bs-list-group-active-color: var(--bs-secondary-bg-subtle);--bs-list-group-active-bg: var(--bs-secondary-text-emphasis);--bs-list-group-active-border-color: var(--bs-secondary-text-emphasis)}.list-group-item-success{--bs-list-group-color: var(--bs-success-text-emphasis);--bs-list-group-bg: var(--bs-success-bg-subtle);--bs-list-group-border-color: var(--bs-success-border-subtle);--bs-list-group-action-hover-color: var(--bs-emphasis-color);--bs-list-group-action-hover-bg: var(--bs-success-border-subtle);--bs-list-group-action-active-color: var(--bs-emphasis-color);--bs-list-group-action-active-bg: var(--bs-success-border-subtle);--bs-list-group-active-color: var(--bs-success-bg-subtle);--bs-list-group-active-bg: var(--bs-success-text-emphasis);--bs-list-group-active-border-color: var(--bs-success-text-emphasis)}.list-group-item-info{--bs-list-group-color: var(--bs-info-text-emphasis);--bs-list-group-bg: var(--bs-info-bg-subtle);--bs-list-group-border-color: var(--bs-info-border-subtle);--bs-list-group-action-hover-color: var(--bs-emphasis-color);--bs-list-group-action-hover-bg: var(--bs-info-border-subtle);--bs-list-group-action-active-color: var(--bs-emphasis-color);--bs-list-group-action-active-bg: var(--bs-info-border-subtle);--bs-list-group-active-color: var(--bs-info-bg-subtle);--bs-list-group-active-bg: var(--bs-info-text-emphasis);--bs-list-group-active-border-color: var(--bs-info-text-emphasis)}.list-group-item-warning{--bs-list-group-color: var(--bs-warning-text-emphasis);--bs-list-group-bg: var(--bs-warning-bg-subtle);--bs-list-group-border-color: var(--bs-warning-border-subtle);--bs-list-group-action-hover-color: var(--bs-emphasis-color);--bs-list-group-action-hover-bg: var(--bs-warning-border-subtle);--bs-list-group-action-active-color: var(--bs-emphasis-color);--bs-list-group-action-active-bg: var(--bs-warning-border-subtle);--bs-list-group-active-color: var(--bs-warning-bg-subtle);--bs-list-group-active-bg: var(--bs-warning-text-emphasis);--bs-list-group-active-border-color: var(--bs-warning-text-emphasis)}.list-group-item-danger{--bs-list-group-color: var(--bs-danger-text-emphasis);--bs-list-group-bg: var(--bs-danger-bg-subtle);--bs-list-group-border-color: var(--bs-danger-border-subtle);--bs-list-group-action-hover-color: var(--bs-emphasis-color);--bs-list-group-action-hover-bg: var(--bs-danger-border-subtle);--bs-list-group-action-active-color: var(--bs-emphasis-color);--bs-list-group-action-active-bg: var(--bs-danger-border-subtle);--bs-list-group-active-color: var(--bs-danger-bg-subtle);--bs-list-group-active-bg: var(--bs-danger-text-emphasis);--bs-list-group-active-border-color: var(--bs-danger-text-emphasis)}.list-group-item-light{--bs-list-group-color: var(--bs-light-text-emphasis);--bs-list-group-bg: var(--bs-light-bg-subtle);--bs-list-group-border-color: var(--bs-light-border-subtle);--bs-list-group-action-hover-color: var(--bs-emphasis-color);--bs-list-group-action-hover-bg: var(--bs-light-border-subtle);--bs-list-group-action-active-color: var(--bs-emphasis-color);--bs-list-group-action-active-bg: var(--bs-light-border-subtle);--bs-list-group-active-color: var(--bs-light-bg-subtle);--bs-list-group-active-bg: var(--bs-light-text-emphasis);--bs-list-group-active-border-color: var(--bs-light-text-emphasis)}.list-group-item-dark{--bs-list-group-color: var(--bs-dark-text-emphasis);--bs-list-group-bg: var(--bs-dark-bg-subtle);--bs-list-group-border-color: var(--bs-dark-border-subtle);--bs-list-group-action-hover-color: var(--bs-emphasis-color);--bs-list-group-action-hover-bg: var(--bs-dark-border-subtle);--bs-list-group-action-active-color: var(--bs-emphasis-color);--bs-list-group-action-active-bg: var(--bs-dark-border-subtle);--bs-list-group-active-color: var(--bs-dark-bg-subtle);--bs-list-group-active-bg: var(--bs-dark-text-emphasis);--bs-list-group-active-border-color: var(--bs-dark-text-emphasis)}.btn-close{--bs-btn-close-color: #000;--bs-btn-close-bg: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='%23000'%3e%3cpath d='M.293.293a1 1 0 0 1 1.414 0L8 6.586 14.293.293a1 1 0 1 1 1.414 1.414L9.414 8l6.293 6.293a1 1 0 0 1-1.414 1.414L8 9.414l-6.293 6.293a1 1 0 0 1-1.414-1.414L6.586 8 .293 1.707a1 1 0 0 1 0-1.414z'/%3e%3c/svg%3e");--bs-btn-close-opacity: 0.5;--bs-btn-close-hover-opacity: 0.75;--bs-btn-close-focus-shadow: 0 0 0 0.25rem rgba(92, 41, 131, 0.25);--bs-btn-close-focus-opacity: 1;--bs-btn-close-disabled-opacity: 0.25;--bs-btn-close-white-filter: invert(1) grayscale(100%) brightness(200%);box-sizing:content-box;width:1em;height:1em;padding:.25em .25em;color:var(--bs-btn-close-color);background:rgba(0,0,0,0) var(--bs-btn-close-bg) center/1em auto no-repeat;border:0;opacity:var(--bs-btn-close-opacity)}.btn-close:hover{color:var(--bs-btn-close-color);text-decoration:none;opacity:var(--bs-btn-close-hover-opacity)}.btn-close:focus{outline:0;box-shadow:var(--bs-btn-close-focus-shadow);opacity:var(--bs-btn-close-focus-opacity)}.btn-close:disabled,.btn-close.disabled{pointer-events:none;user-select:none;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;-o-user-select:none;opacity:var(--bs-btn-close-disabled-opacity)}.btn-close-white{filter:var(--bs-btn-close-white-filter)}[data-bs-theme=dark] .btn-close{filter:var(--bs-btn-close-white-filter)}.toast{--bs-toast-zindex: 1090;--bs-toast-padding-x: 0.75rem;--bs-toast-padding-y: 0.5rem;--bs-toast-spacing: 1.5rem;--bs-toast-max-width: 350px;--bs-toast-font-size:0.875rem;--bs-toast-color: ;--bs-toast-bg: rgba(255, 255, 255, 0.85);--bs-toast-border-width: 1px;--bs-toast-border-color: rgba(0, 0, 0, 0.175);--bs-toast-border-radius: 0.25rem;--bs-toast-box-shadow: 0 0.5rem 1rem rgba(0, 0, 0, 0.15);--bs-toast-header-color: rgba(52, 58, 64, 0.75);--bs-toast-header-bg: rgba(255, 255, 255, 0.85);--bs-toast-header-border-color: rgba(0, 0, 0, 0.175);width:var(--bs-toast-max-width);max-width:100%;font-size:var(--bs-toast-font-size);color:var(--bs-toast-color);pointer-events:auto;background-color:var(--bs-toast-bg);background-clip:padding-box;border:var(--bs-toast-border-width) solid var(--bs-toast-border-color);box-shadow:var(--bs-toast-box-shadow)}.toast.showing{opacity:0}.toast:not(.show){display:none}.toast-container{--bs-toast-zindex: 1090;position:absolute;z-index:var(--bs-toast-zindex);width:max-content;width:-webkit-max-content;width:-moz-max-content;width:-ms-max-content;width:-o-max-content;max-width:100%;pointer-events:none}.toast-container>:not(:last-child){margin-bottom:var(--bs-toast-spacing)}.toast-header{display:flex;display:-webkit-flex;align-items:center;-webkit-align-items:center;padding:var(--bs-toast-padding-y) var(--bs-toast-padding-x);color:var(--bs-toast-header-color);background-color:var(--bs-toast-header-bg);background-clip:padding-box;border-bottom:var(--bs-toast-border-width) solid var(--bs-toast-header-border-color)}.toast-header .btn-close{margin-right:calc(-0.5*var(--bs-toast-padding-x));margin-left:var(--bs-toast-padding-x)}.toast-body{padding:var(--bs-toast-padding-x);word-wrap:break-word}.modal{--bs-modal-zindex: 1055;--bs-modal-width: 500px;--bs-modal-padding: 1rem;--bs-modal-margin: 0.5rem;--bs-modal-color: ;--bs-modal-bg: #fff;--bs-modal-border-color: rgba(0, 0, 0, 0.175);--bs-modal-border-width: 1px;--bs-modal-border-radius: 0.5rem;--bs-modal-box-shadow: 0 0.125rem 0.25rem rgba(0, 0, 0, 0.075);--bs-modal-inner-border-radius: calc(0.5rem - 1px);--bs-modal-header-padding-x: 1rem;--bs-modal-header-padding-y: 1rem;--bs-modal-header-padding: 1rem 1rem;--bs-modal-header-border-color: #dee2e6;--bs-modal-header-border-width: 1px;--bs-modal-title-line-height: 1.5;--bs-modal-footer-gap: 0.5rem;--bs-modal-footer-bg: ;--bs-modal-footer-border-color: #dee2e6;--bs-modal-footer-border-width: 1px;position:fixed;top:0;left:0;z-index:var(--bs-modal-zindex);display:none;width:100%;height:100%;overflow-x:hidden;overflow-y:auto;outline:0}.modal-dialog{position:relative;width:auto;margin:var(--bs-modal-margin);pointer-events:none}.modal.fade .modal-dialog{transition:transform .3s ease-out;transform:translate(0, -50px)}@media(prefers-reduced-motion: reduce){.modal.fade .modal-dialog{transition:none}}.modal.show .modal-dialog{transform:none}.modal.modal-static .modal-dialog{transform:scale(1.02)}.modal-dialog-scrollable{height:calc(100% - var(--bs-modal-margin)*2)}.modal-dialog-scrollable .modal-content{max-height:100%;overflow:hidden}.modal-dialog-scrollable .modal-body{overflow-y:auto}.modal-dialog-centered{display:flex;display:-webkit-flex;align-items:center;-webkit-align-items:center;min-height:calc(100% - var(--bs-modal-margin)*2)}.modal-content{position:relative;display:flex;display:-webkit-flex;flex-direction:column;-webkit-flex-direction:column;width:100%;color:var(--bs-modal-color);pointer-events:auto;background-color:var(--bs-modal-bg);background-clip:padding-box;border:var(--bs-modal-border-width) solid var(--bs-modal-border-color);outline:0}.modal-backdrop{--bs-backdrop-zindex: 1050;--bs-backdrop-bg: #000;--bs-backdrop-opacity: 0.5;position:fixed;top:0;left:0;z-index:var(--bs-backdrop-zindex);width:100vw;height:100vh;background-color:var(--bs-backdrop-bg)}.modal-backdrop.fade{opacity:0}.modal-backdrop.show{opacity:var(--bs-backdrop-opacity)}.modal-header{display:flex;display:-webkit-flex;flex-shrink:0;-webkit-flex-shrink:0;align-items:center;-webkit-align-items:center;justify-content:space-between;-webkit-justify-content:space-between;padding:var(--bs-modal-header-padding);border-bottom:var(--bs-modal-header-border-width) solid var(--bs-modal-header-border-color)}.modal-header .btn-close{padding:calc(var(--bs-modal-header-padding-y)*.5) calc(var(--bs-modal-header-padding-x)*.5);margin:calc(-0.5*var(--bs-modal-header-padding-y)) calc(-0.5*var(--bs-modal-header-padding-x)) calc(-0.5*var(--bs-modal-header-padding-y)) auto}.modal-title{margin-bottom:0;line-height:var(--bs-modal-title-line-height)}.modal-body{position:relative;flex:1 1 auto;-webkit-flex:1 1 auto;padding:var(--bs-modal-padding)}.modal-footer{display:flex;display:-webkit-flex;flex-shrink:0;-webkit-flex-shrink:0;flex-wrap:wrap;-webkit-flex-wrap:wrap;align-items:center;-webkit-align-items:center;justify-content:flex-end;-webkit-justify-content:flex-end;padding:calc(var(--bs-modal-padding) - var(--bs-modal-footer-gap)*.5);background-color:var(--bs-modal-footer-bg);border-top:var(--bs-modal-footer-border-width) solid var(--bs-modal-footer-border-color)}.modal-footer>*{margin:calc(var(--bs-modal-footer-gap)*.5)}@media(min-width: 576px){.modal{--bs-modal-margin: 1.75rem;--bs-modal-box-shadow: 0 0.5rem 1rem rgba(0, 0, 0, 0.15)}.modal-dialog{max-width:var(--bs-modal-width);margin-right:auto;margin-left:auto}.modal-sm{--bs-modal-width: 300px}}@media(min-width: 992px){.modal-lg,.modal-xl{--bs-modal-width: 800px}}@media(min-width: 1200px){.modal-xl{--bs-modal-width: 1140px}}.modal-fullscreen{width:100vw;max-width:none;height:100%;margin:0}.modal-fullscreen .modal-content{height:100%;border:0}.modal-fullscreen .modal-body{overflow-y:auto}@media(max-width: 575.98px){.modal-fullscreen-sm-down{width:100vw;max-width:none;height:100%;margin:0}.modal-fullscreen-sm-down .modal-content{height:100%;border:0}.modal-fullscreen-sm-down .modal-body{overflow-y:auto}}@media(max-width: 767.98px){.modal-fullscreen-md-down{width:100vw;max-width:none;height:100%;margin:0}.modal-fullscreen-md-down .modal-content{height:100%;border:0}.modal-fullscreen-md-down .modal-body{overflow-y:auto}}@media(max-width: 991.98px){.modal-fullscreen-lg-down{width:100vw;max-width:none;height:100%;margin:0}.modal-fullscreen-lg-down .modal-content{height:100%;border:0}.modal-fullscreen-lg-down .modal-body{overflow-y:auto}}@media(max-width: 1199.98px){.modal-fullscreen-xl-down{width:100vw;max-width:none;height:100%;margin:0}.modal-fullscreen-xl-down .modal-content{height:100%;border:0}.modal-fullscreen-xl-down .modal-body{overflow-y:auto}}@media(max-width: 1399.98px){.modal-fullscreen-xxl-down{width:100vw;max-width:none;height:100%;margin:0}.modal-fullscreen-xxl-down .modal-content{height:100%;border:0}.modal-fullscreen-xxl-down .modal-body{overflow-y:auto}}.tooltip{--bs-tooltip-zindex: 1080;--bs-tooltip-max-width: 200px;--bs-tooltip-padding-x: 0.5rem;--bs-tooltip-padding-y: 0.25rem;--bs-tooltip-margin: ;--bs-tooltip-font-size:0.875rem;--bs-tooltip-color: #fff;--bs-tooltip-bg: #000;--bs-tooltip-border-radius: 0.25rem;--bs-tooltip-opacity: 0.9;--bs-tooltip-arrow-width: 0.8rem;--bs-tooltip-arrow-height: 0.4rem;z-index:var(--bs-tooltip-zindex);display:block;margin:var(--bs-tooltip-margin);font-family:"Source Sans Pro",-apple-system,BlinkMacSystemFont,"Segoe UI",Roboto,"Helvetica Neue",Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji","Segoe UI Symbol";font-style:normal;font-weight:400;line-height:1.5;text-align:left;text-align:start;text-decoration:none;text-shadow:none;text-transform:none;letter-spacing:normal;word-break:normal;white-space:normal;word-spacing:normal;line-break:auto;font-size:var(--bs-tooltip-font-size);word-wrap:break-word;opacity:0}.tooltip.show{opacity:var(--bs-tooltip-opacity)}.tooltip .tooltip-arrow{display:block;width:var(--bs-tooltip-arrow-width);height:var(--bs-tooltip-arrow-height)}.tooltip .tooltip-arrow::before{position:absolute;content:"";border-color:rgba(0,0,0,0);border-style:solid}.bs-tooltip-top .tooltip-arrow,.bs-tooltip-auto[data-popper-placement^=top] .tooltip-arrow{bottom:calc(-1*var(--bs-tooltip-arrow-height))}.bs-tooltip-top .tooltip-arrow::before,.bs-tooltip-auto[data-popper-placement^=top] .tooltip-arrow::before{top:-1px;border-width:var(--bs-tooltip-arrow-height) calc(var(--bs-tooltip-arrow-width)*.5) 0;border-top-color:var(--bs-tooltip-bg)}.bs-tooltip-end .tooltip-arrow,.bs-tooltip-auto[data-popper-placement^=right] .tooltip-arrow{left:calc(-1*var(--bs-tooltip-arrow-height));width:var(--bs-tooltip-arrow-height);height:var(--bs-tooltip-arrow-width)}.bs-tooltip-end .tooltip-arrow::before,.bs-tooltip-auto[data-popper-placement^=right] .tooltip-arrow::before{right:-1px;border-width:calc(var(--bs-tooltip-arrow-width)*.5) var(--bs-tooltip-arrow-height) calc(var(--bs-tooltip-arrow-width)*.5) 0;border-right-color:var(--bs-tooltip-bg)}.bs-tooltip-bottom .tooltip-arrow,.bs-tooltip-auto[data-popper-placement^=bottom] .tooltip-arrow{top:calc(-1*var(--bs-tooltip-arrow-height))}.bs-tooltip-bottom .tooltip-arrow::before,.bs-tooltip-auto[data-popper-placement^=bottom] .tooltip-arrow::before{bottom:-1px;border-width:0 calc(var(--bs-tooltip-arrow-width)*.5) var(--bs-tooltip-arrow-height);border-bottom-color:var(--bs-tooltip-bg)}.bs-tooltip-start .tooltip-arrow,.bs-tooltip-auto[data-popper-placement^=left] .tooltip-arrow{right:calc(-1*var(--bs-tooltip-arrow-height));width:var(--bs-tooltip-arrow-height);height:var(--bs-tooltip-arrow-width)}.bs-tooltip-start .tooltip-arrow::before,.bs-tooltip-auto[data-popper-placement^=left] .tooltip-arrow::before{left:-1px;border-width:calc(var(--bs-tooltip-arrow-width)*.5) 0 calc(var(--bs-tooltip-arrow-width)*.5) var(--bs-tooltip-arrow-height);border-left-color:var(--bs-tooltip-bg)}.tooltip-inner{max-width:var(--bs-tooltip-max-width);padding:var(--bs-tooltip-padding-y) var(--bs-tooltip-padding-x);color:var(--bs-tooltip-color);text-align:center;background-color:var(--bs-tooltip-bg)}.popover{--bs-popover-zindex: 1070;--bs-popover-max-width: 276px;--bs-popover-font-size:0.875rem;--bs-popover-bg: #fff;--bs-popover-border-width: 1px;--bs-popover-border-color: rgba(0, 0, 0, 0.175);--bs-popover-border-radius: 0.5rem;--bs-popover-inner-border-radius: calc(0.5rem - 1px);--bs-popover-box-shadow: 0 0.5rem 1rem rgba(0, 0, 0, 0.15);--bs-popover-header-padding-x: 1rem;--bs-popover-header-padding-y: 0.5rem;--bs-popover-header-font-size:1rem;--bs-popover-header-color: inherit;--bs-popover-header-bg: #e9ecef;--bs-popover-body-padding-x: 1rem;--bs-popover-body-padding-y: 1rem;--bs-popover-body-color: #343a40;--bs-popover-arrow-width: 1rem;--bs-popover-arrow-height: 0.5rem;--bs-popover-arrow-border: var(--bs-popover-border-color);z-index:var(--bs-popover-zindex);display:block;max-width:var(--bs-popover-max-width);font-family:"Source Sans Pro",-apple-system,BlinkMacSystemFont,"Segoe UI",Roboto,"Helvetica Neue",Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji","Segoe UI Symbol";font-style:normal;font-weight:400;line-height:1.5;text-align:left;text-align:start;text-decoration:none;text-shadow:none;text-transform:none;letter-spacing:normal;word-break:normal;white-space:normal;word-spacing:normal;line-break:auto;font-size:var(--bs-popover-font-size);word-wrap:break-word;background-color:var(--bs-popover-bg);background-clip:padding-box;border:var(--bs-popover-border-width) solid var(--bs-popover-border-color)}.popover .popover-arrow{display:block;width:var(--bs-popover-arrow-width);height:var(--bs-popover-arrow-height)}.popover .popover-arrow::before,.popover .popover-arrow::after{position:absolute;display:block;content:"";border-color:rgba(0,0,0,0);border-style:solid;border-width:0}.bs-popover-top>.popover-arrow,.bs-popover-auto[data-popper-placement^=top]>.popover-arrow{bottom:calc(-1*(var(--bs-popover-arrow-height)) - var(--bs-popover-border-width))}.bs-popover-top>.popover-arrow::before,.bs-popover-auto[data-popper-placement^=top]>.popover-arrow::before,.bs-popover-top>.popover-arrow::after,.bs-popover-auto[data-popper-placement^=top]>.popover-arrow::after{border-width:var(--bs-popover-arrow-height) calc(var(--bs-popover-arrow-width)*.5) 0}.bs-popover-top>.popover-arrow::before,.bs-popover-auto[data-popper-placement^=top]>.popover-arrow::before{bottom:0;border-top-color:var(--bs-popover-arrow-border)}.bs-popover-top>.popover-arrow::after,.bs-popover-auto[data-popper-placement^=top]>.popover-arrow::after{bottom:var(--bs-popover-border-width);border-top-color:var(--bs-popover-bg)}.bs-popover-end>.popover-arrow,.bs-popover-auto[data-popper-placement^=right]>.popover-arrow{left:calc(-1*(var(--bs-popover-arrow-height)) - var(--bs-popover-border-width));width:var(--bs-popover-arrow-height);height:var(--bs-popover-arrow-width)}.bs-popover-end>.popover-arrow::before,.bs-popover-auto[data-popper-placement^=right]>.popover-arrow::before,.bs-popover-end>.popover-arrow::after,.bs-popover-auto[data-popper-placement^=right]>.popover-arrow::after{border-width:calc(var(--bs-popover-arrow-width)*.5) var(--bs-popover-arrow-height) calc(var(--bs-popover-arrow-width)*.5) 0}.bs-popover-end>.popover-arrow::before,.bs-popover-auto[data-popper-placement^=right]>.popover-arrow::before{left:0;border-right-color:var(--bs-popover-arrow-border)}.bs-popover-end>.popover-arrow::after,.bs-popover-auto[data-popper-placement^=right]>.popover-arrow::after{left:var(--bs-popover-border-width);border-right-color:var(--bs-popover-bg)}.bs-popover-bottom>.popover-arrow,.bs-popover-auto[data-popper-placement^=bottom]>.popover-arrow{top:calc(-1*(var(--bs-popover-arrow-height)) - var(--bs-popover-border-width))}.bs-popover-bottom>.popover-arrow::before,.bs-popover-auto[data-popper-placement^=bottom]>.popover-arrow::before,.bs-popover-bottom>.popover-arrow::after,.bs-popover-auto[data-popper-placement^=bottom]>.popover-arrow::after{border-width:0 calc(var(--bs-popover-arrow-width)*.5) var(--bs-popover-arrow-height)}.bs-popover-bottom>.popover-arrow::before,.bs-popover-auto[data-popper-placement^=bottom]>.popover-arrow::before{top:0;border-bottom-color:var(--bs-popover-arrow-border)}.bs-popover-bottom>.popover-arrow::after,.bs-popover-auto[data-popper-placement^=bottom]>.popover-arrow::after{top:var(--bs-popover-border-width);border-bottom-color:var(--bs-popover-bg)}.bs-popover-bottom .popover-header::before,.bs-popover-auto[data-popper-placement^=bottom] .popover-header::before{position:absolute;top:0;left:50%;display:block;width:var(--bs-popover-arrow-width);margin-left:calc(-0.5*var(--bs-popover-arrow-width));content:"";border-bottom:var(--bs-popover-border-width) solid var(--bs-popover-header-bg)}.bs-popover-start>.popover-arrow,.bs-popover-auto[data-popper-placement^=left]>.popover-arrow{right:calc(-1*(var(--bs-popover-arrow-height)) - var(--bs-popover-border-width));width:var(--bs-popover-arrow-height);height:var(--bs-popover-arrow-width)}.bs-popover-start>.popover-arrow::before,.bs-popover-auto[data-popper-placement^=left]>.popover-arrow::before,.bs-popover-start>.popover-arrow::after,.bs-popover-auto[data-popper-placement^=left]>.popover-arrow::after{border-width:calc(var(--bs-popover-arrow-width)*.5) 0 calc(var(--bs-popover-arrow-width)*.5) var(--bs-popover-arrow-height)}.bs-popover-start>.popover-arrow::before,.bs-popover-auto[data-popper-placement^=left]>.popover-arrow::before{right:0;border-left-color:var(--bs-popover-arrow-border)}.bs-popover-start>.popover-arrow::after,.bs-popover-auto[data-popper-placement^=left]>.popover-arrow::after{right:var(--bs-popover-border-width);border-left-color:var(--bs-popover-bg)}.popover-header{padding:var(--bs-popover-header-padding-y) var(--bs-popover-header-padding-x);margin-bottom:0;font-size:var(--bs-popover-header-font-size);color:var(--bs-popover-header-color);background-color:var(--bs-popover-header-bg);border-bottom:var(--bs-popover-border-width) solid var(--bs-popover-border-color)}.popover-header:empty{display:none}.popover-body{padding:var(--bs-popover-body-padding-y) var(--bs-popover-body-padding-x);color:var(--bs-popover-body-color)}.carousel{position:relative}.carousel.pointer-event{touch-action:pan-y;-webkit-touch-action:pan-y;-moz-touch-action:pan-y;-ms-touch-action:pan-y;-o-touch-action:pan-y}.carousel-inner{position:relative;width:100%;overflow:hidden}.carousel-inner::after{display:block;clear:both;content:""}.carousel-item{position:relative;display:none;float:left;width:100%;margin-right:-100%;backface-visibility:hidden;-webkit-backface-visibility:hidden;-moz-backface-visibility:hidden;-ms-backface-visibility:hidden;-o-backface-visibility:hidden;transition:transform .6s ease-in-out}@media(prefers-reduced-motion: reduce){.carousel-item{transition:none}}.carousel-item.active,.carousel-item-next,.carousel-item-prev{display:block}.carousel-item-next:not(.carousel-item-start),.active.carousel-item-end{transform:translateX(100%)}.carousel-item-prev:not(.carousel-item-end),.active.carousel-item-start{transform:translateX(-100%)}.carousel-fade .carousel-item{opacity:0;transition-property:opacity;transform:none}.carousel-fade .carousel-item.active,.carousel-fade .carousel-item-next.carousel-item-start,.carousel-fade .carousel-item-prev.carousel-item-end{z-index:1;opacity:1}.carousel-fade .active.carousel-item-start,.carousel-fade .active.carousel-item-end{z-index:0;opacity:0;transition:opacity 0s .6s}@media(prefers-reduced-motion: reduce){.carousel-fade .active.carousel-item-start,.carousel-fade .active.carousel-item-end{transition:none}}.carousel-control-prev,.carousel-control-next{position:absolute;top:0;bottom:0;z-index:1;display:flex;display:-webkit-flex;align-items:center;-webkit-align-items:center;justify-content:center;-webkit-justify-content:center;width:15%;padding:0;color:#fff;text-align:center;background:none;border:0;opacity:.5;transition:opacity .15s ease}@media(prefers-reduced-motion: reduce){.carousel-control-prev,.carousel-control-next{transition:none}}.carousel-control-prev:hover,.carousel-control-prev:focus,.carousel-control-next:hover,.carousel-control-next:focus{color:#fff;text-decoration:none;outline:0;opacity:.9}.carousel-control-prev{left:0}.carousel-control-next{right:0}.carousel-control-prev-icon,.carousel-control-next-icon{display:inline-block;width:2rem;height:2rem;background-repeat:no-repeat;background-position:50%;background-size:100% 100%}.carousel-control-prev-icon{background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='%23fff'%3e%3cpath d='M11.354 1.646a.5.5 0 0 1 0 .708L5.707 8l5.647 5.646a.5.5 0 0 1-.708.708l-6-6a.5.5 0 0 1 0-.708l6-6a.5.5 0 0 1 .708 0z'/%3e%3c/svg%3e")}.carousel-control-next-icon{background-image:url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='%23fff'%3e%3cpath d='M4.646 1.646a.5.5 0 0 1 .708 0l6 6a.5.5 0 0 1 0 .708l-6 6a.5.5 0 0 1-.708-.708L10.293 8 4.646 2.354a.5.5 0 0 1 0-.708z'/%3e%3c/svg%3e")}.carousel-indicators{position:absolute;right:0;bottom:0;left:0;z-index:2;display:flex;display:-webkit-flex;justify-content:center;-webkit-justify-content:center;padding:0;margin-right:15%;margin-bottom:1rem;margin-left:15%}.carousel-indicators [data-bs-target]{box-sizing:content-box;flex:0 1 auto;-webkit-flex:0 1 auto;width:30px;height:3px;padding:0;margin-right:3px;margin-left:3px;text-indent:-999px;cursor:pointer;background-color:#fff;background-clip:padding-box;border:0;border-top:10px solid rgba(0,0,0,0);border-bottom:10px solid rgba(0,0,0,0);opacity:.5;transition:opacity .6s ease}@media(prefers-reduced-motion: reduce){.carousel-indicators [data-bs-target]{transition:none}}.carousel-indicators .active{opacity:1}.carousel-caption{position:absolute;right:15%;bottom:1.25rem;left:15%;padding-top:1.25rem;padding-bottom:1.25rem;color:#fff;text-align:center}.carousel-dark .carousel-control-prev-icon,.carousel-dark .carousel-control-next-icon{filter:invert(1) grayscale(100)}.carousel-dark .carousel-indicators [data-bs-target]{background-color:#000}.carousel-dark .carousel-caption{color:#000}[data-bs-theme=dark] .carousel .carousel-control-prev-icon,[data-bs-theme=dark] .carousel .carousel-control-next-icon,[data-bs-theme=dark].carousel .carousel-control-prev-icon,[data-bs-theme=dark].carousel .carousel-control-next-icon{filter:invert(1) grayscale(100)}[data-bs-theme=dark] .carousel .carousel-indicators [data-bs-target],[data-bs-theme=dark].carousel .carousel-indicators [data-bs-target]{background-color:#000}[data-bs-theme=dark] .carousel .carousel-caption,[data-bs-theme=dark].carousel .carousel-caption{color:#000}.spinner-grow,.spinner-border{display:inline-block;width:var(--bs-spinner-width);height:var(--bs-spinner-height);vertical-align:var(--bs-spinner-vertical-align);border-radius:50%;animation:var(--bs-spinner-animation-speed) linear infinite var(--bs-spinner-animation-name)}@keyframes spinner-border{to{transform:rotate(360deg) /* rtl:ignore */}}.spinner-border{--bs-spinner-width: 2rem;--bs-spinner-height: 2rem;--bs-spinner-vertical-align: -0.125em;--bs-spinner-border-width: 0.25em;--bs-spinner-animation-speed: 0.75s;--bs-spinner-animation-name: spinner-border;border:var(--bs-spinner-border-width) solid currentcolor;border-right-color:rgba(0,0,0,0)}.spinner-border-sm{--bs-spinner-width: 1rem;--bs-spinner-height: 1rem;--bs-spinner-border-width: 0.2em}@keyframes spinner-grow{0%{transform:scale(0)}50%{opacity:1;transform:none}}.spinner-grow{--bs-spinner-width: 2rem;--bs-spinner-height: 2rem;--bs-spinner-vertical-align: -0.125em;--bs-spinner-animation-speed: 0.75s;--bs-spinner-animation-name: spinner-grow;background-color:currentcolor;opacity:0}.spinner-grow-sm{--bs-spinner-width: 1rem;--bs-spinner-height: 1rem}@media(prefers-reduced-motion: reduce){.spinner-border,.spinner-grow{--bs-spinner-animation-speed: 1.5s}}.offcanvas,.offcanvas-xxl,.offcanvas-xl,.offcanvas-lg,.offcanvas-md,.offcanvas-sm{--bs-offcanvas-zindex: 1045;--bs-offcanvas-width: 400px;--bs-offcanvas-height: 30vh;--bs-offcanvas-padding-x: 1rem;--bs-offcanvas-padding-y: 1rem;--bs-offcanvas-color: #343a40;--bs-offcanvas-bg: #fff;--bs-offcanvas-border-width: 1px;--bs-offcanvas-border-color: rgba(0, 0, 0, 0.175);--bs-offcanvas-box-shadow: 0 0.125rem 0.25rem rgba(0, 0, 0, 0.075);--bs-offcanvas-transition: transform 0.3s ease-in-out;--bs-offcanvas-title-line-height: 1.5}@media(max-width: 575.98px){.offcanvas-sm{position:fixed;bottom:0;z-index:var(--bs-offcanvas-zindex);display:flex;display:-webkit-flex;flex-direction:column;-webkit-flex-direction:column;max-width:100%;color:var(--bs-offcanvas-color);visibility:hidden;background-color:var(--bs-offcanvas-bg);background-clip:padding-box;outline:0;transition:var(--bs-offcanvas-transition)}}@media(max-width: 575.98px)and (prefers-reduced-motion: reduce){.offcanvas-sm{transition:none}}@media(max-width: 575.98px){.offcanvas-sm.offcanvas-start{top:0;left:0;width:var(--bs-offcanvas-width);border-right:var(--bs-offcanvas-border-width) solid var(--bs-offcanvas-border-color);transform:translateX(-100%)}.offcanvas-sm.offcanvas-end{top:0;right:0;width:var(--bs-offcanvas-width);border-left:var(--bs-offcanvas-border-width) solid var(--bs-offcanvas-border-color);transform:translateX(100%)}.offcanvas-sm.offcanvas-top{top:0;right:0;left:0;height:var(--bs-offcanvas-height);max-height:100%;border-bottom:var(--bs-offcanvas-border-width) solid var(--bs-offcanvas-border-color);transform:translateY(-100%)}.offcanvas-sm.offcanvas-bottom{right:0;left:0;height:var(--bs-offcanvas-height);max-height:100%;border-top:var(--bs-offcanvas-border-width) solid var(--bs-offcanvas-border-color);transform:translateY(100%)}.offcanvas-sm.showing,.offcanvas-sm.show:not(.hiding){transform:none}.offcanvas-sm.showing,.offcanvas-sm.hiding,.offcanvas-sm.show{visibility:visible}}@media(min-width: 576px){.offcanvas-sm{--bs-offcanvas-height: auto;--bs-offcanvas-border-width: 0;background-color:rgba(0,0,0,0) !important}.offcanvas-sm .offcanvas-header{display:none}.offcanvas-sm .offcanvas-body{display:flex;display:-webkit-flex;flex-grow:0;-webkit-flex-grow:0;padding:0;overflow-y:visible;background-color:rgba(0,0,0,0) !important}}@media(max-width: 767.98px){.offcanvas-md{position:fixed;bottom:0;z-index:var(--bs-offcanvas-zindex);display:flex;display:-webkit-flex;flex-direction:column;-webkit-flex-direction:column;max-width:100%;color:var(--bs-offcanvas-color);visibility:hidden;background-color:var(--bs-offcanvas-bg);background-clip:padding-box;outline:0;transition:var(--bs-offcanvas-transition)}}@media(max-width: 767.98px)and (prefers-reduced-motion: reduce){.offcanvas-md{transition:none}}@media(max-width: 767.98px){.offcanvas-md.offcanvas-start{top:0;left:0;width:var(--bs-offcanvas-width);border-right:var(--bs-offcanvas-border-width) solid var(--bs-offcanvas-border-color);transform:translateX(-100%)}.offcanvas-md.offcanvas-end{top:0;right:0;width:var(--bs-offcanvas-width);border-left:var(--bs-offcanvas-border-width) solid var(--bs-offcanvas-border-color);transform:translateX(100%)}.offcanvas-md.offcanvas-top{top:0;right:0;left:0;height:var(--bs-offcanvas-height);max-height:100%;border-bottom:var(--bs-offcanvas-border-width) solid var(--bs-offcanvas-border-color);transform:translateY(-100%)}.offcanvas-md.offcanvas-bottom{right:0;left:0;height:var(--bs-offcanvas-height);max-height:100%;border-top:var(--bs-offcanvas-border-width) solid var(--bs-offcanvas-border-color);transform:translateY(100%)}.offcanvas-md.showing,.offcanvas-md.show:not(.hiding){transform:none}.offcanvas-md.showing,.offcanvas-md.hiding,.offcanvas-md.show{visibility:visible}}@media(min-width: 768px){.offcanvas-md{--bs-offcanvas-height: auto;--bs-offcanvas-border-width: 0;background-color:rgba(0,0,0,0) !important}.offcanvas-md .offcanvas-header{display:none}.offcanvas-md .offcanvas-body{display:flex;display:-webkit-flex;flex-grow:0;-webkit-flex-grow:0;padding:0;overflow-y:visible;background-color:rgba(0,0,0,0) !important}}@media(max-width: 991.98px){.offcanvas-lg{position:fixed;bottom:0;z-index:var(--bs-offcanvas-zindex);display:flex;display:-webkit-flex;flex-direction:column;-webkit-flex-direction:column;max-width:100%;color:var(--bs-offcanvas-color);visibility:hidden;background-color:var(--bs-offcanvas-bg);background-clip:padding-box;outline:0;transition:var(--bs-offcanvas-transition)}}@media(max-width: 991.98px)and (prefers-reduced-motion: reduce){.offcanvas-lg{transition:none}}@media(max-width: 991.98px){.offcanvas-lg.offcanvas-start{top:0;left:0;width:var(--bs-offcanvas-width);border-right:var(--bs-offcanvas-border-width) solid var(--bs-offcanvas-border-color);transform:translateX(-100%)}.offcanvas-lg.offcanvas-end{top:0;right:0;width:var(--bs-offcanvas-width);border-left:var(--bs-offcanvas-border-width) solid var(--bs-offcanvas-border-color);transform:translateX(100%)}.offcanvas-lg.offcanvas-top{top:0;right:0;left:0;height:var(--bs-offcanvas-height);max-height:100%;border-bottom:var(--bs-offcanvas-border-width) solid var(--bs-offcanvas-border-color);transform:translateY(-100%)}.offcanvas-lg.offcanvas-bottom{right:0;left:0;height:var(--bs-offcanvas-height);max-height:100%;border-top:var(--bs-offcanvas-border-width) solid var(--bs-offcanvas-border-color);transform:translateY(100%)}.offcanvas-lg.showing,.offcanvas-lg.show:not(.hiding){transform:none}.offcanvas-lg.showing,.offcanvas-lg.hiding,.offcanvas-lg.show{visibility:visible}}@media(min-width: 992px){.offcanvas-lg{--bs-offcanvas-height: auto;--bs-offcanvas-border-width: 0;background-color:rgba(0,0,0,0) !important}.offcanvas-lg .offcanvas-header{display:none}.offcanvas-lg .offcanvas-body{display:flex;display:-webkit-flex;flex-grow:0;-webkit-flex-grow:0;padding:0;overflow-y:visible;background-color:rgba(0,0,0,0) !important}}@media(max-width: 1199.98px){.offcanvas-xl{position:fixed;bottom:0;z-index:var(--bs-offcanvas-zindex);display:flex;display:-webkit-flex;flex-direction:column;-webkit-flex-direction:column;max-width:100%;color:var(--bs-offcanvas-color);visibility:hidden;background-color:var(--bs-offcanvas-bg);background-clip:padding-box;outline:0;transition:var(--bs-offcanvas-transition)}}@media(max-width: 1199.98px)and (prefers-reduced-motion: reduce){.offcanvas-xl{transition:none}}@media(max-width: 1199.98px){.offcanvas-xl.offcanvas-start{top:0;left:0;width:var(--bs-offcanvas-width);border-right:var(--bs-offcanvas-border-width) solid var(--bs-offcanvas-border-color);transform:translateX(-100%)}.offcanvas-xl.offcanvas-end{top:0;right:0;width:var(--bs-offcanvas-width);border-left:var(--bs-offcanvas-border-width) solid var(--bs-offcanvas-border-color);transform:translateX(100%)}.offcanvas-xl.offcanvas-top{top:0;right:0;left:0;height:var(--bs-offcanvas-height);max-height:100%;border-bottom:var(--bs-offcanvas-border-width) solid var(--bs-offcanvas-border-color);transform:translateY(-100%)}.offcanvas-xl.offcanvas-bottom{right:0;left:0;height:var(--bs-offcanvas-height);max-height:100%;border-top:var(--bs-offcanvas-border-width) solid var(--bs-offcanvas-border-color);transform:translateY(100%)}.offcanvas-xl.showing,.offcanvas-xl.show:not(.hiding){transform:none}.offcanvas-xl.showing,.offcanvas-xl.hiding,.offcanvas-xl.show{visibility:visible}}@media(min-width: 1200px){.offcanvas-xl{--bs-offcanvas-height: auto;--bs-offcanvas-border-width: 0;background-color:rgba(0,0,0,0) !important}.offcanvas-xl .offcanvas-header{display:none}.offcanvas-xl .offcanvas-body{display:flex;display:-webkit-flex;flex-grow:0;-webkit-flex-grow:0;padding:0;overflow-y:visible;background-color:rgba(0,0,0,0) !important}}@media(max-width: 1399.98px){.offcanvas-xxl{position:fixed;bottom:0;z-index:var(--bs-offcanvas-zindex);display:flex;display:-webkit-flex;flex-direction:column;-webkit-flex-direction:column;max-width:100%;color:var(--bs-offcanvas-color);visibility:hidden;background-color:var(--bs-offcanvas-bg);background-clip:padding-box;outline:0;transition:var(--bs-offcanvas-transition)}}@media(max-width: 1399.98px)and (prefers-reduced-motion: reduce){.offcanvas-xxl{transition:none}}@media(max-width: 1399.98px){.offcanvas-xxl.offcanvas-start{top:0;left:0;width:var(--bs-offcanvas-width);border-right:var(--bs-offcanvas-border-width) solid var(--bs-offcanvas-border-color);transform:translateX(-100%)}.offcanvas-xxl.offcanvas-end{top:0;right:0;width:var(--bs-offcanvas-width);border-left:var(--bs-offcanvas-border-width) solid var(--bs-offcanvas-border-color);transform:translateX(100%)}.offcanvas-xxl.offcanvas-top{top:0;right:0;left:0;height:var(--bs-offcanvas-height);max-height:100%;border-bottom:var(--bs-offcanvas-border-width) solid var(--bs-offcanvas-border-color);transform:translateY(-100%)}.offcanvas-xxl.offcanvas-bottom{right:0;left:0;height:var(--bs-offcanvas-height);max-height:100%;border-top:var(--bs-offcanvas-border-width) solid var(--bs-offcanvas-border-color);transform:translateY(100%)}.offcanvas-xxl.showing,.offcanvas-xxl.show:not(.hiding){transform:none}.offcanvas-xxl.showing,.offcanvas-xxl.hiding,.offcanvas-xxl.show{visibility:visible}}@media(min-width: 1400px){.offcanvas-xxl{--bs-offcanvas-height: auto;--bs-offcanvas-border-width: 0;background-color:rgba(0,0,0,0) !important}.offcanvas-xxl .offcanvas-header{display:none}.offcanvas-xxl .offcanvas-body{display:flex;display:-webkit-flex;flex-grow:0;-webkit-flex-grow:0;padding:0;overflow-y:visible;background-color:rgba(0,0,0,0) !important}}.offcanvas{position:fixed;bottom:0;z-index:var(--bs-offcanvas-zindex);display:flex;display:-webkit-flex;flex-direction:column;-webkit-flex-direction:column;max-width:100%;color:var(--bs-offcanvas-color);visibility:hidden;background-color:var(--bs-offcanvas-bg);background-clip:padding-box;outline:0;transition:var(--bs-offcanvas-transition)}@media(prefers-reduced-motion: reduce){.offcanvas{transition:none}}.offcanvas.offcanvas-start{top:0;left:0;width:var(--bs-offcanvas-width);border-right:var(--bs-offcanvas-border-width) solid var(--bs-offcanvas-border-color);transform:translateX(-100%)}.offcanvas.offcanvas-end{top:0;right:0;width:var(--bs-offcanvas-width);border-left:var(--bs-offcanvas-border-width) solid var(--bs-offcanvas-border-color);transform:translateX(100%)}.offcanvas.offcanvas-top{top:0;right:0;left:0;height:var(--bs-offcanvas-height);max-height:100%;border-bottom:var(--bs-offcanvas-border-width) solid var(--bs-offcanvas-border-color);transform:translateY(-100%)}.offcanvas.offcanvas-bottom{right:0;left:0;height:var(--bs-offcanvas-height);max-height:100%;border-top:var(--bs-offcanvas-border-width) solid var(--bs-offcanvas-border-color);transform:translateY(100%)}.offcanvas.showing,.offcanvas.show:not(.hiding){transform:none}.offcanvas.showing,.offcanvas.hiding,.offcanvas.show{visibility:visible}.offcanvas-backdrop{position:fixed;top:0;left:0;z-index:1040;width:100vw;height:100vh;background-color:#000}.offcanvas-backdrop.fade{opacity:0}.offcanvas-backdrop.show{opacity:.5}.offcanvas-header{display:flex;display:-webkit-flex;align-items:center;-webkit-align-items:center;justify-content:space-between;-webkit-justify-content:space-between;padding:var(--bs-offcanvas-padding-y) var(--bs-offcanvas-padding-x)}.offcanvas-header .btn-close{padding:calc(var(--bs-offcanvas-padding-y)*.5) calc(var(--bs-offcanvas-padding-x)*.5);margin-top:calc(-0.5*var(--bs-offcanvas-padding-y));margin-right:calc(-0.5*var(--bs-offcanvas-padding-x));margin-bottom:calc(-0.5*var(--bs-offcanvas-padding-y))}.offcanvas-title{margin-bottom:0;line-height:var(--bs-offcanvas-title-line-height)}.offcanvas-body{flex-grow:1;-webkit-flex-grow:1;padding:var(--bs-offcanvas-padding-y) var(--bs-offcanvas-padding-x);overflow-y:auto}.placeholder{display:inline-block;min-height:1em;vertical-align:middle;cursor:wait;background-color:currentcolor;opacity:.5}.placeholder.btn::before{display:inline-block;content:""}.placeholder-xs{min-height:.6em}.placeholder-sm{min-height:.8em}.placeholder-lg{min-height:1.2em}.placeholder-glow .placeholder{animation:placeholder-glow 2s ease-in-out infinite}@keyframes placeholder-glow{50%{opacity:.2}}.placeholder-wave{mask-image:linear-gradient(130deg, #000 55%, rgba(0, 0, 0, 0.8) 75%, #000 95%);-webkit-mask-image:linear-gradient(130deg, #000 55%, rgba(0, 0, 0, 0.8) 75%, #000 95%);mask-size:200% 100%;-webkit-mask-size:200% 100%;animation:placeholder-wave 2s linear infinite}@keyframes placeholder-wave{100%{mask-position:-200% 0%;-webkit-mask-position:-200% 0%}}.clearfix::after{display:block;clear:both;content:""}.text-bg-default{color:#fff !important;background-color:RGBA(var(--bs-default-rgb), var(--bs-bg-opacity, 1)) !important}.text-bg-primary{color:#fff !important;background-color:RGBA(var(--bs-primary-rgb), var(--bs-bg-opacity, 1)) !important}.text-bg-secondary{color:#fff !important;background-color:RGBA(var(--bs-secondary-rgb), var(--bs-bg-opacity, 1)) !important}.text-bg-success{color:#fff !important;background-color:RGBA(var(--bs-success-rgb), var(--bs-bg-opacity, 1)) !important}.text-bg-info{color:#fff !important;background-color:RGBA(var(--bs-info-rgb), var(--bs-bg-opacity, 1)) !important}.text-bg-warning{color:#fff !important;background-color:RGBA(var(--bs-warning-rgb), var(--bs-bg-opacity, 1)) !important}.text-bg-danger{color:#fff !important;background-color:RGBA(var(--bs-danger-rgb), var(--bs-bg-opacity, 1)) !important}.text-bg-light{color:#000 !important;background-color:RGBA(var(--bs-light-rgb), var(--bs-bg-opacity, 1)) !important}.text-bg-dark{color:#fff !important;background-color:RGBA(var(--bs-dark-rgb), var(--bs-bg-opacity, 1)) !important}.link-default{color:RGBA(var(--bs-default-rgb), var(--bs-link-opacity, 1)) !important;text-decoration-color:RGBA(var(--bs-default-rgb), var(--bs-link-underline-opacity, 1)) !important}.link-default:hover,.link-default:focus{color:RGBA(42, 46, 51, var(--bs-link-opacity, 1)) !important;text-decoration-color:RGBA(42, 46, 51, var(--bs-link-underline-opacity, 1)) !important}.link-primary{color:RGBA(var(--bs-primary-rgb), var(--bs-link-opacity, 1)) !important;text-decoration-color:RGBA(var(--bs-primary-rgb), var(--bs-link-underline-opacity, 1)) !important}.link-primary:hover,.link-primary:focus{color:RGBA(74, 33, 105, var(--bs-link-opacity, 1)) !important;text-decoration-color:RGBA(74, 33, 105, var(--bs-link-underline-opacity, 1)) !important}.link-secondary{color:RGBA(var(--bs-secondary-rgb), var(--bs-link-opacity, 1)) !important;text-decoration-color:RGBA(var(--bs-secondary-rgb), var(--bs-link-underline-opacity, 1)) !important}.link-secondary:hover,.link-secondary:focus{color:RGBA(42, 46, 51, var(--bs-link-opacity, 1)) !important;text-decoration-color:RGBA(42, 46, 51, var(--bs-link-underline-opacity, 1)) !important}.link-success{color:RGBA(var(--bs-success-rgb), var(--bs-link-opacity, 1)) !important;text-decoration-color:RGBA(var(--bs-success-rgb), var(--bs-link-underline-opacity, 1)) !important}.link-success:hover,.link-success:focus{color:RGBA(50, 146, 19, var(--bs-link-opacity, 1)) !important;text-decoration-color:RGBA(50, 146, 19, var(--bs-link-underline-opacity, 1)) !important}.link-info{color:RGBA(var(--bs-info-rgb), var(--bs-link-opacity, 1)) !important;text-decoration-color:RGBA(var(--bs-info-rgb), var(--bs-link-underline-opacity, 1)) !important}.link-info:hover,.link-info:focus{color:RGBA(122, 67, 150, var(--bs-link-opacity, 1)) !important;text-decoration-color:RGBA(122, 67, 150, var(--bs-link-underline-opacity, 1)) !important}.link-warning{color:RGBA(var(--bs-warning-rgb), var(--bs-link-opacity, 1)) !important;text-decoration-color:RGBA(var(--bs-warning-rgb), var(--bs-link-underline-opacity, 1)) !important}.link-warning:hover,.link-warning:focus{color:RGBA(204, 94, 19, var(--bs-link-opacity, 1)) !important;text-decoration-color:RGBA(204, 94, 19, var(--bs-link-underline-opacity, 1)) !important}.link-danger{color:RGBA(var(--bs-danger-rgb), var(--bs-link-opacity, 1)) !important;text-decoration-color:RGBA(var(--bs-danger-rgb), var(--bs-link-underline-opacity, 1)) !important}.link-danger:hover,.link-danger:focus{color:RGBA(204, 0, 46, var(--bs-link-opacity, 1)) !important;text-decoration-color:RGBA(204, 0, 46, var(--bs-link-underline-opacity, 1)) !important}.link-light{color:RGBA(var(--bs-light-rgb), var(--bs-link-opacity, 1)) !important;text-decoration-color:RGBA(var(--bs-light-rgb), var(--bs-link-underline-opacity, 1)) !important}.link-light:hover,.link-light:focus{color:RGBA(249, 250, 251, var(--bs-link-opacity, 1)) !important;text-decoration-color:RGBA(249, 250, 251, var(--bs-link-underline-opacity, 1)) !important}.link-dark{color:RGBA(var(--bs-dark-rgb), var(--bs-link-opacity, 1)) !important;text-decoration-color:RGBA(var(--bs-dark-rgb), var(--bs-link-underline-opacity, 1)) !important}.link-dark:hover,.link-dark:focus{color:RGBA(42, 46, 51, var(--bs-link-opacity, 1)) !important;text-decoration-color:RGBA(42, 46, 51, var(--bs-link-underline-opacity, 1)) !important}.link-body-emphasis{color:RGBA(var(--bs-emphasis-color-rgb), var(--bs-link-opacity, 1)) !important;text-decoration-color:RGBA(var(--bs-emphasis-color-rgb), var(--bs-link-underline-opacity, 1)) !important}.link-body-emphasis:hover,.link-body-emphasis:focus{color:RGBA(var(--bs-emphasis-color-rgb), var(--bs-link-opacity, 0.75)) !important;text-decoration-color:RGBA(var(--bs-emphasis-color-rgb), var(--bs-link-underline-opacity, 0.75)) !important}.focus-ring:focus{outline:0;box-shadow:var(--bs-focus-ring-x, 0) var(--bs-focus-ring-y, 0) var(--bs-focus-ring-blur, 0) var(--bs-focus-ring-width) var(--bs-focus-ring-color)}.icon-link{display:inline-flex;gap:.375rem;align-items:center;-webkit-align-items:center;text-decoration-color:rgba(var(--bs-link-color-rgb), var(--bs-link-opacity, 0.5));text-underline-offset:.25em;backface-visibility:hidden;-webkit-backface-visibility:hidden;-moz-backface-visibility:hidden;-ms-backface-visibility:hidden;-o-backface-visibility:hidden}.icon-link>.bi{flex-shrink:0;-webkit-flex-shrink:0;width:1em;height:1em;fill:currentcolor;transition:.2s ease-in-out transform}@media(prefers-reduced-motion: reduce){.icon-link>.bi{transition:none}}.icon-link-hover:hover>.bi,.icon-link-hover:focus-visible>.bi{transform:var(--bs-icon-link-transform, translate3d(0.25em, 0, 0))}.ratio{position:relative;width:100%}.ratio::before{display:block;padding-top:var(--bs-aspect-ratio);content:""}.ratio>*{position:absolute;top:0;left:0;width:100%;height:100%}.ratio-1x1{--bs-aspect-ratio: 100%}.ratio-4x3{--bs-aspect-ratio: 75%}.ratio-16x9{--bs-aspect-ratio: 56.25%}.ratio-21x9{--bs-aspect-ratio: 42.8571428571%}.fixed-top{position:fixed;top:0;right:0;left:0;z-index:1030}.fixed-bottom{position:fixed;right:0;bottom:0;left:0;z-index:1030}.sticky-top{position:sticky;top:0;z-index:1020}.sticky-bottom{position:sticky;bottom:0;z-index:1020}@media(min-width: 576px){.sticky-sm-top{position:sticky;top:0;z-index:1020}.sticky-sm-bottom{position:sticky;bottom:0;z-index:1020}}@media(min-width: 768px){.sticky-md-top{position:sticky;top:0;z-index:1020}.sticky-md-bottom{position:sticky;bottom:0;z-index:1020}}@media(min-width: 992px){.sticky-lg-top{position:sticky;top:0;z-index:1020}.sticky-lg-bottom{position:sticky;bottom:0;z-index:1020}}@media(min-width: 1200px){.sticky-xl-top{position:sticky;top:0;z-index:1020}.sticky-xl-bottom{position:sticky;bottom:0;z-index:1020}}@media(min-width: 1400px){.sticky-xxl-top{position:sticky;top:0;z-index:1020}.sticky-xxl-bottom{position:sticky;bottom:0;z-index:1020}}.hstack{display:flex;display:-webkit-flex;flex-direction:row;-webkit-flex-direction:row;align-items:center;-webkit-align-items:center;align-self:stretch;-webkit-align-self:stretch}.vstack{display:flex;display:-webkit-flex;flex:1 1 auto;-webkit-flex:1 1 auto;flex-direction:column;-webkit-flex-direction:column;align-self:stretch;-webkit-align-self:stretch}.visually-hidden,.visually-hidden-focusable:not(:focus):not(:focus-within){width:1px !important;height:1px !important;padding:0 !important;margin:-1px !important;overflow:hidden !important;clip:rect(0, 0, 0, 0) !important;white-space:nowrap !important;border:0 !important}.visually-hidden:not(caption),.visually-hidden-focusable:not(:focus):not(:focus-within):not(caption){position:absolute !important}.stretched-link::after{position:absolute;top:0;right:0;bottom:0;left:0;z-index:1;content:""}.text-truncate{overflow:hidden;text-overflow:ellipsis;white-space:nowrap}.vr{display:inline-block;align-self:stretch;-webkit-align-self:stretch;width:1px;min-height:1em;background-color:currentcolor;opacity:.25}.align-baseline{vertical-align:baseline !important}.align-top{vertical-align:top !important}.align-middle{vertical-align:middle !important}.align-bottom{vertical-align:bottom !important}.align-text-bottom{vertical-align:text-bottom !important}.align-text-top{vertical-align:text-top !important}.float-start{float:left !important}.float-end{float:right !important}.float-none{float:none !important}.object-fit-contain{object-fit:contain !important}.object-fit-cover{object-fit:cover !important}.object-fit-fill{object-fit:fill !important}.object-fit-scale{object-fit:scale-down !important}.object-fit-none{object-fit:none !important}.opacity-0{opacity:0 !important}.opacity-25{opacity:.25 !important}.opacity-50{opacity:.5 !important}.opacity-75{opacity:.75 !important}.opacity-100{opacity:1 !important}.overflow-auto{overflow:auto !important}.overflow-hidden{overflow:hidden !important}.overflow-visible{overflow:visible !important}.overflow-scroll{overflow:scroll !important}.overflow-x-auto{overflow-x:auto !important}.overflow-x-hidden{overflow-x:hidden !important}.overflow-x-visible{overflow-x:visible !important}.overflow-x-scroll{overflow-x:scroll !important}.overflow-y-auto{overflow-y:auto !important}.overflow-y-hidden{overflow-y:hidden !important}.overflow-y-visible{overflow-y:visible !important}.overflow-y-scroll{overflow-y:scroll !important}.d-inline{display:inline !important}.d-inline-block{display:inline-block !important}.d-block{display:block !important}.d-grid{display:grid !important}.d-inline-grid{display:inline-grid !important}.d-table{display:table !important}.d-table-row{display:table-row !important}.d-table-cell{display:table-cell !important}.d-flex{display:flex !important}.d-inline-flex{display:inline-flex !important}.d-none{display:none !important}.shadow{box-shadow:0 .5rem 1rem rgba(0,0,0,.15) !important}.shadow-sm{box-shadow:0 .125rem .25rem rgba(0,0,0,.075) !important}.shadow-lg{box-shadow:0 1rem 3rem rgba(0,0,0,.175) !important}.shadow-none{box-shadow:none !important}.focus-ring-default{--bs-focus-ring-color: rgba(var(--bs-default-rgb), var(--bs-focus-ring-opacity))}.focus-ring-primary{--bs-focus-ring-color: rgba(var(--bs-primary-rgb), var(--bs-focus-ring-opacity))}.focus-ring-secondary{--bs-focus-ring-color: rgba(var(--bs-secondary-rgb), var(--bs-focus-ring-opacity))}.focus-ring-success{--bs-focus-ring-color: rgba(var(--bs-success-rgb), var(--bs-focus-ring-opacity))}.focus-ring-info{--bs-focus-ring-color: rgba(var(--bs-info-rgb), var(--bs-focus-ring-opacity))}.focus-ring-warning{--bs-focus-ring-color: rgba(var(--bs-warning-rgb), var(--bs-focus-ring-opacity))}.focus-ring-danger{--bs-focus-ring-color: rgba(var(--bs-danger-rgb), var(--bs-focus-ring-opacity))}.focus-ring-light{--bs-focus-ring-color: rgba(var(--bs-light-rgb), var(--bs-focus-ring-opacity))}.focus-ring-dark{--bs-focus-ring-color: rgba(var(--bs-dark-rgb), var(--bs-focus-ring-opacity))}.position-static{position:static !important}.position-relative{position:relative !important}.position-absolute{position:absolute !important}.position-fixed{position:fixed !important}.position-sticky{position:sticky !important}.top-0{top:0 !important}.top-50{top:50% !important}.top-100{top:100% !important}.bottom-0{bottom:0 !important}.bottom-50{bottom:50% !important}.bottom-100{bottom:100% !important}.start-0{left:0 !important}.start-50{left:50% !important}.start-100{left:100% !important}.end-0{right:0 !important}.end-50{right:50% !important}.end-100{right:100% !important}.translate-middle{transform:translate(-50%, -50%) !important}.translate-middle-x{transform:translateX(-50%) !important}.translate-middle-y{transform:translateY(-50%) !important}.border{border:var(--bs-border-width) var(--bs-border-style) var(--bs-border-color) !important}.border-0{border:0 !important}.border-top{border-top:var(--bs-border-width) var(--bs-border-style) var(--bs-border-color) !important}.border-top-0{border-top:0 !important}.border-end{border-right:var(--bs-border-width) var(--bs-border-style) var(--bs-border-color) !important}.border-end-0{border-right:0 !important}.border-bottom{border-bottom:var(--bs-border-width) var(--bs-border-style) var(--bs-border-color) !important}.border-bottom-0{border-bottom:0 !important}.border-start{border-left:var(--bs-border-width) var(--bs-border-style) var(--bs-border-color) !important}.border-start-0{border-left:0 !important}.border-default{--bs-border-opacity: 1;border-color:rgba(var(--bs-default-rgb), var(--bs-border-opacity)) !important}.border-primary{--bs-border-opacity: 1;border-color:rgba(var(--bs-primary-rgb), var(--bs-border-opacity)) !important}.border-secondary{--bs-border-opacity: 1;border-color:rgba(var(--bs-secondary-rgb), var(--bs-border-opacity)) !important}.border-success{--bs-border-opacity: 1;border-color:rgba(var(--bs-success-rgb), var(--bs-border-opacity)) !important}.border-info{--bs-border-opacity: 1;border-color:rgba(var(--bs-info-rgb), var(--bs-border-opacity)) !important}.border-warning{--bs-border-opacity: 1;border-color:rgba(var(--bs-warning-rgb), var(--bs-border-opacity)) !important}.border-danger{--bs-border-opacity: 1;border-color:rgba(var(--bs-danger-rgb), var(--bs-border-opacity)) !important}.border-light{--bs-border-opacity: 1;border-color:rgba(var(--bs-light-rgb), var(--bs-border-opacity)) !important}.border-dark{--bs-border-opacity: 1;border-color:rgba(var(--bs-dark-rgb), var(--bs-border-opacity)) !important}.border-black{--bs-border-opacity: 1;border-color:rgba(var(--bs-black-rgb), var(--bs-border-opacity)) !important}.border-white{--bs-border-opacity: 1;border-color:rgba(var(--bs-white-rgb), var(--bs-border-opacity)) !important}.border-primary-subtle{border-color:var(--bs-primary-border-subtle) !important}.border-secondary-subtle{border-color:var(--bs-secondary-border-subtle) !important}.border-success-subtle{border-color:var(--bs-success-border-subtle) !important}.border-info-subtle{border-color:var(--bs-info-border-subtle) !important}.border-warning-subtle{border-color:var(--bs-warning-border-subtle) !important}.border-danger-subtle{border-color:var(--bs-danger-border-subtle) !important}.border-light-subtle{border-color:var(--bs-light-border-subtle) !important}.border-dark-subtle{border-color:var(--bs-dark-border-subtle) !important}.border-1{border-width:1px !important}.border-2{border-width:2px !important}.border-3{border-width:3px !important}.border-4{border-width:4px !important}.border-5{border-width:5px !important}.border-opacity-10{--bs-border-opacity: 0.1}.border-opacity-25{--bs-border-opacity: 0.25}.border-opacity-50{--bs-border-opacity: 0.5}.border-opacity-75{--bs-border-opacity: 0.75}.border-opacity-100{--bs-border-opacity: 1}.w-25{width:25% !important}.w-50{width:50% !important}.w-75{width:75% !important}.w-100{width:100% !important}.w-auto{width:auto !important}.mw-100{max-width:100% !important}.vw-100{width:100vw !important}.min-vw-100{min-width:100vw !important}.h-25{height:25% !important}.h-50{height:50% !important}.h-75{height:75% !important}.h-100{height:100% !important}.h-auto{height:auto !important}.mh-100{max-height:100% !important}.vh-100{height:100vh !important}.min-vh-100{min-height:100vh !important}.flex-fill{flex:1 1 auto !important}.flex-row{flex-direction:row !important}.flex-column{flex-direction:column !important}.flex-row-reverse{flex-direction:row-reverse !important}.flex-column-reverse{flex-direction:column-reverse !important}.flex-grow-0{flex-grow:0 !important}.flex-grow-1{flex-grow:1 !important}.flex-shrink-0{flex-shrink:0 !important}.flex-shrink-1{flex-shrink:1 !important}.flex-wrap{flex-wrap:wrap !important}.flex-nowrap{flex-wrap:nowrap !important}.flex-wrap-reverse{flex-wrap:wrap-reverse !important}.justify-content-start{justify-content:flex-start !important}.justify-content-end{justify-content:flex-end !important}.justify-content-center{justify-content:center !important}.justify-content-between{justify-content:space-between !important}.justify-content-around{justify-content:space-around !important}.justify-content-evenly{justify-content:space-evenly !important}.align-items-start{align-items:flex-start !important}.align-items-end{align-items:flex-end !important}.align-items-center{align-items:center !important}.align-items-baseline{align-items:baseline !important}.align-items-stretch{align-items:stretch !important}.align-content-start{align-content:flex-start !important}.align-content-end{align-content:flex-end !important}.align-content-center{align-content:center !important}.align-content-between{align-content:space-between !important}.align-content-around{align-content:space-around !important}.align-content-stretch{align-content:stretch !important}.align-self-auto{align-self:auto !important}.align-self-start{align-self:flex-start !important}.align-self-end{align-self:flex-end !important}.align-self-center{align-self:center !important}.align-self-baseline{align-self:baseline !important}.align-self-stretch{align-self:stretch !important}.order-first{order:-1 !important}.order-0{order:0 !important}.order-1{order:1 !important}.order-2{order:2 !important}.order-3{order:3 !important}.order-4{order:4 !important}.order-5{order:5 !important}.order-last{order:6 !important}.m-0{margin:0 !important}.m-1{margin:.25rem !important}.m-2{margin:.5rem !important}.m-3{margin:1rem !important}.m-4{margin:1.5rem !important}.m-5{margin:3rem !important}.m-auto{margin:auto !important}.mx-0{margin-right:0 !important;margin-left:0 !important}.mx-1{margin-right:.25rem !important;margin-left:.25rem !important}.mx-2{margin-right:.5rem !important;margin-left:.5rem !important}.mx-3{margin-right:1rem !important;margin-left:1rem !important}.mx-4{margin-right:1.5rem !important;margin-left:1.5rem !important}.mx-5{margin-right:3rem !important;margin-left:3rem !important}.mx-auto{margin-right:auto !important;margin-left:auto !important}.my-0{margin-top:0 !important;margin-bottom:0 !important}.my-1{margin-top:.25rem !important;margin-bottom:.25rem !important}.my-2{margin-top:.5rem !important;margin-bottom:.5rem !important}.my-3{margin-top:1rem !important;margin-bottom:1rem !important}.my-4{margin-top:1.5rem !important;margin-bottom:1.5rem !important}.my-5{margin-top:3rem !important;margin-bottom:3rem !important}.my-auto{margin-top:auto !important;margin-bottom:auto !important}.mt-0{margin-top:0 !important}.mt-1{margin-top:.25rem !important}.mt-2{margin-top:.5rem !important}.mt-3{margin-top:1rem !important}.mt-4{margin-top:1.5rem !important}.mt-5{margin-top:3rem !important}.mt-auto{margin-top:auto !important}.me-0{margin-right:0 !important}.me-1{margin-right:.25rem !important}.me-2{margin-right:.5rem !important}.me-3{margin-right:1rem !important}.me-4{margin-right:1.5rem !important}.me-5{margin-right:3rem !important}.me-auto{margin-right:auto !important}.mb-0{margin-bottom:0 !important}.mb-1{margin-bottom:.25rem !important}.mb-2{margin-bottom:.5rem !important}.mb-3{margin-bottom:1rem !important}.mb-4{margin-bottom:1.5rem !important}.mb-5{margin-bottom:3rem !important}.mb-auto{margin-bottom:auto !important}.ms-0{margin-left:0 !important}.ms-1{margin-left:.25rem !important}.ms-2{margin-left:.5rem !important}.ms-3{margin-left:1rem !important}.ms-4{margin-left:1.5rem !important}.ms-5{margin-left:3rem !important}.ms-auto{margin-left:auto !important}.p-0{padding:0 !important}.p-1{padding:.25rem !important}.p-2{padding:.5rem !important}.p-3{padding:1rem !important}.p-4{padding:1.5rem !important}.p-5{padding:3rem !important}.px-0{padding-right:0 !important;padding-left:0 !important}.px-1{padding-right:.25rem !important;padding-left:.25rem !important}.px-2{padding-right:.5rem !important;padding-left:.5rem !important}.px-3{padding-right:1rem !important;padding-left:1rem !important}.px-4{padding-right:1.5rem !important;padding-left:1.5rem !important}.px-5{padding-right:3rem !important;padding-left:3rem !important}.py-0{padding-top:0 !important;padding-bottom:0 !important}.py-1{padding-top:.25rem !important;padding-bottom:.25rem !important}.py-2{padding-top:.5rem !important;padding-bottom:.5rem !important}.py-3{padding-top:1rem !important;padding-bottom:1rem !important}.py-4{padding-top:1.5rem !important;padding-bottom:1.5rem !important}.py-5{padding-top:3rem !important;padding-bottom:3rem !important}.pt-0{padding-top:0 !important}.pt-1{padding-top:.25rem !important}.pt-2{padding-top:.5rem !important}.pt-3{padding-top:1rem !important}.pt-4{padding-top:1.5rem !important}.pt-5{padding-top:3rem !important}.pe-0{padding-right:0 !important}.pe-1{padding-right:.25rem !important}.pe-2{padding-right:.5rem !important}.pe-3{padding-right:1rem !important}.pe-4{padding-right:1.5rem !important}.pe-5{padding-right:3rem !important}.pb-0{padding-bottom:0 !important}.pb-1{padding-bottom:.25rem !important}.pb-2{padding-bottom:.5rem !important}.pb-3{padding-bottom:1rem !important}.pb-4{padding-bottom:1.5rem !important}.pb-5{padding-bottom:3rem !important}.ps-0{padding-left:0 !important}.ps-1{padding-left:.25rem !important}.ps-2{padding-left:.5rem !important}.ps-3{padding-left:1rem !important}.ps-4{padding-left:1.5rem !important}.ps-5{padding-left:3rem !important}.gap-0{gap:0 !important}.gap-1{gap:.25rem !important}.gap-2{gap:.5rem !important}.gap-3{gap:1rem !important}.gap-4{gap:1.5rem !important}.gap-5{gap:3rem !important}.row-gap-0{row-gap:0 !important}.row-gap-1{row-gap:.25rem !important}.row-gap-2{row-gap:.5rem !important}.row-gap-3{row-gap:1rem !important}.row-gap-4{row-gap:1.5rem !important}.row-gap-5{row-gap:3rem !important}.column-gap-0{column-gap:0 !important}.column-gap-1{column-gap:.25rem !important}.column-gap-2{column-gap:.5rem !important}.column-gap-3{column-gap:1rem !important}.column-gap-4{column-gap:1.5rem !important}.column-gap-5{column-gap:3rem !important}.font-monospace{font-family:var(--bs-font-monospace) !important}.fs-1{font-size:calc(1.325rem + 0.9vw) !important}.fs-2{font-size:calc(1.29rem + 0.48vw) !important}.fs-3{font-size:calc(1.27rem + 0.24vw) !important}.fs-4{font-size:1.25rem !important}.fs-5{font-size:1.1rem !important}.fs-6{font-size:1rem !important}.fst-italic{font-style:italic !important}.fst-normal{font-style:normal !important}.fw-lighter{font-weight:lighter !important}.fw-light{font-weight:300 !important}.fw-normal{font-weight:400 !important}.fw-medium{font-weight:500 !important}.fw-semibold{font-weight:600 !important}.fw-bold{font-weight:700 !important}.fw-bolder{font-weight:bolder !important}.lh-1{line-height:1 !important}.lh-sm{line-height:1.25 !important}.lh-base{line-height:1.5 !important}.lh-lg{line-height:2 !important}.text-start{text-align:left !important}.text-end{text-align:right !important}.text-center{text-align:center !important}.text-decoration-none{text-decoration:none !important}.text-decoration-underline{text-decoration:underline !important}.text-decoration-line-through{text-decoration:line-through !important}.text-lowercase{text-transform:lowercase !important}.text-uppercase{text-transform:uppercase !important}.text-capitalize{text-transform:capitalize !important}.text-wrap{white-space:normal !important}.text-nowrap{white-space:nowrap !important}.text-break{word-wrap:break-word !important;word-break:break-word !important}.text-default{--bs-text-opacity: 1;color:rgba(var(--bs-default-rgb), var(--bs-text-opacity)) !important}.text-primary{--bs-text-opacity: 1;color:rgba(var(--bs-primary-rgb), var(--bs-text-opacity)) !important}.text-secondary{--bs-text-opacity: 1;color:rgba(var(--bs-secondary-rgb), var(--bs-text-opacity)) !important}.text-success{--bs-text-opacity: 1;color:rgba(var(--bs-success-rgb), var(--bs-text-opacity)) !important}.text-info{--bs-text-opacity: 1;color:rgba(var(--bs-info-rgb), var(--bs-text-opacity)) !important}.text-warning{--bs-text-opacity: 1;color:rgba(var(--bs-warning-rgb), var(--bs-text-opacity)) !important}.text-danger{--bs-text-opacity: 1;color:rgba(var(--bs-danger-rgb), var(--bs-text-opacity)) !important}.text-light{--bs-text-opacity: 1;color:rgba(var(--bs-light-rgb), var(--bs-text-opacity)) !important}.text-dark{--bs-text-opacity: 1;color:rgba(var(--bs-dark-rgb), var(--bs-text-opacity)) !important}.text-black{--bs-text-opacity: 1;color:rgba(var(--bs-black-rgb), var(--bs-text-opacity)) !important}.text-white{--bs-text-opacity: 1;color:rgba(var(--bs-white-rgb), var(--bs-text-opacity)) !important}.text-body{--bs-text-opacity: 1;color:rgba(var(--bs-body-color-rgb), var(--bs-text-opacity)) !important}.text-muted{--bs-text-opacity: 1;color:var(--bs-secondary-color) !important}.text-black-50{--bs-text-opacity: 1;color:rgba(0,0,0,.5) !important}.text-white-50{--bs-text-opacity: 1;color:rgba(255,255,255,.5) !important}.text-body-secondary{--bs-text-opacity: 1;color:var(--bs-secondary-color) !important}.text-body-tertiary{--bs-text-opacity: 1;color:var(--bs-tertiary-color) !important}.text-body-emphasis{--bs-text-opacity: 1;color:var(--bs-emphasis-color) !important}.text-reset{--bs-text-opacity: 1;color:inherit !important}.text-opacity-25{--bs-text-opacity: 0.25}.text-opacity-50{--bs-text-opacity: 0.5}.text-opacity-75{--bs-text-opacity: 0.75}.text-opacity-100{--bs-text-opacity: 1}.text-primary-emphasis{color:var(--bs-primary-text-emphasis) !important}.text-secondary-emphasis{color:var(--bs-secondary-text-emphasis) !important}.text-success-emphasis{color:var(--bs-success-text-emphasis) !important}.text-info-emphasis{color:var(--bs-info-text-emphasis) !important}.text-warning-emphasis{color:var(--bs-warning-text-emphasis) !important}.text-danger-emphasis{color:var(--bs-danger-text-emphasis) !important}.text-light-emphasis{color:var(--bs-light-text-emphasis) !important}.text-dark-emphasis{color:var(--bs-dark-text-emphasis) !important}.link-opacity-10{--bs-link-opacity: 0.1}.link-opacity-10-hover:hover{--bs-link-opacity: 0.1}.link-opacity-25{--bs-link-opacity: 0.25}.link-opacity-25-hover:hover{--bs-link-opacity: 0.25}.link-opacity-50{--bs-link-opacity: 0.5}.link-opacity-50-hover:hover{--bs-link-opacity: 0.5}.link-opacity-75{--bs-link-opacity: 0.75}.link-opacity-75-hover:hover{--bs-link-opacity: 0.75}.link-opacity-100{--bs-link-opacity: 1}.link-opacity-100-hover:hover{--bs-link-opacity: 1}.link-offset-1{text-underline-offset:.125em !important}.link-offset-1-hover:hover{text-underline-offset:.125em !important}.link-offset-2{text-underline-offset:.25em !important}.link-offset-2-hover:hover{text-underline-offset:.25em !important}.link-offset-3{text-underline-offset:.375em !important}.link-offset-3-hover:hover{text-underline-offset:.375em !important}.link-underline-default{--bs-link-underline-opacity: 1;text-decoration-color:rgba(var(--bs-default-rgb), var(--bs-link-underline-opacity)) !important}.link-underline-primary{--bs-link-underline-opacity: 1;text-decoration-color:rgba(var(--bs-primary-rgb), var(--bs-link-underline-opacity)) !important}.link-underline-secondary{--bs-link-underline-opacity: 1;text-decoration-color:rgba(var(--bs-secondary-rgb), var(--bs-link-underline-opacity)) !important}.link-underline-success{--bs-link-underline-opacity: 1;text-decoration-color:rgba(var(--bs-success-rgb), var(--bs-link-underline-opacity)) !important}.link-underline-info{--bs-link-underline-opacity: 1;text-decoration-color:rgba(var(--bs-info-rgb), var(--bs-link-underline-opacity)) !important}.link-underline-warning{--bs-link-underline-opacity: 1;text-decoration-color:rgba(var(--bs-warning-rgb), var(--bs-link-underline-opacity)) !important}.link-underline-danger{--bs-link-underline-opacity: 1;text-decoration-color:rgba(var(--bs-danger-rgb), var(--bs-link-underline-opacity)) !important}.link-underline-light{--bs-link-underline-opacity: 1;text-decoration-color:rgba(var(--bs-light-rgb), var(--bs-link-underline-opacity)) !important}.link-underline-dark{--bs-link-underline-opacity: 1;text-decoration-color:rgba(var(--bs-dark-rgb), var(--bs-link-underline-opacity)) !important}.link-underline{--bs-link-underline-opacity: 1;text-decoration-color:rgba(var(--bs-link-color-rgb), var(--bs-link-underline-opacity, 1)) !important}.link-underline-opacity-0{--bs-link-underline-opacity: 0}.link-underline-opacity-0-hover:hover{--bs-link-underline-opacity: 0}.link-underline-opacity-10{--bs-link-underline-opacity: 0.1}.link-underline-opacity-10-hover:hover{--bs-link-underline-opacity: 0.1}.link-underline-opacity-25{--bs-link-underline-opacity: 0.25}.link-underline-opacity-25-hover:hover{--bs-link-underline-opacity: 0.25}.link-underline-opacity-50{--bs-link-underline-opacity: 0.5}.link-underline-opacity-50-hover:hover{--bs-link-underline-opacity: 0.5}.link-underline-opacity-75{--bs-link-underline-opacity: 0.75}.link-underline-opacity-75-hover:hover{--bs-link-underline-opacity: 0.75}.link-underline-opacity-100{--bs-link-underline-opacity: 1}.link-underline-opacity-100-hover:hover{--bs-link-underline-opacity: 1}.bg-default{--bs-bg-opacity: 1;background-color:rgba(var(--bs-default-rgb), var(--bs-bg-opacity)) !important}.bg-primary{--bs-bg-opacity: 1;background-color:rgba(var(--bs-primary-rgb), var(--bs-bg-opacity)) !important}.bg-secondary{--bs-bg-opacity: 1;background-color:rgba(var(--bs-secondary-rgb), var(--bs-bg-opacity)) !important}.bg-success{--bs-bg-opacity: 1;background-color:rgba(var(--bs-success-rgb), var(--bs-bg-opacity)) !important}.bg-info{--bs-bg-opacity: 1;background-color:rgba(var(--bs-info-rgb), var(--bs-bg-opacity)) !important}.bg-warning{--bs-bg-opacity: 1;background-color:rgba(var(--bs-warning-rgb), var(--bs-bg-opacity)) !important}.bg-danger{--bs-bg-opacity: 1;background-color:rgba(var(--bs-danger-rgb), var(--bs-bg-opacity)) !important}.bg-light{--bs-bg-opacity: 1;background-color:rgba(var(--bs-light-rgb), var(--bs-bg-opacity)) !important}.bg-dark{--bs-bg-opacity: 1;background-color:rgba(var(--bs-dark-rgb), var(--bs-bg-opacity)) !important}.bg-black{--bs-bg-opacity: 1;background-color:rgba(var(--bs-black-rgb), var(--bs-bg-opacity)) !important}.bg-white{--bs-bg-opacity: 1;background-color:rgba(var(--bs-white-rgb), var(--bs-bg-opacity)) !important}.bg-body{--bs-bg-opacity: 1;background-color:rgba(var(--bs-body-bg-rgb), var(--bs-bg-opacity)) !important}.bg-transparent{--bs-bg-opacity: 1;background-color:rgba(0,0,0,0) !important}.bg-body-secondary{--bs-bg-opacity: 1;background-color:rgba(var(--bs-secondary-bg-rgb), var(--bs-bg-opacity)) !important}.bg-body-tertiary{--bs-bg-opacity: 1;background-color:rgba(var(--bs-tertiary-bg-rgb), var(--bs-bg-opacity)) !important}.bg-opacity-10{--bs-bg-opacity: 0.1}.bg-opacity-25{--bs-bg-opacity: 0.25}.bg-opacity-50{--bs-bg-opacity: 0.5}.bg-opacity-75{--bs-bg-opacity: 0.75}.bg-opacity-100{--bs-bg-opacity: 1}.bg-primary-subtle{background-color:var(--bs-primary-bg-subtle) !important}.bg-secondary-subtle{background-color:var(--bs-secondary-bg-subtle) !important}.bg-success-subtle{background-color:var(--bs-success-bg-subtle) !important}.bg-info-subtle{background-color:var(--bs-info-bg-subtle) !important}.bg-warning-subtle{background-color:var(--bs-warning-bg-subtle) !important}.bg-danger-subtle{background-color:var(--bs-danger-bg-subtle) !important}.bg-light-subtle{background-color:var(--bs-light-bg-subtle) !important}.bg-dark-subtle{background-color:var(--bs-dark-bg-subtle) !important}.bg-gradient{background-image:var(--bs-gradient) !important}.user-select-all{user-select:all !important}.user-select-auto{user-select:auto !important}.user-select-none{user-select:none !important}.pe-none{pointer-events:none !important}.pe-auto{pointer-events:auto !important}.rounded{border-radius:var(--bs-border-radius) !important}.rounded-0{border-radius:0 !important}.rounded-1{border-radius:var(--bs-border-radius-sm) !important}.rounded-2{border-radius:var(--bs-border-radius) !important}.rounded-3{border-radius:var(--bs-border-radius-lg) !important}.rounded-4{border-radius:var(--bs-border-radius-xl) !important}.rounded-5{border-radius:var(--bs-border-radius-xxl) !important}.rounded-circle{border-radius:50% !important}.rounded-pill{border-radius:var(--bs-border-radius-pill) !important}.rounded-top{border-top-left-radius:var(--bs-border-radius) !important;border-top-right-radius:var(--bs-border-radius) !important}.rounded-top-0{border-top-left-radius:0 !important;border-top-right-radius:0 !important}.rounded-top-1{border-top-left-radius:var(--bs-border-radius-sm) !important;border-top-right-radius:var(--bs-border-radius-sm) !important}.rounded-top-2{border-top-left-radius:var(--bs-border-radius) !important;border-top-right-radius:var(--bs-border-radius) !important}.rounded-top-3{border-top-left-radius:var(--bs-border-radius-lg) !important;border-top-right-radius:var(--bs-border-radius-lg) !important}.rounded-top-4{border-top-left-radius:var(--bs-border-radius-xl) !important;border-top-right-radius:var(--bs-border-radius-xl) !important}.rounded-top-5{border-top-left-radius:var(--bs-border-radius-xxl) !important;border-top-right-radius:var(--bs-border-radius-xxl) !important}.rounded-top-circle{border-top-left-radius:50% !important;border-top-right-radius:50% !important}.rounded-top-pill{border-top-left-radius:var(--bs-border-radius-pill) !important;border-top-right-radius:var(--bs-border-radius-pill) !important}.rounded-end{border-top-right-radius:var(--bs-border-radius) !important;border-bottom-right-radius:var(--bs-border-radius) !important}.rounded-end-0{border-top-right-radius:0 !important;border-bottom-right-radius:0 !important}.rounded-end-1{border-top-right-radius:var(--bs-border-radius-sm) !important;border-bottom-right-radius:var(--bs-border-radius-sm) !important}.rounded-end-2{border-top-right-radius:var(--bs-border-radius) !important;border-bottom-right-radius:var(--bs-border-radius) !important}.rounded-end-3{border-top-right-radius:var(--bs-border-radius-lg) !important;border-bottom-right-radius:var(--bs-border-radius-lg) !important}.rounded-end-4{border-top-right-radius:var(--bs-border-radius-xl) !important;border-bottom-right-radius:var(--bs-border-radius-xl) !important}.rounded-end-5{border-top-right-radius:var(--bs-border-radius-xxl) !important;border-bottom-right-radius:var(--bs-border-radius-xxl) !important}.rounded-end-circle{border-top-right-radius:50% !important;border-bottom-right-radius:50% !important}.rounded-end-pill{border-top-right-radius:var(--bs-border-radius-pill) !important;border-bottom-right-radius:var(--bs-border-radius-pill) !important}.rounded-bottom{border-bottom-right-radius:var(--bs-border-radius) !important;border-bottom-left-radius:var(--bs-border-radius) !important}.rounded-bottom-0{border-bottom-right-radius:0 !important;border-bottom-left-radius:0 !important}.rounded-bottom-1{border-bottom-right-radius:var(--bs-border-radius-sm) !important;border-bottom-left-radius:var(--bs-border-radius-sm) !important}.rounded-bottom-2{border-bottom-right-radius:var(--bs-border-radius) !important;border-bottom-left-radius:var(--bs-border-radius) !important}.rounded-bottom-3{border-bottom-right-radius:var(--bs-border-radius-lg) !important;border-bottom-left-radius:var(--bs-border-radius-lg) !important}.rounded-bottom-4{border-bottom-right-radius:var(--bs-border-radius-xl) !important;border-bottom-left-radius:var(--bs-border-radius-xl) !important}.rounded-bottom-5{border-bottom-right-radius:var(--bs-border-radius-xxl) !important;border-bottom-left-radius:var(--bs-border-radius-xxl) !important}.rounded-bottom-circle{border-bottom-right-radius:50% !important;border-bottom-left-radius:50% !important}.rounded-bottom-pill{border-bottom-right-radius:var(--bs-border-radius-pill) !important;border-bottom-left-radius:var(--bs-border-radius-pill) !important}.rounded-start{border-bottom-left-radius:var(--bs-border-radius) !important;border-top-left-radius:var(--bs-border-radius) !important}.rounded-start-0{border-bottom-left-radius:0 !important;border-top-left-radius:0 !important}.rounded-start-1{border-bottom-left-radius:var(--bs-border-radius-sm) !important;border-top-left-radius:var(--bs-border-radius-sm) !important}.rounded-start-2{border-bottom-left-radius:var(--bs-border-radius) !important;border-top-left-radius:var(--bs-border-radius) !important}.rounded-start-3{border-bottom-left-radius:var(--bs-border-radius-lg) !important;border-top-left-radius:var(--bs-border-radius-lg) !important}.rounded-start-4{border-bottom-left-radius:var(--bs-border-radius-xl) !important;border-top-left-radius:var(--bs-border-radius-xl) !important}.rounded-start-5{border-bottom-left-radius:var(--bs-border-radius-xxl) !important;border-top-left-radius:var(--bs-border-radius-xxl) !important}.rounded-start-circle{border-bottom-left-radius:50% !important;border-top-left-radius:50% !important}.rounded-start-pill{border-bottom-left-radius:var(--bs-border-radius-pill) !important;border-top-left-radius:var(--bs-border-radius-pill) !important}.visible{visibility:visible !important}.invisible{visibility:hidden !important}.z-n1{z-index:-1 !important}.z-0{z-index:0 !important}.z-1{z-index:1 !important}.z-2{z-index:2 !important}.z-3{z-index:3 !important}@media(min-width: 576px){.float-sm-start{float:left !important}.float-sm-end{float:right !important}.float-sm-none{float:none !important}.object-fit-sm-contain{object-fit:contain !important}.object-fit-sm-cover{object-fit:cover !important}.object-fit-sm-fill{object-fit:fill !important}.object-fit-sm-scale{object-fit:scale-down !important}.object-fit-sm-none{object-fit:none !important}.d-sm-inline{display:inline !important}.d-sm-inline-block{display:inline-block !important}.d-sm-block{display:block !important}.d-sm-grid{display:grid !important}.d-sm-inline-grid{display:inline-grid !important}.d-sm-table{display:table !important}.d-sm-table-row{display:table-row !important}.d-sm-table-cell{display:table-cell !important}.d-sm-flex{display:flex !important}.d-sm-inline-flex{display:inline-flex !important}.d-sm-none{display:none !important}.flex-sm-fill{flex:1 1 auto !important}.flex-sm-row{flex-direction:row !important}.flex-sm-column{flex-direction:column !important}.flex-sm-row-reverse{flex-direction:row-reverse !important}.flex-sm-column-reverse{flex-direction:column-reverse !important}.flex-sm-grow-0{flex-grow:0 !important}.flex-sm-grow-1{flex-grow:1 !important}.flex-sm-shrink-0{flex-shrink:0 !important}.flex-sm-shrink-1{flex-shrink:1 !important}.flex-sm-wrap{flex-wrap:wrap !important}.flex-sm-nowrap{flex-wrap:nowrap !important}.flex-sm-wrap-reverse{flex-wrap:wrap-reverse !important}.justify-content-sm-start{justify-content:flex-start !important}.justify-content-sm-end{justify-content:flex-end !important}.justify-content-sm-center{justify-content:center !important}.justify-content-sm-between{justify-content:space-between !important}.justify-content-sm-around{justify-content:space-around !important}.justify-content-sm-evenly{justify-content:space-evenly !important}.align-items-sm-start{align-items:flex-start !important}.align-items-sm-end{align-items:flex-end !important}.align-items-sm-center{align-items:center !important}.align-items-sm-baseline{align-items:baseline !important}.align-items-sm-stretch{align-items:stretch !important}.align-content-sm-start{align-content:flex-start !important}.align-content-sm-end{align-content:flex-end !important}.align-content-sm-center{align-content:center !important}.align-content-sm-between{align-content:space-between !important}.align-content-sm-around{align-content:space-around !important}.align-content-sm-stretch{align-content:stretch !important}.align-self-sm-auto{align-self:auto !important}.align-self-sm-start{align-self:flex-start !important}.align-self-sm-end{align-self:flex-end !important}.align-self-sm-center{align-self:center !important}.align-self-sm-baseline{align-self:baseline !important}.align-self-sm-stretch{align-self:stretch !important}.order-sm-first{order:-1 !important}.order-sm-0{order:0 !important}.order-sm-1{order:1 !important}.order-sm-2{order:2 !important}.order-sm-3{order:3 !important}.order-sm-4{order:4 !important}.order-sm-5{order:5 !important}.order-sm-last{order:6 !important}.m-sm-0{margin:0 !important}.m-sm-1{margin:.25rem !important}.m-sm-2{margin:.5rem !important}.m-sm-3{margin:1rem !important}.m-sm-4{margin:1.5rem !important}.m-sm-5{margin:3rem !important}.m-sm-auto{margin:auto !important}.mx-sm-0{margin-right:0 !important;margin-left:0 !important}.mx-sm-1{margin-right:.25rem !important;margin-left:.25rem !important}.mx-sm-2{margin-right:.5rem !important;margin-left:.5rem !important}.mx-sm-3{margin-right:1rem !important;margin-left:1rem !important}.mx-sm-4{margin-right:1.5rem !important;margin-left:1.5rem !important}.mx-sm-5{margin-right:3rem !important;margin-left:3rem !important}.mx-sm-auto{margin-right:auto !important;margin-left:auto !important}.my-sm-0{margin-top:0 !important;margin-bottom:0 !important}.my-sm-1{margin-top:.25rem !important;margin-bottom:.25rem !important}.my-sm-2{margin-top:.5rem !important;margin-bottom:.5rem !important}.my-sm-3{margin-top:1rem !important;margin-bottom:1rem !important}.my-sm-4{margin-top:1.5rem !important;margin-bottom:1.5rem !important}.my-sm-5{margin-top:3rem !important;margin-bottom:3rem !important}.my-sm-auto{margin-top:auto !important;margin-bottom:auto !important}.mt-sm-0{margin-top:0 !important}.mt-sm-1{margin-top:.25rem !important}.mt-sm-2{margin-top:.5rem !important}.mt-sm-3{margin-top:1rem !important}.mt-sm-4{margin-top:1.5rem !important}.mt-sm-5{margin-top:3rem !important}.mt-sm-auto{margin-top:auto !important}.me-sm-0{margin-right:0 !important}.me-sm-1{margin-right:.25rem !important}.me-sm-2{margin-right:.5rem !important}.me-sm-3{margin-right:1rem !important}.me-sm-4{margin-right:1.5rem !important}.me-sm-5{margin-right:3rem !important}.me-sm-auto{margin-right:auto !important}.mb-sm-0{margin-bottom:0 !important}.mb-sm-1{margin-bottom:.25rem !important}.mb-sm-2{margin-bottom:.5rem !important}.mb-sm-3{margin-bottom:1rem !important}.mb-sm-4{margin-bottom:1.5rem !important}.mb-sm-5{margin-bottom:3rem !important}.mb-sm-auto{margin-bottom:auto !important}.ms-sm-0{margin-left:0 !important}.ms-sm-1{margin-left:.25rem !important}.ms-sm-2{margin-left:.5rem !important}.ms-sm-3{margin-left:1rem !important}.ms-sm-4{margin-left:1.5rem !important}.ms-sm-5{margin-left:3rem !important}.ms-sm-auto{margin-left:auto !important}.p-sm-0{padding:0 !important}.p-sm-1{padding:.25rem !important}.p-sm-2{padding:.5rem !important}.p-sm-3{padding:1rem !important}.p-sm-4{padding:1.5rem !important}.p-sm-5{padding:3rem !important}.px-sm-0{padding-right:0 !important;padding-left:0 !important}.px-sm-1{padding-right:.25rem !important;padding-left:.25rem !important}.px-sm-2{padding-right:.5rem !important;padding-left:.5rem !important}.px-sm-3{padding-right:1rem !important;padding-left:1rem !important}.px-sm-4{padding-right:1.5rem !important;padding-left:1.5rem !important}.px-sm-5{padding-right:3rem !important;padding-left:3rem !important}.py-sm-0{padding-top:0 !important;padding-bottom:0 !important}.py-sm-1{padding-top:.25rem !important;padding-bottom:.25rem !important}.py-sm-2{padding-top:.5rem !important;padding-bottom:.5rem !important}.py-sm-3{padding-top:1rem !important;padding-bottom:1rem !important}.py-sm-4{padding-top:1.5rem !important;padding-bottom:1.5rem !important}.py-sm-5{padding-top:3rem !important;padding-bottom:3rem !important}.pt-sm-0{padding-top:0 !important}.pt-sm-1{padding-top:.25rem !important}.pt-sm-2{padding-top:.5rem !important}.pt-sm-3{padding-top:1rem !important}.pt-sm-4{padding-top:1.5rem !important}.pt-sm-5{padding-top:3rem !important}.pe-sm-0{padding-right:0 !important}.pe-sm-1{padding-right:.25rem !important}.pe-sm-2{padding-right:.5rem !important}.pe-sm-3{padding-right:1rem !important}.pe-sm-4{padding-right:1.5rem !important}.pe-sm-5{padding-right:3rem !important}.pb-sm-0{padding-bottom:0 !important}.pb-sm-1{padding-bottom:.25rem !important}.pb-sm-2{padding-bottom:.5rem !important}.pb-sm-3{padding-bottom:1rem !important}.pb-sm-4{padding-bottom:1.5rem !important}.pb-sm-5{padding-bottom:3rem !important}.ps-sm-0{padding-left:0 !important}.ps-sm-1{padding-left:.25rem !important}.ps-sm-2{padding-left:.5rem !important}.ps-sm-3{padding-left:1rem !important}.ps-sm-4{padding-left:1.5rem !important}.ps-sm-5{padding-left:3rem !important}.gap-sm-0{gap:0 !important}.gap-sm-1{gap:.25rem !important}.gap-sm-2{gap:.5rem !important}.gap-sm-3{gap:1rem !important}.gap-sm-4{gap:1.5rem !important}.gap-sm-5{gap:3rem !important}.row-gap-sm-0{row-gap:0 !important}.row-gap-sm-1{row-gap:.25rem !important}.row-gap-sm-2{row-gap:.5rem !important}.row-gap-sm-3{row-gap:1rem !important}.row-gap-sm-4{row-gap:1.5rem !important}.row-gap-sm-5{row-gap:3rem !important}.column-gap-sm-0{column-gap:0 !important}.column-gap-sm-1{column-gap:.25rem !important}.column-gap-sm-2{column-gap:.5rem !important}.column-gap-sm-3{column-gap:1rem !important}.column-gap-sm-4{column-gap:1.5rem !important}.column-gap-sm-5{column-gap:3rem !important}.text-sm-start{text-align:left !important}.text-sm-end{text-align:right !important}.text-sm-center{text-align:center !important}}@media(min-width: 768px){.float-md-start{float:left !important}.float-md-end{float:right !important}.float-md-none{float:none !important}.object-fit-md-contain{object-fit:contain !important}.object-fit-md-cover{object-fit:cover !important}.object-fit-md-fill{object-fit:fill !important}.object-fit-md-scale{object-fit:scale-down !important}.object-fit-md-none{object-fit:none !important}.d-md-inline{display:inline !important}.d-md-inline-block{display:inline-block !important}.d-md-block{display:block !important}.d-md-grid{display:grid !important}.d-md-inline-grid{display:inline-grid !important}.d-md-table{display:table !important}.d-md-table-row{display:table-row !important}.d-md-table-cell{display:table-cell !important}.d-md-flex{display:flex !important}.d-md-inline-flex{display:inline-flex !important}.d-md-none{display:none !important}.flex-md-fill{flex:1 1 auto !important}.flex-md-row{flex-direction:row !important}.flex-md-column{flex-direction:column !important}.flex-md-row-reverse{flex-direction:row-reverse !important}.flex-md-column-reverse{flex-direction:column-reverse !important}.flex-md-grow-0{flex-grow:0 !important}.flex-md-grow-1{flex-grow:1 !important}.flex-md-shrink-0{flex-shrink:0 !important}.flex-md-shrink-1{flex-shrink:1 !important}.flex-md-wrap{flex-wrap:wrap !important}.flex-md-nowrap{flex-wrap:nowrap !important}.flex-md-wrap-reverse{flex-wrap:wrap-reverse !important}.justify-content-md-start{justify-content:flex-start !important}.justify-content-md-end{justify-content:flex-end !important}.justify-content-md-center{justify-content:center !important}.justify-content-md-between{justify-content:space-between !important}.justify-content-md-around{justify-content:space-around !important}.justify-content-md-evenly{justify-content:space-evenly !important}.align-items-md-start{align-items:flex-start !important}.align-items-md-end{align-items:flex-end !important}.align-items-md-center{align-items:center !important}.align-items-md-baseline{align-items:baseline !important}.align-items-md-stretch{align-items:stretch !important}.align-content-md-start{align-content:flex-start !important}.align-content-md-end{align-content:flex-end !important}.align-content-md-center{align-content:center !important}.align-content-md-between{align-content:space-between !important}.align-content-md-around{align-content:space-around !important}.align-content-md-stretch{align-content:stretch !important}.align-self-md-auto{align-self:auto !important}.align-self-md-start{align-self:flex-start !important}.align-self-md-end{align-self:flex-end !important}.align-self-md-center{align-self:center !important}.align-self-md-baseline{align-self:baseline !important}.align-self-md-stretch{align-self:stretch !important}.order-md-first{order:-1 !important}.order-md-0{order:0 !important}.order-md-1{order:1 !important}.order-md-2{order:2 !important}.order-md-3{order:3 !important}.order-md-4{order:4 !important}.order-md-5{order:5 !important}.order-md-last{order:6 !important}.m-md-0{margin:0 !important}.m-md-1{margin:.25rem !important}.m-md-2{margin:.5rem !important}.m-md-3{margin:1rem !important}.m-md-4{margin:1.5rem !important}.m-md-5{margin:3rem !important}.m-md-auto{margin:auto !important}.mx-md-0{margin-right:0 !important;margin-left:0 !important}.mx-md-1{margin-right:.25rem !important;margin-left:.25rem !important}.mx-md-2{margin-right:.5rem !important;margin-left:.5rem !important}.mx-md-3{margin-right:1rem !important;margin-left:1rem !important}.mx-md-4{margin-right:1.5rem !important;margin-left:1.5rem !important}.mx-md-5{margin-right:3rem !important;margin-left:3rem !important}.mx-md-auto{margin-right:auto !important;margin-left:auto !important}.my-md-0{margin-top:0 !important;margin-bottom:0 !important}.my-md-1{margin-top:.25rem !important;margin-bottom:.25rem !important}.my-md-2{margin-top:.5rem !important;margin-bottom:.5rem !important}.my-md-3{margin-top:1rem !important;margin-bottom:1rem !important}.my-md-4{margin-top:1.5rem !important;margin-bottom:1.5rem !important}.my-md-5{margin-top:3rem !important;margin-bottom:3rem !important}.my-md-auto{margin-top:auto !important;margin-bottom:auto !important}.mt-md-0{margin-top:0 !important}.mt-md-1{margin-top:.25rem !important}.mt-md-2{margin-top:.5rem !important}.mt-md-3{margin-top:1rem !important}.mt-md-4{margin-top:1.5rem !important}.mt-md-5{margin-top:3rem !important}.mt-md-auto{margin-top:auto !important}.me-md-0{margin-right:0 !important}.me-md-1{margin-right:.25rem !important}.me-md-2{margin-right:.5rem !important}.me-md-3{margin-right:1rem !important}.me-md-4{margin-right:1.5rem !important}.me-md-5{margin-right:3rem !important}.me-md-auto{margin-right:auto !important}.mb-md-0{margin-bottom:0 !important}.mb-md-1{margin-bottom:.25rem !important}.mb-md-2{margin-bottom:.5rem !important}.mb-md-3{margin-bottom:1rem !important}.mb-md-4{margin-bottom:1.5rem !important}.mb-md-5{margin-bottom:3rem !important}.mb-md-auto{margin-bottom:auto !important}.ms-md-0{margin-left:0 !important}.ms-md-1{margin-left:.25rem !important}.ms-md-2{margin-left:.5rem !important}.ms-md-3{margin-left:1rem !important}.ms-md-4{margin-left:1.5rem !important}.ms-md-5{margin-left:3rem !important}.ms-md-auto{margin-left:auto !important}.p-md-0{padding:0 !important}.p-md-1{padding:.25rem !important}.p-md-2{padding:.5rem !important}.p-md-3{padding:1rem !important}.p-md-4{padding:1.5rem !important}.p-md-5{padding:3rem !important}.px-md-0{padding-right:0 !important;padding-left:0 !important}.px-md-1{padding-right:.25rem !important;padding-left:.25rem !important}.px-md-2{padding-right:.5rem !important;padding-left:.5rem !important}.px-md-3{padding-right:1rem !important;padding-left:1rem !important}.px-md-4{padding-right:1.5rem !important;padding-left:1.5rem !important}.px-md-5{padding-right:3rem !important;padding-left:3rem !important}.py-md-0{padding-top:0 !important;padding-bottom:0 !important}.py-md-1{padding-top:.25rem !important;padding-bottom:.25rem !important}.py-md-2{padding-top:.5rem !important;padding-bottom:.5rem !important}.py-md-3{padding-top:1rem !important;padding-bottom:1rem !important}.py-md-4{padding-top:1.5rem !important;padding-bottom:1.5rem !important}.py-md-5{padding-top:3rem !important;padding-bottom:3rem !important}.pt-md-0{padding-top:0 !important}.pt-md-1{padding-top:.25rem !important}.pt-md-2{padding-top:.5rem !important}.pt-md-3{padding-top:1rem !important}.pt-md-4{padding-top:1.5rem !important}.pt-md-5{padding-top:3rem !important}.pe-md-0{padding-right:0 !important}.pe-md-1{padding-right:.25rem !important}.pe-md-2{padding-right:.5rem !important}.pe-md-3{padding-right:1rem !important}.pe-md-4{padding-right:1.5rem !important}.pe-md-5{padding-right:3rem !important}.pb-md-0{padding-bottom:0 !important}.pb-md-1{padding-bottom:.25rem !important}.pb-md-2{padding-bottom:.5rem !important}.pb-md-3{padding-bottom:1rem !important}.pb-md-4{padding-bottom:1.5rem !important}.pb-md-5{padding-bottom:3rem !important}.ps-md-0{padding-left:0 !important}.ps-md-1{padding-left:.25rem !important}.ps-md-2{padding-left:.5rem !important}.ps-md-3{padding-left:1rem !important}.ps-md-4{padding-left:1.5rem !important}.ps-md-5{padding-left:3rem !important}.gap-md-0{gap:0 !important}.gap-md-1{gap:.25rem !important}.gap-md-2{gap:.5rem !important}.gap-md-3{gap:1rem !important}.gap-md-4{gap:1.5rem !important}.gap-md-5{gap:3rem !important}.row-gap-md-0{row-gap:0 !important}.row-gap-md-1{row-gap:.25rem !important}.row-gap-md-2{row-gap:.5rem !important}.row-gap-md-3{row-gap:1rem !important}.row-gap-md-4{row-gap:1.5rem !important}.row-gap-md-5{row-gap:3rem !important}.column-gap-md-0{column-gap:0 !important}.column-gap-md-1{column-gap:.25rem !important}.column-gap-md-2{column-gap:.5rem !important}.column-gap-md-3{column-gap:1rem !important}.column-gap-md-4{column-gap:1.5rem !important}.column-gap-md-5{column-gap:3rem !important}.text-md-start{text-align:left !important}.text-md-end{text-align:right !important}.text-md-center{text-align:center !important}}@media(min-width: 992px){.float-lg-start{float:left !important}.float-lg-end{float:right !important}.float-lg-none{float:none !important}.object-fit-lg-contain{object-fit:contain !important}.object-fit-lg-cover{object-fit:cover !important}.object-fit-lg-fill{object-fit:fill !important}.object-fit-lg-scale{object-fit:scale-down !important}.object-fit-lg-none{object-fit:none !important}.d-lg-inline{display:inline !important}.d-lg-inline-block{display:inline-block !important}.d-lg-block{display:block !important}.d-lg-grid{display:grid !important}.d-lg-inline-grid{display:inline-grid !important}.d-lg-table{display:table !important}.d-lg-table-row{display:table-row !important}.d-lg-table-cell{display:table-cell !important}.d-lg-flex{display:flex !important}.d-lg-inline-flex{display:inline-flex !important}.d-lg-none{display:none !important}.flex-lg-fill{flex:1 1 auto !important}.flex-lg-row{flex-direction:row !important}.flex-lg-column{flex-direction:column !important}.flex-lg-row-reverse{flex-direction:row-reverse !important}.flex-lg-column-reverse{flex-direction:column-reverse !important}.flex-lg-grow-0{flex-grow:0 !important}.flex-lg-grow-1{flex-grow:1 !important}.flex-lg-shrink-0{flex-shrink:0 !important}.flex-lg-shrink-1{flex-shrink:1 !important}.flex-lg-wrap{flex-wrap:wrap !important}.flex-lg-nowrap{flex-wrap:nowrap !important}.flex-lg-wrap-reverse{flex-wrap:wrap-reverse !important}.justify-content-lg-start{justify-content:flex-start !important}.justify-content-lg-end{justify-content:flex-end !important}.justify-content-lg-center{justify-content:center !important}.justify-content-lg-between{justify-content:space-between !important}.justify-content-lg-around{justify-content:space-around !important}.justify-content-lg-evenly{justify-content:space-evenly !important}.align-items-lg-start{align-items:flex-start !important}.align-items-lg-end{align-items:flex-end !important}.align-items-lg-center{align-items:center !important}.align-items-lg-baseline{align-items:baseline !important}.align-items-lg-stretch{align-items:stretch !important}.align-content-lg-start{align-content:flex-start !important}.align-content-lg-end{align-content:flex-end !important}.align-content-lg-center{align-content:center !important}.align-content-lg-between{align-content:space-between !important}.align-content-lg-around{align-content:space-around !important}.align-content-lg-stretch{align-content:stretch !important}.align-self-lg-auto{align-self:auto !important}.align-self-lg-start{align-self:flex-start !important}.align-self-lg-end{align-self:flex-end !important}.align-self-lg-center{align-self:center !important}.align-self-lg-baseline{align-self:baseline !important}.align-self-lg-stretch{align-self:stretch !important}.order-lg-first{order:-1 !important}.order-lg-0{order:0 !important}.order-lg-1{order:1 !important}.order-lg-2{order:2 !important}.order-lg-3{order:3 !important}.order-lg-4{order:4 !important}.order-lg-5{order:5 !important}.order-lg-last{order:6 !important}.m-lg-0{margin:0 !important}.m-lg-1{margin:.25rem !important}.m-lg-2{margin:.5rem !important}.m-lg-3{margin:1rem !important}.m-lg-4{margin:1.5rem !important}.m-lg-5{margin:3rem !important}.m-lg-auto{margin:auto !important}.mx-lg-0{margin-right:0 !important;margin-left:0 !important}.mx-lg-1{margin-right:.25rem !important;margin-left:.25rem !important}.mx-lg-2{margin-right:.5rem !important;margin-left:.5rem !important}.mx-lg-3{margin-right:1rem !important;margin-left:1rem !important}.mx-lg-4{margin-right:1.5rem !important;margin-left:1.5rem !important}.mx-lg-5{margin-right:3rem !important;margin-left:3rem !important}.mx-lg-auto{margin-right:auto !important;margin-left:auto !important}.my-lg-0{margin-top:0 !important;margin-bottom:0 !important}.my-lg-1{margin-top:.25rem !important;margin-bottom:.25rem !important}.my-lg-2{margin-top:.5rem !important;margin-bottom:.5rem !important}.my-lg-3{margin-top:1rem !important;margin-bottom:1rem !important}.my-lg-4{margin-top:1.5rem !important;margin-bottom:1.5rem !important}.my-lg-5{margin-top:3rem !important;margin-bottom:3rem !important}.my-lg-auto{margin-top:auto !important;margin-bottom:auto !important}.mt-lg-0{margin-top:0 !important}.mt-lg-1{margin-top:.25rem !important}.mt-lg-2{margin-top:.5rem !important}.mt-lg-3{margin-top:1rem !important}.mt-lg-4{margin-top:1.5rem !important}.mt-lg-5{margin-top:3rem !important}.mt-lg-auto{margin-top:auto !important}.me-lg-0{margin-right:0 !important}.me-lg-1{margin-right:.25rem !important}.me-lg-2{margin-right:.5rem !important}.me-lg-3{margin-right:1rem !important}.me-lg-4{margin-right:1.5rem !important}.me-lg-5{margin-right:3rem !important}.me-lg-auto{margin-right:auto !important}.mb-lg-0{margin-bottom:0 !important}.mb-lg-1{margin-bottom:.25rem !important}.mb-lg-2{margin-bottom:.5rem !important}.mb-lg-3{margin-bottom:1rem !important}.mb-lg-4{margin-bottom:1.5rem !important}.mb-lg-5{margin-bottom:3rem !important}.mb-lg-auto{margin-bottom:auto !important}.ms-lg-0{margin-left:0 !important}.ms-lg-1{margin-left:.25rem !important}.ms-lg-2{margin-left:.5rem !important}.ms-lg-3{margin-left:1rem !important}.ms-lg-4{margin-left:1.5rem !important}.ms-lg-5{margin-left:3rem !important}.ms-lg-auto{margin-left:auto !important}.p-lg-0{padding:0 !important}.p-lg-1{padding:.25rem !important}.p-lg-2{padding:.5rem !important}.p-lg-3{padding:1rem !important}.p-lg-4{padding:1.5rem !important}.p-lg-5{padding:3rem !important}.px-lg-0{padding-right:0 !important;padding-left:0 !important}.px-lg-1{padding-right:.25rem !important;padding-left:.25rem !important}.px-lg-2{padding-right:.5rem !important;padding-left:.5rem !important}.px-lg-3{padding-right:1rem !important;padding-left:1rem !important}.px-lg-4{padding-right:1.5rem !important;padding-left:1.5rem !important}.px-lg-5{padding-right:3rem !important;padding-left:3rem !important}.py-lg-0{padding-top:0 !important;padding-bottom:0 !important}.py-lg-1{padding-top:.25rem !important;padding-bottom:.25rem !important}.py-lg-2{padding-top:.5rem !important;padding-bottom:.5rem !important}.py-lg-3{padding-top:1rem !important;padding-bottom:1rem !important}.py-lg-4{padding-top:1.5rem !important;padding-bottom:1.5rem !important}.py-lg-5{padding-top:3rem !important;padding-bottom:3rem !important}.pt-lg-0{padding-top:0 !important}.pt-lg-1{padding-top:.25rem !important}.pt-lg-2{padding-top:.5rem !important}.pt-lg-3{padding-top:1rem !important}.pt-lg-4{padding-top:1.5rem !important}.pt-lg-5{padding-top:3rem !important}.pe-lg-0{padding-right:0 !important}.pe-lg-1{padding-right:.25rem !important}.pe-lg-2{padding-right:.5rem !important}.pe-lg-3{padding-right:1rem !important}.pe-lg-4{padding-right:1.5rem !important}.pe-lg-5{padding-right:3rem !important}.pb-lg-0{padding-bottom:0 !important}.pb-lg-1{padding-bottom:.25rem !important}.pb-lg-2{padding-bottom:.5rem !important}.pb-lg-3{padding-bottom:1rem !important}.pb-lg-4{padding-bottom:1.5rem !important}.pb-lg-5{padding-bottom:3rem !important}.ps-lg-0{padding-left:0 !important}.ps-lg-1{padding-left:.25rem !important}.ps-lg-2{padding-left:.5rem !important}.ps-lg-3{padding-left:1rem !important}.ps-lg-4{padding-left:1.5rem !important}.ps-lg-5{padding-left:3rem !important}.gap-lg-0{gap:0 !important}.gap-lg-1{gap:.25rem !important}.gap-lg-2{gap:.5rem !important}.gap-lg-3{gap:1rem !important}.gap-lg-4{gap:1.5rem !important}.gap-lg-5{gap:3rem !important}.row-gap-lg-0{row-gap:0 !important}.row-gap-lg-1{row-gap:.25rem !important}.row-gap-lg-2{row-gap:.5rem !important}.row-gap-lg-3{row-gap:1rem !important}.row-gap-lg-4{row-gap:1.5rem !important}.row-gap-lg-5{row-gap:3rem !important}.column-gap-lg-0{column-gap:0 !important}.column-gap-lg-1{column-gap:.25rem !important}.column-gap-lg-2{column-gap:.5rem !important}.column-gap-lg-3{column-gap:1rem !important}.column-gap-lg-4{column-gap:1.5rem !important}.column-gap-lg-5{column-gap:3rem !important}.text-lg-start{text-align:left !important}.text-lg-end{text-align:right !important}.text-lg-center{text-align:center !important}}@media(min-width: 1200px){.float-xl-start{float:left !important}.float-xl-end{float:right !important}.float-xl-none{float:none !important}.object-fit-xl-contain{object-fit:contain !important}.object-fit-xl-cover{object-fit:cover !important}.object-fit-xl-fill{object-fit:fill !important}.object-fit-xl-scale{object-fit:scale-down !important}.object-fit-xl-none{object-fit:none !important}.d-xl-inline{display:inline !important}.d-xl-inline-block{display:inline-block !important}.d-xl-block{display:block !important}.d-xl-grid{display:grid !important}.d-xl-inline-grid{display:inline-grid !important}.d-xl-table{display:table !important}.d-xl-table-row{display:table-row !important}.d-xl-table-cell{display:table-cell !important}.d-xl-flex{display:flex !important}.d-xl-inline-flex{display:inline-flex !important}.d-xl-none{display:none !important}.flex-xl-fill{flex:1 1 auto !important}.flex-xl-row{flex-direction:row !important}.flex-xl-column{flex-direction:column !important}.flex-xl-row-reverse{flex-direction:row-reverse !important}.flex-xl-column-reverse{flex-direction:column-reverse !important}.flex-xl-grow-0{flex-grow:0 !important}.flex-xl-grow-1{flex-grow:1 !important}.flex-xl-shrink-0{flex-shrink:0 !important}.flex-xl-shrink-1{flex-shrink:1 !important}.flex-xl-wrap{flex-wrap:wrap !important}.flex-xl-nowrap{flex-wrap:nowrap !important}.flex-xl-wrap-reverse{flex-wrap:wrap-reverse !important}.justify-content-xl-start{justify-content:flex-start !important}.justify-content-xl-end{justify-content:flex-end !important}.justify-content-xl-center{justify-content:center !important}.justify-content-xl-between{justify-content:space-between !important}.justify-content-xl-around{justify-content:space-around !important}.justify-content-xl-evenly{justify-content:space-evenly !important}.align-items-xl-start{align-items:flex-start !important}.align-items-xl-end{align-items:flex-end !important}.align-items-xl-center{align-items:center !important}.align-items-xl-baseline{align-items:baseline !important}.align-items-xl-stretch{align-items:stretch !important}.align-content-xl-start{align-content:flex-start !important}.align-content-xl-end{align-content:flex-end !important}.align-content-xl-center{align-content:center !important}.align-content-xl-between{align-content:space-between !important}.align-content-xl-around{align-content:space-around !important}.align-content-xl-stretch{align-content:stretch !important}.align-self-xl-auto{align-self:auto !important}.align-self-xl-start{align-self:flex-start !important}.align-self-xl-end{align-self:flex-end !important}.align-self-xl-center{align-self:center !important}.align-self-xl-baseline{align-self:baseline !important}.align-self-xl-stretch{align-self:stretch !important}.order-xl-first{order:-1 !important}.order-xl-0{order:0 !important}.order-xl-1{order:1 !important}.order-xl-2{order:2 !important}.order-xl-3{order:3 !important}.order-xl-4{order:4 !important}.order-xl-5{order:5 !important}.order-xl-last{order:6 !important}.m-xl-0{margin:0 !important}.m-xl-1{margin:.25rem !important}.m-xl-2{margin:.5rem !important}.m-xl-3{margin:1rem !important}.m-xl-4{margin:1.5rem !important}.m-xl-5{margin:3rem !important}.m-xl-auto{margin:auto !important}.mx-xl-0{margin-right:0 !important;margin-left:0 !important}.mx-xl-1{margin-right:.25rem !important;margin-left:.25rem !important}.mx-xl-2{margin-right:.5rem !important;margin-left:.5rem !important}.mx-xl-3{margin-right:1rem !important;margin-left:1rem !important}.mx-xl-4{margin-right:1.5rem !important;margin-left:1.5rem !important}.mx-xl-5{margin-right:3rem !important;margin-left:3rem !important}.mx-xl-auto{margin-right:auto !important;margin-left:auto !important}.my-xl-0{margin-top:0 !important;margin-bottom:0 !important}.my-xl-1{margin-top:.25rem !important;margin-bottom:.25rem !important}.my-xl-2{margin-top:.5rem !important;margin-bottom:.5rem !important}.my-xl-3{margin-top:1rem !important;margin-bottom:1rem !important}.my-xl-4{margin-top:1.5rem !important;margin-bottom:1.5rem !important}.my-xl-5{margin-top:3rem !important;margin-bottom:3rem !important}.my-xl-auto{margin-top:auto !important;margin-bottom:auto !important}.mt-xl-0{margin-top:0 !important}.mt-xl-1{margin-top:.25rem !important}.mt-xl-2{margin-top:.5rem !important}.mt-xl-3{margin-top:1rem !important}.mt-xl-4{margin-top:1.5rem !important}.mt-xl-5{margin-top:3rem !important}.mt-xl-auto{margin-top:auto !important}.me-xl-0{margin-right:0 !important}.me-xl-1{margin-right:.25rem !important}.me-xl-2{margin-right:.5rem !important}.me-xl-3{margin-right:1rem !important}.me-xl-4{margin-right:1.5rem !important}.me-xl-5{margin-right:3rem !important}.me-xl-auto{margin-right:auto !important}.mb-xl-0{margin-bottom:0 !important}.mb-xl-1{margin-bottom:.25rem !important}.mb-xl-2{margin-bottom:.5rem !important}.mb-xl-3{margin-bottom:1rem !important}.mb-xl-4{margin-bottom:1.5rem !important}.mb-xl-5{margin-bottom:3rem !important}.mb-xl-auto{margin-bottom:auto !important}.ms-xl-0{margin-left:0 !important}.ms-xl-1{margin-left:.25rem !important}.ms-xl-2{margin-left:.5rem !important}.ms-xl-3{margin-left:1rem !important}.ms-xl-4{margin-left:1.5rem !important}.ms-xl-5{margin-left:3rem !important}.ms-xl-auto{margin-left:auto !important}.p-xl-0{padding:0 !important}.p-xl-1{padding:.25rem !important}.p-xl-2{padding:.5rem !important}.p-xl-3{padding:1rem !important}.p-xl-4{padding:1.5rem !important}.p-xl-5{padding:3rem !important}.px-xl-0{padding-right:0 !important;padding-left:0 !important}.px-xl-1{padding-right:.25rem !important;padding-left:.25rem !important}.px-xl-2{padding-right:.5rem !important;padding-left:.5rem !important}.px-xl-3{padding-right:1rem !important;padding-left:1rem !important}.px-xl-4{padding-right:1.5rem !important;padding-left:1.5rem !important}.px-xl-5{padding-right:3rem !important;padding-left:3rem !important}.py-xl-0{padding-top:0 !important;padding-bottom:0 !important}.py-xl-1{padding-top:.25rem !important;padding-bottom:.25rem !important}.py-xl-2{padding-top:.5rem !important;padding-bottom:.5rem !important}.py-xl-3{padding-top:1rem !important;padding-bottom:1rem !important}.py-xl-4{padding-top:1.5rem !important;padding-bottom:1.5rem !important}.py-xl-5{padding-top:3rem !important;padding-bottom:3rem !important}.pt-xl-0{padding-top:0 !important}.pt-xl-1{padding-top:.25rem !important}.pt-xl-2{padding-top:.5rem !important}.pt-xl-3{padding-top:1rem !important}.pt-xl-4{padding-top:1.5rem !important}.pt-xl-5{padding-top:3rem !important}.pe-xl-0{padding-right:0 !important}.pe-xl-1{padding-right:.25rem !important}.pe-xl-2{padding-right:.5rem !important}.pe-xl-3{padding-right:1rem !important}.pe-xl-4{padding-right:1.5rem !important}.pe-xl-5{padding-right:3rem !important}.pb-xl-0{padding-bottom:0 !important}.pb-xl-1{padding-bottom:.25rem !important}.pb-xl-2{padding-bottom:.5rem !important}.pb-xl-3{padding-bottom:1rem !important}.pb-xl-4{padding-bottom:1.5rem !important}.pb-xl-5{padding-bottom:3rem !important}.ps-xl-0{padding-left:0 !important}.ps-xl-1{padding-left:.25rem !important}.ps-xl-2{padding-left:.5rem !important}.ps-xl-3{padding-left:1rem !important}.ps-xl-4{padding-left:1.5rem !important}.ps-xl-5{padding-left:3rem !important}.gap-xl-0{gap:0 !important}.gap-xl-1{gap:.25rem !important}.gap-xl-2{gap:.5rem !important}.gap-xl-3{gap:1rem !important}.gap-xl-4{gap:1.5rem !important}.gap-xl-5{gap:3rem !important}.row-gap-xl-0{row-gap:0 !important}.row-gap-xl-1{row-gap:.25rem !important}.row-gap-xl-2{row-gap:.5rem !important}.row-gap-xl-3{row-gap:1rem !important}.row-gap-xl-4{row-gap:1.5rem !important}.row-gap-xl-5{row-gap:3rem !important}.column-gap-xl-0{column-gap:0 !important}.column-gap-xl-1{column-gap:.25rem !important}.column-gap-xl-2{column-gap:.5rem !important}.column-gap-xl-3{column-gap:1rem !important}.column-gap-xl-4{column-gap:1.5rem !important}.column-gap-xl-5{column-gap:3rem !important}.text-xl-start{text-align:left !important}.text-xl-end{text-align:right !important}.text-xl-center{text-align:center !important}}@media(min-width: 1400px){.float-xxl-start{float:left !important}.float-xxl-end{float:right !important}.float-xxl-none{float:none !important}.object-fit-xxl-contain{object-fit:contain !important}.object-fit-xxl-cover{object-fit:cover !important}.object-fit-xxl-fill{object-fit:fill !important}.object-fit-xxl-scale{object-fit:scale-down !important}.object-fit-xxl-none{object-fit:none !important}.d-xxl-inline{display:inline !important}.d-xxl-inline-block{display:inline-block !important}.d-xxl-block{display:block !important}.d-xxl-grid{display:grid !important}.d-xxl-inline-grid{display:inline-grid !important}.d-xxl-table{display:table !important}.d-xxl-table-row{display:table-row !important}.d-xxl-table-cell{display:table-cell !important}.d-xxl-flex{display:flex !important}.d-xxl-inline-flex{display:inline-flex !important}.d-xxl-none{display:none !important}.flex-xxl-fill{flex:1 1 auto !important}.flex-xxl-row{flex-direction:row !important}.flex-xxl-column{flex-direction:column !important}.flex-xxl-row-reverse{flex-direction:row-reverse !important}.flex-xxl-column-reverse{flex-direction:column-reverse !important}.flex-xxl-grow-0{flex-grow:0 !important}.flex-xxl-grow-1{flex-grow:1 !important}.flex-xxl-shrink-0{flex-shrink:0 !important}.flex-xxl-shrink-1{flex-shrink:1 !important}.flex-xxl-wrap{flex-wrap:wrap !important}.flex-xxl-nowrap{flex-wrap:nowrap !important}.flex-xxl-wrap-reverse{flex-wrap:wrap-reverse !important}.justify-content-xxl-start{justify-content:flex-start !important}.justify-content-xxl-end{justify-content:flex-end !important}.justify-content-xxl-center{justify-content:center !important}.justify-content-xxl-between{justify-content:space-between !important}.justify-content-xxl-around{justify-content:space-around !important}.justify-content-xxl-evenly{justify-content:space-evenly !important}.align-items-xxl-start{align-items:flex-start !important}.align-items-xxl-end{align-items:flex-end !important}.align-items-xxl-center{align-items:center !important}.align-items-xxl-baseline{align-items:baseline !important}.align-items-xxl-stretch{align-items:stretch !important}.align-content-xxl-start{align-content:flex-start !important}.align-content-xxl-end{align-content:flex-end !important}.align-content-xxl-center{align-content:center !important}.align-content-xxl-between{align-content:space-between !important}.align-content-xxl-around{align-content:space-around !important}.align-content-xxl-stretch{align-content:stretch !important}.align-self-xxl-auto{align-self:auto !important}.align-self-xxl-start{align-self:flex-start !important}.align-self-xxl-end{align-self:flex-end !important}.align-self-xxl-center{align-self:center !important}.align-self-xxl-baseline{align-self:baseline !important}.align-self-xxl-stretch{align-self:stretch !important}.order-xxl-first{order:-1 !important}.order-xxl-0{order:0 !important}.order-xxl-1{order:1 !important}.order-xxl-2{order:2 !important}.order-xxl-3{order:3 !important}.order-xxl-4{order:4 !important}.order-xxl-5{order:5 !important}.order-xxl-last{order:6 !important}.m-xxl-0{margin:0 !important}.m-xxl-1{margin:.25rem !important}.m-xxl-2{margin:.5rem !important}.m-xxl-3{margin:1rem !important}.m-xxl-4{margin:1.5rem !important}.m-xxl-5{margin:3rem !important}.m-xxl-auto{margin:auto !important}.mx-xxl-0{margin-right:0 !important;margin-left:0 !important}.mx-xxl-1{margin-right:.25rem !important;margin-left:.25rem !important}.mx-xxl-2{margin-right:.5rem !important;margin-left:.5rem !important}.mx-xxl-3{margin-right:1rem !important;margin-left:1rem !important}.mx-xxl-4{margin-right:1.5rem !important;margin-left:1.5rem !important}.mx-xxl-5{margin-right:3rem !important;margin-left:3rem !important}.mx-xxl-auto{margin-right:auto !important;margin-left:auto !important}.my-xxl-0{margin-top:0 !important;margin-bottom:0 !important}.my-xxl-1{margin-top:.25rem !important;margin-bottom:.25rem !important}.my-xxl-2{margin-top:.5rem !important;margin-bottom:.5rem !important}.my-xxl-3{margin-top:1rem !important;margin-bottom:1rem !important}.my-xxl-4{margin-top:1.5rem !important;margin-bottom:1.5rem !important}.my-xxl-5{margin-top:3rem !important;margin-bottom:3rem !important}.my-xxl-auto{margin-top:auto !important;margin-bottom:auto !important}.mt-xxl-0{margin-top:0 !important}.mt-xxl-1{margin-top:.25rem !important}.mt-xxl-2{margin-top:.5rem !important}.mt-xxl-3{margin-top:1rem !important}.mt-xxl-4{margin-top:1.5rem !important}.mt-xxl-5{margin-top:3rem !important}.mt-xxl-auto{margin-top:auto !important}.me-xxl-0{margin-right:0 !important}.me-xxl-1{margin-right:.25rem !important}.me-xxl-2{margin-right:.5rem !important}.me-xxl-3{margin-right:1rem !important}.me-xxl-4{margin-right:1.5rem !important}.me-xxl-5{margin-right:3rem !important}.me-xxl-auto{margin-right:auto !important}.mb-xxl-0{margin-bottom:0 !important}.mb-xxl-1{margin-bottom:.25rem !important}.mb-xxl-2{margin-bottom:.5rem !important}.mb-xxl-3{margin-bottom:1rem !important}.mb-xxl-4{margin-bottom:1.5rem !important}.mb-xxl-5{margin-bottom:3rem !important}.mb-xxl-auto{margin-bottom:auto !important}.ms-xxl-0{margin-left:0 !important}.ms-xxl-1{margin-left:.25rem !important}.ms-xxl-2{margin-left:.5rem !important}.ms-xxl-3{margin-left:1rem !important}.ms-xxl-4{margin-left:1.5rem !important}.ms-xxl-5{margin-left:3rem !important}.ms-xxl-auto{margin-left:auto !important}.p-xxl-0{padding:0 !important}.p-xxl-1{padding:.25rem !important}.p-xxl-2{padding:.5rem !important}.p-xxl-3{padding:1rem !important}.p-xxl-4{padding:1.5rem !important}.p-xxl-5{padding:3rem !important}.px-xxl-0{padding-right:0 !important;padding-left:0 !important}.px-xxl-1{padding-right:.25rem !important;padding-left:.25rem !important}.px-xxl-2{padding-right:.5rem !important;padding-left:.5rem !important}.px-xxl-3{padding-right:1rem !important;padding-left:1rem !important}.px-xxl-4{padding-right:1.5rem !important;padding-left:1.5rem !important}.px-xxl-5{padding-right:3rem !important;padding-left:3rem !important}.py-xxl-0{padding-top:0 !important;padding-bottom:0 !important}.py-xxl-1{padding-top:.25rem !important;padding-bottom:.25rem !important}.py-xxl-2{padding-top:.5rem !important;padding-bottom:.5rem !important}.py-xxl-3{padding-top:1rem !important;padding-bottom:1rem !important}.py-xxl-4{padding-top:1.5rem !important;padding-bottom:1.5rem !important}.py-xxl-5{padding-top:3rem !important;padding-bottom:3rem !important}.pt-xxl-0{padding-top:0 !important}.pt-xxl-1{padding-top:.25rem !important}.pt-xxl-2{padding-top:.5rem !important}.pt-xxl-3{padding-top:1rem !important}.pt-xxl-4{padding-top:1.5rem !important}.pt-xxl-5{padding-top:3rem !important}.pe-xxl-0{padding-right:0 !important}.pe-xxl-1{padding-right:.25rem !important}.pe-xxl-2{padding-right:.5rem !important}.pe-xxl-3{padding-right:1rem !important}.pe-xxl-4{padding-right:1.5rem !important}.pe-xxl-5{padding-right:3rem !important}.pb-xxl-0{padding-bottom:0 !important}.pb-xxl-1{padding-bottom:.25rem !important}.pb-xxl-2{padding-bottom:.5rem !important}.pb-xxl-3{padding-bottom:1rem !important}.pb-xxl-4{padding-bottom:1.5rem !important}.pb-xxl-5{padding-bottom:3rem !important}.ps-xxl-0{padding-left:0 !important}.ps-xxl-1{padding-left:.25rem !important}.ps-xxl-2{padding-left:.5rem !important}.ps-xxl-3{padding-left:1rem !important}.ps-xxl-4{padding-left:1.5rem !important}.ps-xxl-5{padding-left:3rem !important}.gap-xxl-0{gap:0 !important}.gap-xxl-1{gap:.25rem !important}.gap-xxl-2{gap:.5rem !important}.gap-xxl-3{gap:1rem !important}.gap-xxl-4{gap:1.5rem !important}.gap-xxl-5{gap:3rem !important}.row-gap-xxl-0{row-gap:0 !important}.row-gap-xxl-1{row-gap:.25rem !important}.row-gap-xxl-2{row-gap:.5rem !important}.row-gap-xxl-3{row-gap:1rem !important}.row-gap-xxl-4{row-gap:1.5rem !important}.row-gap-xxl-5{row-gap:3rem !important}.column-gap-xxl-0{column-gap:0 !important}.column-gap-xxl-1{column-gap:.25rem !important}.column-gap-xxl-2{column-gap:.5rem !important}.column-gap-xxl-3{column-gap:1rem !important}.column-gap-xxl-4{column-gap:1.5rem !important}.column-gap-xxl-5{column-gap:3rem !important}.text-xxl-start{text-align:left !important}.text-xxl-end{text-align:right !important}.text-xxl-center{text-align:center !important}}.bg-default{color:#fff}.bg-primary{color:#fff}.bg-secondary{color:#fff}.bg-success{color:#fff}.bg-info{color:#fff}.bg-warning{color:#fff}.bg-danger{color:#fff}.bg-light{color:#000}.bg-dark{color:#fff}@media(min-width: 1200px){.fs-1{font-size:2rem !important}.fs-2{font-size:1.65rem !important}.fs-3{font-size:1.45rem !important}}@media print{.d-print-inline{display:inline !important}.d-print-inline-block{display:inline-block !important}.d-print-block{display:block !important}.d-print-grid{display:grid !important}.d-print-inline-grid{display:inline-grid !important}.d-print-table{display:table !important}.d-print-table-row{display:table-row !important}.d-print-table-cell{display:table-cell !important}.d-print-flex{display:flex !important}.d-print-inline-flex{display:inline-flex !important}.d-print-none{display:none !important}}:root{--bslib-spacer: 1rem;--bslib-mb-spacer: var(--bslib-spacer, 1rem)}.bslib-mb-spacing{margin-bottom:var(--bslib-mb-spacer)}.bslib-gap-spacing{gap:var(--bslib-mb-spacer)}.bslib-gap-spacing>.bslib-mb-spacing,.bslib-gap-spacing>.form-group,.bslib-gap-spacing>p,.bslib-gap-spacing>pre{margin-bottom:0}.html-fill-container>.html-fill-item.bslib-mb-spacing{margin-bottom:0}.tab-content>.tab-pane.html-fill-container{display:none}.tab-content>.active.html-fill-container{display:flex}.tab-content.html-fill-container{padding:0}.bg-blue{--bslib-color-bg: #2780e3;--bslib-color-fg: #fff;background-color:var(--bslib-color-bg);color:var(--bslib-color-fg)}.text-blue{--bslib-color-fg: #2780e3;color:var(--bslib-color-fg)}.bg-indigo{--bslib-color-bg: #6610f2;--bslib-color-fg: #fff;background-color:var(--bslib-color-bg);color:var(--bslib-color-fg)}.text-indigo{--bslib-color-fg: #6610f2;color:var(--bslib-color-fg)}.bg-purple{--bslib-color-bg: #613d7c;--bslib-color-fg: #fff;background-color:var(--bslib-color-bg);color:var(--bslib-color-fg)}.text-purple{--bslib-color-fg: #613d7c;color:var(--bslib-color-fg)}.bg-pink{--bslib-color-bg: #e83e8c;--bslib-color-fg: #fff;background-color:var(--bslib-color-bg);color:var(--bslib-color-fg)}.text-pink{--bslib-color-fg: #e83e8c;color:var(--bslib-color-fg)}.bg-red{--bslib-color-bg: #ff0039;--bslib-color-fg: #fff;background-color:var(--bslib-color-bg);color:var(--bslib-color-fg)}.text-red{--bslib-color-fg: #ff0039;color:var(--bslib-color-fg)}.bg-orange{--bslib-color-bg: #f0ad4e;--bslib-color-fg: #000;background-color:var(--bslib-color-bg);color:var(--bslib-color-fg)}.text-orange{--bslib-color-fg: #f0ad4e;color:var(--bslib-color-fg)}.bg-yellow{--bslib-color-bg: #ff7518;--bslib-color-fg: #fff;background-color:var(--bslib-color-bg);color:var(--bslib-color-fg)}.text-yellow{--bslib-color-fg: #ff7518;color:var(--bslib-color-fg)}.bg-green{--bslib-color-bg: #3fb618;--bslib-color-fg: #fff;background-color:var(--bslib-color-bg);color:var(--bslib-color-fg)}.text-green{--bslib-color-fg: #3fb618;color:var(--bslib-color-fg)}.bg-teal{--bslib-color-bg: #20c997;--bslib-color-fg: #000;background-color:var(--bslib-color-bg);color:var(--bslib-color-fg)}.text-teal{--bslib-color-fg: #20c997;color:var(--bslib-color-fg)}.bg-cyan{--bslib-color-bg: #9954bb;--bslib-color-fg: #fff;background-color:var(--bslib-color-bg);color:var(--bslib-color-fg)}.text-cyan{--bslib-color-fg: #9954bb;color:var(--bslib-color-fg)}.text-default{--bslib-color-fg: #343a40}.bg-default{--bslib-color-bg: #343a40;--bslib-color-fg: #fff}.text-primary{--bslib-color-fg: #5c2983}.bg-primary{--bslib-color-bg: #5c2983;--bslib-color-fg: #fff}.text-secondary{--bslib-color-fg: #343a40}.bg-secondary{--bslib-color-bg: #343a40;--bslib-color-fg: #fff}.text-success{--bslib-color-fg: #3fb618}.bg-success{--bslib-color-bg: #3fb618;--bslib-color-fg: #fff}.text-info{--bslib-color-fg: #9954bb}.bg-info{--bslib-color-bg: #9954bb;--bslib-color-fg: #fff}.text-warning{--bslib-color-fg: #ff7518}.bg-warning{--bslib-color-bg: #ff7518;--bslib-color-fg: #fff}.text-danger{--bslib-color-fg: #ff0039}.bg-danger{--bslib-color-bg: #ff0039;--bslib-color-fg: #fff}.text-light{--bslib-color-fg: #f8f9fa}.bg-light{--bslib-color-bg: #f8f9fa;--bslib-color-fg: #000}.text-dark{--bslib-color-fg: #343a40}.bg-dark{--bslib-color-bg: #343a40;--bslib-color-fg: #fff}.bg-gradient-blue-indigo{--bslib-color-fg: #fff;--bslib-color-bg: #4053e9;background:linear-gradient(var(--bg-gradient-deg, 140deg), #2780e3 var(--bg-gradient-start, 36%), #6610f2 var(--bg-gradient-end, 180%)) #4053e9;color:#fff}.bg-gradient-blue-purple{--bslib-color-fg: #fff;--bslib-color-bg: #3e65ba;background:linear-gradient(var(--bg-gradient-deg, 140deg), #2780e3 var(--bg-gradient-start, 36%), #613d7c var(--bg-gradient-end, 180%)) #3e65ba;color:#fff}.bg-gradient-blue-pink{--bslib-color-fg: #fff;--bslib-color-bg: #7466c0;background:linear-gradient(var(--bg-gradient-deg, 140deg), #2780e3 var(--bg-gradient-start, 36%), #e83e8c var(--bg-gradient-end, 180%)) #7466c0;color:#fff}.bg-gradient-blue-red{--bslib-color-fg: #fff;--bslib-color-bg: #7d4d9f;background:linear-gradient(var(--bg-gradient-deg, 140deg), #2780e3 var(--bg-gradient-start, 36%), #ff0039 var(--bg-gradient-end, 180%)) #7d4d9f;color:#fff}.bg-gradient-blue-orange{--bslib-color-fg: #fff;--bslib-color-bg: #7792a7;background:linear-gradient(var(--bg-gradient-deg, 140deg), #2780e3 var(--bg-gradient-start, 36%), #f0ad4e var(--bg-gradient-end, 180%)) #7792a7;color:#fff}.bg-gradient-blue-yellow{--bslib-color-fg: #fff;--bslib-color-bg: #7d7c92;background:linear-gradient(var(--bg-gradient-deg, 140deg), #2780e3 var(--bg-gradient-start, 36%), #ff7518 var(--bg-gradient-end, 180%)) #7d7c92;color:#fff}.bg-gradient-blue-green{--bslib-color-fg: #fff;--bslib-color-bg: #319692;background:linear-gradient(var(--bg-gradient-deg, 140deg), #2780e3 var(--bg-gradient-start, 36%), #3fb618 var(--bg-gradient-end, 180%)) #319692;color:#fff}.bg-gradient-blue-teal{--bslib-color-fg: #fff;--bslib-color-bg: #249dc5;background:linear-gradient(var(--bg-gradient-deg, 140deg), #2780e3 var(--bg-gradient-start, 36%), #20c997 var(--bg-gradient-end, 180%)) #249dc5;color:#fff}.bg-gradient-blue-cyan{--bslib-color-fg: #fff;--bslib-color-bg: #556ed3;background:linear-gradient(var(--bg-gradient-deg, 140deg), #2780e3 var(--bg-gradient-start, 36%), #9954bb var(--bg-gradient-end, 180%)) #556ed3;color:#fff}.bg-gradient-indigo-blue{--bslib-color-fg: #fff;--bslib-color-bg: #4d3dec;background:linear-gradient(var(--bg-gradient-deg, 140deg), #6610f2 var(--bg-gradient-start, 36%), #2780e3 var(--bg-gradient-end, 180%)) #4d3dec;color:#fff}.bg-gradient-indigo-purple{--bslib-color-fg: #fff;--bslib-color-bg: #6422c3;background:linear-gradient(var(--bg-gradient-deg, 140deg), #6610f2 var(--bg-gradient-start, 36%), #613d7c var(--bg-gradient-end, 180%)) #6422c3;color:#fff}.bg-gradient-indigo-pink{--bslib-color-fg: #fff;--bslib-color-bg: #9a22c9;background:linear-gradient(var(--bg-gradient-deg, 140deg), #6610f2 var(--bg-gradient-start, 36%), #e83e8c var(--bg-gradient-end, 180%)) #9a22c9;color:#fff}.bg-gradient-indigo-red{--bslib-color-fg: #fff;--bslib-color-bg: #a30aa8;background:linear-gradient(var(--bg-gradient-deg, 140deg), #6610f2 var(--bg-gradient-start, 36%), #ff0039 var(--bg-gradient-end, 180%)) #a30aa8;color:#fff}.bg-gradient-indigo-orange{--bslib-color-fg: #fff;--bslib-color-bg: #9d4fb0;background:linear-gradient(var(--bg-gradient-deg, 140deg), #6610f2 var(--bg-gradient-start, 36%), #f0ad4e var(--bg-gradient-end, 180%)) #9d4fb0;color:#fff}.bg-gradient-indigo-yellow{--bslib-color-fg: #fff;--bslib-color-bg: #a3389b;background:linear-gradient(var(--bg-gradient-deg, 140deg), #6610f2 var(--bg-gradient-start, 36%), #ff7518 var(--bg-gradient-end, 180%)) #a3389b;color:#fff}.bg-gradient-indigo-green{--bslib-color-fg: #fff;--bslib-color-bg: #56529b;background:linear-gradient(var(--bg-gradient-deg, 140deg), #6610f2 var(--bg-gradient-start, 36%), #3fb618 var(--bg-gradient-end, 180%)) #56529b;color:#fff}.bg-gradient-indigo-teal{--bslib-color-fg: #fff;--bslib-color-bg: #4a5ace;background:linear-gradient(var(--bg-gradient-deg, 140deg), #6610f2 var(--bg-gradient-start, 36%), #20c997 var(--bg-gradient-end, 180%)) #4a5ace;color:#fff}.bg-gradient-indigo-cyan{--bslib-color-fg: #fff;--bslib-color-bg: #7a2bdc;background:linear-gradient(var(--bg-gradient-deg, 140deg), #6610f2 var(--bg-gradient-start, 36%), #9954bb var(--bg-gradient-end, 180%)) #7a2bdc;color:#fff}.bg-gradient-purple-blue{--bslib-color-fg: #fff;--bslib-color-bg: #4a58a5;background:linear-gradient(var(--bg-gradient-deg, 140deg), #613d7c var(--bg-gradient-start, 36%), #2780e3 var(--bg-gradient-end, 180%)) #4a58a5;color:#fff}.bg-gradient-purple-indigo{--bslib-color-fg: #fff;--bslib-color-bg: #632bab;background:linear-gradient(var(--bg-gradient-deg, 140deg), #613d7c var(--bg-gradient-start, 36%), #6610f2 var(--bg-gradient-end, 180%)) #632bab;color:#fff}.bg-gradient-purple-pink{--bslib-color-fg: #fff;--bslib-color-bg: #973d82;background:linear-gradient(var(--bg-gradient-deg, 140deg), #613d7c var(--bg-gradient-start, 36%), #e83e8c var(--bg-gradient-end, 180%)) #973d82;color:#fff}.bg-gradient-purple-red{--bslib-color-fg: #fff;--bslib-color-bg: #a02561;background:linear-gradient(var(--bg-gradient-deg, 140deg), #613d7c var(--bg-gradient-start, 36%), #ff0039 var(--bg-gradient-end, 180%)) #a02561;color:#fff}.bg-gradient-purple-orange{--bslib-color-fg: #fff;--bslib-color-bg: #9a6a6a;background:linear-gradient(var(--bg-gradient-deg, 140deg), #613d7c var(--bg-gradient-start, 36%), #f0ad4e var(--bg-gradient-end, 180%)) #9a6a6a;color:#fff}.bg-gradient-purple-yellow{--bslib-color-fg: #fff;--bslib-color-bg: #a05354;background:linear-gradient(var(--bg-gradient-deg, 140deg), #613d7c var(--bg-gradient-start, 36%), #ff7518 var(--bg-gradient-end, 180%)) #a05354;color:#fff}.bg-gradient-purple-green{--bslib-color-fg: #fff;--bslib-color-bg: #536d54;background:linear-gradient(var(--bg-gradient-deg, 140deg), #613d7c var(--bg-gradient-start, 36%), #3fb618 var(--bg-gradient-end, 180%)) #536d54;color:#fff}.bg-gradient-purple-teal{--bslib-color-fg: #fff;--bslib-color-bg: #477587;background:linear-gradient(var(--bg-gradient-deg, 140deg), #613d7c var(--bg-gradient-start, 36%), #20c997 var(--bg-gradient-end, 180%)) #477587;color:#fff}.bg-gradient-purple-cyan{--bslib-color-fg: #fff;--bslib-color-bg: #774695;background:linear-gradient(var(--bg-gradient-deg, 140deg), #613d7c var(--bg-gradient-start, 36%), #9954bb var(--bg-gradient-end, 180%)) #774695;color:#fff}.bg-gradient-pink-blue{--bslib-color-fg: #fff;--bslib-color-bg: #9b58af;background:linear-gradient(var(--bg-gradient-deg, 140deg), #e83e8c var(--bg-gradient-start, 36%), #2780e3 var(--bg-gradient-end, 180%)) #9b58af;color:#fff}.bg-gradient-pink-indigo{--bslib-color-fg: #fff;--bslib-color-bg: #b42cb5;background:linear-gradient(var(--bg-gradient-deg, 140deg), #e83e8c var(--bg-gradient-start, 36%), #6610f2 var(--bg-gradient-end, 180%)) #b42cb5;color:#fff}.bg-gradient-pink-purple{--bslib-color-fg: #fff;--bslib-color-bg: #b23e86;background:linear-gradient(var(--bg-gradient-deg, 140deg), #e83e8c var(--bg-gradient-start, 36%), #613d7c var(--bg-gradient-end, 180%)) #b23e86;color:#fff}.bg-gradient-pink-red{--bslib-color-fg: #fff;--bslib-color-bg: #f1256b;background:linear-gradient(var(--bg-gradient-deg, 140deg), #e83e8c var(--bg-gradient-start, 36%), #ff0039 var(--bg-gradient-end, 180%)) #f1256b;color:#fff}.bg-gradient-pink-orange{--bslib-color-fg: #fff;--bslib-color-bg: #eb6a73;background:linear-gradient(var(--bg-gradient-deg, 140deg), #e83e8c var(--bg-gradient-start, 36%), #f0ad4e var(--bg-gradient-end, 180%)) #eb6a73;color:#fff}.bg-gradient-pink-yellow{--bslib-color-fg: #fff;--bslib-color-bg: #f1545e;background:linear-gradient(var(--bg-gradient-deg, 140deg), #e83e8c var(--bg-gradient-start, 36%), #ff7518 var(--bg-gradient-end, 180%)) #f1545e;color:#fff}.bg-gradient-pink-green{--bslib-color-fg: #fff;--bslib-color-bg: #a46e5e;background:linear-gradient(var(--bg-gradient-deg, 140deg), #e83e8c var(--bg-gradient-start, 36%), #3fb618 var(--bg-gradient-end, 180%)) #a46e5e;color:#fff}.bg-gradient-pink-teal{--bslib-color-fg: #fff;--bslib-color-bg: #987690;background:linear-gradient(var(--bg-gradient-deg, 140deg), #e83e8c var(--bg-gradient-start, 36%), #20c997 var(--bg-gradient-end, 180%)) #987690;color:#fff}.bg-gradient-pink-cyan{--bslib-color-fg: #fff;--bslib-color-bg: #c8479f;background:linear-gradient(var(--bg-gradient-deg, 140deg), #e83e8c var(--bg-gradient-start, 36%), #9954bb var(--bg-gradient-end, 180%)) #c8479f;color:#fff}.bg-gradient-red-blue{--bslib-color-fg: #fff;--bslib-color-bg: #a9337d;background:linear-gradient(var(--bg-gradient-deg, 140deg), #ff0039 var(--bg-gradient-start, 36%), #2780e3 var(--bg-gradient-end, 180%)) #a9337d;color:#fff}.bg-gradient-red-indigo{--bslib-color-fg: #fff;--bslib-color-bg: #c20683;background:linear-gradient(var(--bg-gradient-deg, 140deg), #ff0039 var(--bg-gradient-start, 36%), #6610f2 var(--bg-gradient-end, 180%)) #c20683;color:#fff}.bg-gradient-red-purple{--bslib-color-fg: #fff;--bslib-color-bg: #c01854;background:linear-gradient(var(--bg-gradient-deg, 140deg), #ff0039 var(--bg-gradient-start, 36%), #613d7c var(--bg-gradient-end, 180%)) #c01854;color:#fff}.bg-gradient-red-pink{--bslib-color-fg: #fff;--bslib-color-bg: #f6195a;background:linear-gradient(var(--bg-gradient-deg, 140deg), #ff0039 var(--bg-gradient-start, 36%), #e83e8c var(--bg-gradient-end, 180%)) #f6195a;color:#fff}.bg-gradient-red-orange{--bslib-color-fg: #fff;--bslib-color-bg: #f94541;background:linear-gradient(var(--bg-gradient-deg, 140deg), #ff0039 var(--bg-gradient-start, 36%), #f0ad4e var(--bg-gradient-end, 180%)) #f94541;color:#fff}.bg-gradient-red-yellow{--bslib-color-fg: #fff;--bslib-color-bg: #ff2f2c;background:linear-gradient(var(--bg-gradient-deg, 140deg), #ff0039 var(--bg-gradient-start, 36%), #ff7518 var(--bg-gradient-end, 180%)) #ff2f2c;color:#fff}.bg-gradient-red-green{--bslib-color-fg: #fff;--bslib-color-bg: #b2492c;background:linear-gradient(var(--bg-gradient-deg, 140deg), #ff0039 var(--bg-gradient-start, 36%), #3fb618 var(--bg-gradient-end, 180%)) #b2492c;color:#fff}.bg-gradient-red-teal{--bslib-color-fg: #fff;--bslib-color-bg: #a6505f;background:linear-gradient(var(--bg-gradient-deg, 140deg), #ff0039 var(--bg-gradient-start, 36%), #20c997 var(--bg-gradient-end, 180%)) #a6505f;color:#fff}.bg-gradient-red-cyan{--bslib-color-fg: #fff;--bslib-color-bg: #d6226d;background:linear-gradient(var(--bg-gradient-deg, 140deg), #ff0039 var(--bg-gradient-start, 36%), #9954bb var(--bg-gradient-end, 180%)) #d6226d;color:#fff}.bg-gradient-orange-blue{--bslib-color-fg: #fff;--bslib-color-bg: #a09b8a;background:linear-gradient(var(--bg-gradient-deg, 140deg), #f0ad4e var(--bg-gradient-start, 36%), #2780e3 var(--bg-gradient-end, 180%)) #a09b8a;color:#fff}.bg-gradient-orange-indigo{--bslib-color-fg: #fff;--bslib-color-bg: #b96e90;background:linear-gradient(var(--bg-gradient-deg, 140deg), #f0ad4e var(--bg-gradient-start, 36%), #6610f2 var(--bg-gradient-end, 180%)) #b96e90;color:#fff}.bg-gradient-orange-purple{--bslib-color-fg: #fff;--bslib-color-bg: #b78060;background:linear-gradient(var(--bg-gradient-deg, 140deg), #f0ad4e var(--bg-gradient-start, 36%), #613d7c var(--bg-gradient-end, 180%)) #b78060;color:#fff}.bg-gradient-orange-pink{--bslib-color-fg: #fff;--bslib-color-bg: #ed8167;background:linear-gradient(var(--bg-gradient-deg, 140deg), #f0ad4e var(--bg-gradient-start, 36%), #e83e8c var(--bg-gradient-end, 180%)) #ed8167;color:#fff}.bg-gradient-orange-red{--bslib-color-fg: #fff;--bslib-color-bg: #f66846;background:linear-gradient(var(--bg-gradient-deg, 140deg), #f0ad4e var(--bg-gradient-start, 36%), #ff0039 var(--bg-gradient-end, 180%)) #f66846;color:#fff}.bg-gradient-orange-yellow{--bslib-color-fg: #000;--bslib-color-bg: #f69738;background:linear-gradient(var(--bg-gradient-deg, 140deg), #f0ad4e var(--bg-gradient-start, 36%), #ff7518 var(--bg-gradient-end, 180%)) #f69738;color:#000}.bg-gradient-orange-green{--bslib-color-fg: #000;--bslib-color-bg: #a9b138;background:linear-gradient(var(--bg-gradient-deg, 140deg), #f0ad4e var(--bg-gradient-start, 36%), #3fb618 var(--bg-gradient-end, 180%)) #a9b138;color:#000}.bg-gradient-orange-teal{--bslib-color-fg: #000;--bslib-color-bg: #9db86b;background:linear-gradient(var(--bg-gradient-deg, 140deg), #f0ad4e var(--bg-gradient-start, 36%), #20c997 var(--bg-gradient-end, 180%)) #9db86b;color:#000}.bg-gradient-orange-cyan{--bslib-color-fg: #fff;--bslib-color-bg: #cd897a;background:linear-gradient(var(--bg-gradient-deg, 140deg), #f0ad4e var(--bg-gradient-start, 36%), #9954bb var(--bg-gradient-end, 180%)) #cd897a;color:#fff}.bg-gradient-yellow-blue{--bslib-color-fg: #fff;--bslib-color-bg: #a97969;background:linear-gradient(var(--bg-gradient-deg, 140deg), #ff7518 var(--bg-gradient-start, 36%), #2780e3 var(--bg-gradient-end, 180%)) #a97969;color:#fff}.bg-gradient-yellow-indigo{--bslib-color-fg: #fff;--bslib-color-bg: #c24d6f;background:linear-gradient(var(--bg-gradient-deg, 140deg), #ff7518 var(--bg-gradient-start, 36%), #6610f2 var(--bg-gradient-end, 180%)) #c24d6f;color:#fff}.bg-gradient-yellow-purple{--bslib-color-fg: #fff;--bslib-color-bg: #c05f40;background:linear-gradient(var(--bg-gradient-deg, 140deg), #ff7518 var(--bg-gradient-start, 36%), #613d7c var(--bg-gradient-end, 180%)) #c05f40;color:#fff}.bg-gradient-yellow-pink{--bslib-color-fg: #fff;--bslib-color-bg: #f65f46;background:linear-gradient(var(--bg-gradient-deg, 140deg), #ff7518 var(--bg-gradient-start, 36%), #e83e8c var(--bg-gradient-end, 180%)) #f65f46;color:#fff}.bg-gradient-yellow-red{--bslib-color-fg: #fff;--bslib-color-bg: #ff4625;background:linear-gradient(var(--bg-gradient-deg, 140deg), #ff7518 var(--bg-gradient-start, 36%), #ff0039 var(--bg-gradient-end, 180%)) #ff4625;color:#fff}.bg-gradient-yellow-orange{--bslib-color-fg: #000;--bslib-color-bg: #f98b2e;background:linear-gradient(var(--bg-gradient-deg, 140deg), #ff7518 var(--bg-gradient-start, 36%), #f0ad4e var(--bg-gradient-end, 180%)) #f98b2e;color:#000}.bg-gradient-yellow-green{--bslib-color-fg: #fff;--bslib-color-bg: #b28f18;background:linear-gradient(var(--bg-gradient-deg, 140deg), #ff7518 var(--bg-gradient-start, 36%), #3fb618 var(--bg-gradient-end, 180%)) #b28f18;color:#fff}.bg-gradient-yellow-teal{--bslib-color-fg: #fff;--bslib-color-bg: #a6974b;background:linear-gradient(var(--bg-gradient-deg, 140deg), #ff7518 var(--bg-gradient-start, 36%), #20c997 var(--bg-gradient-end, 180%)) #a6974b;color:#fff}.bg-gradient-yellow-cyan{--bslib-color-fg: #fff;--bslib-color-bg: #d66859;background:linear-gradient(var(--bg-gradient-deg, 140deg), #ff7518 var(--bg-gradient-start, 36%), #9954bb var(--bg-gradient-end, 180%)) #d66859;color:#fff}.bg-gradient-green-blue{--bslib-color-fg: #fff;--bslib-color-bg: #35a069;background:linear-gradient(var(--bg-gradient-deg, 140deg), #3fb618 var(--bg-gradient-start, 36%), #2780e3 var(--bg-gradient-end, 180%)) #35a069;color:#fff}.bg-gradient-green-indigo{--bslib-color-fg: #fff;--bslib-color-bg: #4f746f;background:linear-gradient(var(--bg-gradient-deg, 140deg), #3fb618 var(--bg-gradient-start, 36%), #6610f2 var(--bg-gradient-end, 180%)) #4f746f;color:#fff}.bg-gradient-green-purple{--bslib-color-fg: #fff;--bslib-color-bg: #4d8640;background:linear-gradient(var(--bg-gradient-deg, 140deg), #3fb618 var(--bg-gradient-start, 36%), #613d7c var(--bg-gradient-end, 180%)) #4d8640;color:#fff}.bg-gradient-green-pink{--bslib-color-fg: #fff;--bslib-color-bg: #838646;background:linear-gradient(var(--bg-gradient-deg, 140deg), #3fb618 var(--bg-gradient-start, 36%), #e83e8c var(--bg-gradient-end, 180%)) #838646;color:#fff}.bg-gradient-green-red{--bslib-color-fg: #fff;--bslib-color-bg: #8c6d25;background:linear-gradient(var(--bg-gradient-deg, 140deg), #3fb618 var(--bg-gradient-start, 36%), #ff0039 var(--bg-gradient-end, 180%)) #8c6d25;color:#fff}.bg-gradient-green-orange{--bslib-color-fg: #000;--bslib-color-bg: #86b22e;background:linear-gradient(var(--bg-gradient-deg, 140deg), #3fb618 var(--bg-gradient-start, 36%), #f0ad4e var(--bg-gradient-end, 180%)) #86b22e;color:#000}.bg-gradient-green-yellow{--bslib-color-fg: #fff;--bslib-color-bg: #8c9c18;background:linear-gradient(var(--bg-gradient-deg, 140deg), #3fb618 var(--bg-gradient-start, 36%), #ff7518 var(--bg-gradient-end, 180%)) #8c9c18;color:#fff}.bg-gradient-green-teal{--bslib-color-fg: #000;--bslib-color-bg: #33be4b;background:linear-gradient(var(--bg-gradient-deg, 140deg), #3fb618 var(--bg-gradient-start, 36%), #20c997 var(--bg-gradient-end, 180%)) #33be4b;color:#000}.bg-gradient-green-cyan{--bslib-color-fg: #fff;--bslib-color-bg: #638f59;background:linear-gradient(var(--bg-gradient-deg, 140deg), #3fb618 var(--bg-gradient-start, 36%), #9954bb var(--bg-gradient-end, 180%)) #638f59;color:#fff}.bg-gradient-teal-blue{--bslib-color-fg: #fff;--bslib-color-bg: #23acb5;background:linear-gradient(var(--bg-gradient-deg, 140deg), #20c997 var(--bg-gradient-start, 36%), #2780e3 var(--bg-gradient-end, 180%)) #23acb5;color:#fff}.bg-gradient-teal-indigo{--bslib-color-fg: #fff;--bslib-color-bg: #3c7fbb;background:linear-gradient(var(--bg-gradient-deg, 140deg), #20c997 var(--bg-gradient-start, 36%), #6610f2 var(--bg-gradient-end, 180%)) #3c7fbb;color:#fff}.bg-gradient-teal-purple{--bslib-color-fg: #fff;--bslib-color-bg: #3a918c;background:linear-gradient(var(--bg-gradient-deg, 140deg), #20c997 var(--bg-gradient-start, 36%), #613d7c var(--bg-gradient-end, 180%)) #3a918c;color:#fff}.bg-gradient-teal-pink{--bslib-color-fg: #fff;--bslib-color-bg: #709193;background:linear-gradient(var(--bg-gradient-deg, 140deg), #20c997 var(--bg-gradient-start, 36%), #e83e8c var(--bg-gradient-end, 180%)) #709193;color:#fff}.bg-gradient-teal-red{--bslib-color-fg: #fff;--bslib-color-bg: #797971;background:linear-gradient(var(--bg-gradient-deg, 140deg), #20c997 var(--bg-gradient-start, 36%), #ff0039 var(--bg-gradient-end, 180%)) #797971;color:#fff}.bg-gradient-teal-orange{--bslib-color-fg: #000;--bslib-color-bg: #73be7a;background:linear-gradient(var(--bg-gradient-deg, 140deg), #20c997 var(--bg-gradient-start, 36%), #f0ad4e var(--bg-gradient-end, 180%)) #73be7a;color:#000}.bg-gradient-teal-yellow{--bslib-color-fg: #fff;--bslib-color-bg: #79a764;background:linear-gradient(var(--bg-gradient-deg, 140deg), #20c997 var(--bg-gradient-start, 36%), #ff7518 var(--bg-gradient-end, 180%)) #79a764;color:#fff}.bg-gradient-teal-green{--bslib-color-fg: #000;--bslib-color-bg: #2cc164;background:linear-gradient(var(--bg-gradient-deg, 140deg), #20c997 var(--bg-gradient-start, 36%), #3fb618 var(--bg-gradient-end, 180%)) #2cc164;color:#000}.bg-gradient-teal-cyan{--bslib-color-fg: #fff;--bslib-color-bg: #509aa5;background:linear-gradient(var(--bg-gradient-deg, 140deg), #20c997 var(--bg-gradient-start, 36%), #9954bb var(--bg-gradient-end, 180%)) #509aa5;color:#fff}.bg-gradient-cyan-blue{--bslib-color-fg: #fff;--bslib-color-bg: #6b66cb;background:linear-gradient(var(--bg-gradient-deg, 140deg), #9954bb var(--bg-gradient-start, 36%), #2780e3 var(--bg-gradient-end, 180%)) #6b66cb;color:#fff}.bg-gradient-cyan-indigo{--bslib-color-fg: #fff;--bslib-color-bg: #8539d1;background:linear-gradient(var(--bg-gradient-deg, 140deg), #9954bb var(--bg-gradient-start, 36%), #6610f2 var(--bg-gradient-end, 180%)) #8539d1;color:#fff}.bg-gradient-cyan-purple{--bslib-color-fg: #fff;--bslib-color-bg: #834ba2;background:linear-gradient(var(--bg-gradient-deg, 140deg), #9954bb var(--bg-gradient-start, 36%), #613d7c var(--bg-gradient-end, 180%)) #834ba2;color:#fff}.bg-gradient-cyan-pink{--bslib-color-fg: #fff;--bslib-color-bg: #b94ba8;background:linear-gradient(var(--bg-gradient-deg, 140deg), #9954bb var(--bg-gradient-start, 36%), #e83e8c var(--bg-gradient-end, 180%)) #b94ba8;color:#fff}.bg-gradient-cyan-red{--bslib-color-fg: #fff;--bslib-color-bg: #c23287;background:linear-gradient(var(--bg-gradient-deg, 140deg), #9954bb var(--bg-gradient-start, 36%), #ff0039 var(--bg-gradient-end, 180%)) #c23287;color:#fff}.bg-gradient-cyan-orange{--bslib-color-fg: #fff;--bslib-color-bg: #bc788f;background:linear-gradient(var(--bg-gradient-deg, 140deg), #9954bb var(--bg-gradient-start, 36%), #f0ad4e var(--bg-gradient-end, 180%)) #bc788f;color:#fff}.bg-gradient-cyan-yellow{--bslib-color-fg: #fff;--bslib-color-bg: #c2617a;background:linear-gradient(var(--bg-gradient-deg, 140deg), #9954bb var(--bg-gradient-start, 36%), #ff7518 var(--bg-gradient-end, 180%)) #c2617a;color:#fff}.bg-gradient-cyan-green{--bslib-color-fg: #fff;--bslib-color-bg: #757b7a;background:linear-gradient(var(--bg-gradient-deg, 140deg), #9954bb var(--bg-gradient-start, 36%), #3fb618 var(--bg-gradient-end, 180%)) #757b7a;color:#fff}.bg-gradient-cyan-teal{--bslib-color-fg: #fff;--bslib-color-bg: #6983ad;background:linear-gradient(var(--bg-gradient-deg, 140deg), #9954bb var(--bg-gradient-start, 36%), #20c997 var(--bg-gradient-end, 180%)) #6983ad;color:#fff}.tab-content>.tab-pane.html-fill-container{display:none}.tab-content>.active.html-fill-container{display:flex}.tab-content.html-fill-container{padding:0}.bg-blue{--bslib-color-bg: #2780e3;--bslib-color-fg: #fff;background-color:var(--bslib-color-bg);color:var(--bslib-color-fg)}.text-blue{--bslib-color-fg: #2780e3;color:var(--bslib-color-fg)}.bg-indigo{--bslib-color-bg: #6610f2;--bslib-color-fg: #fff;background-color:var(--bslib-color-bg);color:var(--bslib-color-fg)}.text-indigo{--bslib-color-fg: #6610f2;color:var(--bslib-color-fg)}.bg-purple{--bslib-color-bg: #613d7c;--bslib-color-fg: #fff;background-color:var(--bslib-color-bg);color:var(--bslib-color-fg)}.text-purple{--bslib-color-fg: #613d7c;color:var(--bslib-color-fg)}.bg-pink{--bslib-color-bg: #e83e8c;--bslib-color-fg: #fff;background-color:var(--bslib-color-bg);color:var(--bslib-color-fg)}.text-pink{--bslib-color-fg: #e83e8c;color:var(--bslib-color-fg)}.bg-red{--bslib-color-bg: #ff0039;--bslib-color-fg: #fff;background-color:var(--bslib-color-bg);color:var(--bslib-color-fg)}.text-red{--bslib-color-fg: #ff0039;color:var(--bslib-color-fg)}.bg-orange{--bslib-color-bg: #f0ad4e;--bslib-color-fg: #000;background-color:var(--bslib-color-bg);color:var(--bslib-color-fg)}.text-orange{--bslib-color-fg: #f0ad4e;color:var(--bslib-color-fg)}.bg-yellow{--bslib-color-bg: #ff7518;--bslib-color-fg: #fff;background-color:var(--bslib-color-bg);color:var(--bslib-color-fg)}.text-yellow{--bslib-color-fg: #ff7518;color:var(--bslib-color-fg)}.bg-green{--bslib-color-bg: #3fb618;--bslib-color-fg: #fff;background-color:var(--bslib-color-bg);color:var(--bslib-color-fg)}.text-green{--bslib-color-fg: #3fb618;color:var(--bslib-color-fg)}.bg-teal{--bslib-color-bg: #20c997;--bslib-color-fg: #000;background-color:var(--bslib-color-bg);color:var(--bslib-color-fg)}.text-teal{--bslib-color-fg: #20c997;color:var(--bslib-color-fg)}.bg-cyan{--bslib-color-bg: #9954bb;--bslib-color-fg: #fff;background-color:var(--bslib-color-bg);color:var(--bslib-color-fg)}.text-cyan{--bslib-color-fg: #9954bb;color:var(--bslib-color-fg)}.text-default{--bslib-color-fg: #343a40}.bg-default{--bslib-color-bg: #343a40;--bslib-color-fg: #fff}.text-primary{--bslib-color-fg: #5c2983}.bg-primary{--bslib-color-bg: #5c2983;--bslib-color-fg: #fff}.text-secondary{--bslib-color-fg: #343a40}.bg-secondary{--bslib-color-bg: #343a40;--bslib-color-fg: #fff}.text-success{--bslib-color-fg: #3fb618}.bg-success{--bslib-color-bg: #3fb618;--bslib-color-fg: #fff}.text-info{--bslib-color-fg: #9954bb}.bg-info{--bslib-color-bg: #9954bb;--bslib-color-fg: #fff}.text-warning{--bslib-color-fg: #ff7518}.bg-warning{--bslib-color-bg: #ff7518;--bslib-color-fg: #fff}.text-danger{--bslib-color-fg: #ff0039}.bg-danger{--bslib-color-bg: #ff0039;--bslib-color-fg: #fff}.text-light{--bslib-color-fg: #f8f9fa}.bg-light{--bslib-color-bg: #f8f9fa;--bslib-color-fg: #000}.text-dark{--bslib-color-fg: #343a40}.bg-dark{--bslib-color-bg: #343a40;--bslib-color-fg: #fff}.bg-gradient-blue-indigo{--bslib-color-fg: #fff;--bslib-color-bg: #4053e9;background:linear-gradient(var(--bg-gradient-deg, 140deg), #2780e3 var(--bg-gradient-start, 36%), #6610f2 var(--bg-gradient-end, 180%)) #4053e9;color:#fff}.bg-gradient-blue-purple{--bslib-color-fg: #fff;--bslib-color-bg: #3e65ba;background:linear-gradient(var(--bg-gradient-deg, 140deg), #2780e3 var(--bg-gradient-start, 36%), #613d7c var(--bg-gradient-end, 180%)) #3e65ba;color:#fff}.bg-gradient-blue-pink{--bslib-color-fg: #fff;--bslib-color-bg: #7466c0;background:linear-gradient(var(--bg-gradient-deg, 140deg), #2780e3 var(--bg-gradient-start, 36%), #e83e8c var(--bg-gradient-end, 180%)) #7466c0;color:#fff}.bg-gradient-blue-red{--bslib-color-fg: #fff;--bslib-color-bg: #7d4d9f;background:linear-gradient(var(--bg-gradient-deg, 140deg), #2780e3 var(--bg-gradient-start, 36%), #ff0039 var(--bg-gradient-end, 180%)) #7d4d9f;color:#fff}.bg-gradient-blue-orange{--bslib-color-fg: #fff;--bslib-color-bg: #7792a7;background:linear-gradient(var(--bg-gradient-deg, 140deg), #2780e3 var(--bg-gradient-start, 36%), #f0ad4e var(--bg-gradient-end, 180%)) #7792a7;color:#fff}.bg-gradient-blue-yellow{--bslib-color-fg: #fff;--bslib-color-bg: #7d7c92;background:linear-gradient(var(--bg-gradient-deg, 140deg), #2780e3 var(--bg-gradient-start, 36%), #ff7518 var(--bg-gradient-end, 180%)) #7d7c92;color:#fff}.bg-gradient-blue-green{--bslib-color-fg: #fff;--bslib-color-bg: #319692;background:linear-gradient(var(--bg-gradient-deg, 140deg), #2780e3 var(--bg-gradient-start, 36%), #3fb618 var(--bg-gradient-end, 180%)) #319692;color:#fff}.bg-gradient-blue-teal{--bslib-color-fg: #fff;--bslib-color-bg: #249dc5;background:linear-gradient(var(--bg-gradient-deg, 140deg), #2780e3 var(--bg-gradient-start, 36%), #20c997 var(--bg-gradient-end, 180%)) #249dc5;color:#fff}.bg-gradient-blue-cyan{--bslib-color-fg: #fff;--bslib-color-bg: #556ed3;background:linear-gradient(var(--bg-gradient-deg, 140deg), #2780e3 var(--bg-gradient-start, 36%), #9954bb var(--bg-gradient-end, 180%)) #556ed3;color:#fff}.bg-gradient-indigo-blue{--bslib-color-fg: #fff;--bslib-color-bg: #4d3dec;background:linear-gradient(var(--bg-gradient-deg, 140deg), #6610f2 var(--bg-gradient-start, 36%), #2780e3 var(--bg-gradient-end, 180%)) #4d3dec;color:#fff}.bg-gradient-indigo-purple{--bslib-color-fg: #fff;--bslib-color-bg: #6422c3;background:linear-gradient(var(--bg-gradient-deg, 140deg), #6610f2 var(--bg-gradient-start, 36%), #613d7c var(--bg-gradient-end, 180%)) #6422c3;color:#fff}.bg-gradient-indigo-pink{--bslib-color-fg: #fff;--bslib-color-bg: #9a22c9;background:linear-gradient(var(--bg-gradient-deg, 140deg), #6610f2 var(--bg-gradient-start, 36%), #e83e8c var(--bg-gradient-end, 180%)) #9a22c9;color:#fff}.bg-gradient-indigo-red{--bslib-color-fg: #fff;--bslib-color-bg: #a30aa8;background:linear-gradient(var(--bg-gradient-deg, 140deg), #6610f2 var(--bg-gradient-start, 36%), #ff0039 var(--bg-gradient-end, 180%)) #a30aa8;color:#fff}.bg-gradient-indigo-orange{--bslib-color-fg: #fff;--bslib-color-bg: #9d4fb0;background:linear-gradient(var(--bg-gradient-deg, 140deg), #6610f2 var(--bg-gradient-start, 36%), #f0ad4e var(--bg-gradient-end, 180%)) #9d4fb0;color:#fff}.bg-gradient-indigo-yellow{--bslib-color-fg: #fff;--bslib-color-bg: #a3389b;background:linear-gradient(var(--bg-gradient-deg, 140deg), #6610f2 var(--bg-gradient-start, 36%), #ff7518 var(--bg-gradient-end, 180%)) #a3389b;color:#fff}.bg-gradient-indigo-green{--bslib-color-fg: #fff;--bslib-color-bg: #56529b;background:linear-gradient(var(--bg-gradient-deg, 140deg), #6610f2 var(--bg-gradient-start, 36%), #3fb618 var(--bg-gradient-end, 180%)) #56529b;color:#fff}.bg-gradient-indigo-teal{--bslib-color-fg: #fff;--bslib-color-bg: #4a5ace;background:linear-gradient(var(--bg-gradient-deg, 140deg), #6610f2 var(--bg-gradient-start, 36%), #20c997 var(--bg-gradient-end, 180%)) #4a5ace;color:#fff}.bg-gradient-indigo-cyan{--bslib-color-fg: #fff;--bslib-color-bg: #7a2bdc;background:linear-gradient(var(--bg-gradient-deg, 140deg), #6610f2 var(--bg-gradient-start, 36%), #9954bb var(--bg-gradient-end, 180%)) #7a2bdc;color:#fff}.bg-gradient-purple-blue{--bslib-color-fg: #fff;--bslib-color-bg: #4a58a5;background:linear-gradient(var(--bg-gradient-deg, 140deg), #613d7c var(--bg-gradient-start, 36%), #2780e3 var(--bg-gradient-end, 180%)) #4a58a5;color:#fff}.bg-gradient-purple-indigo{--bslib-color-fg: #fff;--bslib-color-bg: #632bab;background:linear-gradient(var(--bg-gradient-deg, 140deg), #613d7c var(--bg-gradient-start, 36%), #6610f2 var(--bg-gradient-end, 180%)) #632bab;color:#fff}.bg-gradient-purple-pink{--bslib-color-fg: #fff;--bslib-color-bg: #973d82;background:linear-gradient(var(--bg-gradient-deg, 140deg), #613d7c var(--bg-gradient-start, 36%), #e83e8c var(--bg-gradient-end, 180%)) #973d82;color:#fff}.bg-gradient-purple-red{--bslib-color-fg: #fff;--bslib-color-bg: #a02561;background:linear-gradient(var(--bg-gradient-deg, 140deg), #613d7c var(--bg-gradient-start, 36%), #ff0039 var(--bg-gradient-end, 180%)) #a02561;color:#fff}.bg-gradient-purple-orange{--bslib-color-fg: #fff;--bslib-color-bg: #9a6a6a;background:linear-gradient(var(--bg-gradient-deg, 140deg), #613d7c var(--bg-gradient-start, 36%), #f0ad4e var(--bg-gradient-end, 180%)) #9a6a6a;color:#fff}.bg-gradient-purple-yellow{--bslib-color-fg: #fff;--bslib-color-bg: #a05354;background:linear-gradient(var(--bg-gradient-deg, 140deg), #613d7c var(--bg-gradient-start, 36%), #ff7518 var(--bg-gradient-end, 180%)) #a05354;color:#fff}.bg-gradient-purple-green{--bslib-color-fg: #fff;--bslib-color-bg: #536d54;background:linear-gradient(var(--bg-gradient-deg, 140deg), #613d7c var(--bg-gradient-start, 36%), #3fb618 var(--bg-gradient-end, 180%)) #536d54;color:#fff}.bg-gradient-purple-teal{--bslib-color-fg: #fff;--bslib-color-bg: #477587;background:linear-gradient(var(--bg-gradient-deg, 140deg), #613d7c var(--bg-gradient-start, 36%), #20c997 var(--bg-gradient-end, 180%)) #477587;color:#fff}.bg-gradient-purple-cyan{--bslib-color-fg: #fff;--bslib-color-bg: #774695;background:linear-gradient(var(--bg-gradient-deg, 140deg), #613d7c var(--bg-gradient-start, 36%), #9954bb var(--bg-gradient-end, 180%)) #774695;color:#fff}.bg-gradient-pink-blue{--bslib-color-fg: #fff;--bslib-color-bg: #9b58af;background:linear-gradient(var(--bg-gradient-deg, 140deg), #e83e8c var(--bg-gradient-start, 36%), #2780e3 var(--bg-gradient-end, 180%)) #9b58af;color:#fff}.bg-gradient-pink-indigo{--bslib-color-fg: #fff;--bslib-color-bg: #b42cb5;background:linear-gradient(var(--bg-gradient-deg, 140deg), #e83e8c var(--bg-gradient-start, 36%), #6610f2 var(--bg-gradient-end, 180%)) #b42cb5;color:#fff}.bg-gradient-pink-purple{--bslib-color-fg: #fff;--bslib-color-bg: #b23e86;background:linear-gradient(var(--bg-gradient-deg, 140deg), #e83e8c var(--bg-gradient-start, 36%), #613d7c var(--bg-gradient-end, 180%)) #b23e86;color:#fff}.bg-gradient-pink-red{--bslib-color-fg: #fff;--bslib-color-bg: #f1256b;background:linear-gradient(var(--bg-gradient-deg, 140deg), #e83e8c var(--bg-gradient-start, 36%), #ff0039 var(--bg-gradient-end, 180%)) #f1256b;color:#fff}.bg-gradient-pink-orange{--bslib-color-fg: #fff;--bslib-color-bg: #eb6a73;background:linear-gradient(var(--bg-gradient-deg, 140deg), #e83e8c var(--bg-gradient-start, 36%), #f0ad4e var(--bg-gradient-end, 180%)) #eb6a73;color:#fff}.bg-gradient-pink-yellow{--bslib-color-fg: #fff;--bslib-color-bg: #f1545e;background:linear-gradient(var(--bg-gradient-deg, 140deg), #e83e8c var(--bg-gradient-start, 36%), #ff7518 var(--bg-gradient-end, 180%)) #f1545e;color:#fff}.bg-gradient-pink-green{--bslib-color-fg: #fff;--bslib-color-bg: #a46e5e;background:linear-gradient(var(--bg-gradient-deg, 140deg), #e83e8c var(--bg-gradient-start, 36%), #3fb618 var(--bg-gradient-end, 180%)) #a46e5e;color:#fff}.bg-gradient-pink-teal{--bslib-color-fg: #fff;--bslib-color-bg: #987690;background:linear-gradient(var(--bg-gradient-deg, 140deg), #e83e8c var(--bg-gradient-start, 36%), #20c997 var(--bg-gradient-end, 180%)) #987690;color:#fff}.bg-gradient-pink-cyan{--bslib-color-fg: #fff;--bslib-color-bg: #c8479f;background:linear-gradient(var(--bg-gradient-deg, 140deg), #e83e8c var(--bg-gradient-start, 36%), #9954bb var(--bg-gradient-end, 180%)) #c8479f;color:#fff}.bg-gradient-red-blue{--bslib-color-fg: #fff;--bslib-color-bg: #a9337d;background:linear-gradient(var(--bg-gradient-deg, 140deg), #ff0039 var(--bg-gradient-start, 36%), #2780e3 var(--bg-gradient-end, 180%)) #a9337d;color:#fff}.bg-gradient-red-indigo{--bslib-color-fg: #fff;--bslib-color-bg: #c20683;background:linear-gradient(var(--bg-gradient-deg, 140deg), #ff0039 var(--bg-gradient-start, 36%), #6610f2 var(--bg-gradient-end, 180%)) #c20683;color:#fff}.bg-gradient-red-purple{--bslib-color-fg: #fff;--bslib-color-bg: #c01854;background:linear-gradient(var(--bg-gradient-deg, 140deg), #ff0039 var(--bg-gradient-start, 36%), #613d7c var(--bg-gradient-end, 180%)) #c01854;color:#fff}.bg-gradient-red-pink{--bslib-color-fg: #fff;--bslib-color-bg: #f6195a;background:linear-gradient(var(--bg-gradient-deg, 140deg), #ff0039 var(--bg-gradient-start, 36%), #e83e8c var(--bg-gradient-end, 180%)) #f6195a;color:#fff}.bg-gradient-red-orange{--bslib-color-fg: #fff;--bslib-color-bg: #f94541;background:linear-gradient(var(--bg-gradient-deg, 140deg), #ff0039 var(--bg-gradient-start, 36%), #f0ad4e var(--bg-gradient-end, 180%)) #f94541;color:#fff}.bg-gradient-red-yellow{--bslib-color-fg: #fff;--bslib-color-bg: #ff2f2c;background:linear-gradient(var(--bg-gradient-deg, 140deg), #ff0039 var(--bg-gradient-start, 36%), #ff7518 var(--bg-gradient-end, 180%)) #ff2f2c;color:#fff}.bg-gradient-red-green{--bslib-color-fg: #fff;--bslib-color-bg: #b2492c;background:linear-gradient(var(--bg-gradient-deg, 140deg), #ff0039 var(--bg-gradient-start, 36%), #3fb618 var(--bg-gradient-end, 180%)) #b2492c;color:#fff}.bg-gradient-red-teal{--bslib-color-fg: #fff;--bslib-color-bg: #a6505f;background:linear-gradient(var(--bg-gradient-deg, 140deg), #ff0039 var(--bg-gradient-start, 36%), #20c997 var(--bg-gradient-end, 180%)) #a6505f;color:#fff}.bg-gradient-red-cyan{--bslib-color-fg: #fff;--bslib-color-bg: #d6226d;background:linear-gradient(var(--bg-gradient-deg, 140deg), #ff0039 var(--bg-gradient-start, 36%), #9954bb var(--bg-gradient-end, 180%)) #d6226d;color:#fff}.bg-gradient-orange-blue{--bslib-color-fg: #fff;--bslib-color-bg: #a09b8a;background:linear-gradient(var(--bg-gradient-deg, 140deg), #f0ad4e var(--bg-gradient-start, 36%), #2780e3 var(--bg-gradient-end, 180%)) #a09b8a;color:#fff}.bg-gradient-orange-indigo{--bslib-color-fg: #fff;--bslib-color-bg: #b96e90;background:linear-gradient(var(--bg-gradient-deg, 140deg), #f0ad4e var(--bg-gradient-start, 36%), #6610f2 var(--bg-gradient-end, 180%)) #b96e90;color:#fff}.bg-gradient-orange-purple{--bslib-color-fg: #fff;--bslib-color-bg: #b78060;background:linear-gradient(var(--bg-gradient-deg, 140deg), #f0ad4e var(--bg-gradient-start, 36%), #613d7c var(--bg-gradient-end, 180%)) #b78060;color:#fff}.bg-gradient-orange-pink{--bslib-color-fg: #fff;--bslib-color-bg: #ed8167;background:linear-gradient(var(--bg-gradient-deg, 140deg), #f0ad4e var(--bg-gradient-start, 36%), #e83e8c var(--bg-gradient-end, 180%)) #ed8167;color:#fff}.bg-gradient-orange-red{--bslib-color-fg: #fff;--bslib-color-bg: #f66846;background:linear-gradient(var(--bg-gradient-deg, 140deg), #f0ad4e var(--bg-gradient-start, 36%), #ff0039 var(--bg-gradient-end, 180%)) #f66846;color:#fff}.bg-gradient-orange-yellow{--bslib-color-fg: #000;--bslib-color-bg: #f69738;background:linear-gradient(var(--bg-gradient-deg, 140deg), #f0ad4e var(--bg-gradient-start, 36%), #ff7518 var(--bg-gradient-end, 180%)) #f69738;color:#000}.bg-gradient-orange-green{--bslib-color-fg: #000;--bslib-color-bg: #a9b138;background:linear-gradient(var(--bg-gradient-deg, 140deg), #f0ad4e var(--bg-gradient-start, 36%), #3fb618 var(--bg-gradient-end, 180%)) #a9b138;color:#000}.bg-gradient-orange-teal{--bslib-color-fg: #000;--bslib-color-bg: #9db86b;background:linear-gradient(var(--bg-gradient-deg, 140deg), #f0ad4e var(--bg-gradient-start, 36%), #20c997 var(--bg-gradient-end, 180%)) #9db86b;color:#000}.bg-gradient-orange-cyan{--bslib-color-fg: #fff;--bslib-color-bg: #cd897a;background:linear-gradient(var(--bg-gradient-deg, 140deg), #f0ad4e var(--bg-gradient-start, 36%), #9954bb var(--bg-gradient-end, 180%)) #cd897a;color:#fff}.bg-gradient-yellow-blue{--bslib-color-fg: #fff;--bslib-color-bg: #a97969;background:linear-gradient(var(--bg-gradient-deg, 140deg), #ff7518 var(--bg-gradient-start, 36%), #2780e3 var(--bg-gradient-end, 180%)) #a97969;color:#fff}.bg-gradient-yellow-indigo{--bslib-color-fg: #fff;--bslib-color-bg: #c24d6f;background:linear-gradient(var(--bg-gradient-deg, 140deg), #ff7518 var(--bg-gradient-start, 36%), #6610f2 var(--bg-gradient-end, 180%)) #c24d6f;color:#fff}.bg-gradient-yellow-purple{--bslib-color-fg: #fff;--bslib-color-bg: #c05f40;background:linear-gradient(var(--bg-gradient-deg, 140deg), #ff7518 var(--bg-gradient-start, 36%), #613d7c var(--bg-gradient-end, 180%)) #c05f40;color:#fff}.bg-gradient-yellow-pink{--bslib-color-fg: #fff;--bslib-color-bg: #f65f46;background:linear-gradient(var(--bg-gradient-deg, 140deg), #ff7518 var(--bg-gradient-start, 36%), #e83e8c var(--bg-gradient-end, 180%)) #f65f46;color:#fff}.bg-gradient-yellow-red{--bslib-color-fg: #fff;--bslib-color-bg: #ff4625;background:linear-gradient(var(--bg-gradient-deg, 140deg), #ff7518 var(--bg-gradient-start, 36%), #ff0039 var(--bg-gradient-end, 180%)) #ff4625;color:#fff}.bg-gradient-yellow-orange{--bslib-color-fg: #000;--bslib-color-bg: #f98b2e;background:linear-gradient(var(--bg-gradient-deg, 140deg), #ff7518 var(--bg-gradient-start, 36%), #f0ad4e var(--bg-gradient-end, 180%)) #f98b2e;color:#000}.bg-gradient-yellow-green{--bslib-color-fg: #fff;--bslib-color-bg: #b28f18;background:linear-gradient(var(--bg-gradient-deg, 140deg), #ff7518 var(--bg-gradient-start, 36%), #3fb618 var(--bg-gradient-end, 180%)) #b28f18;color:#fff}.bg-gradient-yellow-teal{--bslib-color-fg: #fff;--bslib-color-bg: #a6974b;background:linear-gradient(var(--bg-gradient-deg, 140deg), #ff7518 var(--bg-gradient-start, 36%), #20c997 var(--bg-gradient-end, 180%)) #a6974b;color:#fff}.bg-gradient-yellow-cyan{--bslib-color-fg: #fff;--bslib-color-bg: #d66859;background:linear-gradient(var(--bg-gradient-deg, 140deg), #ff7518 var(--bg-gradient-start, 36%), #9954bb var(--bg-gradient-end, 180%)) #d66859;color:#fff}.bg-gradient-green-blue{--bslib-color-fg: #fff;--bslib-color-bg: #35a069;background:linear-gradient(var(--bg-gradient-deg, 140deg), #3fb618 var(--bg-gradient-start, 36%), #2780e3 var(--bg-gradient-end, 180%)) #35a069;color:#fff}.bg-gradient-green-indigo{--bslib-color-fg: #fff;--bslib-color-bg: #4f746f;background:linear-gradient(var(--bg-gradient-deg, 140deg), #3fb618 var(--bg-gradient-start, 36%), #6610f2 var(--bg-gradient-end, 180%)) #4f746f;color:#fff}.bg-gradient-green-purple{--bslib-color-fg: #fff;--bslib-color-bg: #4d8640;background:linear-gradient(var(--bg-gradient-deg, 140deg), #3fb618 var(--bg-gradient-start, 36%), #613d7c var(--bg-gradient-end, 180%)) #4d8640;color:#fff}.bg-gradient-green-pink{--bslib-color-fg: #fff;--bslib-color-bg: #838646;background:linear-gradient(var(--bg-gradient-deg, 140deg), #3fb618 var(--bg-gradient-start, 36%), #e83e8c var(--bg-gradient-end, 180%)) #838646;color:#fff}.bg-gradient-green-red{--bslib-color-fg: #fff;--bslib-color-bg: #8c6d25;background:linear-gradient(var(--bg-gradient-deg, 140deg), #3fb618 var(--bg-gradient-start, 36%), #ff0039 var(--bg-gradient-end, 180%)) #8c6d25;color:#fff}.bg-gradient-green-orange{--bslib-color-fg: #000;--bslib-color-bg: #86b22e;background:linear-gradient(var(--bg-gradient-deg, 140deg), #3fb618 var(--bg-gradient-start, 36%), #f0ad4e var(--bg-gradient-end, 180%)) #86b22e;color:#000}.bg-gradient-green-yellow{--bslib-color-fg: #fff;--bslib-color-bg: #8c9c18;background:linear-gradient(var(--bg-gradient-deg, 140deg), #3fb618 var(--bg-gradient-start, 36%), #ff7518 var(--bg-gradient-end, 180%)) #8c9c18;color:#fff}.bg-gradient-green-teal{--bslib-color-fg: #000;--bslib-color-bg: #33be4b;background:linear-gradient(var(--bg-gradient-deg, 140deg), #3fb618 var(--bg-gradient-start, 36%), #20c997 var(--bg-gradient-end, 180%)) #33be4b;color:#000}.bg-gradient-green-cyan{--bslib-color-fg: #fff;--bslib-color-bg: #638f59;background:linear-gradient(var(--bg-gradient-deg, 140deg), #3fb618 var(--bg-gradient-start, 36%), #9954bb var(--bg-gradient-end, 180%)) #638f59;color:#fff}.bg-gradient-teal-blue{--bslib-color-fg: #fff;--bslib-color-bg: #23acb5;background:linear-gradient(var(--bg-gradient-deg, 140deg), #20c997 var(--bg-gradient-start, 36%), #2780e3 var(--bg-gradient-end, 180%)) #23acb5;color:#fff}.bg-gradient-teal-indigo{--bslib-color-fg: #fff;--bslib-color-bg: #3c7fbb;background:linear-gradient(var(--bg-gradient-deg, 140deg), #20c997 var(--bg-gradient-start, 36%), #6610f2 var(--bg-gradient-end, 180%)) #3c7fbb;color:#fff}.bg-gradient-teal-purple{--bslib-color-fg: #fff;--bslib-color-bg: #3a918c;background:linear-gradient(var(--bg-gradient-deg, 140deg), #20c997 var(--bg-gradient-start, 36%), #613d7c var(--bg-gradient-end, 180%)) #3a918c;color:#fff}.bg-gradient-teal-pink{--bslib-color-fg: #fff;--bslib-color-bg: #709193;background:linear-gradient(var(--bg-gradient-deg, 140deg), #20c997 var(--bg-gradient-start, 36%), #e83e8c var(--bg-gradient-end, 180%)) #709193;color:#fff}.bg-gradient-teal-red{--bslib-color-fg: #fff;--bslib-color-bg: #797971;background:linear-gradient(var(--bg-gradient-deg, 140deg), #20c997 var(--bg-gradient-start, 36%), #ff0039 var(--bg-gradient-end, 180%)) #797971;color:#fff}.bg-gradient-teal-orange{--bslib-color-fg: #000;--bslib-color-bg: #73be7a;background:linear-gradient(var(--bg-gradient-deg, 140deg), #20c997 var(--bg-gradient-start, 36%), #f0ad4e var(--bg-gradient-end, 180%)) #73be7a;color:#000}.bg-gradient-teal-yellow{--bslib-color-fg: #fff;--bslib-color-bg: #79a764;background:linear-gradient(var(--bg-gradient-deg, 140deg), #20c997 var(--bg-gradient-start, 36%), #ff7518 var(--bg-gradient-end, 180%)) #79a764;color:#fff}.bg-gradient-teal-green{--bslib-color-fg: #000;--bslib-color-bg: #2cc164;background:linear-gradient(var(--bg-gradient-deg, 140deg), #20c997 var(--bg-gradient-start, 36%), #3fb618 var(--bg-gradient-end, 180%)) #2cc164;color:#000}.bg-gradient-teal-cyan{--bslib-color-fg: #fff;--bslib-color-bg: #509aa5;background:linear-gradient(var(--bg-gradient-deg, 140deg), #20c997 var(--bg-gradient-start, 36%), #9954bb var(--bg-gradient-end, 180%)) #509aa5;color:#fff}.bg-gradient-cyan-blue{--bslib-color-fg: #fff;--bslib-color-bg: #6b66cb;background:linear-gradient(var(--bg-gradient-deg, 140deg), #9954bb var(--bg-gradient-start, 36%), #2780e3 var(--bg-gradient-end, 180%)) #6b66cb;color:#fff}.bg-gradient-cyan-indigo{--bslib-color-fg: #fff;--bslib-color-bg: #8539d1;background:linear-gradient(var(--bg-gradient-deg, 140deg), #9954bb var(--bg-gradient-start, 36%), #6610f2 var(--bg-gradient-end, 180%)) #8539d1;color:#fff}.bg-gradient-cyan-purple{--bslib-color-fg: #fff;--bslib-color-bg: #834ba2;background:linear-gradient(var(--bg-gradient-deg, 140deg), #9954bb var(--bg-gradient-start, 36%), #613d7c var(--bg-gradient-end, 180%)) #834ba2;color:#fff}.bg-gradient-cyan-pink{--bslib-color-fg: #fff;--bslib-color-bg: #b94ba8;background:linear-gradient(var(--bg-gradient-deg, 140deg), #9954bb var(--bg-gradient-start, 36%), #e83e8c var(--bg-gradient-end, 180%)) #b94ba8;color:#fff}.bg-gradient-cyan-red{--bslib-color-fg: #fff;--bslib-color-bg: #c23287;background:linear-gradient(var(--bg-gradient-deg, 140deg), #9954bb var(--bg-gradient-start, 36%), #ff0039 var(--bg-gradient-end, 180%)) #c23287;color:#fff}.bg-gradient-cyan-orange{--bslib-color-fg: #fff;--bslib-color-bg: #bc788f;background:linear-gradient(var(--bg-gradient-deg, 140deg), #9954bb var(--bg-gradient-start, 36%), #f0ad4e var(--bg-gradient-end, 180%)) #bc788f;color:#fff}.bg-gradient-cyan-yellow{--bslib-color-fg: #fff;--bslib-color-bg: #c2617a;background:linear-gradient(var(--bg-gradient-deg, 140deg), #9954bb var(--bg-gradient-start, 36%), #ff7518 var(--bg-gradient-end, 180%)) #c2617a;color:#fff}.bg-gradient-cyan-green{--bslib-color-fg: #fff;--bslib-color-bg: #757b7a;background:linear-gradient(var(--bg-gradient-deg, 140deg), #9954bb var(--bg-gradient-start, 36%), #3fb618 var(--bg-gradient-end, 180%)) #757b7a;color:#fff}.bg-gradient-cyan-teal{--bslib-color-fg: #fff;--bslib-color-bg: #6983ad;background:linear-gradient(var(--bg-gradient-deg, 140deg), #9954bb var(--bg-gradient-start, 36%), #20c997 var(--bg-gradient-end, 180%)) #6983ad;color:#fff}:root{--bslib-spacer: 1rem;--bslib-mb-spacer: var(--bslib-spacer, 1rem)}.bslib-mb-spacing{margin-bottom:var(--bslib-mb-spacer)}.bslib-gap-spacing{gap:var(--bslib-mb-spacer)}.bslib-gap-spacing>.bslib-mb-spacing,.bslib-gap-spacing>.form-group,.bslib-gap-spacing>p,.bslib-gap-spacing>pre{margin-bottom:0}.html-fill-container>.html-fill-item.bslib-mb-spacing{margin-bottom:0}@media(min-width: 576px){.nav:not(.nav-hidden){display:flex !important;display:-webkit-flex !important}.nav:not(.nav-hidden):not(.nav-stacked):not(.flex-column){float:none !important}.nav:not(.nav-hidden):not(.nav-stacked):not(.flex-column)>.bslib-nav-spacer{margin-left:auto !important}.nav:not(.nav-hidden):not(.nav-stacked):not(.flex-column)>.form-inline{margin-top:auto;margin-bottom:auto}.nav:not(.nav-hidden).nav-stacked{flex-direction:column;-webkit-flex-direction:column;height:100%}.nav:not(.nav-hidden).nav-stacked>.bslib-nav-spacer{margin-top:auto !important}}.accordion .accordion-header{font-size:calc(1.29rem + 0.48vw);margin-top:0;margin-bottom:.5rem;font-weight:400;line-height:1.2;color:var(--bs-heading-color);margin-bottom:0}@media(min-width: 1200px){.accordion .accordion-header{font-size:1.65rem}}.accordion .accordion-icon:not(:empty){margin-right:.75rem;display:flex}.accordion .accordion-button:not(.collapsed){box-shadow:none}.accordion .accordion-button:not(.collapsed):focus{box-shadow:var(--bs-accordion-btn-focus-box-shadow)}.bslib-sidebar-layout{--bslib-sidebar-transition-duration: 500ms;--bslib-sidebar-transition-easing-x: cubic-bezier(0.8, 0.78, 0.22, 1.07);--bslib-sidebar-border: var(--bs-card-border-width, 1px) solid var(--bs-card-border-color, rgba(0, 0, 0, 0.175));--bslib-sidebar-border-radius: var(--bs-border-radius);--bslib-sidebar-vert-border: var(--bs-card-border-width, 1px) solid var(--bs-card-border-color, rgba(0, 0, 0, 0.175));--bslib-sidebar-bg: rgba(var(--bs-emphasis-color-rgb, 0, 0, 0), 0.05);--bslib-sidebar-fg: var(--bs-emphasis-color, black);--bslib-sidebar-main-fg: var(--bs-card-color, var(--bs-body-color));--bslib-sidebar-main-bg: var(--bs-card-bg, var(--bs-body-bg));--bslib-sidebar-toggle-bg: rgba(var(--bs-emphasis-color-rgb, 0, 0, 0), 0.1);--bslib-sidebar-padding: calc(var(--bslib-spacer) * 1.5);--bslib-sidebar-icon-size: var(--bslib-spacer, 1rem);--bslib-sidebar-icon-button-size: calc(var(--bslib-sidebar-icon-size, 1rem) * 2);--bslib-sidebar-padding-icon: calc(var(--bslib-sidebar-icon-button-size, 2rem) * 1.5);--bslib-collapse-toggle-border-radius: var(--bs-border-radius, 0.25rem);--bslib-collapse-toggle-transform: 0deg;--bslib-sidebar-toggle-transition-easing: cubic-bezier(1, 0, 0, 1);--bslib-collapse-toggle-right-transform: 180deg;--bslib-sidebar-column-main: minmax(0, 1fr);display:grid !important;grid-template-columns:min(100% - var(--bslib-sidebar-icon-size),var(--bslib-sidebar-width, 250px)) var(--bslib-sidebar-column-main);position:relative;transition:grid-template-columns ease-in-out var(--bslib-sidebar-transition-duration);border:var(--bslib-sidebar-border);border-radius:var(--bslib-sidebar-border-radius)}@media(prefers-reduced-motion: reduce){.bslib-sidebar-layout{transition:none}}.bslib-sidebar-layout[data-bslib-sidebar-border=false]{border:none}.bslib-sidebar-layout[data-bslib-sidebar-border-radius=false]{border-radius:initial}.bslib-sidebar-layout>.main,.bslib-sidebar-layout>.sidebar{grid-row:1/2;border-radius:inherit;overflow:auto}.bslib-sidebar-layout>.main{grid-column:2/3;border-top-left-radius:0;border-bottom-left-radius:0;padding:var(--bslib-sidebar-padding);transition:padding var(--bslib-sidebar-transition-easing-x) var(--bslib-sidebar-transition-duration);color:var(--bslib-sidebar-main-fg);background-color:var(--bslib-sidebar-main-bg)}.bslib-sidebar-layout>.sidebar{grid-column:1/2;width:100%;height:100%;border-right:var(--bslib-sidebar-vert-border);border-top-right-radius:0;border-bottom-right-radius:0;color:var(--bslib-sidebar-fg);background-color:var(--bslib-sidebar-bg);backdrop-filter:blur(5px)}.bslib-sidebar-layout>.sidebar>.sidebar-content{display:flex;flex-direction:column;gap:var(--bslib-spacer, 1rem);padding:var(--bslib-sidebar-padding);padding-top:var(--bslib-sidebar-padding-icon)}.bslib-sidebar-layout>.sidebar>.sidebar-content>:last-child:not(.sidebar-title){margin-bottom:0}.bslib-sidebar-layout>.sidebar>.sidebar-content>.accordion{margin-left:calc(-1*var(--bslib-sidebar-padding));margin-right:calc(-1*var(--bslib-sidebar-padding))}.bslib-sidebar-layout>.sidebar>.sidebar-content>.accordion:last-child{margin-bottom:calc(-1*var(--bslib-sidebar-padding))}.bslib-sidebar-layout>.sidebar>.sidebar-content>.accordion:not(:last-child){margin-bottom:1rem}.bslib-sidebar-layout>.sidebar>.sidebar-content>.accordion .accordion-body{display:flex;flex-direction:column}.bslib-sidebar-layout>.sidebar>.sidebar-content>.accordion:not(:first-child) .accordion-item:first-child{border-top:var(--bs-accordion-border-width) solid var(--bs-accordion-border-color)}.bslib-sidebar-layout>.sidebar>.sidebar-content>.accordion:not(:last-child) .accordion-item:last-child{border-bottom:var(--bs-accordion-border-width) solid var(--bs-accordion-border-color)}.bslib-sidebar-layout>.sidebar>.sidebar-content.has-accordion>.sidebar-title{border-bottom:none;padding-bottom:0}.bslib-sidebar-layout>.sidebar .shiny-input-container{width:100%}.bslib-sidebar-layout[data-bslib-sidebar-open=always]>.sidebar>.sidebar-content{padding-top:var(--bslib-sidebar-padding)}.bslib-sidebar-layout>.collapse-toggle{grid-row:1/2;grid-column:1/2;display:inline-flex;align-items:center;position:absolute;right:calc(var(--bslib-sidebar-icon-size));top:calc(var(--bslib-sidebar-icon-size, 1rem)/2);border:none;border-radius:var(--bslib-collapse-toggle-border-radius);height:var(--bslib-sidebar-icon-button-size, 2rem);width:var(--bslib-sidebar-icon-button-size, 2rem);display:flex;align-items:center;justify-content:center;padding:0;color:var(--bslib-sidebar-fg);background-color:unset;transition:color var(--bslib-sidebar-transition-easing-x) var(--bslib-sidebar-transition-duration),top var(--bslib-sidebar-transition-easing-x) var(--bslib-sidebar-transition-duration),right var(--bslib-sidebar-transition-easing-x) var(--bslib-sidebar-transition-duration),left var(--bslib-sidebar-transition-easing-x) var(--bslib-sidebar-transition-duration)}.bslib-sidebar-layout>.collapse-toggle:hover{background-color:var(--bslib-sidebar-toggle-bg)}.bslib-sidebar-layout>.collapse-toggle>.collapse-icon{opacity:.8;width:var(--bslib-sidebar-icon-size);height:var(--bslib-sidebar-icon-size);transform:rotateY(var(--bslib-collapse-toggle-transform));transition:transform var(--bslib-sidebar-toggle-transition-easing) var(--bslib-sidebar-transition-duration)}.bslib-sidebar-layout>.collapse-toggle:hover>.collapse-icon{opacity:1}.bslib-sidebar-layout .sidebar-title{font-size:1.25rem;line-height:1.25;margin-top:0;margin-bottom:1rem;padding-bottom:1rem;border-bottom:var(--bslib-sidebar-border)}.bslib-sidebar-layout.sidebar-right{grid-template-columns:var(--bslib-sidebar-column-main) min(100% - var(--bslib-sidebar-icon-size),var(--bslib-sidebar-width, 250px))}.bslib-sidebar-layout.sidebar-right>.main{grid-column:1/2;border-top-right-radius:0;border-bottom-right-radius:0;border-top-left-radius:inherit;border-bottom-left-radius:inherit}.bslib-sidebar-layout.sidebar-right>.sidebar{grid-column:2/3;border-right:none;border-left:var(--bslib-sidebar-vert-border);border-top-left-radius:0;border-bottom-left-radius:0}.bslib-sidebar-layout.sidebar-right>.collapse-toggle{grid-column:2/3;left:var(--bslib-sidebar-icon-size);right:unset;border:var(--bslib-collapse-toggle-border)}.bslib-sidebar-layout.sidebar-right>.collapse-toggle>.collapse-icon{transform:rotateY(var(--bslib-collapse-toggle-right-transform))}.bslib-sidebar-layout.sidebar-collapsed{--bslib-collapse-toggle-transform: 180deg;--bslib-collapse-toggle-right-transform: 0deg;--bslib-sidebar-vert-border: none;grid-template-columns:0 minmax(0, 1fr)}.bslib-sidebar-layout.sidebar-collapsed.sidebar-right{grid-template-columns:minmax(0, 1fr) 0}.bslib-sidebar-layout.sidebar-collapsed:not(.transitioning)>.sidebar>*{display:none}.bslib-sidebar-layout.sidebar-collapsed>.main{border-radius:inherit}.bslib-sidebar-layout.sidebar-collapsed:not(.sidebar-right)>.main{padding-left:var(--bslib-sidebar-padding-icon)}.bslib-sidebar-layout.sidebar-collapsed.sidebar-right>.main{padding-right:var(--bslib-sidebar-padding-icon)}.bslib-sidebar-layout.sidebar-collapsed>.collapse-toggle{color:var(--bslib-sidebar-main-fg);top:calc(var(--bslib-sidebar-overlap-counter, 0)*(var(--bslib-sidebar-icon-size) + var(--bslib-sidebar-padding)) + var(--bslib-sidebar-icon-size, 1rem)/2);right:calc(-2.5*var(--bslib-sidebar-icon-size) - var(--bs-card-border-width, 1px))}.bslib-sidebar-layout.sidebar-collapsed.sidebar-right>.collapse-toggle{left:calc(-2.5*var(--bslib-sidebar-icon-size) - var(--bs-card-border-width, 1px));right:unset}@media(min-width: 576px){.bslib-sidebar-layout.transitioning>.sidebar>.sidebar-content{display:none}}@media(max-width: 575.98px){.bslib-sidebar-layout[data-bslib-sidebar-open=desktop]{--bslib-sidebar-js-init-collapsed: true}.bslib-sidebar-layout>.sidebar,.bslib-sidebar-layout.sidebar-right>.sidebar{border:none}.bslib-sidebar-layout>.main,.bslib-sidebar-layout.sidebar-right>.main{grid-column:1/3}.bslib-sidebar-layout[data-bslib-sidebar-open=always]{display:block !important}.bslib-sidebar-layout[data-bslib-sidebar-open=always]>.sidebar{max-height:var(--bslib-sidebar-max-height-mobile);overflow-y:auto;border-top:var(--bslib-sidebar-vert-border)}.bslib-sidebar-layout:not([data-bslib-sidebar-open=always]){grid-template-columns:100% 0}.bslib-sidebar-layout:not([data-bslib-sidebar-open=always]):not(.sidebar-collapsed)>.sidebar{z-index:1}.bslib-sidebar-layout:not([data-bslib-sidebar-open=always]):not(.sidebar-collapsed)>.collapse-toggle{z-index:1}.bslib-sidebar-layout:not([data-bslib-sidebar-open=always]).sidebar-right{grid-template-columns:0 100%}.bslib-sidebar-layout:not([data-bslib-sidebar-open=always]).sidebar-collapsed{grid-template-columns:0 100%}.bslib-sidebar-layout:not([data-bslib-sidebar-open=always]).sidebar-collapsed.sidebar-right{grid-template-columns:100% 0}.bslib-sidebar-layout:not([data-bslib-sidebar-open=always]):not(.sidebar-right)>.main{padding-left:var(--bslib-sidebar-padding-icon)}.bslib-sidebar-layout:not([data-bslib-sidebar-open=always]).sidebar-right>.main{padding-right:var(--bslib-sidebar-padding-icon)}.bslib-sidebar-layout:not([data-bslib-sidebar-open=always])>.main{opacity:0;transition:opacity var(--bslib-sidebar-transition-easing-x) var(--bslib-sidebar-transition-duration)}.bslib-sidebar-layout:not([data-bslib-sidebar-open=always]).sidebar-collapsed>.main{opacity:1}}:root{--bslib-page-sidebar-title-bg: #5c2983;--bslib-page-sidebar-title-color: #fff}.bslib-page-title{background-color:var(--bslib-page-sidebar-title-bg);color:var(--bslib-page-sidebar-title-color);font-size:1.25rem;font-weight:300;padding:var(--bslib-spacer, 1rem);padding-left:1.5rem;margin-bottom:0;border-bottom:1px solid #dee2e6}html{height:100%}.bslib-page-fill{width:100%;height:100%;margin:0;padding:var(--bslib-spacer, 1rem);gap:var(--bslib-spacer, 1rem)}@media(max-width: 575.98px){.bslib-page-fill{height:var(--bslib-page-fill-mobile-height, auto)}}:root{--bslib-value-box-shadow: none;--bslib-value-box-border-width-auto-yes: var(--bslib-value-box-border-width-baseline);--bslib-value-box-border-width-auto-no: 0;--bslib-value-box-border-width-baseline: 1px}.bslib-value-box{border-width:var(--bslib-value-box-border-width-auto-no, var(--bslib-value-box-border-width-baseline));container-name:bslib-value-box;container-type:inline-size}.bslib-value-box.card{box-shadow:var(--bslib-value-box-shadow)}.bslib-value-box.border-auto{border-width:var(--bslib-value-box-border-width-auto-yes, var(--bslib-value-box-border-width-baseline))}.bslib-value-box.default{--bslib-value-box-bg-default: var(--bs-card-bg, #fff);--bslib-value-box-border-color-default: var(--bs-card-border-color, rgba(0, 0, 0, 0.175));color:var(--bslib-value-box-color);background-color:var(--bslib-value-box-bg, var(--bslib-value-box-bg-default));border-color:var(--bslib-value-box-border-color, var(--bslib-value-box-border-color-default))}.bslib-value-box .value-box-grid{display:grid;grid-template-areas:"left right";align-items:center;overflow:hidden}.bslib-value-box .value-box-showcase{height:100%;max-height:var(---bslib-value-box-showcase-max-h, 100%)}.bslib-value-box .value-box-showcase,.bslib-value-box .value-box-showcase>.html-fill-item{width:100%}.bslib-value-box[data-full-screen=true] .value-box-showcase{max-height:var(---bslib-value-box-showcase-max-h-fs, 100%)}@media screen and (min-width: 575.98px){@container bslib-value-box (max-width: 300px){.bslib-value-box:not(.showcase-bottom) .value-box-grid{grid-template-columns:1fr !important;grid-template-rows:auto auto;grid-template-areas:"top" "bottom"}.bslib-value-box:not(.showcase-bottom) .value-box-grid .value-box-showcase{grid-area:top !important}.bslib-value-box:not(.showcase-bottom) .value-box-grid .value-box-area{grid-area:bottom !important;justify-content:end}}}.bslib-value-box .value-box-area{justify-content:center;padding:1.5rem 1rem;font-size:.9rem;font-weight:500}.bslib-value-box .value-box-area *{margin-bottom:0;margin-top:0}.bslib-value-box .value-box-title{font-size:1rem;margin-top:0;margin-bottom:.5rem;font-weight:400;line-height:1.2}.bslib-value-box .value-box-title:empty::after{content:" "}.bslib-value-box .value-box-value{font-size:calc(1.29rem + 0.48vw);margin-top:0;margin-bottom:.5rem;font-weight:400;line-height:1.2}@media(min-width: 1200px){.bslib-value-box .value-box-value{font-size:1.65rem}}.bslib-value-box .value-box-value:empty::after{content:" "}.bslib-value-box .value-box-showcase{align-items:center;justify-content:center;margin-top:auto;margin-bottom:auto;padding:1rem}.bslib-value-box .value-box-showcase .bi,.bslib-value-box .value-box-showcase .fa,.bslib-value-box .value-box-showcase .fab,.bslib-value-box .value-box-showcase .fas,.bslib-value-box .value-box-showcase .far{opacity:.85;min-width:50px;max-width:125%}.bslib-value-box .value-box-showcase .bi,.bslib-value-box .value-box-showcase .fa,.bslib-value-box .value-box-showcase .fab,.bslib-value-box .value-box-showcase .fas,.bslib-value-box .value-box-showcase .far{font-size:4rem}.bslib-value-box.showcase-top-right .value-box-grid{grid-template-columns:1fr var(---bslib-value-box-showcase-w, 50%)}.bslib-value-box.showcase-top-right .value-box-grid .value-box-showcase{grid-area:right;margin-left:auto;align-self:start;align-items:end;padding-left:0;padding-bottom:0}.bslib-value-box.showcase-top-right .value-box-grid .value-box-area{grid-area:left;align-self:end}.bslib-value-box.showcase-top-right[data-full-screen=true] .value-box-grid{grid-template-columns:auto var(---bslib-value-box-showcase-w-fs, 1fr)}.bslib-value-box.showcase-top-right[data-full-screen=true] .value-box-grid>div{align-self:center}.bslib-value-box.showcase-top-right:not([data-full-screen=true]) .value-box-showcase{margin-top:0}@container bslib-value-box (max-width: 300px){.bslib-value-box.showcase-top-right:not([data-full-screen=true]) .value-box-grid .value-box-showcase{padding-left:1rem}}.bslib-value-box.showcase-left-center .value-box-grid{grid-template-columns:var(---bslib-value-box-showcase-w, 30%) auto}.bslib-value-box.showcase-left-center[data-full-screen=true] .value-box-grid{grid-template-columns:var(---bslib-value-box-showcase-w-fs, 1fr) auto}.bslib-value-box.showcase-left-center:not([data-fill-screen=true]) .value-box-grid .value-box-showcase{grid-area:left}.bslib-value-box.showcase-left-center:not([data-fill-screen=true]) .value-box-grid .value-box-area{grid-area:right}.bslib-value-box.showcase-bottom .value-box-grid{grid-template-columns:1fr;grid-template-rows:1fr var(---bslib-value-box-showcase-h, auto);grid-template-areas:"top" "bottom";overflow:hidden}.bslib-value-box.showcase-bottom .value-box-grid .value-box-showcase{grid-area:bottom;padding:0;margin:0}.bslib-value-box.showcase-bottom .value-box-grid .value-box-area{grid-area:top}.bslib-value-box.showcase-bottom[data-full-screen=true] .value-box-grid{grid-template-rows:1fr var(---bslib-value-box-showcase-h-fs, 2fr)}.bslib-value-box.showcase-bottom[data-full-screen=true] .value-box-grid .value-box-showcase{padding:1rem}[data-bs-theme=dark] .bslib-value-box{--bslib-value-box-shadow: 0 0.5rem 1rem rgb(0 0 0 / 50%)}.bslib-card{overflow:auto}.bslib-card .card-body+.card-body{padding-top:0}.bslib-card .card-body{overflow:auto}.bslib-card .card-body p{margin-top:0}.bslib-card .card-body p:last-child{margin-bottom:0}.bslib-card .card-body{max-height:var(--bslib-card-body-max-height, none)}.bslib-card[data-full-screen=true]>.card-body{max-height:var(--bslib-card-body-max-height-full-screen, none)}.bslib-card .card-header .form-group{margin-bottom:0}.bslib-card .card-header .selectize-control{margin-bottom:0}.bslib-card .card-header .selectize-control .item{margin-right:1.15rem}.bslib-card .card-footer{margin-top:auto}.bslib-card .bslib-navs-card-title{display:flex;flex-wrap:wrap;justify-content:space-between;align-items:center}.bslib-card .bslib-navs-card-title .nav{margin-left:auto}.bslib-card .bslib-sidebar-layout:not([data-bslib-sidebar-border=true]){border:none}.bslib-card .bslib-sidebar-layout:not([data-bslib-sidebar-border-radius=true]){border-top-left-radius:0;border-top-right-radius:0}[data-full-screen=true]{position:fixed;inset:3.5rem 1rem 1rem;height:auto !important;max-height:none !important;width:auto !important;z-index:1070}.bslib-full-screen-enter{display:none;position:absolute;bottom:var(--bslib-full-screen-enter-bottom, 0.2rem);right:var(--bslib-full-screen-enter-right, 0);top:var(--bslib-full-screen-enter-top);left:var(--bslib-full-screen-enter-left);color:var(--bslib-color-fg, var(--bs-card-color));background-color:var(--bslib-color-bg, var(--bs-card-bg, var(--bs-body-bg)));border:var(--bs-card-border-width) solid var(--bslib-color-fg, var(--bs-card-border-color));box-shadow:0 2px 4px rgba(0,0,0,.15);margin:.2rem .4rem;padding:.55rem !important;font-size:.8rem;cursor:pointer;opacity:.7;z-index:1070}.bslib-full-screen-enter:hover{opacity:1}.card[data-full-screen=false]:hover>*>.bslib-full-screen-enter{display:block}.bslib-has-full-screen .card:hover>*>.bslib-full-screen-enter{display:none}@media(max-width: 575.98px){.bslib-full-screen-enter{display:none !important}}.bslib-full-screen-exit{position:relative;top:1.35rem;font-size:.9rem;cursor:pointer;text-decoration:none;display:flex;float:right;margin-right:2.15rem;align-items:center;color:rgba(var(--bs-body-bg-rgb), 0.8)}.bslib-full-screen-exit:hover{color:rgba(var(--bs-body-bg-rgb), 1)}.bslib-full-screen-exit svg{margin-left:.5rem;font-size:1.5rem}#bslib-full-screen-overlay{position:fixed;inset:0;background-color:rgba(var(--bs-body-color-rgb), 0.6);backdrop-filter:blur(2px);-webkit-backdrop-filter:blur(2px);z-index:1069;animation:bslib-full-screen-overlay-enter 400ms cubic-bezier(0.6, 0.02, 0.65, 1) forwards}@keyframes bslib-full-screen-overlay-enter{0%{opacity:0}100%{opacity:1}}.navbar+.container-fluid:has(>.tab-content>.tab-pane.active.html-fill-container),.navbar+.container-sm:has(>.tab-content>.tab-pane.active.html-fill-container),.navbar+.container-md:has(>.tab-content>.tab-pane.active.html-fill-container),.navbar+.container-lg:has(>.tab-content>.tab-pane.active.html-fill-container),.navbar+.container-xl:has(>.tab-content>.tab-pane.active.html-fill-container),.navbar+.container-xxl:has(>.tab-content>.tab-pane.active.html-fill-container){padding-left:0;padding-right:0}.navbar+.container-fluid>.tab-content>.tab-pane.active.html-fill-container,.navbar+.container-sm>.tab-content>.tab-pane.active.html-fill-container,.navbar+.container-md>.tab-content>.tab-pane.active.html-fill-container,.navbar+.container-lg>.tab-content>.tab-pane.active.html-fill-container,.navbar+.container-xl>.tab-content>.tab-pane.active.html-fill-container,.navbar+.container-xxl>.tab-content>.tab-pane.active.html-fill-container{padding:var(--bslib-spacer, 1rem);gap:var(--bslib-spacer, 1rem)}.navbar+.container-fluid>.tab-content>.tab-pane.active.html-fill-container:has(>.bslib-sidebar-layout:only-child),.navbar+.container-sm>.tab-content>.tab-pane.active.html-fill-container:has(>.bslib-sidebar-layout:only-child),.navbar+.container-md>.tab-content>.tab-pane.active.html-fill-container:has(>.bslib-sidebar-layout:only-child),.navbar+.container-lg>.tab-content>.tab-pane.active.html-fill-container:has(>.bslib-sidebar-layout:only-child),.navbar+.container-xl>.tab-content>.tab-pane.active.html-fill-container:has(>.bslib-sidebar-layout:only-child),.navbar+.container-xxl>.tab-content>.tab-pane.active.html-fill-container:has(>.bslib-sidebar-layout:only-child){padding:0}.navbar+.container-fluid>.tab-content>.tab-pane.active.html-fill-container>.bslib-sidebar-layout:only-child:not([data-bslib-sidebar-border=true]),.navbar+.container-sm>.tab-content>.tab-pane.active.html-fill-container>.bslib-sidebar-layout:only-child:not([data-bslib-sidebar-border=true]),.navbar+.container-md>.tab-content>.tab-pane.active.html-fill-container>.bslib-sidebar-layout:only-child:not([data-bslib-sidebar-border=true]),.navbar+.container-lg>.tab-content>.tab-pane.active.html-fill-container>.bslib-sidebar-layout:only-child:not([data-bslib-sidebar-border=true]),.navbar+.container-xl>.tab-content>.tab-pane.active.html-fill-container>.bslib-sidebar-layout:only-child:not([data-bslib-sidebar-border=true]),.navbar+.container-xxl>.tab-content>.tab-pane.active.html-fill-container>.bslib-sidebar-layout:only-child:not([data-bslib-sidebar-border=true]){border-left:none;border-right:none;border-bottom:none}.navbar+.container-fluid>.tab-content>.tab-pane.active.html-fill-container>.bslib-sidebar-layout:only-child:not([data-bslib-sidebar-border-radius=true]),.navbar+.container-sm>.tab-content>.tab-pane.active.html-fill-container>.bslib-sidebar-layout:only-child:not([data-bslib-sidebar-border-radius=true]),.navbar+.container-md>.tab-content>.tab-pane.active.html-fill-container>.bslib-sidebar-layout:only-child:not([data-bslib-sidebar-border-radius=true]),.navbar+.container-lg>.tab-content>.tab-pane.active.html-fill-container>.bslib-sidebar-layout:only-child:not([data-bslib-sidebar-border-radius=true]),.navbar+.container-xl>.tab-content>.tab-pane.active.html-fill-container>.bslib-sidebar-layout:only-child:not([data-bslib-sidebar-border-radius=true]),.navbar+.container-xxl>.tab-content>.tab-pane.active.html-fill-container>.bslib-sidebar-layout:only-child:not([data-bslib-sidebar-border-radius=true]){border-radius:0}.navbar+div>.bslib-sidebar-layout{border-top:var(--bslib-sidebar-border)}.bslib-grid{display:grid !important;gap:var(--bslib-spacer, 1rem);height:var(--bslib-grid-height)}.bslib-grid.grid{grid-template-columns:repeat(var(--bs-columns, 12), minmax(0, 1fr));grid-template-rows:unset;grid-auto-rows:var(--bslib-grid--row-heights);--bslib-grid--row-heights--xs: unset;--bslib-grid--row-heights--sm: unset;--bslib-grid--row-heights--md: unset;--bslib-grid--row-heights--lg: unset;--bslib-grid--row-heights--xl: unset;--bslib-grid--row-heights--xxl: unset}.bslib-grid.grid.bslib-grid--row-heights--xs{--bslib-grid--row-heights: var(--bslib-grid--row-heights--xs)}@media(min-width: 576px){.bslib-grid.grid.bslib-grid--row-heights--sm{--bslib-grid--row-heights: var(--bslib-grid--row-heights--sm)}}@media(min-width: 768px){.bslib-grid.grid.bslib-grid--row-heights--md{--bslib-grid--row-heights: var(--bslib-grid--row-heights--md)}}@media(min-width: 992px){.bslib-grid.grid.bslib-grid--row-heights--lg{--bslib-grid--row-heights: var(--bslib-grid--row-heights--lg)}}@media(min-width: 1200px){.bslib-grid.grid.bslib-grid--row-heights--xl{--bslib-grid--row-heights: var(--bslib-grid--row-heights--xl)}}@media(min-width: 1400px){.bslib-grid.grid.bslib-grid--row-heights--xxl{--bslib-grid--row-heights: var(--bslib-grid--row-heights--xxl)}}.bslib-grid>*>.shiny-input-container{width:100%}.bslib-grid-item{grid-column:auto/span 1}@media(max-width: 767.98px){.bslib-grid-item{grid-column:1/-1}}@media(max-width: 575.98px){.bslib-grid{grid-template-columns:1fr !important;height:var(--bslib-grid-height-mobile)}.bslib-grid.grid{height:unset !important;grid-auto-rows:var(--bslib-grid--row-heights--xs, auto)}}.html-fill-container{display:flex;flex-direction:column;min-height:0;min-width:0}.html-fill-container>.html-fill-item{flex:1 1 auto;min-height:0;min-width:0}.html-fill-container>:not(.html-fill-item){flex:0 0 auto}.quarto-container{min-height:calc(100vh - 132px)}body.hypothesis-enabled #quarto-header{margin-right:16px}footer.footer .nav-footer,#quarto-header>nav{padding-left:1em;padding-right:1em}footer.footer div.nav-footer p:first-child{margin-top:0}footer.footer div.nav-footer p:last-child{margin-bottom:0}#quarto-content>*{padding-top:14px}#quarto-content>#quarto-sidebar-glass{padding-top:0px}@media(max-width: 991.98px){#quarto-content>*{padding-top:0}#quarto-content .subtitle{padding-top:14px}#quarto-content section:first-of-type h2:first-of-type,#quarto-content section:first-of-type .h2:first-of-type{margin-top:1rem}}.headroom-target,header.headroom{will-change:transform;transition:position 200ms linear;transition:all 200ms linear}header.headroom--pinned{transform:translateY(0%)}header.headroom--unpinned{transform:translateY(-100%)}.navbar-container{width:100%}.navbar-brand{overflow:hidden;text-overflow:ellipsis}.navbar-brand-container{max-width:calc(100% - 115px);min-width:0;display:flex;align-items:center}@media(min-width: 992px){.navbar-brand-container{margin-right:1em}}.navbar-brand.navbar-brand-logo{margin-right:4px;display:inline-flex}.navbar-toggler{flex-basis:content;flex-shrink:0}.navbar .navbar-brand-container{order:2}.navbar .navbar-toggler{order:1}.navbar .navbar-container>.navbar-nav{order:20}.navbar .navbar-container>.navbar-brand-container{margin-left:0 !important;margin-right:0 !important}.navbar .navbar-collapse{order:20}.navbar #quarto-search{order:4;margin-left:auto}.navbar .navbar-toggler{margin-right:.5em}.navbar-logo{max-height:24px;width:auto;padding-right:4px}nav .nav-item:not(.compact){padding-top:1px}nav .nav-link i,nav .dropdown-item i{padding-right:1px}.navbar-expand-lg .navbar-nav .nav-link{padding-left:.6rem;padding-right:.6rem}nav .nav-item.compact .nav-link{padding-left:.5rem;padding-right:.5rem;font-size:1.1rem}.navbar .quarto-navbar-tools{order:3}.navbar .quarto-navbar-tools div.dropdown{display:inline-block}.navbar .quarto-navbar-tools .quarto-navigation-tool{color:#e0d6e7}.navbar .quarto-navbar-tools .quarto-navigation-tool:hover{color:#b6e5b0}.navbar-nav .dropdown-menu{min-width:220px;font-size:.9rem}.navbar .navbar-nav .nav-link.dropdown-toggle::after{opacity:.75;vertical-align:.175em}.navbar ul.dropdown-menu{padding-top:0;padding-bottom:0}.navbar .dropdown-header{text-transform:uppercase;font-size:.8rem;padding:0 .5rem}.navbar .dropdown-item{padding:.4rem .5rem}.navbar .dropdown-item>i.bi{margin-left:.1rem;margin-right:.25em}.sidebar #quarto-search{margin-top:-1px}.sidebar #quarto-search svg.aa-SubmitIcon{width:16px;height:16px}.sidebar-navigation a{color:inherit}.sidebar-title{margin-top:.25rem;padding-bottom:.5rem;font-size:1.3rem;line-height:1.6rem;visibility:visible}.sidebar-title>a{font-size:inherit;text-decoration:none}.sidebar-title .sidebar-tools-main{margin-top:-6px}@media(max-width: 991.98px){#quarto-sidebar div.sidebar-header{padding-top:.2em}}.sidebar-header-stacked .sidebar-title{margin-top:.6rem}.sidebar-logo{max-width:90%;padding-bottom:.5rem}.sidebar-logo-link{text-decoration:none}.sidebar-navigation li a{text-decoration:none}.sidebar-navigation .quarto-navigation-tool{opacity:.7;font-size:.875rem}#quarto-sidebar>nav>.sidebar-tools-main{margin-left:14px}.sidebar-tools-main{display:inline-flex;margin-left:0px;order:2}.sidebar-tools-main:not(.tools-wide){vertical-align:middle}.sidebar-navigation .quarto-navigation-tool.dropdown-toggle::after{display:none}.sidebar.sidebar-navigation>*{padding-top:1em}.sidebar-item{margin-bottom:.2em;line-height:1rem;margin-top:.4rem}.sidebar-section{padding-left:.5em;padding-bottom:.2em}.sidebar-item .sidebar-item-container{display:flex;justify-content:space-between;cursor:pointer}.sidebar-item-toggle:hover{cursor:pointer}.sidebar-item .sidebar-item-toggle .bi{font-size:.7rem;text-align:center}.sidebar-item .sidebar-item-toggle .bi-chevron-right::before{transition:transform 200ms ease}.sidebar-item .sidebar-item-toggle[aria-expanded=false] .bi-chevron-right::before{transform:none}.sidebar-item .sidebar-item-toggle[aria-expanded=true] .bi-chevron-right::before{transform:rotate(90deg)}.sidebar-item-text{width:100%}.sidebar-navigation .sidebar-divider{margin-left:0;margin-right:0;margin-top:.5rem;margin-bottom:.5rem}@media(max-width: 991.98px){.quarto-secondary-nav{display:block}.quarto-secondary-nav button.quarto-search-button{padding-right:0em;padding-left:2em}.quarto-secondary-nav button.quarto-btn-toggle{margin-left:-0.75rem;margin-right:.15rem}.quarto-secondary-nav nav.quarto-title-breadcrumbs{display:none}.quarto-secondary-nav nav.quarto-page-breadcrumbs{display:flex;align-items:center;padding-right:1em;margin-left:-0.25em}.quarto-secondary-nav nav.quarto-page-breadcrumbs a{text-decoration:none}.quarto-secondary-nav nav.quarto-page-breadcrumbs ol.breadcrumb{margin-bottom:0}}@media(min-width: 992px){.quarto-secondary-nav{display:none}}.quarto-title-breadcrumbs .breadcrumb{margin-bottom:.5em;font-size:.9rem}.quarto-title-breadcrumbs .breadcrumb li:last-of-type a{color:#6c757d}.quarto-secondary-nav .quarto-btn-toggle{color:#595959}.quarto-secondary-nav[aria-expanded=false] .quarto-btn-toggle .bi-chevron-right::before{transform:none}.quarto-secondary-nav[aria-expanded=true] .quarto-btn-toggle .bi-chevron-right::before{transform:rotate(90deg)}.quarto-secondary-nav .quarto-btn-toggle .bi-chevron-right::before{transition:transform 200ms ease}.quarto-secondary-nav{cursor:pointer}.no-decor{text-decoration:none}.quarto-secondary-nav-title{margin-top:.3em;color:#595959;padding-top:4px}.quarto-secondary-nav nav.quarto-page-breadcrumbs{color:#595959}.quarto-secondary-nav nav.quarto-page-breadcrumbs a{color:#595959}.quarto-secondary-nav nav.quarto-page-breadcrumbs a:hover{color:rgba(35,102,26,.8)}.quarto-secondary-nav nav.quarto-page-breadcrumbs .breadcrumb-item::before{color:#8c8c8c}.breadcrumb-item{line-height:1.2rem}div.sidebar-item-container{color:#595959}div.sidebar-item-container:hover,div.sidebar-item-container:focus{color:rgba(35,102,26,.8)}div.sidebar-item-container.disabled{color:rgba(89,89,89,.75)}div.sidebar-item-container .active,div.sidebar-item-container .show>.nav-link,div.sidebar-item-container .sidebar-link>code{color:#23661a}div.sidebar.sidebar-navigation.rollup.quarto-sidebar-toggle-contents,nav.sidebar.sidebar-navigation:not(.rollup){background-color:#fff}@media(max-width: 991.98px){.sidebar-navigation .sidebar-item a,.nav-page .nav-page-text,.sidebar-navigation{font-size:1rem}.sidebar-navigation ul.sidebar-section.depth1 .sidebar-section-item{font-size:1.1rem}.sidebar-logo{display:none}.sidebar.sidebar-navigation{position:static;border-bottom:1px solid #dee2e6}.sidebar.sidebar-navigation.collapsing{position:fixed;z-index:1000}.sidebar.sidebar-navigation.show{position:fixed;z-index:1000}.sidebar.sidebar-navigation{min-height:100%}nav.quarto-secondary-nav{background-color:#fff;border-bottom:1px solid #dee2e6}.quarto-banner nav.quarto-secondary-nav{background-color:#5c2983;color:#e0d6e7;border-top:1px solid #dee2e6}.sidebar .sidebar-footer{visibility:visible;padding-top:1rem;position:inherit}.sidebar-tools-collapse{display:block}}#quarto-sidebar{transition:width .15s ease-in}#quarto-sidebar>*{padding-right:1em}@media(max-width: 991.98px){#quarto-sidebar .sidebar-menu-container{white-space:nowrap;min-width:225px}#quarto-sidebar.show{transition:width .15s ease-out}}@media(min-width: 992px){#quarto-sidebar{display:flex;flex-direction:column}.nav-page .nav-page-text,.sidebar-navigation .sidebar-section .sidebar-item{font-size:.875rem}.sidebar-navigation .sidebar-item{font-size:.925rem}.sidebar.sidebar-navigation{display:block;position:sticky}.sidebar-search{width:100%}.sidebar .sidebar-footer{visibility:visible}}@media(max-width: 991.98px){#quarto-sidebar-glass{position:fixed;top:0;bottom:0;left:0;right:0;background-color:rgba(255,255,255,0);transition:background-color .15s ease-in;z-index:-1}#quarto-sidebar-glass.collapsing{z-index:1000}#quarto-sidebar-glass.show{transition:background-color .15s ease-out;background-color:rgba(102,102,102,.4);z-index:1000}}.sidebar .sidebar-footer{padding:.5rem 1rem;align-self:flex-end;color:#6c757d;width:100%}.quarto-page-breadcrumbs .breadcrumb-item+.breadcrumb-item,.quarto-page-breadcrumbs .breadcrumb-item{padding-right:.33em;padding-left:0}.quarto-page-breadcrumbs .breadcrumb-item::before{padding-right:.33em}.quarto-sidebar-footer{font-size:.875em}.sidebar-section .bi-chevron-right{vertical-align:middle}.sidebar-section .bi-chevron-right::before{font-size:.9em}.notransition{-webkit-transition:none !important;-moz-transition:none !important;-o-transition:none !important;transition:none !important}.btn:focus:not(:focus-visible){box-shadow:none}.page-navigation{display:flex;justify-content:space-between}.nav-page{padding-bottom:.75em}.nav-page .bi{font-size:1.8rem;vertical-align:middle}.nav-page .nav-page-text{padding-left:.25em;padding-right:.25em}.nav-page a{color:#6c757d;text-decoration:none;display:flex;align-items:center}.nav-page a:hover{color:#339526}.nav-footer .toc-actions{padding-bottom:.5em;padding-top:.5em}.nav-footer .toc-actions a,.nav-footer .toc-actions a:hover{text-decoration:none}.nav-footer .toc-actions ul{display:flex;list-style:none}.nav-footer .toc-actions ul :first-child{margin-left:auto}.nav-footer .toc-actions ul :last-child{margin-right:auto}.nav-footer .toc-actions ul li{padding-right:1.5em}.nav-footer .toc-actions ul li i.bi{padding-right:.4em}.nav-footer .toc-actions ul li:last-of-type{padding-right:0}.nav-footer{display:flex;flex-direction:row;flex-wrap:wrap;justify-content:space-between;align-items:baseline;text-align:center;padding-top:.5rem;padding-bottom:.5rem;background-color:#fff}body.nav-fixed{padding-top:64px}.nav-footer-contents{color:#6c757d;margin-top:.25rem}.nav-footer{min-height:3.5em;color:#757575}.nav-footer a{color:#757575}.nav-footer .nav-footer-left{font-size:.825em}.nav-footer .nav-footer-center{font-size:.825em}.nav-footer .nav-footer-right{font-size:.825em}.nav-footer-left .footer-items,.nav-footer-center .footer-items,.nav-footer-right .footer-items{display:inline-flex;padding-top:.3em;padding-bottom:.3em;margin-bottom:0em}.nav-footer-left .footer-items .nav-link,.nav-footer-center .footer-items .nav-link,.nav-footer-right .footer-items .nav-link{padding-left:.6em;padding-right:.6em}.nav-footer-left{flex:1 1 0px;text-align:left}.nav-footer-right{flex:1 1 0px;text-align:right}.nav-footer-center{flex:1 1 0px;min-height:3em;text-align:center}.nav-footer-center .footer-items{justify-content:center}@media(max-width: 767.98px){.nav-footer-center{margin-top:3em}}.navbar .quarto-reader-toggle.reader .quarto-reader-toggle-btn{background-color:#e0d6e7;border-radius:3px}@media(max-width: 991.98px){.quarto-reader-toggle{display:none}}.quarto-reader-toggle.reader.quarto-navigation-tool .quarto-reader-toggle-btn{background-color:#595959;border-radius:3px}.quarto-reader-toggle .quarto-reader-toggle-btn{display:inline-flex;padding-left:.2em;padding-right:.2em;margin-left:-0.2em;margin-right:-0.2em;text-align:center}.navbar .quarto-reader-toggle:not(.reader) .bi::before{background-image:url('data:image/svg+xml,')}.navbar .quarto-reader-toggle.reader .bi::before{background-image:url('data:image/svg+xml,')}.sidebar-navigation .quarto-reader-toggle:not(.reader) .bi::before{background-image:url('data:image/svg+xml,')}.sidebar-navigation .quarto-reader-toggle.reader .bi::before{background-image:url('data:image/svg+xml,')}#quarto-back-to-top{display:none;position:fixed;bottom:50px;background-color:#fff;border-radius:.25rem;box-shadow:0 .2rem .5rem #6c757d,0 0 .05rem #6c757d;color:#6c757d;text-decoration:none;font-size:.9em;text-align:center;left:50%;padding:.4rem .8rem;transform:translate(-50%, 0)}.aa-DetachedSearchButtonQuery{display:none}.aa-DetachedOverlay ul.aa-List,#quarto-search-results ul.aa-List{list-style:none;padding-left:0}.aa-DetachedOverlay .aa-Panel,#quarto-search-results .aa-Panel{background-color:#fff;position:absolute;z-index:2000}#quarto-search-results .aa-Panel{max-width:400px}#quarto-search input{font-size:.925rem}@media(min-width: 992px){.navbar #quarto-search{margin-left:.25rem;order:999}}.navbar.navbar-expand-sm #quarto-search,.navbar.navbar-expand-md #quarto-search{order:999}@media(min-width: 992px){.navbar .quarto-navbar-tools{order:900}}@media(min-width: 992px){.navbar .quarto-navbar-tools.tools-end{margin-left:auto !important}}@media(max-width: 991.98px){#quarto-sidebar .sidebar-search{display:none}}#quarto-sidebar .sidebar-search .aa-Autocomplete{width:100%}.navbar .aa-Autocomplete .aa-Form{width:180px}.navbar #quarto-search.type-overlay .aa-Autocomplete{width:40px}.navbar #quarto-search.type-overlay .aa-Autocomplete .aa-Form{background-color:inherit;border:none}.navbar #quarto-search.type-overlay .aa-Autocomplete .aa-Form:focus-within{box-shadow:none;outline:none}.navbar #quarto-search.type-overlay .aa-Autocomplete .aa-Form .aa-InputWrapper{display:none}.navbar #quarto-search.type-overlay .aa-Autocomplete .aa-Form .aa-InputWrapper:focus-within{display:inherit}.navbar #quarto-search.type-overlay .aa-Autocomplete .aa-Form .aa-Label svg,.navbar #quarto-search.type-overlay .aa-Autocomplete .aa-Form .aa-LoadingIndicator svg{width:26px;height:26px;color:#e0d6e7;opacity:1}.navbar #quarto-search.type-overlay .aa-Autocomplete svg.aa-SubmitIcon{width:26px;height:26px;color:#e0d6e7;opacity:1}.aa-Autocomplete .aa-Form,.aa-DetachedFormContainer .aa-Form{align-items:center;background-color:#fff;border:1px solid #dee2e6;border-radius:.25rem;color:#343a40;display:flex;line-height:1em;margin:0;position:relative;width:100%}.aa-Autocomplete .aa-Form:focus-within,.aa-DetachedFormContainer .aa-Form:focus-within{box-shadow:rgba(92,41,131,.6) 0 0 0 1px;outline:currentColor none medium}.aa-Autocomplete .aa-Form .aa-InputWrapperPrefix,.aa-DetachedFormContainer .aa-Form .aa-InputWrapperPrefix{align-items:center;display:flex;flex-shrink:0;order:1}.aa-Autocomplete .aa-Form .aa-InputWrapperPrefix .aa-Label,.aa-Autocomplete .aa-Form .aa-InputWrapperPrefix .aa-LoadingIndicator,.aa-DetachedFormContainer .aa-Form .aa-InputWrapperPrefix .aa-Label,.aa-DetachedFormContainer .aa-Form .aa-InputWrapperPrefix .aa-LoadingIndicator{cursor:initial;flex-shrink:0;padding:0;text-align:left}.aa-Autocomplete .aa-Form .aa-InputWrapperPrefix .aa-Label svg,.aa-Autocomplete .aa-Form .aa-InputWrapperPrefix .aa-LoadingIndicator svg,.aa-DetachedFormContainer .aa-Form .aa-InputWrapperPrefix .aa-Label svg,.aa-DetachedFormContainer .aa-Form .aa-InputWrapperPrefix .aa-LoadingIndicator svg{color:#343a40;opacity:.5}.aa-Autocomplete .aa-Form .aa-InputWrapperPrefix .aa-SubmitButton,.aa-DetachedFormContainer .aa-Form .aa-InputWrapperPrefix .aa-SubmitButton{appearance:none;background:none;border:0;margin:0}.aa-Autocomplete .aa-Form .aa-InputWrapperPrefix .aa-LoadingIndicator,.aa-DetachedFormContainer .aa-Form .aa-InputWrapperPrefix .aa-LoadingIndicator{align-items:center;display:flex;justify-content:center}.aa-Autocomplete .aa-Form .aa-InputWrapperPrefix .aa-LoadingIndicator[hidden],.aa-DetachedFormContainer .aa-Form .aa-InputWrapperPrefix .aa-LoadingIndicator[hidden]{display:none}.aa-Autocomplete .aa-Form .aa-InputWrapper,.aa-DetachedFormContainer .aa-Form .aa-InputWrapper{order:3;position:relative;width:100%}.aa-Autocomplete .aa-Form .aa-InputWrapper .aa-Input,.aa-DetachedFormContainer .aa-Form .aa-InputWrapper .aa-Input{appearance:none;background:none;border:0;color:#343a40;font:inherit;height:calc(1.5em + .1rem + 2px);padding:0;width:100%}.aa-Autocomplete .aa-Form .aa-InputWrapper .aa-Input::placeholder,.aa-DetachedFormContainer .aa-Form .aa-InputWrapper .aa-Input::placeholder{color:#343a40;opacity:.8}.aa-Autocomplete .aa-Form .aa-InputWrapper .aa-Input:focus,.aa-DetachedFormContainer .aa-Form .aa-InputWrapper .aa-Input:focus{border-color:none;box-shadow:none;outline:none}.aa-Autocomplete .aa-Form .aa-InputWrapper .aa-Input::-webkit-search-decoration,.aa-Autocomplete .aa-Form .aa-InputWrapper .aa-Input::-webkit-search-cancel-button,.aa-Autocomplete .aa-Form .aa-InputWrapper .aa-Input::-webkit-search-results-button,.aa-Autocomplete .aa-Form .aa-InputWrapper .aa-Input::-webkit-search-results-decoration,.aa-DetachedFormContainer .aa-Form .aa-InputWrapper .aa-Input::-webkit-search-decoration,.aa-DetachedFormContainer .aa-Form .aa-InputWrapper .aa-Input::-webkit-search-cancel-button,.aa-DetachedFormContainer .aa-Form .aa-InputWrapper .aa-Input::-webkit-search-results-button,.aa-DetachedFormContainer .aa-Form .aa-InputWrapper .aa-Input::-webkit-search-results-decoration{display:none}.aa-Autocomplete .aa-Form .aa-InputWrapperSuffix,.aa-DetachedFormContainer .aa-Form .aa-InputWrapperSuffix{align-items:center;display:flex;order:4}.aa-Autocomplete .aa-Form .aa-InputWrapperSuffix .aa-ClearButton,.aa-DetachedFormContainer .aa-Form .aa-InputWrapperSuffix .aa-ClearButton{align-items:center;background:none;border:0;color:#343a40;opacity:.8;cursor:pointer;display:flex;margin:0;width:calc(1.5em + .1rem + 2px)}.aa-Autocomplete .aa-Form .aa-InputWrapperSuffix .aa-ClearButton:hover,.aa-Autocomplete .aa-Form .aa-InputWrapperSuffix .aa-ClearButton:focus,.aa-DetachedFormContainer .aa-Form .aa-InputWrapperSuffix .aa-ClearButton:hover,.aa-DetachedFormContainer .aa-Form .aa-InputWrapperSuffix .aa-ClearButton:focus{color:#343a40;opacity:.8}.aa-Autocomplete .aa-Form .aa-InputWrapperSuffix .aa-ClearButton[hidden],.aa-DetachedFormContainer .aa-Form .aa-InputWrapperSuffix .aa-ClearButton[hidden]{display:none}.aa-Autocomplete .aa-Form .aa-InputWrapperSuffix .aa-ClearButton svg,.aa-DetachedFormContainer .aa-Form .aa-InputWrapperSuffix .aa-ClearButton svg{width:calc(1.5em + 0.75rem + calc(1px * 2))}.aa-Autocomplete .aa-Form .aa-InputWrapperSuffix .aa-CopyButton,.aa-DetachedFormContainer .aa-Form .aa-InputWrapperSuffix .aa-CopyButton{border:none;align-items:center;background:none;color:#343a40;opacity:.4;font-size:.7rem;cursor:pointer;display:none;margin:0;width:calc(1em + .1rem + 2px)}.aa-Autocomplete .aa-Form .aa-InputWrapperSuffix .aa-CopyButton:hover,.aa-Autocomplete .aa-Form .aa-InputWrapperSuffix .aa-CopyButton:focus,.aa-DetachedFormContainer .aa-Form .aa-InputWrapperSuffix .aa-CopyButton:hover,.aa-DetachedFormContainer .aa-Form .aa-InputWrapperSuffix .aa-CopyButton:focus{color:#343a40;opacity:.8}.aa-Autocomplete .aa-Form .aa-InputWrapperSuffix .aa-CopyButton[hidden],.aa-DetachedFormContainer .aa-Form .aa-InputWrapperSuffix .aa-CopyButton[hidden]{display:none}.aa-PanelLayout:empty{display:none}.quarto-search-no-results.no-query{display:none}.aa-Source:has(.no-query){display:none}#quarto-search-results .aa-Panel{border:solid #dee2e6 1px}#quarto-search-results .aa-SourceNoResults{width:398px}.aa-DetachedOverlay .aa-Panel,#quarto-search-results .aa-Panel{max-height:65vh;overflow-y:auto;font-size:.925rem}.aa-DetachedOverlay .aa-SourceNoResults,#quarto-search-results .aa-SourceNoResults{height:60px;display:flex;justify-content:center;align-items:center}.aa-DetachedOverlay .search-error,#quarto-search-results .search-error{padding-top:10px;padding-left:20px;padding-right:20px;cursor:default}.aa-DetachedOverlay .search-error .search-error-title,#quarto-search-results .search-error .search-error-title{font-size:1.1rem;margin-bottom:.5rem}.aa-DetachedOverlay .search-error .search-error-title .search-error-icon,#quarto-search-results .search-error .search-error-title .search-error-icon{margin-right:8px}.aa-DetachedOverlay .search-error .search-error-text,#quarto-search-results .search-error .search-error-text{font-weight:300}.aa-DetachedOverlay .search-result-text,#quarto-search-results .search-result-text{font-weight:300;overflow:hidden;text-overflow:ellipsis;display:-webkit-box;-webkit-line-clamp:2;-webkit-box-orient:vertical;line-height:1.2rem;max-height:2.4rem}.aa-DetachedOverlay .aa-SourceHeader .search-result-header,#quarto-search-results .aa-SourceHeader .search-result-header{font-size:.875rem;background-color:#f2f2f2;padding-left:14px;padding-bottom:4px;padding-top:4px}.aa-DetachedOverlay .aa-SourceHeader .search-result-header-no-results,#quarto-search-results .aa-SourceHeader .search-result-header-no-results{display:none}.aa-DetachedOverlay .aa-SourceFooter .algolia-search-logo,#quarto-search-results .aa-SourceFooter .algolia-search-logo{width:110px;opacity:.85;margin:8px;float:right}.aa-DetachedOverlay .search-result-section,#quarto-search-results .search-result-section{font-size:.925em}.aa-DetachedOverlay a.search-result-link,#quarto-search-results a.search-result-link{color:inherit;text-decoration:none}.aa-DetachedOverlay li.aa-Item[aria-selected=true] .search-item,#quarto-search-results li.aa-Item[aria-selected=true] .search-item{background-color:#5c2983}.aa-DetachedOverlay li.aa-Item[aria-selected=true] .search-item.search-result-more,.aa-DetachedOverlay li.aa-Item[aria-selected=true] .search-item .search-result-section,.aa-DetachedOverlay li.aa-Item[aria-selected=true] .search-item .search-result-text,.aa-DetachedOverlay li.aa-Item[aria-selected=true] .search-item .search-result-title-container,.aa-DetachedOverlay li.aa-Item[aria-selected=true] .search-item .search-result-text-container,#quarto-search-results li.aa-Item[aria-selected=true] .search-item.search-result-more,#quarto-search-results li.aa-Item[aria-selected=true] .search-item .search-result-section,#quarto-search-results li.aa-Item[aria-selected=true] .search-item .search-result-text,#quarto-search-results li.aa-Item[aria-selected=true] .search-item .search-result-title-container,#quarto-search-results li.aa-Item[aria-selected=true] .search-item .search-result-text-container{color:#fff;background-color:#5c2983}.aa-DetachedOverlay li.aa-Item[aria-selected=true] .search-item mark.search-match,.aa-DetachedOverlay li.aa-Item[aria-selected=true] .search-item .search-match.mark,#quarto-search-results li.aa-Item[aria-selected=true] .search-item mark.search-match,#quarto-search-results li.aa-Item[aria-selected=true] .search-item .search-match.mark{color:#fff;background-color:#7233a2}.aa-DetachedOverlay li.aa-Item[aria-selected=false] .search-item,#quarto-search-results li.aa-Item[aria-selected=false] .search-item{background-color:#fff}.aa-DetachedOverlay li.aa-Item[aria-selected=false] .search-item.search-result-more,.aa-DetachedOverlay li.aa-Item[aria-selected=false] .search-item .search-result-section,.aa-DetachedOverlay li.aa-Item[aria-selected=false] .search-item .search-result-text,.aa-DetachedOverlay li.aa-Item[aria-selected=false] .search-item .search-result-title-container,.aa-DetachedOverlay li.aa-Item[aria-selected=false] .search-item .search-result-text-container,#quarto-search-results li.aa-Item[aria-selected=false] .search-item.search-result-more,#quarto-search-results li.aa-Item[aria-selected=false] .search-item .search-result-section,#quarto-search-results li.aa-Item[aria-selected=false] .search-item .search-result-text,#quarto-search-results li.aa-Item[aria-selected=false] .search-item .search-result-title-container,#quarto-search-results li.aa-Item[aria-selected=false] .search-item .search-result-text-container{color:#343a40}.aa-DetachedOverlay li.aa-Item[aria-selected=false] .search-item mark.search-match,.aa-DetachedOverlay li.aa-Item[aria-selected=false] .search-item .search-match.mark,#quarto-search-results li.aa-Item[aria-selected=false] .search-item mark.search-match,#quarto-search-results li.aa-Item[aria-selected=false] .search-item .search-match.mark{color:inherit;background-color:#c5a1e1}.aa-DetachedOverlay .aa-Item .search-result-doc:not(.document-selectable) .search-result-title-container,#quarto-search-results .aa-Item .search-result-doc:not(.document-selectable) .search-result-title-container{background-color:#fff;color:#343a40}.aa-DetachedOverlay .aa-Item .search-result-doc:not(.document-selectable) .search-result-text-container,#quarto-search-results .aa-Item .search-result-doc:not(.document-selectable) .search-result-text-container{padding-top:0px}.aa-DetachedOverlay li.aa-Item .search-result-doc.document-selectable .search-result-text-container,#quarto-search-results li.aa-Item .search-result-doc.document-selectable .search-result-text-container{margin-top:-4px}.aa-DetachedOverlay .aa-Item,#quarto-search-results .aa-Item{cursor:pointer}.aa-DetachedOverlay .aa-Item .search-item,#quarto-search-results .aa-Item .search-item{border-left:none;border-right:none;border-top:none;background-color:#fff;border-color:#dee2e6;color:#343a40}.aa-DetachedOverlay .aa-Item .search-item p,#quarto-search-results .aa-Item .search-item p{margin-top:0;margin-bottom:0}.aa-DetachedOverlay .aa-Item .search-item i.bi,#quarto-search-results .aa-Item .search-item i.bi{padding-left:8px;padding-right:8px;font-size:1.3em}.aa-DetachedOverlay .aa-Item .search-item .search-result-title,#quarto-search-results .aa-Item .search-item .search-result-title{margin-top:.3em;margin-bottom:0em}.aa-DetachedOverlay .aa-Item .search-item .search-result-crumbs,#quarto-search-results .aa-Item .search-item .search-result-crumbs{white-space:nowrap;text-overflow:ellipsis;font-size:.8em;font-weight:300;margin-right:1em}.aa-DetachedOverlay .aa-Item .search-item .search-result-crumbs:not(.search-result-crumbs-wrap),#quarto-search-results .aa-Item .search-item .search-result-crumbs:not(.search-result-crumbs-wrap){max-width:30%;margin-left:auto;margin-top:.5em;margin-bottom:.1rem}.aa-DetachedOverlay .aa-Item .search-item .search-result-crumbs.search-result-crumbs-wrap,#quarto-search-results .aa-Item .search-item .search-result-crumbs.search-result-crumbs-wrap{flex-basis:100%;margin-top:0em;margin-bottom:.2em;margin-left:37px}.aa-DetachedOverlay .aa-Item .search-result-title-container,#quarto-search-results .aa-Item .search-result-title-container{font-size:1em;display:flex;flex-wrap:wrap;padding:6px 4px 6px 4px}.aa-DetachedOverlay .aa-Item .search-result-text-container,#quarto-search-results .aa-Item .search-result-text-container{padding-bottom:8px;padding-right:8px;margin-left:42px}.aa-DetachedOverlay .aa-Item .search-result-doc-section,.aa-DetachedOverlay .aa-Item .search-result-more,#quarto-search-results .aa-Item .search-result-doc-section,#quarto-search-results .aa-Item .search-result-more{padding-top:8px;padding-bottom:8px;padding-left:44px}.aa-DetachedOverlay .aa-Item .search-result-more,#quarto-search-results .aa-Item .search-result-more{font-size:.8em;font-weight:400}.aa-DetachedOverlay .aa-Item .search-result-doc,#quarto-search-results .aa-Item .search-result-doc{border-top:1px solid #dee2e6}.aa-DetachedSearchButton{background:none;border:none}.aa-DetachedSearchButton .aa-DetachedSearchButtonPlaceholder{display:none}.navbar .aa-DetachedSearchButton .aa-DetachedSearchButtonIcon{color:#e0d6e7}.sidebar-tools-collapse #quarto-search,.sidebar-tools-main #quarto-search{display:inline}.sidebar-tools-collapse #quarto-search .aa-Autocomplete,.sidebar-tools-main #quarto-search .aa-Autocomplete{display:inline}.sidebar-tools-collapse #quarto-search .aa-DetachedSearchButton,.sidebar-tools-main #quarto-search .aa-DetachedSearchButton{padding-left:4px;padding-right:4px}.sidebar-tools-collapse #quarto-search .aa-DetachedSearchButton .aa-DetachedSearchButtonIcon,.sidebar-tools-main #quarto-search .aa-DetachedSearchButton .aa-DetachedSearchButtonIcon{color:#595959}.sidebar-tools-collapse #quarto-search .aa-DetachedSearchButton .aa-DetachedSearchButtonIcon .aa-SubmitIcon,.sidebar-tools-main #quarto-search .aa-DetachedSearchButton .aa-DetachedSearchButtonIcon .aa-SubmitIcon{margin-top:-3px}.aa-DetachedContainer{background:rgba(255,255,255,.65);width:90%;bottom:0;box-shadow:rgba(222,226,230,.6) 0 0 0 1px;outline:currentColor none medium;display:flex;flex-direction:column;left:0;margin:0;overflow:hidden;padding:0;position:fixed;right:0;top:0;z-index:1101}.aa-DetachedContainer::after{height:32px}.aa-DetachedContainer .aa-SourceHeader{margin:var(--aa-spacing-half) 0 var(--aa-spacing-half) 2px}.aa-DetachedContainer .aa-Panel{background-color:#fff;border-radius:0;box-shadow:none;flex-grow:1;margin:0;padding:0;position:relative}.aa-DetachedContainer .aa-PanelLayout{bottom:0;box-shadow:none;left:0;margin:0;max-height:none;overflow-y:auto;position:absolute;right:0;top:0;width:100%}.aa-DetachedFormContainer{background-color:#fff;border-bottom:1px solid #dee2e6;display:flex;flex-direction:row;justify-content:space-between;margin:0;padding:.5em}.aa-DetachedCancelButton{background:none;font-size:.8em;border:0;border-radius:3px;color:#343a40;cursor:pointer;margin:0 0 0 .5em;padding:0 .5em}.aa-DetachedCancelButton:hover,.aa-DetachedCancelButton:focus{box-shadow:rgba(92,41,131,.6) 0 0 0 1px;outline:currentColor none medium}.aa-DetachedContainer--modal{bottom:inherit;height:auto;margin:0 auto;position:absolute;top:100px;border-radius:6px;max-width:850px}@media(max-width: 575.98px){.aa-DetachedContainer--modal{width:100%;top:0px;border-radius:0px;border:none}}.aa-DetachedContainer--modal .aa-PanelLayout{max-height:var(--aa-detached-modal-max-height);padding-bottom:var(--aa-spacing-half);position:static}.aa-Detached{height:100vh;overflow:hidden}.aa-DetachedOverlay{background-color:rgba(52,58,64,.4);position:fixed;left:0;right:0;top:0;margin:0;padding:0;height:100vh;z-index:1100}.quarto-dashboard.nav-fixed.dashboard-sidebar #quarto-content.quarto-dashboard-content{padding:0em}.quarto-dashboard #quarto-content.quarto-dashboard-content{padding:1em}.quarto-dashboard #quarto-content.quarto-dashboard-content>*{padding-top:0}@media(min-width: 576px){.quarto-dashboard{height:100%}}.quarto-dashboard .card.valuebox.bslib-card.bg-primary{background-color:#5397e9 !important}.quarto-dashboard .card.valuebox.bslib-card.bg-secondary{background-color:#343a40 !important}.quarto-dashboard .card.valuebox.bslib-card.bg-success{background-color:#3aa716 !important}.quarto-dashboard .card.valuebox.bslib-card.bg-info{background-color:rgba(153,84,187,.7019607843) !important}.quarto-dashboard .card.valuebox.bslib-card.bg-warning{background-color:#fa6400 !important}.quarto-dashboard .card.valuebox.bslib-card.bg-danger{background-color:rgba(255,0,57,.7019607843) !important}.quarto-dashboard .card.valuebox.bslib-card.bg-light{background-color:#f8f9fa !important}.quarto-dashboard .card.valuebox.bslib-card.bg-dark{background-color:#343a40 !important}.quarto-dashboard.dashboard-fill{display:flex;flex-direction:column}.quarto-dashboard #quarto-appendix{display:none}.quarto-dashboard #quarto-header #quarto-dashboard-header{border-top:solid 1px #7735aa;border-bottom:solid 1px #7735aa}.quarto-dashboard #quarto-header #quarto-dashboard-header>nav{padding-left:1em;padding-right:1em}.quarto-dashboard #quarto-header #quarto-dashboard-header>nav .navbar-brand-container{padding-left:0}.quarto-dashboard #quarto-header #quarto-dashboard-header .navbar-toggler{margin-right:0}.quarto-dashboard #quarto-header #quarto-dashboard-header .navbar-toggler-icon{height:1em;width:1em;background-image:url('data:image/svg+xml,')}.quarto-dashboard #quarto-header #quarto-dashboard-header .navbar-brand-container{padding-right:1em}.quarto-dashboard #quarto-header #quarto-dashboard-header .navbar-title{font-size:1.1em}.quarto-dashboard #quarto-header #quarto-dashboard-header .navbar-nav{font-size:.9em}.quarto-dashboard #quarto-dashboard-header .navbar{padding:0}.quarto-dashboard #quarto-dashboard-header .navbar .navbar-container{padding-left:1em}.quarto-dashboard #quarto-dashboard-header .navbar.slim .navbar-brand-container .nav-link,.quarto-dashboard #quarto-dashboard-header .navbar.slim .navbar-nav .nav-link{padding:.7em}.quarto-dashboard #quarto-dashboard-header .navbar .quarto-color-scheme-toggle{order:9}.quarto-dashboard #quarto-dashboard-header .navbar .navbar-toggler{margin-left:.5em;order:10}.quarto-dashboard #quarto-dashboard-header .navbar .navbar-nav .nav-link{padding:.5em;height:100%;display:flex;align-items:center}.quarto-dashboard #quarto-dashboard-header .navbar .navbar-nav .active{background-color:#7233a2}.quarto-dashboard #quarto-dashboard-header .navbar .navbar-brand-container{padding:.5em .5em .5em 0;display:flex;flex-direction:row;margin-right:2em;align-items:center}@media(max-width: 767.98px){.quarto-dashboard #quarto-dashboard-header .navbar .navbar-brand-container{margin-right:auto}}.quarto-dashboard #quarto-dashboard-header .navbar .navbar-collapse{align-self:stretch}@media(min-width: 768px){.quarto-dashboard #quarto-dashboard-header .navbar .navbar-collapse{order:8}}@media(max-width: 767.98px){.quarto-dashboard #quarto-dashboard-header .navbar .navbar-collapse{order:1000;padding-bottom:.5em}}.quarto-dashboard #quarto-dashboard-header .navbar .navbar-collapse .navbar-nav{align-self:stretch}.quarto-dashboard #quarto-dashboard-header .navbar .navbar-title{font-size:1.25em;line-height:1.1em;display:flex;flex-direction:row;flex-wrap:wrap;align-items:baseline}.quarto-dashboard #quarto-dashboard-header .navbar .navbar-title .navbar-title-text{margin-right:.4em}.quarto-dashboard #quarto-dashboard-header .navbar .navbar-title a{text-decoration:none;color:inherit}.quarto-dashboard #quarto-dashboard-header .navbar .navbar-subtitle,.quarto-dashboard #quarto-dashboard-header .navbar .navbar-author{font-size:.9rem;margin-right:.5em}.quarto-dashboard #quarto-dashboard-header .navbar .navbar-author{margin-left:auto}.quarto-dashboard #quarto-dashboard-header .navbar .navbar-logo{max-height:48px;min-height:30px;object-fit:cover;margin-right:1em}.quarto-dashboard #quarto-dashboard-header .navbar .quarto-dashboard-links{order:9;padding-right:1em}.quarto-dashboard #quarto-dashboard-header .navbar .quarto-dashboard-link-text{margin-left:.25em}.quarto-dashboard #quarto-dashboard-header .navbar .quarto-dashboard-link{padding-right:0em;padding-left:.7em;text-decoration:none;color:#e0d6e7}.quarto-dashboard .page-layout-custom .tab-content{padding:0;border:none}.quarto-dashboard-img-contain{height:100%;width:100%;object-fit:contain}@media(max-width: 575.98px){.quarto-dashboard .bslib-grid{grid-template-rows:minmax(1em, max-content) !important}.quarto-dashboard .sidebar-content{height:inherit}.quarto-dashboard .page-layout-custom{min-height:100vh}}.quarto-dashboard.dashboard-toolbar>.page-layout-custom,.quarto-dashboard.dashboard-sidebar>.page-layout-custom{padding:0}.quarto-dashboard .quarto-dashboard-content.quarto-dashboard-pages{padding:0}.quarto-dashboard .callout{margin-bottom:0;margin-top:0}.quarto-dashboard .html-fill-container figure{overflow:hidden}.quarto-dashboard bslib-tooltip .rounded-pill{border:solid #6c757d 1px}.quarto-dashboard bslib-tooltip .rounded-pill .svg{fill:#343a40}.quarto-dashboard .tabset .dashboard-card-no-title .nav-tabs{margin-left:0;margin-right:auto}.quarto-dashboard .tabset .tab-content{border:none}.quarto-dashboard .tabset .card-header .nav-link[role=tab]{margin-top:-6px;padding-top:6px;padding-bottom:6px}.quarto-dashboard .card.valuebox,.quarto-dashboard .card.bslib-value-box{min-height:3rem}.quarto-dashboard .card.valuebox .card-body,.quarto-dashboard .card.bslib-value-box .card-body{padding:0}.quarto-dashboard .bslib-value-box .value-box-value{font-size:clamp(.1em,15cqw,5em)}.quarto-dashboard .bslib-value-box .value-box-showcase .bi{font-size:clamp(.1em,max(18cqw,5.2cqh),5em);text-align:center;height:1em}.quarto-dashboard .bslib-value-box .value-box-showcase .bi::before{vertical-align:1em}.quarto-dashboard .bslib-value-box .value-box-area{margin-top:auto;margin-bottom:auto}.quarto-dashboard .card figure.quarto-float{display:flex;flex-direction:column;align-items:center}.quarto-dashboard .dashboard-scrolling{padding:1em}.quarto-dashboard .full-height{height:100%}.quarto-dashboard .showcase-bottom .value-box-grid{display:grid;grid-template-columns:1fr;grid-template-rows:1fr auto;grid-template-areas:"top" "bottom"}.quarto-dashboard .showcase-bottom .value-box-grid .value-box-showcase{grid-area:bottom;padding:0;margin:0}.quarto-dashboard .showcase-bottom .value-box-grid .value-box-showcase i.bi{font-size:4rem}.quarto-dashboard .showcase-bottom .value-box-grid .value-box-area{grid-area:top}.quarto-dashboard .tab-content{margin-bottom:0}.quarto-dashboard .bslib-card .bslib-navs-card-title{justify-content:stretch;align-items:end}.quarto-dashboard .card-header{display:flex;flex-wrap:wrap;justify-content:space-between}.quarto-dashboard .card-header .card-title{display:flex;flex-direction:column;justify-content:center;margin-bottom:0}.quarto-dashboard .tabset .card-toolbar{margin-bottom:1em}.quarto-dashboard .bslib-grid>.bslib-sidebar-layout{border:none;gap:var(--bslib-spacer, 1rem)}.quarto-dashboard .bslib-grid>.bslib-sidebar-layout>.main{padding:0}.quarto-dashboard .bslib-grid>.bslib-sidebar-layout>.sidebar{border-radius:.25rem;border:1px solid rgba(0,0,0,.175)}.quarto-dashboard .bslib-grid>.bslib-sidebar-layout>.collapse-toggle{display:none}@media(max-width: 767.98px){.quarto-dashboard .bslib-grid>.bslib-sidebar-layout{grid-template-columns:1fr;grid-template-rows:max-content 1fr}.quarto-dashboard .bslib-grid>.bslib-sidebar-layout>.main{grid-column:1;grid-row:2}.quarto-dashboard .bslib-grid>.bslib-sidebar-layout .sidebar{grid-column:1;grid-row:1}}.quarto-dashboard .sidebar-right .sidebar{padding-left:2.5em}.quarto-dashboard .sidebar-right .collapse-toggle{left:2px}.quarto-dashboard .quarto-dashboard .sidebar-right button.collapse-toggle:not(.transitioning){left:unset}.quarto-dashboard aside.sidebar{padding-left:1em;padding-right:1em;background-color:rgba(52,58,64,.25);color:#343a40}.quarto-dashboard .bslib-sidebar-layout>div.main{padding:.7em}.quarto-dashboard .bslib-sidebar-layout button.collapse-toggle{margin-top:.3em}.quarto-dashboard .bslib-sidebar-layout .collapse-toggle{top:0}.quarto-dashboard .bslib-sidebar-layout.sidebar-collapsed:not(.transitioning):not(.sidebar-right) .collapse-toggle{left:2px}.quarto-dashboard .sidebar>section>.h3:first-of-type{margin-top:0em}.quarto-dashboard .sidebar .h3,.quarto-dashboard .sidebar .h4,.quarto-dashboard .sidebar .h5,.quarto-dashboard .sidebar .h6{margin-top:.5em}.quarto-dashboard .sidebar form{flex-direction:column;align-items:start;margin-bottom:1em}.quarto-dashboard .sidebar form div[class*=oi-][class$=-input]{flex-direction:column}.quarto-dashboard .sidebar form[class*=oi-][class$=-toggle]{flex-direction:row-reverse;align-items:center;justify-content:start}.quarto-dashboard .sidebar form input[type=range]{margin-top:.5em;margin-right:.8em;margin-left:1em}.quarto-dashboard .sidebar label{width:fit-content}.quarto-dashboard .sidebar .card-body{margin-bottom:2em}.quarto-dashboard .sidebar .shiny-input-container{margin-bottom:1em}.quarto-dashboard .sidebar .shiny-options-group{margin-top:0}.quarto-dashboard .sidebar .control-label{margin-bottom:.3em}.quarto-dashboard .card .card-body .quarto-layout-row{align-items:stretch}.quarto-dashboard .toolbar{font-size:.9em;display:flex;flex-direction:row;border-top:solid 1px #bcbfc0;padding:1em;flex-wrap:wrap;background-color:rgba(52,58,64,.25)}.quarto-dashboard .toolbar .cell-output-display{display:flex}.quarto-dashboard .toolbar .shiny-input-container{padding-bottom:.5em;margin-bottom:.5em;width:inherit}.quarto-dashboard .toolbar .shiny-input-container>.checkbox:first-child{margin-top:6px}.quarto-dashboard .toolbar>*:last-child{margin-right:0}.quarto-dashboard .toolbar>*>*{margin-right:1em;align-items:baseline}.quarto-dashboard .toolbar>*>*>a{text-decoration:none;margin-top:auto;margin-bottom:auto}.quarto-dashboard .toolbar .shiny-input-container{padding-bottom:0;margin-bottom:0}.quarto-dashboard .toolbar .shiny-input-container>*{flex-shrink:0;flex-grow:0}.quarto-dashboard .toolbar .form-group.shiny-input-container:not([role=group])>label{margin-bottom:0}.quarto-dashboard .toolbar .shiny-input-container.no-baseline{align-items:start;padding-top:6px}.quarto-dashboard .toolbar .shiny-input-container{display:flex;align-items:baseline}.quarto-dashboard .toolbar .shiny-input-container label{padding-right:.4em}.quarto-dashboard .toolbar .shiny-input-container .bslib-input-switch{margin-top:6px}.quarto-dashboard .toolbar input[type=text]{line-height:1;width:inherit}.quarto-dashboard .toolbar .input-daterange{width:inherit}.quarto-dashboard .toolbar .input-daterange input[type=text]{height:2.4em;width:10em}.quarto-dashboard .toolbar .input-daterange .input-group-addon{height:auto;padding:0;margin-left:-5px !important;margin-right:-5px}.quarto-dashboard .toolbar .input-daterange .input-group-addon .input-group-text{padding-top:0;padding-bottom:0;height:100%}.quarto-dashboard .toolbar span.irs.irs--shiny{width:10em}.quarto-dashboard .toolbar span.irs.irs--shiny .irs-line{top:9px}.quarto-dashboard .toolbar span.irs.irs--shiny .irs-min,.quarto-dashboard .toolbar span.irs.irs--shiny .irs-max,.quarto-dashboard .toolbar span.irs.irs--shiny .irs-from,.quarto-dashboard .toolbar span.irs.irs--shiny .irs-to,.quarto-dashboard .toolbar span.irs.irs--shiny .irs-single{top:20px}.quarto-dashboard .toolbar span.irs.irs--shiny .irs-bar{top:8px}.quarto-dashboard .toolbar span.irs.irs--shiny .irs-handle{top:0px}.quarto-dashboard .toolbar .shiny-input-checkboxgroup>label{margin-top:6px}.quarto-dashboard .toolbar .shiny-input-checkboxgroup>.shiny-options-group{margin-top:0;align-items:baseline}.quarto-dashboard .toolbar .shiny-input-radiogroup>label{margin-top:6px}.quarto-dashboard .toolbar .shiny-input-radiogroup>.shiny-options-group{align-items:baseline;margin-top:0}.quarto-dashboard .toolbar .shiny-input-radiogroup>.shiny-options-group>.radio{margin-right:.3em}.quarto-dashboard .toolbar .form-select{padding-top:.2em;padding-bottom:.2em}.quarto-dashboard .toolbar .shiny-input-select{min-width:6em}.quarto-dashboard .toolbar div.checkbox{margin-bottom:0px}.quarto-dashboard .toolbar>.checkbox:first-child{margin-top:6px}.quarto-dashboard .toolbar form{width:fit-content}.quarto-dashboard .toolbar form label{padding-top:.2em;padding-bottom:.2em;width:fit-content}.quarto-dashboard .toolbar form input[type=date]{width:fit-content}.quarto-dashboard .toolbar form input[type=color]{width:3em}.quarto-dashboard .toolbar form button{padding:.4em}.quarto-dashboard .toolbar form select{width:fit-content}.quarto-dashboard .toolbar>*{font-size:.9em;flex-grow:0}.quarto-dashboard .toolbar .shiny-input-container label{margin-bottom:1px}.quarto-dashboard .toolbar-bottom{margin-top:1em;margin-bottom:0 !important;order:2}.quarto-dashboard .quarto-dashboard-content>.dashboard-toolbar-container>.toolbar-content>.tab-content>.tab-pane>*:not(.bslib-sidebar-layout){padding:1em}.quarto-dashboard .quarto-dashboard-content>.dashboard-toolbar-container>.toolbar-content>*:not(.tab-content){padding:1em}.quarto-dashboard .quarto-dashboard-content>.tab-content>.dashboard-page>.dashboard-toolbar-container>.toolbar-content,.quarto-dashboard .quarto-dashboard-content>.tab-content>.dashboard-page:not(.dashboard-sidebar-container)>*:not(.dashboard-toolbar-container){padding:1em}.quarto-dashboard .toolbar-content{padding:0}.quarto-dashboard .quarto-dashboard-content.quarto-dashboard-pages .tab-pane>.dashboard-toolbar-container .toolbar{border-radius:0;margin-bottom:0}.quarto-dashboard .dashboard-toolbar-container.toolbar-toplevel .toolbar{border-bottom:1px solid rgba(0,0,0,.175)}.quarto-dashboard .dashboard-toolbar-container.toolbar-toplevel .toolbar-bottom{margin-top:0}.quarto-dashboard .dashboard-toolbar-container:not(.toolbar-toplevel) .toolbar{margin-bottom:1em;border-top:none;border-radius:.25rem;border:1px solid rgba(0,0,0,.175)}.quarto-dashboard .vega-embed.has-actions details{width:1.7em;height:2em;position:absolute !important;top:0;right:0}.quarto-dashboard .dashboard-toolbar-container{padding:0}.quarto-dashboard .card .card-header p:last-child,.quarto-dashboard .card .card-footer p:last-child{margin-bottom:0}.quarto-dashboard .card .card-body>.h4:first-child{margin-top:0}.quarto-dashboard .card .card-body{z-index:4}@media(max-width: 767.98px){.quarto-dashboard .card .card-body .itables div.dataTables_wrapper div.dataTables_length,.quarto-dashboard .card .card-body .itables div.dataTables_wrapper div.dataTables_info,.quarto-dashboard .card .card-body .itables div.dataTables_wrapper div.dataTables_paginate{text-align:initial}.quarto-dashboard .card .card-body .itables div.dataTables_wrapper div.dataTables_filter{text-align:right}.quarto-dashboard .card .card-body .itables div.dataTables_wrapper div.dataTables_paginate ul.pagination{justify-content:initial}}.quarto-dashboard .card .card-body .itables .dataTables_wrapper{display:flex;flex-wrap:wrap;justify-content:space-between;align-items:center;padding-top:0}.quarto-dashboard .card .card-body .itables .dataTables_wrapper table{flex-shrink:0}.quarto-dashboard .card .card-body .itables .dataTables_wrapper .dt-buttons{margin-bottom:.5em;margin-left:auto;width:fit-content;float:right}.quarto-dashboard .card .card-body .itables .dataTables_wrapper .dt-buttons.btn-group{background:#fff;border:none}.quarto-dashboard .card .card-body .itables .dataTables_wrapper .dt-buttons .btn-secondary{background-color:#fff;background-image:none;border:solid #dee2e6 1px;padding:.2em .7em}.quarto-dashboard .card .card-body .itables .dataTables_wrapper .dt-buttons .btn span{font-size:.8em;color:#343a40}.quarto-dashboard .card .card-body .itables .dataTables_wrapper .dataTables_info{margin-left:.5em;margin-bottom:.5em;padding-top:0}@media(min-width: 768px){.quarto-dashboard .card .card-body .itables .dataTables_wrapper .dataTables_info{font-size:.875em}}@media(max-width: 767.98px){.quarto-dashboard .card .card-body .itables .dataTables_wrapper .dataTables_info{font-size:.8em}}.quarto-dashboard .card .card-body .itables .dataTables_wrapper .dataTables_filter{margin-bottom:.5em;font-size:.875em}.quarto-dashboard .card .card-body .itables .dataTables_wrapper .dataTables_filter input[type=search]{padding:1px 5px 1px 5px;font-size:.875em}.quarto-dashboard .card .card-body .itables .dataTables_wrapper .dataTables_length{flex-basis:1 1 50%;margin-bottom:.5em;font-size:.875em}.quarto-dashboard .card .card-body .itables .dataTables_wrapper .dataTables_length select{padding:.4em 3em .4em .5em;font-size:.875em;margin-left:.2em;margin-right:.2em}.quarto-dashboard .card .card-body .itables .dataTables_wrapper .dataTables_paginate{flex-shrink:0}@media(min-width: 768px){.quarto-dashboard .card .card-body .itables .dataTables_wrapper .dataTables_paginate{margin-left:auto}}.quarto-dashboard .card .card-body .itables .dataTables_wrapper .dataTables_paginate ul.pagination .paginate_button .page-link{font-size:.8em}.quarto-dashboard .card .card-footer{font-size:.9em}.quarto-dashboard .card .card-toolbar{display:flex;flex-grow:1;flex-direction:row;width:100%;flex-wrap:wrap}.quarto-dashboard .card .card-toolbar>*{font-size:.8em;flex-grow:0}.quarto-dashboard .card .card-toolbar>.card-title{font-size:1em;flex-grow:1;align-self:flex-start;margin-top:.1em}.quarto-dashboard .card .card-toolbar .cell-output-display{display:flex}.quarto-dashboard .card .card-toolbar .shiny-input-container{padding-bottom:.5em;margin-bottom:.5em;width:inherit}.quarto-dashboard .card .card-toolbar .shiny-input-container>.checkbox:first-child{margin-top:6px}.quarto-dashboard .card .card-toolbar>*:last-child{margin-right:0}.quarto-dashboard .card .card-toolbar>*>*{margin-right:1em;align-items:baseline}.quarto-dashboard .card .card-toolbar>*>*>a{text-decoration:none;margin-top:auto;margin-bottom:auto}.quarto-dashboard .card .card-toolbar form{width:fit-content}.quarto-dashboard .card .card-toolbar form label{padding-top:.2em;padding-bottom:.2em;width:fit-content}.quarto-dashboard .card .card-toolbar form input[type=date]{width:fit-content}.quarto-dashboard .card .card-toolbar form input[type=color]{width:3em}.quarto-dashboard .card .card-toolbar form button{padding:.4em}.quarto-dashboard .card .card-toolbar form select{width:fit-content}.quarto-dashboard .card .card-toolbar .cell-output-display{display:flex}.quarto-dashboard .card .card-toolbar .shiny-input-container{padding-bottom:.5em;margin-bottom:.5em;width:inherit}.quarto-dashboard .card .card-toolbar .shiny-input-container>.checkbox:first-child{margin-top:6px}.quarto-dashboard .card .card-toolbar>*:last-child{margin-right:0}.quarto-dashboard .card .card-toolbar>*>*{margin-right:1em;align-items:baseline}.quarto-dashboard .card .card-toolbar>*>*>a{text-decoration:none;margin-top:auto;margin-bottom:auto}.quarto-dashboard .card .card-toolbar .shiny-input-container{padding-bottom:0;margin-bottom:0}.quarto-dashboard .card .card-toolbar .shiny-input-container>*{flex-shrink:0;flex-grow:0}.quarto-dashboard .card .card-toolbar .form-group.shiny-input-container:not([role=group])>label{margin-bottom:0}.quarto-dashboard .card .card-toolbar .shiny-input-container.no-baseline{align-items:start;padding-top:6px}.quarto-dashboard .card .card-toolbar .shiny-input-container{display:flex;align-items:baseline}.quarto-dashboard .card .card-toolbar .shiny-input-container label{padding-right:.4em}.quarto-dashboard .card .card-toolbar .shiny-input-container .bslib-input-switch{margin-top:6px}.quarto-dashboard .card .card-toolbar input[type=text]{line-height:1;width:inherit}.quarto-dashboard .card .card-toolbar .input-daterange{width:inherit}.quarto-dashboard .card .card-toolbar .input-daterange input[type=text]{height:2.4em;width:10em}.quarto-dashboard .card .card-toolbar .input-daterange .input-group-addon{height:auto;padding:0;margin-left:-5px !important;margin-right:-5px}.quarto-dashboard .card .card-toolbar .input-daterange .input-group-addon .input-group-text{padding-top:0;padding-bottom:0;height:100%}.quarto-dashboard .card .card-toolbar span.irs.irs--shiny{width:10em}.quarto-dashboard .card .card-toolbar span.irs.irs--shiny .irs-line{top:9px}.quarto-dashboard .card .card-toolbar span.irs.irs--shiny .irs-min,.quarto-dashboard .card .card-toolbar span.irs.irs--shiny .irs-max,.quarto-dashboard .card .card-toolbar span.irs.irs--shiny .irs-from,.quarto-dashboard .card .card-toolbar span.irs.irs--shiny .irs-to,.quarto-dashboard .card .card-toolbar span.irs.irs--shiny .irs-single{top:20px}.quarto-dashboard .card .card-toolbar span.irs.irs--shiny .irs-bar{top:8px}.quarto-dashboard .card .card-toolbar span.irs.irs--shiny .irs-handle{top:0px}.quarto-dashboard .card .card-toolbar .shiny-input-checkboxgroup>label{margin-top:6px}.quarto-dashboard .card .card-toolbar .shiny-input-checkboxgroup>.shiny-options-group{margin-top:0;align-items:baseline}.quarto-dashboard .card .card-toolbar .shiny-input-radiogroup>label{margin-top:6px}.quarto-dashboard .card .card-toolbar .shiny-input-radiogroup>.shiny-options-group{align-items:baseline;margin-top:0}.quarto-dashboard .card .card-toolbar .shiny-input-radiogroup>.shiny-options-group>.radio{margin-right:.3em}.quarto-dashboard .card .card-toolbar .form-select{padding-top:.2em;padding-bottom:.2em}.quarto-dashboard .card .card-toolbar .shiny-input-select{min-width:6em}.quarto-dashboard .card .card-toolbar div.checkbox{margin-bottom:0px}.quarto-dashboard .card .card-toolbar>.checkbox:first-child{margin-top:6px}.quarto-dashboard .card-body>table>thead{border-top:none}.quarto-dashboard .card-body>.table>:not(caption)>*>*{background-color:#fff}.tableFloatingHeaderOriginal{background-color:#fff;position:sticky !important;top:0 !important}.dashboard-data-table{margin-top:-1px}.quarto-listing{padding-bottom:1em}.listing-pagination{padding-top:.5em}ul.pagination{float:right;padding-left:8px;padding-top:.5em}ul.pagination li{padding-right:.75em}ul.pagination li.disabled a,ul.pagination li.active a{color:#fff;text-decoration:none}ul.pagination li:last-of-type{padding-right:0}.listing-actions-group{display:flex}.quarto-listing-filter{margin-bottom:1em;width:200px;margin-left:auto}.quarto-listing-sort{margin-bottom:1em;margin-right:auto;width:auto}.quarto-listing-sort .input-group-text{font-size:.8em}.input-group-text{border-right:none}.quarto-listing-sort select.form-select{font-size:.8em}.listing-no-matching{text-align:center;padding-top:2em;padding-bottom:3em;font-size:1em}#quarto-margin-sidebar .quarto-listing-category{padding-top:0;font-size:1rem}#quarto-margin-sidebar .quarto-listing-category-title{cursor:pointer;font-weight:600;font-size:1rem}.quarto-listing-category .category{cursor:pointer}.quarto-listing-category .category.active{font-weight:600}.quarto-listing-category.category-cloud{display:flex;flex-wrap:wrap;align-items:baseline}.quarto-listing-category.category-cloud .category{padding-right:5px}.quarto-listing-category.category-cloud .category-cloud-1{font-size:.75em}.quarto-listing-category.category-cloud .category-cloud-2{font-size:.95em}.quarto-listing-category.category-cloud .category-cloud-3{font-size:1.15em}.quarto-listing-category.category-cloud .category-cloud-4{font-size:1.35em}.quarto-listing-category.category-cloud .category-cloud-5{font-size:1.55em}.quarto-listing-category.category-cloud .category-cloud-6{font-size:1.75em}.quarto-listing-category.category-cloud .category-cloud-7{font-size:1.95em}.quarto-listing-category.category-cloud .category-cloud-8{font-size:2.15em}.quarto-listing-category.category-cloud .category-cloud-9{font-size:2.35em}.quarto-listing-category.category-cloud .category-cloud-10{font-size:2.55em}.quarto-listing-cols-1{grid-template-columns:repeat(1, minmax(0, 1fr));gap:1.5em}@media(max-width: 767.98px){.quarto-listing-cols-1{grid-template-columns:repeat(1, minmax(0, 1fr));gap:1.5em}}@media(max-width: 575.98px){.quarto-listing-cols-1{grid-template-columns:minmax(0, 1fr);gap:1.5em}}.quarto-listing-cols-2{grid-template-columns:repeat(2, minmax(0, 1fr));gap:1.5em}@media(max-width: 767.98px){.quarto-listing-cols-2{grid-template-columns:repeat(2, minmax(0, 1fr));gap:1.5em}}@media(max-width: 575.98px){.quarto-listing-cols-2{grid-template-columns:minmax(0, 1fr);gap:1.5em}}.quarto-listing-cols-3{grid-template-columns:repeat(3, minmax(0, 1fr));gap:1.5em}@media(max-width: 767.98px){.quarto-listing-cols-3{grid-template-columns:repeat(2, minmax(0, 1fr));gap:1.5em}}@media(max-width: 575.98px){.quarto-listing-cols-3{grid-template-columns:minmax(0, 1fr);gap:1.5em}}.quarto-listing-cols-4{grid-template-columns:repeat(4, minmax(0, 1fr));gap:1.5em}@media(max-width: 767.98px){.quarto-listing-cols-4{grid-template-columns:repeat(2, minmax(0, 1fr));gap:1.5em}}@media(max-width: 575.98px){.quarto-listing-cols-4{grid-template-columns:minmax(0, 1fr);gap:1.5em}}.quarto-listing-cols-5{grid-template-columns:repeat(5, minmax(0, 1fr));gap:1.5em}@media(max-width: 767.98px){.quarto-listing-cols-5{grid-template-columns:repeat(2, minmax(0, 1fr));gap:1.5em}}@media(max-width: 575.98px){.quarto-listing-cols-5{grid-template-columns:minmax(0, 1fr);gap:1.5em}}.quarto-listing-cols-6{grid-template-columns:repeat(6, minmax(0, 1fr));gap:1.5em}@media(max-width: 767.98px){.quarto-listing-cols-6{grid-template-columns:repeat(2, minmax(0, 1fr));gap:1.5em}}@media(max-width: 575.98px){.quarto-listing-cols-6{grid-template-columns:minmax(0, 1fr);gap:1.5em}}.quarto-listing-cols-7{grid-template-columns:repeat(7, minmax(0, 1fr));gap:1.5em}@media(max-width: 767.98px){.quarto-listing-cols-7{grid-template-columns:repeat(2, minmax(0, 1fr));gap:1.5em}}@media(max-width: 575.98px){.quarto-listing-cols-7{grid-template-columns:minmax(0, 1fr);gap:1.5em}}.quarto-listing-cols-8{grid-template-columns:repeat(8, minmax(0, 1fr));gap:1.5em}@media(max-width: 767.98px){.quarto-listing-cols-8{grid-template-columns:repeat(2, minmax(0, 1fr));gap:1.5em}}@media(max-width: 575.98px){.quarto-listing-cols-8{grid-template-columns:minmax(0, 1fr);gap:1.5em}}.quarto-listing-cols-9{grid-template-columns:repeat(9, minmax(0, 1fr));gap:1.5em}@media(max-width: 767.98px){.quarto-listing-cols-9{grid-template-columns:repeat(2, minmax(0, 1fr));gap:1.5em}}@media(max-width: 575.98px){.quarto-listing-cols-9{grid-template-columns:minmax(0, 1fr);gap:1.5em}}.quarto-listing-cols-10{grid-template-columns:repeat(10, minmax(0, 1fr));gap:1.5em}@media(max-width: 767.98px){.quarto-listing-cols-10{grid-template-columns:repeat(2, minmax(0, 1fr));gap:1.5em}}@media(max-width: 575.98px){.quarto-listing-cols-10{grid-template-columns:minmax(0, 1fr);gap:1.5em}}.quarto-listing-cols-11{grid-template-columns:repeat(11, minmax(0, 1fr));gap:1.5em}@media(max-width: 767.98px){.quarto-listing-cols-11{grid-template-columns:repeat(2, minmax(0, 1fr));gap:1.5em}}@media(max-width: 575.98px){.quarto-listing-cols-11{grid-template-columns:minmax(0, 1fr);gap:1.5em}}.quarto-listing-cols-12{grid-template-columns:repeat(12, minmax(0, 1fr));gap:1.5em}@media(max-width: 767.98px){.quarto-listing-cols-12{grid-template-columns:repeat(2, minmax(0, 1fr));gap:1.5em}}@media(max-width: 575.98px){.quarto-listing-cols-12{grid-template-columns:minmax(0, 1fr);gap:1.5em}}.quarto-listing-grid{gap:1.5em}.quarto-grid-item.borderless{border:none}.quarto-grid-item.borderless .listing-categories .listing-category:last-of-type,.quarto-grid-item.borderless .listing-categories .listing-category:first-of-type{padding-left:0}.quarto-grid-item.borderless .listing-categories .listing-category{border:0}.quarto-grid-link{text-decoration:none;color:inherit}.quarto-grid-link:hover{text-decoration:none;color:inherit}.quarto-grid-item h5.title,.quarto-grid-item .title.h5{margin-top:0;margin-bottom:0}.quarto-grid-item .card-footer{display:flex;justify-content:space-between;font-size:.8em}.quarto-grid-item .card-footer p{margin-bottom:0}.quarto-grid-item p.card-img-top{margin-bottom:0}.quarto-grid-item p.card-img-top>img{object-fit:cover}.quarto-grid-item .card-other-values{margin-top:.5em;font-size:.8em}.quarto-grid-item .card-other-values tr{margin-bottom:.5em}.quarto-grid-item .card-other-values tr>td:first-of-type{font-weight:600;padding-right:1em;padding-left:1em;vertical-align:top}.quarto-grid-item div.post-contents{display:flex;flex-direction:column;text-decoration:none;height:100%}.quarto-grid-item .listing-item-img-placeholder{background-color:rgba(52,58,64,.25);flex-shrink:0}.quarto-grid-item .card-attribution{padding-top:1em;display:flex;gap:1em;text-transform:uppercase;color:#6c757d;font-weight:500;flex-grow:10;align-items:flex-end}.quarto-grid-item .description{padding-bottom:1em}.quarto-grid-item .card-attribution .date{align-self:flex-end}.quarto-grid-item .card-attribution.justify{justify-content:space-between}.quarto-grid-item .card-attribution.start{justify-content:flex-start}.quarto-grid-item .card-attribution.end{justify-content:flex-end}.quarto-grid-item .card-title{margin-bottom:.1em}.quarto-grid-item .card-subtitle{padding-top:.25em}.quarto-grid-item .card-text{font-size:.9em}.quarto-grid-item .listing-reading-time{padding-bottom:.25em}.quarto-grid-item .card-text-small{font-size:.8em}.quarto-grid-item .card-subtitle.subtitle{font-size:.9em;font-weight:600;padding-bottom:.5em}.quarto-grid-item .listing-categories{display:flex;flex-wrap:wrap;padding-bottom:5px}.quarto-grid-item .listing-categories .listing-category{color:#6c757d;border:solid 1px #dee2e6;border-radius:.25rem;text-transform:uppercase;font-size:.65em;padding-left:.5em;padding-right:.5em;padding-top:.15em;padding-bottom:.15em;cursor:pointer;margin-right:4px;margin-bottom:4px}.quarto-grid-item.card-right{text-align:right}.quarto-grid-item.card-right .listing-categories{justify-content:flex-end}.quarto-grid-item.card-left{text-align:left}.quarto-grid-item.card-center{text-align:center}.quarto-grid-item.card-center .listing-description{text-align:justify}.quarto-grid-item.card-center .listing-categories{justify-content:center}table.quarto-listing-table td.image{padding:0px}table.quarto-listing-table td.image img{width:100%;max-width:50px;object-fit:contain}table.quarto-listing-table a{text-decoration:none;word-break:keep-all}table.quarto-listing-table th a{color:inherit}table.quarto-listing-table th a.asc:after{margin-bottom:-2px;margin-left:5px;display:inline-block;height:1rem;width:1rem;background-repeat:no-repeat;background-size:1rem 1rem;background-image:url('data:image/svg+xml,');content:""}table.quarto-listing-table th a.desc:after{margin-bottom:-2px;margin-left:5px;display:inline-block;height:1rem;width:1rem;background-repeat:no-repeat;background-size:1rem 1rem;background-image:url('data:image/svg+xml,');content:""}table.quarto-listing-table.table-hover td{cursor:pointer}.quarto-post.image-left{flex-direction:row}.quarto-post.image-right{flex-direction:row-reverse}@media(max-width: 767.98px){.quarto-post.image-right,.quarto-post.image-left{gap:0em;flex-direction:column}.quarto-post .metadata{padding-bottom:1em;order:2}.quarto-post .body{order:1}.quarto-post .thumbnail{order:3}}.list.quarto-listing-default div:last-of-type{border-bottom:none}@media(min-width: 992px){.quarto-listing-container-default{margin-right:2em}}div.quarto-post{display:flex;gap:2em;margin-bottom:1.5em;border-bottom:1px solid #dee2e6}@media(max-width: 767.98px){div.quarto-post{padding-bottom:1em}}div.quarto-post .metadata{flex-basis:20%;flex-grow:0;margin-top:.2em;flex-shrink:10}div.quarto-post .thumbnail{flex-basis:30%;flex-grow:0;flex-shrink:0}div.quarto-post .thumbnail img{margin-top:.4em;width:100%;object-fit:cover}div.quarto-post .body{flex-basis:45%;flex-grow:1;flex-shrink:0}div.quarto-post .body h3.listing-title,div.quarto-post .body .listing-title.h3{margin-top:0px;margin-bottom:0px;border-bottom:none}div.quarto-post .body .listing-subtitle{font-size:.875em;margin-bottom:.5em;margin-top:.2em}div.quarto-post .body .description{font-size:.9em}div.quarto-post .body pre code{white-space:pre-wrap}div.quarto-post a{color:#343a40;text-decoration:none}div.quarto-post .metadata{display:flex;flex-direction:column;font-size:.8em;font-family:"Source Sans Pro",-apple-system,BlinkMacSystemFont,"Segoe UI",Roboto,"Helvetica Neue",Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji","Segoe UI Symbol";flex-basis:33%}div.quarto-post .listing-categories{display:flex;flex-wrap:wrap;padding-bottom:5px}div.quarto-post .listing-categories .listing-category{color:#6c757d;border:solid 1px #dee2e6;border-radius:.25rem;text-transform:uppercase;font-size:.65em;padding-left:.5em;padding-right:.5em;padding-top:.15em;padding-bottom:.15em;cursor:pointer;margin-right:4px;margin-bottom:4px}div.quarto-post .listing-description{margin-bottom:.5em}div.quarto-about-jolla{display:flex !important;flex-direction:column;align-items:center;margin-top:10%;padding-bottom:1em}div.quarto-about-jolla .about-image{object-fit:cover;margin-left:auto;margin-right:auto;margin-bottom:1.5em}div.quarto-about-jolla img.round{border-radius:50%}div.quarto-about-jolla img.rounded{border-radius:10px}div.quarto-about-jolla .quarto-title h1.title,div.quarto-about-jolla .quarto-title .title.h1{text-align:center}div.quarto-about-jolla .quarto-title .description{text-align:center}div.quarto-about-jolla h2,div.quarto-about-jolla .h2{border-bottom:none}div.quarto-about-jolla .about-sep{width:60%}div.quarto-about-jolla main{text-align:center}div.quarto-about-jolla .about-links{display:flex}@media(min-width: 992px){div.quarto-about-jolla .about-links{flex-direction:row;column-gap:.8em;row-gap:15px;flex-wrap:wrap}}@media(max-width: 991.98px){div.quarto-about-jolla .about-links{flex-direction:column;row-gap:1em;width:100%;padding-bottom:1.5em}}div.quarto-about-jolla .about-link{color:#626d78;text-decoration:none;border:solid 1px}@media(min-width: 992px){div.quarto-about-jolla .about-link{font-size:.8em;padding:.25em .5em;border-radius:4px}}@media(max-width: 991.98px){div.quarto-about-jolla .about-link{font-size:1.1em;padding:.5em .5em;text-align:center;border-radius:6px}}div.quarto-about-jolla .about-link:hover{color:#40ba2f}div.quarto-about-jolla .about-link i.bi{margin-right:.15em}div.quarto-about-solana{display:flex !important;flex-direction:column;padding-top:3em !important;padding-bottom:1em}div.quarto-about-solana .about-entity{display:flex !important;align-items:start;justify-content:space-between}@media(min-width: 992px){div.quarto-about-solana .about-entity{flex-direction:row}}@media(max-width: 991.98px){div.quarto-about-solana .about-entity{flex-direction:column-reverse;align-items:center;text-align:center}}div.quarto-about-solana .about-entity .entity-contents{display:flex;flex-direction:column}@media(max-width: 767.98px){div.quarto-about-solana .about-entity .entity-contents{width:100%}}div.quarto-about-solana .about-entity .about-image{object-fit:cover}@media(max-width: 991.98px){div.quarto-about-solana .about-entity .about-image{margin-bottom:1.5em}}div.quarto-about-solana .about-entity img.round{border-radius:50%}div.quarto-about-solana .about-entity img.rounded{border-radius:10px}div.quarto-about-solana .about-entity .about-links{display:flex;justify-content:left;padding-bottom:1.2em}@media(min-width: 992px){div.quarto-about-solana .about-entity .about-links{flex-direction:row;column-gap:.8em;row-gap:15px;flex-wrap:wrap}}@media(max-width: 991.98px){div.quarto-about-solana .about-entity .about-links{flex-direction:column;row-gap:1em;width:100%;padding-bottom:1.5em}}div.quarto-about-solana .about-entity .about-link{color:#626d78;text-decoration:none;border:solid 1px}@media(min-width: 992px){div.quarto-about-solana .about-entity .about-link{font-size:.8em;padding:.25em .5em;border-radius:4px}}@media(max-width: 991.98px){div.quarto-about-solana .about-entity .about-link{font-size:1.1em;padding:.5em .5em;text-align:center;border-radius:6px}}div.quarto-about-solana .about-entity .about-link:hover{color:#40ba2f}div.quarto-about-solana .about-entity .about-link i.bi{margin-right:.15em}div.quarto-about-solana .about-contents{padding-right:1.5em;flex-basis:0;flex-grow:1}div.quarto-about-solana .about-contents main.content{margin-top:0}div.quarto-about-solana .about-contents h2,div.quarto-about-solana .about-contents .h2{border-bottom:none}div.quarto-about-trestles{display:flex !important;flex-direction:row;padding-top:3em !important;padding-bottom:1em}@media(max-width: 991.98px){div.quarto-about-trestles{flex-direction:column;padding-top:0em !important}}div.quarto-about-trestles .about-entity{display:flex !important;flex-direction:column;align-items:center;text-align:center;padding-right:1em}@media(min-width: 992px){div.quarto-about-trestles .about-entity{flex:0 0 42%}}div.quarto-about-trestles .about-entity .about-image{object-fit:cover;margin-bottom:1.5em}div.quarto-about-trestles .about-entity img.round{border-radius:50%}div.quarto-about-trestles .about-entity img.rounded{border-radius:10px}div.quarto-about-trestles .about-entity .about-links{display:flex;justify-content:center}@media(min-width: 992px){div.quarto-about-trestles .about-entity .about-links{flex-direction:row;column-gap:.8em;row-gap:15px;flex-wrap:wrap}}@media(max-width: 991.98px){div.quarto-about-trestles .about-entity .about-links{flex-direction:column;row-gap:1em;width:100%;padding-bottom:1.5em}}div.quarto-about-trestles .about-entity .about-link{color:#626d78;text-decoration:none;border:solid 1px}@media(min-width: 992px){div.quarto-about-trestles .about-entity .about-link{font-size:.8em;padding:.25em .5em;border-radius:4px}}@media(max-width: 991.98px){div.quarto-about-trestles .about-entity .about-link{font-size:1.1em;padding:.5em .5em;text-align:center;border-radius:6px}}div.quarto-about-trestles .about-entity .about-link:hover{color:#40ba2f}div.quarto-about-trestles .about-entity .about-link i.bi{margin-right:.15em}div.quarto-about-trestles .about-contents{flex-basis:0;flex-grow:1}div.quarto-about-trestles .about-contents h2,div.quarto-about-trestles .about-contents .h2{border-bottom:none}@media(min-width: 992px){div.quarto-about-trestles .about-contents{border-left:solid 1px #dee2e6;padding-left:1.5em}}div.quarto-about-trestles .about-contents main.content{margin-top:0}div.quarto-about-marquee{padding-bottom:1em}div.quarto-about-marquee .about-contents{display:flex;flex-direction:column}div.quarto-about-marquee .about-image{max-height:550px;margin-bottom:1.5em;object-fit:cover}div.quarto-about-marquee img.round{border-radius:50%}div.quarto-about-marquee img.rounded{border-radius:10px}div.quarto-about-marquee h2,div.quarto-about-marquee .h2{border-bottom:none}div.quarto-about-marquee .about-links{display:flex;justify-content:center;padding-top:1.5em}@media(min-width: 992px){div.quarto-about-marquee .about-links{flex-direction:row;column-gap:.8em;row-gap:15px;flex-wrap:wrap}}@media(max-width: 991.98px){div.quarto-about-marquee .about-links{flex-direction:column;row-gap:1em;width:100%;padding-bottom:1.5em}}div.quarto-about-marquee .about-link{color:#626d78;text-decoration:none;border:solid 1px}@media(min-width: 992px){div.quarto-about-marquee .about-link{font-size:.8em;padding:.25em .5em;border-radius:4px}}@media(max-width: 991.98px){div.quarto-about-marquee .about-link{font-size:1.1em;padding:.5em .5em;text-align:center;border-radius:6px}}div.quarto-about-marquee .about-link:hover{color:#40ba2f}div.quarto-about-marquee .about-link i.bi{margin-right:.15em}@media(min-width: 992px){div.quarto-about-marquee .about-link{border:none}}div.quarto-about-broadside{display:flex;flex-direction:column;padding-bottom:1em}div.quarto-about-broadside .about-main{display:flex !important;padding-top:0 !important}@media(min-width: 992px){div.quarto-about-broadside .about-main{flex-direction:row;align-items:flex-start}}@media(max-width: 991.98px){div.quarto-about-broadside .about-main{flex-direction:column}}@media(max-width: 991.98px){div.quarto-about-broadside .about-main .about-entity{flex-shrink:0;width:100%;height:450px;margin-bottom:1.5em;background-size:cover;background-repeat:no-repeat}}@media(min-width: 992px){div.quarto-about-broadside .about-main .about-entity{flex:0 10 50%;margin-right:1.5em;width:100%;height:100%;background-size:100%;background-repeat:no-repeat}}div.quarto-about-broadside .about-main .about-contents{padding-top:14px;flex:0 0 50%}div.quarto-about-broadside h2,div.quarto-about-broadside .h2{border-bottom:none}div.quarto-about-broadside .about-sep{margin-top:1.5em;width:60%;align-self:center}div.quarto-about-broadside .about-links{display:flex;justify-content:center;column-gap:20px;padding-top:1.5em}@media(min-width: 992px){div.quarto-about-broadside .about-links{flex-direction:row;column-gap:.8em;row-gap:15px;flex-wrap:wrap}}@media(max-width: 991.98px){div.quarto-about-broadside .about-links{flex-direction:column;row-gap:1em;width:100%;padding-bottom:1.5em}}div.quarto-about-broadside .about-link{color:#626d78;text-decoration:none;border:solid 1px}@media(min-width: 992px){div.quarto-about-broadside .about-link{font-size:.8em;padding:.25em .5em;border-radius:4px}}@media(max-width: 991.98px){div.quarto-about-broadside .about-link{font-size:1.1em;padding:.5em .5em;text-align:center;border-radius:6px}}div.quarto-about-broadside .about-link:hover{color:#40ba2f}div.quarto-about-broadside .about-link i.bi{margin-right:.15em}@media(min-width: 992px){div.quarto-about-broadside .about-link{border:none}}.tippy-box[data-theme~=quarto]{background-color:#fff;border:solid 1px #dee2e6;border-radius:.25rem;color:#343a40;font-size:.875rem}.tippy-box[data-theme~=quarto]>.tippy-backdrop{background-color:#fff}.tippy-box[data-theme~=quarto]>.tippy-arrow:after,.tippy-box[data-theme~=quarto]>.tippy-svg-arrow:after{content:"";position:absolute;z-index:-1}.tippy-box[data-theme~=quarto]>.tippy-arrow:after{border-color:rgba(0,0,0,0);border-style:solid}.tippy-box[data-placement^=top]>.tippy-arrow:before{bottom:-6px}.tippy-box[data-placement^=bottom]>.tippy-arrow:before{top:-6px}.tippy-box[data-placement^=right]>.tippy-arrow:before{left:-6px}.tippy-box[data-placement^=left]>.tippy-arrow:before{right:-6px}.tippy-box[data-theme~=quarto][data-placement^=top]>.tippy-arrow:before{border-top-color:#fff}.tippy-box[data-theme~=quarto][data-placement^=top]>.tippy-arrow:after{border-top-color:#dee2e6;border-width:7px 7px 0;top:17px;left:1px}.tippy-box[data-theme~=quarto][data-placement^=top]>.tippy-svg-arrow>svg{top:16px}.tippy-box[data-theme~=quarto][data-placement^=top]>.tippy-svg-arrow:after{top:17px}.tippy-box[data-theme~=quarto][data-placement^=bottom]>.tippy-arrow:before{border-bottom-color:#fff;bottom:16px}.tippy-box[data-theme~=quarto][data-placement^=bottom]>.tippy-arrow:after{border-bottom-color:#dee2e6;border-width:0 7px 7px;bottom:17px;left:1px}.tippy-box[data-theme~=quarto][data-placement^=bottom]>.tippy-svg-arrow>svg{bottom:15px}.tippy-box[data-theme~=quarto][data-placement^=bottom]>.tippy-svg-arrow:after{bottom:17px}.tippy-box[data-theme~=quarto][data-placement^=left]>.tippy-arrow:before{border-left-color:#fff}.tippy-box[data-theme~=quarto][data-placement^=left]>.tippy-arrow:after{border-left-color:#dee2e6;border-width:7px 0 7px 7px;left:17px;top:1px}.tippy-box[data-theme~=quarto][data-placement^=left]>.tippy-svg-arrow>svg{left:11px}.tippy-box[data-theme~=quarto][data-placement^=left]>.tippy-svg-arrow:after{left:12px}.tippy-box[data-theme~=quarto][data-placement^=right]>.tippy-arrow:before{border-right-color:#fff;right:16px}.tippy-box[data-theme~=quarto][data-placement^=right]>.tippy-arrow:after{border-width:7px 7px 7px 0;right:17px;top:1px;border-right-color:#dee2e6}.tippy-box[data-theme~=quarto][data-placement^=right]>.tippy-svg-arrow>svg{right:11px}.tippy-box[data-theme~=quarto][data-placement^=right]>.tippy-svg-arrow:after{right:12px}.tippy-box[data-theme~=quarto]>.tippy-svg-arrow{fill:#343a40}.tippy-box[data-theme~=quarto]>.tippy-svg-arrow:after{background-image:url(data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMTYiIGhlaWdodD0iNiIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj48cGF0aCBkPSJNMCA2czEuNzk2LS4wMTMgNC42Ny0zLjYxNUM1Ljg1MS45IDYuOTMuMDA2IDggMGMxLjA3LS4wMDYgMi4xNDguODg3IDMuMzQzIDIuMzg1QzE0LjIzMyA2LjAwNSAxNiA2IDE2IDZIMHoiIGZpbGw9InJnYmEoMCwgOCwgMTYsIDAuMikiLz48L3N2Zz4=);background-size:16px 6px;width:16px;height:6px}.top-right{position:absolute;top:1em;right:1em}.visually-hidden{border:0;clip:rect(0 0 0 0);height:auto;margin:0;overflow:hidden;padding:0;position:absolute;width:1px;white-space:nowrap}.hidden{display:none !important}.zindex-bottom{z-index:-1 !important}figure.figure{display:block}.quarto-layout-panel{margin-bottom:1em}.quarto-layout-panel>figure{width:100%}.quarto-layout-panel>figure>figcaption,.quarto-layout-panel>.panel-caption{margin-top:10pt}.quarto-layout-panel>.table-caption{margin-top:0px}.table-caption p{margin-bottom:.5em}.quarto-layout-row{display:flex;flex-direction:row;align-items:flex-start}.quarto-layout-valign-top{align-items:flex-start}.quarto-layout-valign-bottom{align-items:flex-end}.quarto-layout-valign-center{align-items:center}.quarto-layout-cell{position:relative;margin-right:20px}.quarto-layout-cell:last-child{margin-right:0}.quarto-layout-cell figure,.quarto-layout-cell>p{margin:.2em}.quarto-layout-cell img{max-width:100%}.quarto-layout-cell .html-widget{width:100% !important}.quarto-layout-cell div figure p{margin:0}.quarto-layout-cell figure{display:block;margin-inline-start:0;margin-inline-end:0}.quarto-layout-cell table{display:inline-table}.quarto-layout-cell-subref figcaption,figure .quarto-layout-row figure figcaption{text-align:center;font-style:italic}.quarto-figure{position:relative;margin-bottom:1em}.quarto-figure>figure{width:100%;margin-bottom:0}.quarto-figure-left>figure>p,.quarto-figure-left>figure>div{text-align:left}.quarto-figure-center>figure>p,.quarto-figure-center>figure>div{text-align:center}.quarto-figure-right>figure>p,.quarto-figure-right>figure>div{text-align:right}.quarto-figure>figure>div.cell-annotation,.quarto-figure>figure>div code{text-align:left}figure>p:empty{display:none}figure>p:first-child{margin-top:0;margin-bottom:0}figure>figcaption.quarto-float-caption-bottom{margin-bottom:.5em}figure>figcaption.quarto-float-caption-top{margin-top:.5em}div[id^=tbl-]{position:relative}.quarto-figure>.anchorjs-link{position:absolute;top:.6em;right:.5em}div[id^=tbl-]>.anchorjs-link{position:absolute;top:.7em;right:.3em}.quarto-figure:hover>.anchorjs-link,div[id^=tbl-]:hover>.anchorjs-link,h2:hover>.anchorjs-link,.h2:hover>.anchorjs-link,h3:hover>.anchorjs-link,.h3:hover>.anchorjs-link,h4:hover>.anchorjs-link,.h4:hover>.anchorjs-link,h5:hover>.anchorjs-link,.h5:hover>.anchorjs-link,h6:hover>.anchorjs-link,.h6:hover>.anchorjs-link,.reveal-anchorjs-link>.anchorjs-link{opacity:1}#title-block-header{margin-block-end:1rem;position:relative;margin-top:-1px}#title-block-header .abstract{margin-block-start:1rem}#title-block-header .abstract .abstract-title{font-weight:600}#title-block-header a{text-decoration:none}#title-block-header .author,#title-block-header .date,#title-block-header .doi{margin-block-end:.2rem}#title-block-header .quarto-title-block>div{display:flex}#title-block-header .quarto-title-block>div>h1,#title-block-header .quarto-title-block>div>.h1{flex-grow:1}#title-block-header .quarto-title-block>div>button{flex-shrink:0;height:2.25rem;margin-top:0}@media(min-width: 992px){#title-block-header .quarto-title-block>div>button{margin-top:5px}}tr.header>th>p:last-of-type{margin-bottom:0px}table,table.table{margin-top:.5rem;margin-bottom:.5rem}caption,.table-caption{padding-top:.5rem;padding-bottom:.5rem;text-align:center}figure.quarto-float-tbl figcaption.quarto-float-caption-top{margin-top:.5rem;margin-bottom:.25rem;text-align:center}figure.quarto-float-tbl figcaption.quarto-float-caption-bottom{padding-top:.25rem;margin-bottom:.5rem;text-align:center}.utterances{max-width:none;margin-left:-8px}iframe{margin-bottom:1em}details{margin-bottom:1em}details[show]{margin-bottom:0}details>summary{color:#6c757d}details>summary>p:only-child{display:inline}pre.sourceCode,code.sourceCode{position:relative}p code:not(.sourceCode){white-space:pre-wrap}code{white-space:pre}@media print{code{white-space:pre-wrap}}pre>code{display:block}pre>code.sourceCode{white-space:pre}pre>code.sourceCode>span>a:first-child::before{text-decoration:none}pre.code-overflow-wrap>code.sourceCode{white-space:pre-wrap}pre.code-overflow-scroll>code.sourceCode{white-space:pre}code a:any-link{color:inherit;text-decoration:none}code a:hover{color:inherit;text-decoration:underline}ul.task-list{padding-left:1em}[data-tippy-root]{display:inline-block}.tippy-content .footnote-back{display:none}.footnote-back{margin-left:.2em}.tippy-content{overflow-x:auto}.quarto-embedded-source-code{display:none}.quarto-unresolved-ref{font-weight:600}.quarto-cover-image{max-width:35%;float:right;margin-left:30px}.cell-output-display .widget-subarea{margin-bottom:1em}.cell-output-display:not(.no-overflow-x),.knitsql-table:not(.no-overflow-x){overflow-x:auto}.panel-input{margin-bottom:1em}.panel-input>div,.panel-input>div>div{display:inline-block;vertical-align:top;padding-right:12px}.panel-input>p:last-child{margin-bottom:0}.layout-sidebar{margin-bottom:1em}.layout-sidebar .tab-content{border:none}.tab-content>.page-columns.active{display:grid}div.sourceCode>iframe{width:100%;height:300px;margin-bottom:-0.5em}a{text-underline-offset:3px}div.ansi-escaped-output{font-family:monospace;display:block}/*! +* +* ansi colors from IPython notebook's +* +* we also add `bright-[color]-` synonyms for the `-[color]-intense` classes since +* that seems to be what ansi_up emits +* +*/.ansi-black-fg{color:#3e424d}.ansi-black-bg{background-color:#3e424d}.ansi-black-intense-black,.ansi-bright-black-fg{color:#282c36}.ansi-black-intense-black,.ansi-bright-black-bg{background-color:#282c36}.ansi-red-fg{color:#e75c58}.ansi-red-bg{background-color:#e75c58}.ansi-red-intense-red,.ansi-bright-red-fg{color:#b22b31}.ansi-red-intense-red,.ansi-bright-red-bg{background-color:#b22b31}.ansi-green-fg{color:#00a250}.ansi-green-bg{background-color:#00a250}.ansi-green-intense-green,.ansi-bright-green-fg{color:#007427}.ansi-green-intense-green,.ansi-bright-green-bg{background-color:#007427}.ansi-yellow-fg{color:#ddb62b}.ansi-yellow-bg{background-color:#ddb62b}.ansi-yellow-intense-yellow,.ansi-bright-yellow-fg{color:#b27d12}.ansi-yellow-intense-yellow,.ansi-bright-yellow-bg{background-color:#b27d12}.ansi-blue-fg{color:#208ffb}.ansi-blue-bg{background-color:#208ffb}.ansi-blue-intense-blue,.ansi-bright-blue-fg{color:#0065ca}.ansi-blue-intense-blue,.ansi-bright-blue-bg{background-color:#0065ca}.ansi-magenta-fg{color:#d160c4}.ansi-magenta-bg{background-color:#d160c4}.ansi-magenta-intense-magenta,.ansi-bright-magenta-fg{color:#a03196}.ansi-magenta-intense-magenta,.ansi-bright-magenta-bg{background-color:#a03196}.ansi-cyan-fg{color:#60c6c8}.ansi-cyan-bg{background-color:#60c6c8}.ansi-cyan-intense-cyan,.ansi-bright-cyan-fg{color:#258f8f}.ansi-cyan-intense-cyan,.ansi-bright-cyan-bg{background-color:#258f8f}.ansi-white-fg{color:#c5c1b4}.ansi-white-bg{background-color:#c5c1b4}.ansi-white-intense-white,.ansi-bright-white-fg{color:#a1a6b2}.ansi-white-intense-white,.ansi-bright-white-bg{background-color:#a1a6b2}.ansi-default-inverse-fg{color:#fff}.ansi-default-inverse-bg{background-color:#000}.ansi-bold{font-weight:bold}.ansi-underline{text-decoration:underline}:root{--quarto-body-bg: #fff;--quarto-body-color: #343a40;--quarto-text-muted: #6c757d;--quarto-border-color: #dee2e6;--quarto-border-width: 1px;--quarto-border-radius: 0.25rem}table.gt_table{color:var(--quarto-body-color);font-size:1em;width:100%;background-color:rgba(0,0,0,0);border-top-width:inherit;border-bottom-width:inherit;border-color:var(--quarto-border-color)}table.gt_table th.gt_column_spanner_outer{color:var(--quarto-body-color);background-color:rgba(0,0,0,0);border-top-width:inherit;border-bottom-width:inherit;border-color:var(--quarto-border-color)}table.gt_table th.gt_col_heading{color:var(--quarto-body-color);font-weight:bold;background-color:rgba(0,0,0,0)}table.gt_table thead.gt_col_headings{border-bottom:1px solid currentColor;border-top-width:inherit;border-top-color:var(--quarto-border-color)}table.gt_table thead.gt_col_headings:not(:first-child){border-top-width:1px;border-top-color:var(--quarto-border-color)}table.gt_table td.gt_row{border-bottom-width:1px;border-bottom-color:var(--quarto-border-color);border-top-width:0px}table.gt_table tbody.gt_table_body{border-top-width:1px;border-bottom-width:1px;border-bottom-color:var(--quarto-border-color);border-top-color:currentColor}div.columns{display:initial;gap:initial}div.column{display:inline-block;overflow-x:initial;vertical-align:top;width:50%}.code-annotation-tip-content{word-wrap:break-word}.code-annotation-container-hidden{display:none !important}dl.code-annotation-container-grid{display:grid;grid-template-columns:min-content auto}dl.code-annotation-container-grid dt{grid-column:1}dl.code-annotation-container-grid dd{grid-column:2}pre.sourceCode.code-annotation-code{padding-right:0}code.sourceCode .code-annotation-anchor{z-index:100;position:relative;float:right;background-color:rgba(0,0,0,0)}input[type=checkbox]{margin-right:.5ch}:root{--mermaid-bg-color: #fff;--mermaid-edge-color: #343a40;--mermaid-node-fg-color: #343a40;--mermaid-fg-color: #343a40;--mermaid-fg-color--lighter: #4b545c;--mermaid-fg-color--lightest: #626d78;--mermaid-font-family: Source Sans Pro, -apple-system, BlinkMacSystemFont, Segoe UI, Roboto, Helvetica Neue, Arial, sans-serif, Apple Color Emoji, Segoe UI Emoji, Segoe UI Symbol;--mermaid-label-bg-color: #fff;--mermaid-label-fg-color: #5c2983;--mermaid-node-bg-color: rgba(92, 41, 131, 0.1);--mermaid-node-fg-color: #343a40}@media print{:root{font-size:11pt}#quarto-sidebar,#TOC,.nav-page{display:none}.page-columns .content{grid-column-start:page-start}.fixed-top{position:relative}.panel-caption,.figure-caption,figcaption{color:#666}}.code-copy-button{position:absolute;top:0;right:0;border:0;margin-top:5px;margin-right:5px;background-color:rgba(0,0,0,0);z-index:3}.code-copy-button:focus{outline:none}.code-copy-button-tooltip{font-size:.75em}pre.sourceCode:hover>.code-copy-button>.bi::before{display:inline-block;height:1rem;width:1rem;content:"";vertical-align:-0.125em;background-image:url('data:image/svg+xml,');background-repeat:no-repeat;background-size:1rem 1rem}pre.sourceCode:hover>.code-copy-button-checked>.bi::before{background-image:url('data:image/svg+xml,')}pre.sourceCode:hover>.code-copy-button:hover>.bi::before{background-image:url('data:image/svg+xml,')}pre.sourceCode:hover>.code-copy-button-checked:hover>.bi::before{background-image:url('data:image/svg+xml,')}main ol ol,main ul ul,main ol ul,main ul ol{margin-bottom:1em}ul>li:not(:has(>p))>ul,ol>li:not(:has(>p))>ul,ul>li:not(:has(>p))>ol,ol>li:not(:has(>p))>ol{margin-bottom:0}ul>li:not(:has(>p))>ul>li:has(>p),ol>li:not(:has(>p))>ul>li:has(>p),ul>li:not(:has(>p))>ol>li:has(>p),ol>li:not(:has(>p))>ol>li:has(>p){margin-top:1rem}body{margin:0}main.page-columns>header>h1.title,main.page-columns>header>.title.h1{margin-bottom:0}@media(min-width: 992px){body .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start page-start-inset] 35px [body-start-outset] 35px [body-start] 1.5em [body-content-start] minmax(500px, calc(850px - 3em)) [body-content-end] 1.5em [body-end] 35px [body-end-outset] minmax(75px, 145px) [page-end-inset] 35px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.fullcontent:not(.floating):not(.docked) .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start page-start-inset] 35px [body-start-outset] 35px [body-start] 1.5em [body-content-start] minmax(500px, calc(850px - 3em)) [body-content-end] 1.5em [body-end] 35px [body-end-outset] 35px [page-end-inset page-end] 5fr [screen-end-inset] 1.5em}body.slimcontent:not(.floating):not(.docked) .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start page-start-inset] 35px [body-start-outset] 35px [body-start] 1.5em [body-content-start] minmax(500px, calc(850px - 3em)) [body-content-end] 1.5em [body-end] 50px [body-end-outset] minmax(0px, 200px) [page-end-inset] 35px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.listing:not(.floating):not(.docked) .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start] minmax(50px, 100px) [page-start-inset] 50px [body-start-outset] 50px [body-start] 1.5em [body-content-start] minmax(500px, calc(850px - 3em)) [body-content-end] 3em [body-end] 50px [body-end-outset] minmax(0px, 250px) [page-end-inset] minmax(50px, 100px) [page-end] 1fr [screen-end-inset] 1.5em [screen-end]}body:not(.floating):not(.docked) .page-columns.toc-left{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start] 35px [page-start-inset] minmax(0px, 175px) [body-start-outset] 35px [body-start] 1.5em [body-content-start] minmax(450px, calc(800px - 3em)) [body-content-end] 1.5em [body-end] 50px [body-end-outset] minmax(0px, 200px) [page-end-inset] 50px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body:not(.floating):not(.docked) .page-columns.toc-left .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start] 35px [page-start-inset] minmax(0px, 175px) [body-start-outset] 35px [body-start] 1.5em [body-content-start] minmax(450px, calc(800px - 3em)) [body-content-end] 1.5em [body-end] 50px [body-end-outset] minmax(0px, 200px) [page-end-inset] 50px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.floating .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start] minmax(25px, 50px) [page-start-inset] minmax(50px, 150px) [body-start-outset] minmax(25px, 50px) [body-start] 1.5em [body-content-start] minmax(500px, calc(800px - 3em)) [body-content-end] 1.5em [body-end] minmax(25px, 50px) [body-end-outset] minmax(50px, 150px) [page-end-inset] minmax(25px, 50px) [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.docked .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start] minmax(50px, 100px) [page-start-inset] 50px [body-start-outset] 50px [body-start] 1.5em [body-content-start] minmax(500px, calc(1000px - 3em)) [body-content-end] 1.5em [body-end] 50px [body-end-outset] minmax(50px, 100px) [page-end-inset] 50px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.docked.fullcontent .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start] minmax(50px, 100px) [page-start-inset] 50px [body-start-outset] 50px [body-start] 1.5em [body-content-start] minmax(500px, calc(1000px - 3em)) [body-content-end] 1.5em [body-end body-end-outset page-end-inset page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.floating.fullcontent .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start] 50px [page-start-inset] minmax(50px, 150px) [body-start-outset] 50px [body-start] 1.5em [body-content-start] minmax(500px, calc(800px - 3em)) [body-content-end] 1.5em [body-end body-end-outset page-end-inset page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.docked.slimcontent .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start] minmax(50px, 100px) [page-start-inset] 50px [body-start-outset] 50px [body-start] 1.5em [body-content-start] minmax(450px, calc(750px - 3em)) [body-content-end] 1.5em [body-end] 50px [body-end-outset] minmax(0px, 200px) [page-end-inset] 50px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.docked.listing .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start] minmax(50px, 100px) [page-start-inset] 50px [body-start-outset] 50px [body-start] 1.5em [body-content-start] minmax(500px, calc(1000px - 3em)) [body-content-end] 1.5em [body-end] 50px [body-end-outset] minmax(0px, 200px) [page-end-inset] 50px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.floating.slimcontent .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start] 50px [page-start-inset] minmax(50px, 150px) [body-start-outset] 50px [body-start] 1.5em [body-content-start] minmax(450px, calc(750px - 3em)) [body-content-end] 1.5em [body-end] 50px [body-end-outset] minmax(50px, 150px) [page-end-inset] 50px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.floating.listing .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start] minmax(25px, 50px) [page-start-inset] minmax(50px, 150px) [body-start-outset] minmax(25px, 50px) [body-start] 1.5em [body-content-start] minmax(500px, calc(800px - 3em)) [body-content-end] 1.5em [body-end] minmax(25px, 50px) [body-end-outset] minmax(50px, 150px) [page-end-inset] minmax(25px, 50px) [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}}@media(max-width: 991.98px){body .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start page-start-inset body-start-outset] 5fr [body-start] 1.5em [body-content-start] minmax(500px, calc(800px - 3em)) [body-content-end] 1.5em [body-end] 35px [body-end-outset] minmax(75px, 145px) [page-end-inset] 35px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.fullcontent:not(.floating):not(.docked) .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start page-start-inset body-start-outset] 5fr [body-start] 1.5em [body-content-start] minmax(500px, calc(800px - 3em)) [body-content-end] 1.5em [body-end body-end-outset page-end-inset page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.slimcontent:not(.floating):not(.docked) .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start page-start-inset body-start-outset] 5fr [body-start] 1.5em [body-content-start] minmax(500px, calc(800px - 3em)) [body-content-end] 1.5em [body-end] 35px [body-end-outset] minmax(75px, 145px) [page-end-inset] 35px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.listing:not(.floating):not(.docked) .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start page-start-inset body-start-outset] 5fr [body-start] 1.5em [body-content-start] minmax(500px, calc(1250px - 3em)) [body-content-end body-end body-end-outset page-end-inset page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body:not(.floating):not(.docked) .page-columns.toc-left{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start] 35px [page-start-inset] minmax(0px, 145px) [body-start-outset] 35px [body-start] 1.5em [body-content-start] minmax(450px, calc(800px - 3em)) [body-content-end] 1.5em [body-end body-end-outset page-end-inset page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body:not(.floating):not(.docked) .page-columns.toc-left .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start] 35px [page-start-inset] minmax(0px, 145px) [body-start-outset] 35px [body-start] 1.5em [body-content-start] minmax(450px, calc(800px - 3em)) [body-content-end] 1.5em [body-end body-end-outset page-end-inset page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.floating .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start page-start-inset body-start-outset body-start] 1.5em [body-content-start] minmax(500px, calc(750px - 3em)) [body-content-end] 1.5em [body-end] 50px [body-end-outset] minmax(75px, 150px) [page-end-inset] 25px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.docked .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start page-start-inset body-start-outset body-start body-content-start] minmax(500px, calc(750px - 3em)) [body-content-end] 1.5em [body-end] 50px [body-end-outset] minmax(25px, 50px) [page-end-inset] 50px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.docked.fullcontent .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start page-start-inset body-start-outset body-start body-content-start] minmax(500px, calc(1000px - 3em)) [body-content-end] 1.5em [body-end body-end-outset page-end-inset page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.floating.fullcontent .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start page-start-inset body-start-outset body-start] 1em [body-content-start] minmax(500px, calc(800px - 3em)) [body-content-end] 1.5em [body-end body-end-outset page-end-inset page-end] 4fr [screen-end-inset] 1.5em [screen-end]}body.docked.slimcontent .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start page-start-inset body-start-outset body-start body-content-start] minmax(500px, calc(750px - 3em)) [body-content-end] 1.5em [body-end] 50px [body-end-outset] minmax(25px, 50px) [page-end-inset] 50px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.docked.listing .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start page-start-inset body-start-outset body-start body-content-start] minmax(500px, calc(750px - 3em)) [body-content-end] 1.5em [body-end] 50px [body-end-outset] minmax(25px, 50px) [page-end-inset] 50px [page-end] 5fr [screen-end-inset] 1.5em [screen-end]}body.floating.slimcontent .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start page-start-inset body-start-outset body-start] 1em [body-content-start] minmax(500px, calc(750px - 3em)) [body-content-end] 1.5em [body-end] 35px [body-end-outset] minmax(75px, 145px) [page-end-inset] 35px [page-end] 4fr [screen-end-inset] 1.5em [screen-end]}body.floating.listing .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset] 5fr [page-start page-start-inset body-start-outset body-start] 1em [body-content-start] minmax(500px, calc(750px - 3em)) [body-content-end] 1.5em [body-end] 50px [body-end-outset] minmax(75px, 150px) [page-end-inset] 25px [page-end] 4fr [screen-end-inset] 1.5em [screen-end]}}@media(max-width: 767.98px){body .page-columns,body.fullcontent:not(.floating):not(.docked) .page-columns,body.slimcontent:not(.floating):not(.docked) .page-columns,body.docked .page-columns,body.docked.slimcontent .page-columns,body.docked.fullcontent .page-columns,body.floating .page-columns,body.floating.slimcontent .page-columns,body.floating.fullcontent .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start page-start-inset body-start-outset body-start body-content-start] minmax(0px, 1fr) [body-content-end body-end body-end-outset page-end-inset page-end screen-end-inset] 1.5em [screen-end]}body:not(.floating):not(.docked) .page-columns.toc-left{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start page-start-inset body-start-outset body-start body-content-start] minmax(0px, 1fr) [body-content-end body-end body-end-outset page-end-inset page-end screen-end-inset] 1.5em [screen-end]}body:not(.floating):not(.docked) .page-columns.toc-left .page-columns{display:grid;gap:0;grid-template-columns:[screen-start] 1.5em [screen-start-inset page-start page-start-inset body-start-outset body-start body-content-start] minmax(0px, 1fr) [body-content-end body-end body-end-outset page-end-inset page-end screen-end-inset] 1.5em [screen-end]}nav[role=doc-toc]{display:none}}body,.page-row-navigation{grid-template-rows:[page-top] max-content [contents-top] max-content [contents-bottom] max-content [page-bottom]}.page-rows-contents{grid-template-rows:[content-top] minmax(max-content, 1fr) [content-bottom] minmax(60px, max-content) [page-bottom]}.page-full{grid-column:screen-start/screen-end !important}.page-columns>*{grid-column:body-content-start/body-content-end}.page-columns.column-page>*{grid-column:page-start/page-end}.page-columns.column-page-left .page-columns.page-full>*,.page-columns.column-page-left>*{grid-column:page-start/body-content-end}.page-columns.column-page-right .page-columns.page-full>*,.page-columns.column-page-right>*{grid-column:body-content-start/page-end}.page-rows{grid-auto-rows:auto}.header{grid-column:screen-start/screen-end;grid-row:page-top/contents-top}#quarto-content{padding:0;grid-column:screen-start/screen-end;grid-row:contents-top/contents-bottom}body.floating .sidebar.sidebar-navigation{grid-column:page-start/body-start;grid-row:content-top/page-bottom}body.docked .sidebar.sidebar-navigation{grid-column:screen-start/body-start;grid-row:content-top/page-bottom}.sidebar.toc-left{grid-column:page-start/body-start;grid-row:content-top/page-bottom}.sidebar.margin-sidebar{grid-column:body-end/page-end;grid-row:content-top/page-bottom}.page-columns .content{grid-column:body-content-start/body-content-end;grid-row:content-top/content-bottom;align-content:flex-start}.page-columns .page-navigation{grid-column:body-content-start/body-content-end;grid-row:content-bottom/page-bottom}.page-columns .footer{grid-column:screen-start/screen-end;grid-row:contents-bottom/page-bottom}.page-columns .column-body{grid-column:body-content-start/body-content-end}.page-columns .column-body-fullbleed{grid-column:body-start/body-end}.page-columns .column-body-outset{grid-column:body-start-outset/body-end-outset;z-index:998;opacity:.999}.page-columns .column-body-outset table{background:#fff}.page-columns .column-body-outset-left{grid-column:body-start-outset/body-content-end;z-index:998;opacity:.999}.page-columns .column-body-outset-left table{background:#fff}.page-columns .column-body-outset-right{grid-column:body-content-start/body-end-outset;z-index:998;opacity:.999}.page-columns .column-body-outset-right table{background:#fff}.page-columns .column-page{grid-column:page-start/page-end;z-index:998;opacity:.999}.page-columns .column-page table{background:#fff}.page-columns .column-page-inset{grid-column:page-start-inset/page-end-inset;z-index:998;opacity:.999}.page-columns .column-page-inset table{background:#fff}.page-columns .column-page-inset-left{grid-column:page-start-inset/body-content-end;z-index:998;opacity:.999}.page-columns .column-page-inset-left table{background:#fff}.page-columns .column-page-inset-right{grid-column:body-content-start/page-end-inset;z-index:998;opacity:.999}.page-columns .column-page-inset-right figcaption table{background:#fff}.page-columns .column-page-left{grid-column:page-start/body-content-end;z-index:998;opacity:.999}.page-columns .column-page-left table{background:#fff}.page-columns .column-page-right{grid-column:body-content-start/page-end;z-index:998;opacity:.999}.page-columns .column-page-right figcaption table{background:#fff}#quarto-content.page-columns #quarto-margin-sidebar,#quarto-content.page-columns #quarto-sidebar{z-index:1}@media(max-width: 991.98px){#quarto-content.page-columns #quarto-margin-sidebar.collapse,#quarto-content.page-columns #quarto-sidebar.collapse,#quarto-content.page-columns #quarto-margin-sidebar.collapsing,#quarto-content.page-columns #quarto-sidebar.collapsing{z-index:1055}}#quarto-content.page-columns main.column-page,#quarto-content.page-columns main.column-page-right,#quarto-content.page-columns main.column-page-left{z-index:0}.page-columns .column-screen-inset{grid-column:screen-start-inset/screen-end-inset;z-index:998;opacity:.999}.page-columns .column-screen-inset table{background:#fff}.page-columns .column-screen-inset-left{grid-column:screen-start-inset/body-content-end;z-index:998;opacity:.999}.page-columns .column-screen-inset-left table{background:#fff}.page-columns .column-screen-inset-right{grid-column:body-content-start/screen-end-inset;z-index:998;opacity:.999}.page-columns .column-screen-inset-right table{background:#fff}.page-columns .column-screen{grid-column:screen-start/screen-end;z-index:998;opacity:.999}.page-columns .column-screen table{background:#fff}.page-columns .column-screen-left{grid-column:screen-start/body-content-end;z-index:998;opacity:.999}.page-columns .column-screen-left table{background:#fff}.page-columns .column-screen-right{grid-column:body-content-start/screen-end;z-index:998;opacity:.999}.page-columns .column-screen-right table{background:#fff}.page-columns .column-screen-inset-shaded{grid-column:screen-start/screen-end;padding:1em;background:#f8f9fa;z-index:998;opacity:.999;margin-bottom:1em}.zindex-content{z-index:998;opacity:.999}.zindex-modal{z-index:1055;opacity:.999}.zindex-over-content{z-index:999;opacity:.999}img.img-fluid.column-screen,img.img-fluid.column-screen-inset-shaded,img.img-fluid.column-screen-inset,img.img-fluid.column-screen-inset-left,img.img-fluid.column-screen-inset-right,img.img-fluid.column-screen-left,img.img-fluid.column-screen-right{width:100%}@media(min-width: 992px){.margin-caption,div.aside,aside:not(.footnotes):not(.sidebar),.column-margin{grid-column:body-end/page-end !important;z-index:998}.column-sidebar{grid-column:page-start/body-start !important;z-index:998}.column-leftmargin{grid-column:screen-start-inset/body-start !important;z-index:998}.no-row-height{height:1em;overflow:visible}}@media(max-width: 991.98px){.margin-caption,div.aside,aside:not(.footnotes):not(.sidebar),.column-margin{grid-column:body-end/page-end !important;z-index:998}.no-row-height{height:1em;overflow:visible}.page-columns.page-full{overflow:visible}.page-columns.toc-left .margin-caption,.page-columns.toc-left div.aside,.page-columns.toc-left aside:not(.footnotes):not(.sidebar),.page-columns.toc-left .column-margin{grid-column:body-content-start/body-content-end !important;z-index:998;opacity:.999}.page-columns.toc-left .no-row-height{height:initial;overflow:initial}}@media(max-width: 767.98px){.margin-caption,div.aside,aside:not(.footnotes):not(.sidebar),.column-margin{grid-column:body-content-start/body-content-end !important;z-index:998;opacity:.999}.no-row-height{height:initial;overflow:initial}#quarto-margin-sidebar{display:none}#quarto-sidebar-toc-left{display:none}.hidden-sm{display:none}}.panel-grid{display:grid;grid-template-rows:repeat(1, 1fr);grid-template-columns:repeat(24, 1fr);gap:1em}.panel-grid .g-col-1{grid-column:auto/span 1}.panel-grid .g-col-2{grid-column:auto/span 2}.panel-grid .g-col-3{grid-column:auto/span 3}.panel-grid .g-col-4{grid-column:auto/span 4}.panel-grid .g-col-5{grid-column:auto/span 5}.panel-grid .g-col-6{grid-column:auto/span 6}.panel-grid .g-col-7{grid-column:auto/span 7}.panel-grid .g-col-8{grid-column:auto/span 8}.panel-grid .g-col-9{grid-column:auto/span 9}.panel-grid .g-col-10{grid-column:auto/span 10}.panel-grid .g-col-11{grid-column:auto/span 11}.panel-grid .g-col-12{grid-column:auto/span 12}.panel-grid .g-col-13{grid-column:auto/span 13}.panel-grid .g-col-14{grid-column:auto/span 14}.panel-grid .g-col-15{grid-column:auto/span 15}.panel-grid .g-col-16{grid-column:auto/span 16}.panel-grid .g-col-17{grid-column:auto/span 17}.panel-grid .g-col-18{grid-column:auto/span 18}.panel-grid .g-col-19{grid-column:auto/span 19}.panel-grid .g-col-20{grid-column:auto/span 20}.panel-grid .g-col-21{grid-column:auto/span 21}.panel-grid .g-col-22{grid-column:auto/span 22}.panel-grid .g-col-23{grid-column:auto/span 23}.panel-grid .g-col-24{grid-column:auto/span 24}.panel-grid .g-start-1{grid-column-start:1}.panel-grid .g-start-2{grid-column-start:2}.panel-grid .g-start-3{grid-column-start:3}.panel-grid .g-start-4{grid-column-start:4}.panel-grid .g-start-5{grid-column-start:5}.panel-grid .g-start-6{grid-column-start:6}.panel-grid .g-start-7{grid-column-start:7}.panel-grid .g-start-8{grid-column-start:8}.panel-grid .g-start-9{grid-column-start:9}.panel-grid .g-start-10{grid-column-start:10}.panel-grid .g-start-11{grid-column-start:11}.panel-grid .g-start-12{grid-column-start:12}.panel-grid .g-start-13{grid-column-start:13}.panel-grid .g-start-14{grid-column-start:14}.panel-grid .g-start-15{grid-column-start:15}.panel-grid .g-start-16{grid-column-start:16}.panel-grid .g-start-17{grid-column-start:17}.panel-grid .g-start-18{grid-column-start:18}.panel-grid .g-start-19{grid-column-start:19}.panel-grid .g-start-20{grid-column-start:20}.panel-grid .g-start-21{grid-column-start:21}.panel-grid .g-start-22{grid-column-start:22}.panel-grid .g-start-23{grid-column-start:23}@media(min-width: 576px){.panel-grid .g-col-sm-1{grid-column:auto/span 1}.panel-grid .g-col-sm-2{grid-column:auto/span 2}.panel-grid .g-col-sm-3{grid-column:auto/span 3}.panel-grid .g-col-sm-4{grid-column:auto/span 4}.panel-grid .g-col-sm-5{grid-column:auto/span 5}.panel-grid .g-col-sm-6{grid-column:auto/span 6}.panel-grid .g-col-sm-7{grid-column:auto/span 7}.panel-grid .g-col-sm-8{grid-column:auto/span 8}.panel-grid .g-col-sm-9{grid-column:auto/span 9}.panel-grid .g-col-sm-10{grid-column:auto/span 10}.panel-grid .g-col-sm-11{grid-column:auto/span 11}.panel-grid .g-col-sm-12{grid-column:auto/span 12}.panel-grid .g-col-sm-13{grid-column:auto/span 13}.panel-grid .g-col-sm-14{grid-column:auto/span 14}.panel-grid .g-col-sm-15{grid-column:auto/span 15}.panel-grid .g-col-sm-16{grid-column:auto/span 16}.panel-grid .g-col-sm-17{grid-column:auto/span 17}.panel-grid .g-col-sm-18{grid-column:auto/span 18}.panel-grid .g-col-sm-19{grid-column:auto/span 19}.panel-grid .g-col-sm-20{grid-column:auto/span 20}.panel-grid .g-col-sm-21{grid-column:auto/span 21}.panel-grid .g-col-sm-22{grid-column:auto/span 22}.panel-grid .g-col-sm-23{grid-column:auto/span 23}.panel-grid .g-col-sm-24{grid-column:auto/span 24}.panel-grid .g-start-sm-1{grid-column-start:1}.panel-grid .g-start-sm-2{grid-column-start:2}.panel-grid .g-start-sm-3{grid-column-start:3}.panel-grid .g-start-sm-4{grid-column-start:4}.panel-grid .g-start-sm-5{grid-column-start:5}.panel-grid .g-start-sm-6{grid-column-start:6}.panel-grid .g-start-sm-7{grid-column-start:7}.panel-grid .g-start-sm-8{grid-column-start:8}.panel-grid .g-start-sm-9{grid-column-start:9}.panel-grid .g-start-sm-10{grid-column-start:10}.panel-grid .g-start-sm-11{grid-column-start:11}.panel-grid .g-start-sm-12{grid-column-start:12}.panel-grid .g-start-sm-13{grid-column-start:13}.panel-grid .g-start-sm-14{grid-column-start:14}.panel-grid .g-start-sm-15{grid-column-start:15}.panel-grid .g-start-sm-16{grid-column-start:16}.panel-grid .g-start-sm-17{grid-column-start:17}.panel-grid .g-start-sm-18{grid-column-start:18}.panel-grid .g-start-sm-19{grid-column-start:19}.panel-grid .g-start-sm-20{grid-column-start:20}.panel-grid .g-start-sm-21{grid-column-start:21}.panel-grid .g-start-sm-22{grid-column-start:22}.panel-grid .g-start-sm-23{grid-column-start:23}}@media(min-width: 768px){.panel-grid .g-col-md-1{grid-column:auto/span 1}.panel-grid .g-col-md-2{grid-column:auto/span 2}.panel-grid .g-col-md-3{grid-column:auto/span 3}.panel-grid .g-col-md-4{grid-column:auto/span 4}.panel-grid .g-col-md-5{grid-column:auto/span 5}.panel-grid .g-col-md-6{grid-column:auto/span 6}.panel-grid .g-col-md-7{grid-column:auto/span 7}.panel-grid .g-col-md-8{grid-column:auto/span 8}.panel-grid .g-col-md-9{grid-column:auto/span 9}.panel-grid .g-col-md-10{grid-column:auto/span 10}.panel-grid .g-col-md-11{grid-column:auto/span 11}.panel-grid .g-col-md-12{grid-column:auto/span 12}.panel-grid .g-col-md-13{grid-column:auto/span 13}.panel-grid .g-col-md-14{grid-column:auto/span 14}.panel-grid .g-col-md-15{grid-column:auto/span 15}.panel-grid .g-col-md-16{grid-column:auto/span 16}.panel-grid .g-col-md-17{grid-column:auto/span 17}.panel-grid .g-col-md-18{grid-column:auto/span 18}.panel-grid .g-col-md-19{grid-column:auto/span 19}.panel-grid .g-col-md-20{grid-column:auto/span 20}.panel-grid .g-col-md-21{grid-column:auto/span 21}.panel-grid .g-col-md-22{grid-column:auto/span 22}.panel-grid .g-col-md-23{grid-column:auto/span 23}.panel-grid .g-col-md-24{grid-column:auto/span 24}.panel-grid .g-start-md-1{grid-column-start:1}.panel-grid .g-start-md-2{grid-column-start:2}.panel-grid .g-start-md-3{grid-column-start:3}.panel-grid .g-start-md-4{grid-column-start:4}.panel-grid .g-start-md-5{grid-column-start:5}.panel-grid .g-start-md-6{grid-column-start:6}.panel-grid .g-start-md-7{grid-column-start:7}.panel-grid .g-start-md-8{grid-column-start:8}.panel-grid .g-start-md-9{grid-column-start:9}.panel-grid .g-start-md-10{grid-column-start:10}.panel-grid .g-start-md-11{grid-column-start:11}.panel-grid .g-start-md-12{grid-column-start:12}.panel-grid .g-start-md-13{grid-column-start:13}.panel-grid .g-start-md-14{grid-column-start:14}.panel-grid .g-start-md-15{grid-column-start:15}.panel-grid .g-start-md-16{grid-column-start:16}.panel-grid .g-start-md-17{grid-column-start:17}.panel-grid .g-start-md-18{grid-column-start:18}.panel-grid .g-start-md-19{grid-column-start:19}.panel-grid .g-start-md-20{grid-column-start:20}.panel-grid .g-start-md-21{grid-column-start:21}.panel-grid .g-start-md-22{grid-column-start:22}.panel-grid .g-start-md-23{grid-column-start:23}}@media(min-width: 992px){.panel-grid .g-col-lg-1{grid-column:auto/span 1}.panel-grid .g-col-lg-2{grid-column:auto/span 2}.panel-grid .g-col-lg-3{grid-column:auto/span 3}.panel-grid .g-col-lg-4{grid-column:auto/span 4}.panel-grid .g-col-lg-5{grid-column:auto/span 5}.panel-grid .g-col-lg-6{grid-column:auto/span 6}.panel-grid .g-col-lg-7{grid-column:auto/span 7}.panel-grid .g-col-lg-8{grid-column:auto/span 8}.panel-grid .g-col-lg-9{grid-column:auto/span 9}.panel-grid .g-col-lg-10{grid-column:auto/span 10}.panel-grid .g-col-lg-11{grid-column:auto/span 11}.panel-grid .g-col-lg-12{grid-column:auto/span 12}.panel-grid .g-col-lg-13{grid-column:auto/span 13}.panel-grid .g-col-lg-14{grid-column:auto/span 14}.panel-grid .g-col-lg-15{grid-column:auto/span 15}.panel-grid .g-col-lg-16{grid-column:auto/span 16}.panel-grid .g-col-lg-17{grid-column:auto/span 17}.panel-grid .g-col-lg-18{grid-column:auto/span 18}.panel-grid .g-col-lg-19{grid-column:auto/span 19}.panel-grid .g-col-lg-20{grid-column:auto/span 20}.panel-grid .g-col-lg-21{grid-column:auto/span 21}.panel-grid .g-col-lg-22{grid-column:auto/span 22}.panel-grid .g-col-lg-23{grid-column:auto/span 23}.panel-grid .g-col-lg-24{grid-column:auto/span 24}.panel-grid .g-start-lg-1{grid-column-start:1}.panel-grid .g-start-lg-2{grid-column-start:2}.panel-grid .g-start-lg-3{grid-column-start:3}.panel-grid .g-start-lg-4{grid-column-start:4}.panel-grid .g-start-lg-5{grid-column-start:5}.panel-grid .g-start-lg-6{grid-column-start:6}.panel-grid .g-start-lg-7{grid-column-start:7}.panel-grid .g-start-lg-8{grid-column-start:8}.panel-grid .g-start-lg-9{grid-column-start:9}.panel-grid .g-start-lg-10{grid-column-start:10}.panel-grid .g-start-lg-11{grid-column-start:11}.panel-grid .g-start-lg-12{grid-column-start:12}.panel-grid .g-start-lg-13{grid-column-start:13}.panel-grid .g-start-lg-14{grid-column-start:14}.panel-grid .g-start-lg-15{grid-column-start:15}.panel-grid .g-start-lg-16{grid-column-start:16}.panel-grid .g-start-lg-17{grid-column-start:17}.panel-grid .g-start-lg-18{grid-column-start:18}.panel-grid .g-start-lg-19{grid-column-start:19}.panel-grid .g-start-lg-20{grid-column-start:20}.panel-grid .g-start-lg-21{grid-column-start:21}.panel-grid .g-start-lg-22{grid-column-start:22}.panel-grid .g-start-lg-23{grid-column-start:23}}@media(min-width: 1200px){.panel-grid .g-col-xl-1{grid-column:auto/span 1}.panel-grid .g-col-xl-2{grid-column:auto/span 2}.panel-grid .g-col-xl-3{grid-column:auto/span 3}.panel-grid .g-col-xl-4{grid-column:auto/span 4}.panel-grid .g-col-xl-5{grid-column:auto/span 5}.panel-grid .g-col-xl-6{grid-column:auto/span 6}.panel-grid .g-col-xl-7{grid-column:auto/span 7}.panel-grid .g-col-xl-8{grid-column:auto/span 8}.panel-grid .g-col-xl-9{grid-column:auto/span 9}.panel-grid .g-col-xl-10{grid-column:auto/span 10}.panel-grid .g-col-xl-11{grid-column:auto/span 11}.panel-grid .g-col-xl-12{grid-column:auto/span 12}.panel-grid .g-col-xl-13{grid-column:auto/span 13}.panel-grid .g-col-xl-14{grid-column:auto/span 14}.panel-grid .g-col-xl-15{grid-column:auto/span 15}.panel-grid .g-col-xl-16{grid-column:auto/span 16}.panel-grid .g-col-xl-17{grid-column:auto/span 17}.panel-grid .g-col-xl-18{grid-column:auto/span 18}.panel-grid .g-col-xl-19{grid-column:auto/span 19}.panel-grid .g-col-xl-20{grid-column:auto/span 20}.panel-grid .g-col-xl-21{grid-column:auto/span 21}.panel-grid .g-col-xl-22{grid-column:auto/span 22}.panel-grid .g-col-xl-23{grid-column:auto/span 23}.panel-grid .g-col-xl-24{grid-column:auto/span 24}.panel-grid .g-start-xl-1{grid-column-start:1}.panel-grid .g-start-xl-2{grid-column-start:2}.panel-grid .g-start-xl-3{grid-column-start:3}.panel-grid .g-start-xl-4{grid-column-start:4}.panel-grid .g-start-xl-5{grid-column-start:5}.panel-grid .g-start-xl-6{grid-column-start:6}.panel-grid .g-start-xl-7{grid-column-start:7}.panel-grid .g-start-xl-8{grid-column-start:8}.panel-grid .g-start-xl-9{grid-column-start:9}.panel-grid .g-start-xl-10{grid-column-start:10}.panel-grid .g-start-xl-11{grid-column-start:11}.panel-grid .g-start-xl-12{grid-column-start:12}.panel-grid .g-start-xl-13{grid-column-start:13}.panel-grid .g-start-xl-14{grid-column-start:14}.panel-grid .g-start-xl-15{grid-column-start:15}.panel-grid .g-start-xl-16{grid-column-start:16}.panel-grid .g-start-xl-17{grid-column-start:17}.panel-grid .g-start-xl-18{grid-column-start:18}.panel-grid .g-start-xl-19{grid-column-start:19}.panel-grid .g-start-xl-20{grid-column-start:20}.panel-grid .g-start-xl-21{grid-column-start:21}.panel-grid .g-start-xl-22{grid-column-start:22}.panel-grid .g-start-xl-23{grid-column-start:23}}@media(min-width: 1400px){.panel-grid .g-col-xxl-1{grid-column:auto/span 1}.panel-grid .g-col-xxl-2{grid-column:auto/span 2}.panel-grid .g-col-xxl-3{grid-column:auto/span 3}.panel-grid .g-col-xxl-4{grid-column:auto/span 4}.panel-grid .g-col-xxl-5{grid-column:auto/span 5}.panel-grid .g-col-xxl-6{grid-column:auto/span 6}.panel-grid .g-col-xxl-7{grid-column:auto/span 7}.panel-grid .g-col-xxl-8{grid-column:auto/span 8}.panel-grid .g-col-xxl-9{grid-column:auto/span 9}.panel-grid .g-col-xxl-10{grid-column:auto/span 10}.panel-grid .g-col-xxl-11{grid-column:auto/span 11}.panel-grid .g-col-xxl-12{grid-column:auto/span 12}.panel-grid .g-col-xxl-13{grid-column:auto/span 13}.panel-grid .g-col-xxl-14{grid-column:auto/span 14}.panel-grid .g-col-xxl-15{grid-column:auto/span 15}.panel-grid .g-col-xxl-16{grid-column:auto/span 16}.panel-grid .g-col-xxl-17{grid-column:auto/span 17}.panel-grid .g-col-xxl-18{grid-column:auto/span 18}.panel-grid .g-col-xxl-19{grid-column:auto/span 19}.panel-grid .g-col-xxl-20{grid-column:auto/span 20}.panel-grid .g-col-xxl-21{grid-column:auto/span 21}.panel-grid .g-col-xxl-22{grid-column:auto/span 22}.panel-grid .g-col-xxl-23{grid-column:auto/span 23}.panel-grid .g-col-xxl-24{grid-column:auto/span 24}.panel-grid .g-start-xxl-1{grid-column-start:1}.panel-grid .g-start-xxl-2{grid-column-start:2}.panel-grid .g-start-xxl-3{grid-column-start:3}.panel-grid .g-start-xxl-4{grid-column-start:4}.panel-grid .g-start-xxl-5{grid-column-start:5}.panel-grid .g-start-xxl-6{grid-column-start:6}.panel-grid .g-start-xxl-7{grid-column-start:7}.panel-grid .g-start-xxl-8{grid-column-start:8}.panel-grid .g-start-xxl-9{grid-column-start:9}.panel-grid .g-start-xxl-10{grid-column-start:10}.panel-grid .g-start-xxl-11{grid-column-start:11}.panel-grid .g-start-xxl-12{grid-column-start:12}.panel-grid .g-start-xxl-13{grid-column-start:13}.panel-grid .g-start-xxl-14{grid-column-start:14}.panel-grid .g-start-xxl-15{grid-column-start:15}.panel-grid .g-start-xxl-16{grid-column-start:16}.panel-grid .g-start-xxl-17{grid-column-start:17}.panel-grid .g-start-xxl-18{grid-column-start:18}.panel-grid .g-start-xxl-19{grid-column-start:19}.panel-grid .g-start-xxl-20{grid-column-start:20}.panel-grid .g-start-xxl-21{grid-column-start:21}.panel-grid .g-start-xxl-22{grid-column-start:22}.panel-grid .g-start-xxl-23{grid-column-start:23}}main{margin-top:1em;margin-bottom:1em}h1,.h1,h2,.h2{color:inherit;margin-top:2rem;margin-bottom:1rem;font-weight:600}h1.title,.title.h1{margin-top:0}main.content>section:first-of-type>h2:first-child,main.content>section:first-of-type>.h2:first-child{margin-top:0}h2,.h2{border-bottom:1px solid #dee2e6;padding-bottom:.5rem}h3,.h3{font-weight:600}h3,.h3,h4,.h4{opacity:.9;margin-top:1.5rem}h5,.h5,h6,.h6{opacity:.9}.header-section-number{color:#6d7a86}.nav-link.active .header-section-number{color:inherit}mark,.mark{padding:0em}.panel-caption,.figure-caption,.subfigure-caption,.table-caption,figcaption,caption{font-size:.9rem;color:#6d7a86}.quarto-layout-cell[data-ref-parent] caption{color:#6d7a86}.column-margin figcaption,.margin-caption,div.aside,aside,.column-margin{color:#6d7a86;font-size:.825rem}.panel-caption.margin-caption{text-align:inherit}.column-margin.column-container p{margin-bottom:0}.column-margin.column-container>*:not(.collapse):first-child{padding-bottom:.5em;display:block}.column-margin.column-container>*:not(.collapse):not(:first-child){padding-top:.5em;padding-bottom:.5em;display:block}.column-margin.column-container>*.collapse:not(.show){display:none}@media(min-width: 768px){.column-margin.column-container .callout-margin-content:first-child{margin-top:4.5em}.column-margin.column-container .callout-margin-content-simple:first-child{margin-top:3.5em}}.margin-caption>*{padding-top:.5em;padding-bottom:.5em}@media(max-width: 767.98px){.quarto-layout-row{flex-direction:column}}.nav-tabs .nav-item{margin-top:1px;cursor:pointer}.tab-content{margin-top:0px;border-left:#dee2e6 1px solid;border-right:#dee2e6 1px solid;border-bottom:#dee2e6 1px solid;margin-left:0;padding:1em;margin-bottom:1em}@media(max-width: 767.98px){.layout-sidebar{margin-left:0;margin-right:0}}.panel-sidebar,.panel-sidebar .form-control,.panel-input,.panel-input .form-control,.selectize-dropdown{font-size:.9rem}.panel-sidebar .form-control,.panel-input .form-control{padding-top:.1rem}.tab-pane div.sourceCode{margin-top:0px}.tab-pane>p{padding-top:0}.tab-pane>p:nth-child(1){padding-top:0}.tab-pane>p:last-child{margin-bottom:0}.tab-pane>pre:last-child{margin-bottom:0}.tab-content>.tab-pane:not(.active){display:none !important}div.sourceCode{background-color:#eff6ef;border:1px solid #eff6ef;border-radius:.25rem}pre.sourceCode{background-color:rgba(0,0,0,0)}pre.sourceCode{border:none;font-size:.875em;overflow:visible !important;padding:.4em}.callout pre.sourceCode{padding-left:0}div.sourceCode{overflow-y:hidden}.callout div.sourceCode{margin-left:initial}.blockquote{font-size:inherit;padding-left:1rem;padding-right:1.5rem;color:#6d7a86}.blockquote h1:first-child,.blockquote .h1:first-child,.blockquote h2:first-child,.blockquote .h2:first-child,.blockquote h3:first-child,.blockquote .h3:first-child,.blockquote h4:first-child,.blockquote .h4:first-child,.blockquote h5:first-child,.blockquote .h5:first-child{margin-top:0}pre{background-color:initial;padding:initial;border:initial}p pre code:not(.sourceCode),li pre code:not(.sourceCode),pre code:not(.sourceCode){background-color:initial}p code:not(.sourceCode),li code:not(.sourceCode),td code:not(.sourceCode){background-color:#f8f9fa;padding:.2em}nav p code:not(.sourceCode),nav li code:not(.sourceCode),nav td code:not(.sourceCode){background-color:rgba(0,0,0,0);padding:0}td code:not(.sourceCode){white-space:pre-wrap}#quarto-embedded-source-code-modal>.modal-dialog{max-width:1000px;padding-left:1.75rem;padding-right:1.75rem}#quarto-embedded-source-code-modal>.modal-dialog>.modal-content>.modal-body{padding:0}#quarto-embedded-source-code-modal>.modal-dialog>.modal-content>.modal-body div.sourceCode{margin:0;padding:.2rem .2rem;border-radius:0px;border:none}#quarto-embedded-source-code-modal>.modal-dialog>.modal-content>.modal-header{padding:.7rem}.code-tools-button{font-size:1rem;padding:.15rem .15rem;margin-left:5px;color:#6c757d;background-color:rgba(0,0,0,0);transition:initial;cursor:pointer}.code-tools-button>.bi::before{display:inline-block;height:1rem;width:1rem;content:"";vertical-align:-0.125em;background-image:url('data:image/svg+xml,');background-repeat:no-repeat;background-size:1rem 1rem}.code-tools-button:hover>.bi::before{background-image:url('data:image/svg+xml,')}#quarto-embedded-source-code-modal .code-copy-button>.bi::before{background-image:url('data:image/svg+xml,')}#quarto-embedded-source-code-modal .code-copy-button-checked>.bi::before{background-image:url('data:image/svg+xml,')}.sidebar{will-change:top;transition:top 200ms linear;position:sticky;overflow-y:auto;padding-top:1.2em;max-height:100vh}.sidebar.toc-left,.sidebar.margin-sidebar{top:0px;padding-top:1em}.sidebar.quarto-banner-title-block-sidebar>*{padding-top:1.65em}figure .quarto-notebook-link{margin-top:.5em}.quarto-notebook-link{font-size:.75em;color:#6c757d;margin-bottom:1em;text-decoration:none;display:block}.quarto-notebook-link:hover{text-decoration:underline;color:#40ba2f}.quarto-notebook-link::before{display:inline-block;height:.75rem;width:.75rem;margin-bottom:0em;margin-right:.25em;content:"";vertical-align:-0.125em;background-image:url('data:image/svg+xml,');background-repeat:no-repeat;background-size:.75rem .75rem}.toc-actions i.bi,.quarto-code-links i.bi,.quarto-other-links i.bi,.quarto-alternate-notebooks i.bi,.quarto-alternate-formats i.bi{margin-right:.4em;font-size:.8rem}.quarto-other-links-text-target .quarto-code-links i.bi,.quarto-other-links-text-target .quarto-other-links i.bi{margin-right:.2em}.quarto-other-formats-text-target .quarto-alternate-formats i.bi{margin-right:.1em}.toc-actions i.bi.empty,.quarto-code-links i.bi.empty,.quarto-other-links i.bi.empty,.quarto-alternate-notebooks i.bi.empty,.quarto-alternate-formats i.bi.empty{padding-left:1em}.quarto-notebook h2,.quarto-notebook .h2{border-bottom:none}.quarto-notebook .cell-container{display:flex}.quarto-notebook .cell-container .cell{flex-grow:4}.quarto-notebook .cell-container .cell-decorator{padding-top:1.5em;padding-right:1em;text-align:right}.quarto-notebook .cell-container.code-fold .cell-decorator{padding-top:3em}.quarto-notebook .cell-code code{white-space:pre-wrap}.quarto-notebook .cell .cell-output-stderr pre code,.quarto-notebook .cell .cell-output-stdout pre code{white-space:pre-wrap;overflow-wrap:anywhere}.toc-actions,.quarto-alternate-formats,.quarto-other-links,.quarto-code-links,.quarto-alternate-notebooks{padding-left:0em}.sidebar .toc-actions a,.sidebar .quarto-alternate-formats a,.sidebar .quarto-other-links a,.sidebar .quarto-code-links a,.sidebar .quarto-alternate-notebooks a,.sidebar nav[role=doc-toc] a{text-decoration:none}.sidebar .toc-actions a:hover,.sidebar .quarto-other-links a:hover,.sidebar .quarto-code-links a:hover,.sidebar .quarto-alternate-formats a:hover,.sidebar .quarto-alternate-notebooks a:hover{color:#40ba2f}.sidebar .toc-actions h2,.sidebar .toc-actions .h2,.sidebar .quarto-code-links h2,.sidebar .quarto-code-links .h2,.sidebar .quarto-other-links h2,.sidebar .quarto-other-links .h2,.sidebar .quarto-alternate-notebooks h2,.sidebar .quarto-alternate-notebooks .h2,.sidebar .quarto-alternate-formats h2,.sidebar .quarto-alternate-formats .h2,.sidebar nav[role=doc-toc]>h2,.sidebar nav[role=doc-toc]>.h2{font-weight:500;margin-bottom:.2rem;margin-top:.3rem;font-family:inherit;border-bottom:0;padding-bottom:0;padding-top:0px}.sidebar .toc-actions>h2,.sidebar .toc-actions>.h2,.sidebar .quarto-code-links>h2,.sidebar .quarto-code-links>.h2,.sidebar .quarto-other-links>h2,.sidebar .quarto-other-links>.h2,.sidebar .quarto-alternate-notebooks>h2,.sidebar .quarto-alternate-notebooks>.h2,.sidebar .quarto-alternate-formats>h2,.sidebar .quarto-alternate-formats>.h2{font-size:.8rem}.sidebar nav[role=doc-toc]>h2,.sidebar nav[role=doc-toc]>.h2{font-size:.875rem}.sidebar nav[role=doc-toc]>ul a{border-left:1px solid #e9ecef;padding-left:.6rem}.sidebar .toc-actions h2>ul a,.sidebar .toc-actions .h2>ul a,.sidebar .quarto-code-links h2>ul a,.sidebar .quarto-code-links .h2>ul a,.sidebar .quarto-other-links h2>ul a,.sidebar .quarto-other-links .h2>ul a,.sidebar .quarto-alternate-notebooks h2>ul a,.sidebar .quarto-alternate-notebooks .h2>ul a,.sidebar .quarto-alternate-formats h2>ul a,.sidebar .quarto-alternate-formats .h2>ul a{border-left:none;padding-left:.6rem}.sidebar .toc-actions ul a:empty,.sidebar .quarto-code-links ul a:empty,.sidebar .quarto-other-links ul a:empty,.sidebar .quarto-alternate-notebooks ul a:empty,.sidebar .quarto-alternate-formats ul a:empty,.sidebar nav[role=doc-toc]>ul a:empty{display:none}.sidebar .toc-actions ul,.sidebar .quarto-code-links ul,.sidebar .quarto-other-links ul,.sidebar .quarto-alternate-notebooks ul,.sidebar .quarto-alternate-formats ul{padding-left:0;list-style:none}.sidebar nav[role=doc-toc] ul{list-style:none;padding-left:0;list-style:none}.sidebar nav[role=doc-toc]>ul{margin-left:.45em}.quarto-margin-sidebar nav[role=doc-toc]{padding-left:.5em}.sidebar .toc-actions>ul,.sidebar .quarto-code-links>ul,.sidebar .quarto-other-links>ul,.sidebar .quarto-alternate-notebooks>ul,.sidebar .quarto-alternate-formats>ul{font-size:.8rem}.sidebar nav[role=doc-toc]>ul{font-size:.875rem}.sidebar .toc-actions ul li a,.sidebar .quarto-code-links ul li a,.sidebar .quarto-other-links ul li a,.sidebar .quarto-alternate-notebooks ul li a,.sidebar .quarto-alternate-formats ul li a,.sidebar nav[role=doc-toc]>ul li a{line-height:1.1rem;padding-bottom:.2rem;padding-top:.2rem;color:inherit}.sidebar nav[role=doc-toc] ul>li>ul>li>a{padding-left:1.2em}.sidebar nav[role=doc-toc] ul>li>ul>li>ul>li>a{padding-left:2.4em}.sidebar nav[role=doc-toc] ul>li>ul>li>ul>li>ul>li>a{padding-left:3.6em}.sidebar nav[role=doc-toc] ul>li>ul>li>ul>li>ul>li>ul>li>a{padding-left:4.8em}.sidebar nav[role=doc-toc] ul>li>ul>li>ul>li>ul>li>ul>li>ul>li>a{padding-left:6em}.sidebar nav[role=doc-toc] ul>li>a.active,.sidebar nav[role=doc-toc] ul>li>ul>li>a.active{border-left:1px solid #40ba2f;color:#40ba2f !important}.sidebar nav[role=doc-toc] ul>li>a:hover,.sidebar nav[role=doc-toc] ul>li>ul>li>a:hover{color:#40ba2f !important}kbd,.kbd{color:#343a40;background-color:#f8f9fa;border:1px solid;border-radius:5px;border-color:#dee2e6}.quarto-appendix-contents div.hanging-indent{margin-left:0em}.quarto-appendix-contents div.hanging-indent div.csl-entry{margin-left:1em;text-indent:-1em}.citation a,.footnote-ref{text-decoration:none}.footnotes ol{padding-left:1em}.tippy-content>*{margin-bottom:.7em}.tippy-content>*:last-child{margin-bottom:0}.callout{margin-top:1.25rem;margin-bottom:1.25rem;border-radius:.25rem;overflow-wrap:break-word}.callout .callout-title-container{overflow-wrap:anywhere}.callout.callout-style-simple{padding:.4em .7em;border-left:5px solid;border-right:1px solid #dee2e6;border-top:1px solid #dee2e6;border-bottom:1px solid #dee2e6}.callout.callout-style-default{border-left:5px solid;border-right:1px solid #dee2e6;border-top:1px solid #dee2e6;border-bottom:1px solid #dee2e6}.callout .callout-body-container{flex-grow:1}.callout.callout-style-simple .callout-body{font-size:.9rem;font-weight:400}.callout.callout-style-default .callout-body{font-size:.9rem;font-weight:400}.callout:not(.no-icon).callout-titled.callout-style-simple .callout-body{padding-left:1.6em}.callout.callout-titled>.callout-header{padding-top:.2em;margin-bottom:-0.2em}.callout.callout-style-simple>div.callout-header{border-bottom:none;font-size:.9rem;font-weight:600;opacity:75%}.callout.callout-style-default>div.callout-header{border-bottom:none;font-weight:600;opacity:85%;font-size:.9rem;padding-left:.5em;padding-right:.5em}.callout.callout-style-default .callout-body{padding-left:.5em;padding-right:.5em}.callout.callout-style-default .callout-body>:first-child{padding-top:.5rem;margin-top:0}.callout>div.callout-header[data-bs-toggle=collapse]{cursor:pointer}.callout.callout-style-default .callout-header[aria-expanded=false],.callout.callout-style-default .callout-header[aria-expanded=true]{padding-top:0px;margin-bottom:0px;align-items:center}.callout.callout-titled .callout-body>:last-child:not(.sourceCode),.callout.callout-titled .callout-body>div>:last-child:not(.sourceCode){padding-bottom:.5rem;margin-bottom:0}.callout:not(.callout-titled) .callout-body>:first-child,.callout:not(.callout-titled) .callout-body>div>:first-child{margin-top:.25rem}.callout:not(.callout-titled) .callout-body>:last-child,.callout:not(.callout-titled) .callout-body>div>:last-child{margin-bottom:.2rem}.callout.callout-style-simple .callout-icon::before,.callout.callout-style-simple .callout-toggle::before{height:1rem;width:1rem;display:inline-block;content:"";background-repeat:no-repeat;background-size:1rem 1rem}.callout.callout-style-default .callout-icon::before,.callout.callout-style-default .callout-toggle::before{height:.9rem;width:.9rem;display:inline-block;content:"";background-repeat:no-repeat;background-size:.9rem .9rem}.callout.callout-style-default .callout-toggle::before{margin-top:5px}.callout .callout-btn-toggle .callout-toggle::before{transition:transform .2s linear}.callout .callout-header[aria-expanded=false] .callout-toggle::before{transform:rotate(-90deg)}.callout .callout-header[aria-expanded=true] .callout-toggle::before{transform:none}.callout.callout-style-simple:not(.no-icon) div.callout-icon-container{padding-top:.2em;padding-right:.55em}.callout.callout-style-default:not(.no-icon) div.callout-icon-container{padding-top:.1em;padding-right:.35em}.callout.callout-style-default:not(.no-icon) div.callout-title-container{margin-top:-1px}.callout.callout-style-default.callout-caution:not(.no-icon) div.callout-icon-container{padding-top:.3em;padding-right:.35em}.callout>.callout-body>.callout-icon-container>.no-icon,.callout>.callout-header>.callout-icon-container>.no-icon{display:none}div.callout.callout{border-left-color:#6c757d}div.callout.callout-style-default>.callout-header{background-color:#6c757d}div.callout-note.callout{border-left-color:#2780e3}div.callout-note.callout-style-default>.callout-header{background-color:#e9f2fc}div.callout-note:not(.callout-titled) .callout-icon::before{background-image:url('data:image/svg+xml,');}div.callout-note.callout-titled .callout-icon::before{background-image:url('data:image/svg+xml,');}div.callout-note .callout-toggle::before{background-image:url('data:image/svg+xml,')}div.callout-tip.callout{border-left-color:#3fb618}div.callout-tip.callout-style-default>.callout-header{background-color:#ecf8e8}div.callout-tip:not(.callout-titled) .callout-icon::before{background-image:url('data:image/svg+xml,');}div.callout-tip.callout-titled .callout-icon::before{background-image:url('data:image/svg+xml,');}div.callout-tip .callout-toggle::before{background-image:url('data:image/svg+xml,')}div.callout-warning.callout{border-left-color:#ff7518}div.callout-warning.callout-style-default>.callout-header{background-color:#fff1e8}div.callout-warning:not(.callout-titled) .callout-icon::before{background-image:url('data:image/svg+xml,');}div.callout-warning.callout-titled .callout-icon::before{background-image:url('data:image/svg+xml,');}div.callout-warning .callout-toggle::before{background-image:url('data:image/svg+xml,')}div.callout-caution.callout{border-left-color:#f0ad4e}div.callout-caution.callout-style-default>.callout-header{background-color:#fef7ed}div.callout-caution:not(.callout-titled) .callout-icon::before{background-image:url('data:image/svg+xml,');}div.callout-caution.callout-titled .callout-icon::before{background-image:url('data:image/svg+xml,');}div.callout-caution .callout-toggle::before{background-image:url('data:image/svg+xml,')}div.callout-important.callout{border-left-color:#ff0039}div.callout-important.callout-style-default>.callout-header{background-color:#ffe6eb}div.callout-important:not(.callout-titled) .callout-icon::before{background-image:url('data:image/svg+xml,');}div.callout-important.callout-titled .callout-icon::before{background-image:url('data:image/svg+xml,');}div.callout-important .callout-toggle::before{background-image:url('data:image/svg+xml,')}.quarto-toggle-container{display:flex;align-items:center}.quarto-reader-toggle .bi::before,.quarto-color-scheme-toggle .bi::before{display:inline-block;height:1rem;width:1rem;content:"";background-repeat:no-repeat;background-size:1rem 1rem}.sidebar-navigation{padding-left:20px}.navbar{background-color:#5c2983;color:#e0d6e7}.navbar .quarto-color-scheme-toggle:not(.alternate) .bi::before{background-image:url('data:image/svg+xml,')}.navbar .quarto-color-scheme-toggle.alternate .bi::before{background-image:url('data:image/svg+xml,')}.sidebar-navigation .quarto-color-scheme-toggle:not(.alternate) .bi::before{background-image:url('data:image/svg+xml,')}.sidebar-navigation .quarto-color-scheme-toggle.alternate .bi::before{background-image:url('data:image/svg+xml,')}.quarto-sidebar-toggle{border-color:#dee2e6;border-bottom-left-radius:.25rem;border-bottom-right-radius:.25rem;border-style:solid;border-width:1px;overflow:hidden;border-top-width:0px;padding-top:0px !important}.quarto-sidebar-toggle-title{cursor:pointer;padding-bottom:2px;margin-left:.25em;text-align:center;font-weight:400;font-size:.775em}#quarto-content .quarto-sidebar-toggle{background:#fafafa}#quarto-content .quarto-sidebar-toggle-title{color:#343a40}.quarto-sidebar-toggle-icon{color:#dee2e6;margin-right:.5em;float:right;transition:transform .2s ease}.quarto-sidebar-toggle-icon::before{padding-top:5px}.quarto-sidebar-toggle.expanded .quarto-sidebar-toggle-icon{transform:rotate(-180deg)}.quarto-sidebar-toggle.expanded .quarto-sidebar-toggle-title{border-bottom:solid #dee2e6 1px}.quarto-sidebar-toggle-contents{background-color:#fff;padding-right:10px;padding-left:10px;margin-top:0px !important;transition:max-height .5s ease}.quarto-sidebar-toggle.expanded .quarto-sidebar-toggle-contents{padding-top:1em;padding-bottom:10px}@media(max-width: 767.98px){.sidebar-menu-container{padding-bottom:5em}}.quarto-sidebar-toggle:not(.expanded) .quarto-sidebar-toggle-contents{padding-top:0px !important;padding-bottom:0px}nav[role=doc-toc]{z-index:1020}#quarto-sidebar>*,nav[role=doc-toc]>*{transition:opacity .1s ease,border .1s ease}#quarto-sidebar.slow>*,nav[role=doc-toc].slow>*{transition:opacity .4s ease,border .4s ease}.quarto-color-scheme-toggle:not(.alternate).top-right .bi::before{background-image:url('data:image/svg+xml,')}.quarto-color-scheme-toggle.alternate.top-right .bi::before{background-image:url('data:image/svg+xml,')}#quarto-appendix.default{border-top:1px solid #dee2e6}#quarto-appendix.default{background-color:#fff;padding-top:1.5em;margin-top:2em;z-index:998}#quarto-appendix.default .quarto-appendix-heading{margin-top:0;line-height:1.4em;font-weight:600;opacity:.9;border-bottom:none;margin-bottom:0}#quarto-appendix.default .footnotes ol,#quarto-appendix.default .footnotes ol li>p:last-of-type,#quarto-appendix.default .quarto-appendix-contents>p:last-of-type{margin-bottom:0}#quarto-appendix.default .footnotes ol{margin-left:.5em}#quarto-appendix.default .quarto-appendix-secondary-label{margin-bottom:.4em}#quarto-appendix.default .quarto-appendix-bibtex{font-size:.7em;padding:1em;border:solid 1px #dee2e6;margin-bottom:1em}#quarto-appendix.default .quarto-appendix-bibtex code.sourceCode{white-space:pre-wrap}#quarto-appendix.default .quarto-appendix-citeas{font-size:.9em;padding:1em;border:solid 1px #dee2e6;margin-bottom:1em}#quarto-appendix.default .quarto-appendix-heading{font-size:1em !important}#quarto-appendix.default *[role=doc-endnotes]>ol,#quarto-appendix.default .quarto-appendix-contents>*:not(h2):not(.h2){font-size:.9em}#quarto-appendix.default section{padding-bottom:1.5em}#quarto-appendix.default section *[role=doc-endnotes],#quarto-appendix.default section>*:not(a){opacity:.9;word-wrap:break-word}.btn.btn-quarto,div.cell-output-display .btn-quarto{--bs-btn-color: #cacccd;--bs-btn-bg: #343a40;--bs-btn-border-color: #343a40;--bs-btn-hover-color: #cacccd;--bs-btn-hover-bg: #52585d;--bs-btn-hover-border-color: #484e53;--bs-btn-focus-shadow-rgb: 75, 80, 85;--bs-btn-active-color: #fff;--bs-btn-active-bg: #5d6166;--bs-btn-active-border-color: #484e53;--bs-btn-active-shadow: inset 0 3px 5px rgba(0, 0, 0, 0.125);--bs-btn-disabled-color: #fff;--bs-btn-disabled-bg: #343a40;--bs-btn-disabled-border-color: #343a40}nav.quarto-secondary-nav.color-navbar{background-color:#5c2983;color:#e0d6e7}nav.quarto-secondary-nav.color-navbar h1,nav.quarto-secondary-nav.color-navbar .h1,nav.quarto-secondary-nav.color-navbar .quarto-btn-toggle{color:#e0d6e7}@media(max-width: 991.98px){body.nav-sidebar .quarto-title-banner{margin-bottom:0;padding-bottom:1em}body.nav-sidebar #title-block-header{margin-block-end:0}}p.subtitle{margin-top:.25em;margin-bottom:.5em}code a:any-link{color:inherit;text-decoration-color:#6c757d}/*! light */div.observablehq table thead tr th{background-color:var(--bs-body-bg)}input,button,select,optgroup,textarea{background-color:var(--bs-body-bg)}.code-annotated .code-copy-button{margin-right:1.25em;margin-top:0;padding-bottom:0;padding-top:3px}.code-annotation-gutter-bg{background-color:#fff}.code-annotation-gutter{background-color:#eff6ef}.code-annotation-gutter,.code-annotation-gutter-bg{height:100%;width:calc(20px + .5em);position:absolute;top:0;right:0}dl.code-annotation-container-grid dt{margin-right:1em;margin-top:.25rem}dl.code-annotation-container-grid dt{font-family:SFMono-Regular,Menlo,Monaco,Consolas,"Liberation Mono","Courier New",monospace;color:#4b545c;border:solid #4b545c 1px;border-radius:50%;height:22px;width:22px;line-height:22px;font-size:11px;text-align:center;vertical-align:middle;text-decoration:none}dl.code-annotation-container-grid dt[data-target-cell]{cursor:pointer}dl.code-annotation-container-grid dt[data-target-cell].code-annotation-active{color:#fff;border:solid #aaa 1px;background-color:#aaa}pre.code-annotation-code{padding-top:0;padding-bottom:0}pre.code-annotation-code code{z-index:3}#code-annotation-line-highlight-gutter{width:100%;border-top:solid rgba(170,170,170,.2666666667) 1px;border-bottom:solid rgba(170,170,170,.2666666667) 1px;z-index:2;background-color:rgba(170,170,170,.1333333333)}#code-annotation-line-highlight{margin-left:-4em;width:calc(100% + 4em);border-top:solid rgba(170,170,170,.2666666667) 1px;border-bottom:solid rgba(170,170,170,.2666666667) 1px;z-index:2;background-color:rgba(170,170,170,.1333333333)}code.sourceCode .code-annotation-anchor.code-annotation-active{background-color:var(--quarto-hl-normal-color, #aaaaaa);border:solid var(--quarto-hl-normal-color, #aaaaaa) 1px;color:#eff6ef;font-weight:bolder}code.sourceCode .code-annotation-anchor{font-family:SFMono-Regular,Menlo,Monaco,Consolas,"Liberation Mono","Courier New",monospace;color:var(--quarto-hl-co-color);border:solid var(--quarto-hl-co-color) 1px;border-radius:50%;height:18px;width:18px;font-size:9px;margin-top:2px}code.sourceCode button.code-annotation-anchor{padding:2px;user-select:none;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;-o-user-select:none}code.sourceCode a.code-annotation-anchor{line-height:18px;text-align:center;vertical-align:middle;cursor:default;text-decoration:none}@media print{.page-columns .column-screen-inset{grid-column:page-start-inset/page-end-inset;z-index:998;opacity:.999}.page-columns .column-screen-inset table{background:#fff}.page-columns .column-screen-inset-left{grid-column:page-start-inset/body-content-end;z-index:998;opacity:.999}.page-columns .column-screen-inset-left table{background:#fff}.page-columns .column-screen-inset-right{grid-column:body-content-start/page-end-inset;z-index:998;opacity:.999}.page-columns .column-screen-inset-right table{background:#fff}.page-columns .column-screen{grid-column:page-start/page-end;z-index:998;opacity:.999}.page-columns .column-screen table{background:#fff}.page-columns .column-screen-left{grid-column:page-start/body-content-end;z-index:998;opacity:.999}.page-columns .column-screen-left table{background:#fff}.page-columns .column-screen-right{grid-column:body-content-start/page-end;z-index:998;opacity:.999}.page-columns .column-screen-right table{background:#fff}.page-columns .column-screen-inset-shaded{grid-column:page-start-inset/page-end-inset;padding:1em;background:#f8f9fa;z-index:998;opacity:.999;margin-bottom:1em}}.quarto-video{margin-bottom:1em}.table{border-top:1px solid #ebedee;border-bottom:1px solid #ebedee}.table>thead{border-top-width:0;border-bottom:1px solid #b2bac1}.table a{word-break:break-word}.table>:not(caption)>*>*{background-color:unset;color:unset}#quarto-document-content .crosstalk-input .checkbox input[type=checkbox],#quarto-document-content .crosstalk-input .checkbox-inline input[type=checkbox]{position:unset;margin-top:unset;margin-left:unset}#quarto-document-content .row{margin-left:unset;margin-right:unset}.quarto-xref{white-space:nowrap}a.external:after{content:"";background-image:url('data:image/svg+xml,');background-size:contain;background-repeat:no-repeat;background-position:center center;margin-left:.2em;padding-right:.75em}div.sourceCode code a.external:after{content:none}a.external:after:hover{cursor:pointer}.quarto-ext-icon{display:inline-block;font-size:.75em;padding-left:.3em}.code-with-filename .code-with-filename-file{margin-bottom:0;padding-bottom:2px;padding-top:2px;padding-left:.7em;border:var(--quarto-border-width) solid var(--quarto-border-color);border-radius:var(--quarto-border-radius);border-bottom:0;border-bottom-left-radius:0%;border-bottom-right-radius:0%}.code-with-filename div.sourceCode,.reveal .code-with-filename div.sourceCode{margin-top:0;border-top-left-radius:0%;border-top-right-radius:0%}.code-with-filename .code-with-filename-file pre{margin-bottom:0}.code-with-filename .code-with-filename-file{background-color:rgba(219,219,219,.8)}.quarto-dark .code-with-filename .code-with-filename-file{background-color:#555}.code-with-filename .code-with-filename-file strong{font-weight:400}.quarto-title-banner{margin-bottom:1em;color:#e0d6e7;background:#5c2983}.quarto-title-banner a{color:#e0d6e7}.quarto-title-banner h1,.quarto-title-banner .h1,.quarto-title-banner h2,.quarto-title-banner .h2{color:#e0d6e7}.quarto-title-banner .code-tools-button{color:#af96c1}.quarto-title-banner .code-tools-button:hover{color:#e0d6e7}.quarto-title-banner .code-tools-button>.bi::before{background-image:url('data:image/svg+xml,')}.quarto-title-banner .code-tools-button:hover>.bi::before{background-image:url('data:image/svg+xml,')}.quarto-title-banner .quarto-title .title{font-weight:600}.quarto-title-banner .quarto-categories{margin-top:.75em}@media(min-width: 992px){.quarto-title-banner{padding-top:2.5em;padding-bottom:2.5em}}@media(max-width: 991.98px){.quarto-title-banner{padding-top:1em;padding-bottom:1em}}@media(max-width: 767.98px){body.hypothesis-enabled #title-block-header>*{padding-right:20px}}main.quarto-banner-title-block>section:first-child>h2,main.quarto-banner-title-block>section:first-child>.h2,main.quarto-banner-title-block>section:first-child>h3,main.quarto-banner-title-block>section:first-child>.h3,main.quarto-banner-title-block>section:first-child>h4,main.quarto-banner-title-block>section:first-child>.h4{margin-top:0}.quarto-title .quarto-categories{display:flex;flex-wrap:wrap;row-gap:.5em;column-gap:.4em;padding-bottom:.5em;margin-top:.75em}.quarto-title .quarto-categories .quarto-category{padding:.25em .75em;font-size:.65em;text-transform:uppercase;border:solid 1px;border-radius:.25rem;opacity:.6}.quarto-title .quarto-categories .quarto-category a{color:inherit}.quarto-title-meta-container{display:grid;grid-template-columns:1fr auto}.quarto-title-meta-column-end{display:flex;flex-direction:column;padding-left:1em}.quarto-title-meta-column-end a .bi{margin-right:.3em}#title-block-header.quarto-title-block.default .quarto-title-meta{display:grid;grid-template-columns:minmax(max-content, 1fr) 1fr;grid-column-gap:1em}#title-block-header.quarto-title-block.default .quarto-title .title{margin-bottom:0}#title-block-header.quarto-title-block.default .quarto-title-author-orcid img{margin-top:-0.2em;height:.8em;width:.8em}#title-block-header.quarto-title-block.default .quarto-title-author-email{opacity:.7}#title-block-header.quarto-title-block.default .quarto-description p:last-of-type{margin-bottom:0}#title-block-header.quarto-title-block.default .quarto-title-meta-contents p,#title-block-header.quarto-title-block.default .quarto-title-authors p,#title-block-header.quarto-title-block.default .quarto-title-affiliations p{margin-bottom:.1em}#title-block-header.quarto-title-block.default .quarto-title-meta-heading{text-transform:uppercase;margin-top:1em;font-size:.8em;opacity:.8;font-weight:400}#title-block-header.quarto-title-block.default .quarto-title-meta-contents{font-size:.9em}#title-block-header.quarto-title-block.default .quarto-title-meta-contents p.affiliation:last-of-type{margin-bottom:.1em}#title-block-header.quarto-title-block.default p.affiliation{margin-bottom:.1em}#title-block-header.quarto-title-block.default .keywords,#title-block-header.quarto-title-block.default .description,#title-block-header.quarto-title-block.default .abstract{margin-top:0}#title-block-header.quarto-title-block.default .keywords>p,#title-block-header.quarto-title-block.default .description>p,#title-block-header.quarto-title-block.default .abstract>p{font-size:.9em}#title-block-header.quarto-title-block.default .keywords>p:last-of-type,#title-block-header.quarto-title-block.default .description>p:last-of-type,#title-block-header.quarto-title-block.default .abstract>p:last-of-type{margin-bottom:0}#title-block-header.quarto-title-block.default .keywords .block-title,#title-block-header.quarto-title-block.default .description .block-title,#title-block-header.quarto-title-block.default .abstract .block-title{margin-top:1em;text-transform:uppercase;font-size:.8em;opacity:.8;font-weight:400}#title-block-header.quarto-title-block.default .quarto-title-meta-author{display:grid;grid-template-columns:minmax(max-content, 1fr) 1fr;grid-column-gap:1em}.quarto-title-tools-only{display:flex;justify-content:right}body{-webkit-font-smoothing:antialiased}.badge.bg-light{color:#343a40}.progress .progress-bar{font-size:8px;line-height:8px} diff --git a/site_libs/bootstrap/bootstrap.min.js b/site_libs/bootstrap/bootstrap.min.js new file mode 100644 index 0000000..e8f21f7 --- /dev/null +++ b/site_libs/bootstrap/bootstrap.min.js @@ -0,0 +1,7 @@ +/*! + * Bootstrap v5.3.1 (https://getbootstrap.com/) + * Copyright 2011-2023 The Bootstrap Authors (https://github.com/twbs/bootstrap/graphs/contributors) + * Licensed under MIT (https://github.com/twbs/bootstrap/blob/main/LICENSE) + */ +!function(t,e){"object"==typeof exports&&"undefined"!=typeof module?module.exports=e():"function"==typeof define&&define.amd?define(e):(t="undefined"!=typeof globalThis?globalThis:t||self).bootstrap=e()}(this,(function(){"use strict";const t=new Map,e={set(e,i,n){t.has(e)||t.set(e,new Map);const s=t.get(e);s.has(i)||0===s.size?s.set(i,n):console.error(`Bootstrap doesn't allow more than one instance per element. Bound instance: ${Array.from(s.keys())[0]}.`)},get:(e,i)=>t.has(e)&&t.get(e).get(i)||null,remove(e,i){if(!t.has(e))return;const n=t.get(e);n.delete(i),0===n.size&&t.delete(e)}},i="transitionend",n=t=>(t&&window.CSS&&window.CSS.escape&&(t=t.replace(/#([^\s"#']+)/g,((t,e)=>`#${CSS.escape(e)}`))),t),s=t=>{t.dispatchEvent(new Event(i))},o=t=>!(!t||"object"!=typeof t)&&(void 0!==t.jquery&&(t=t[0]),void 0!==t.nodeType),r=t=>o(t)?t.jquery?t[0]:t:"string"==typeof t&&t.length>0?document.querySelector(n(t)):null,a=t=>{if(!o(t)||0===t.getClientRects().length)return!1;const e="visible"===getComputedStyle(t).getPropertyValue("visibility"),i=t.closest("details:not([open])");if(!i)return e;if(i!==t){const e=t.closest("summary");if(e&&e.parentNode!==i)return!1;if(null===e)return!1}return e},l=t=>!t||t.nodeType!==Node.ELEMENT_NODE||!!t.classList.contains("disabled")||(void 0!==t.disabled?t.disabled:t.hasAttribute("disabled")&&"false"!==t.getAttribute("disabled")),c=t=>{if(!document.documentElement.attachShadow)return null;if("function"==typeof t.getRootNode){const e=t.getRootNode();return e instanceof ShadowRoot?e:null}return t instanceof ShadowRoot?t:t.parentNode?c(t.parentNode):null},h=()=>{},d=t=>{t.offsetHeight},u=()=>window.jQuery&&!document.body.hasAttribute("data-bs-no-jquery")?window.jQuery:null,f=[],p=()=>"rtl"===document.documentElement.dir,m=t=>{var e;e=()=>{const e=u();if(e){const i=t.NAME,n=e.fn[i];e.fn[i]=t.jQueryInterface,e.fn[i].Constructor=t,e.fn[i].noConflict=()=>(e.fn[i]=n,t.jQueryInterface)}},"loading"===document.readyState?(f.length||document.addEventListener("DOMContentLoaded",(()=>{for(const t of f)t()})),f.push(e)):e()},g=(t,e=[],i=t)=>"function"==typeof t?t(...e):i,_=(t,e,n=!0)=>{if(!n)return void g(t);const o=(t=>{if(!t)return 0;let{transitionDuration:e,transitionDelay:i}=window.getComputedStyle(t);const n=Number.parseFloat(e),s=Number.parseFloat(i);return n||s?(e=e.split(",")[0],i=i.split(",")[0],1e3*(Number.parseFloat(e)+Number.parseFloat(i))):0})(e)+5;let r=!1;const a=({target:n})=>{n===e&&(r=!0,e.removeEventListener(i,a),g(t))};e.addEventListener(i,a),setTimeout((()=>{r||s(e)}),o)},b=(t,e,i,n)=>{const s=t.length;let o=t.indexOf(e);return-1===o?!i&&n?t[s-1]:t[0]:(o+=i?1:-1,n&&(o=(o+s)%s),t[Math.max(0,Math.min(o,s-1))])},v=/[^.]*(?=\..*)\.|.*/,y=/\..*/,w=/::\d+$/,A={};let E=1;const T={mouseenter:"mouseover",mouseleave:"mouseout"},C=new Set(["click","dblclick","mouseup","mousedown","contextmenu","mousewheel","DOMMouseScroll","mouseover","mouseout","mousemove","selectstart","selectend","keydown","keypress","keyup","orientationchange","touchstart","touchmove","touchend","touchcancel","pointerdown","pointermove","pointerup","pointerleave","pointercancel","gesturestart","gesturechange","gestureend","focus","blur","change","reset","select","submit","focusin","focusout","load","unload","beforeunload","resize","move","DOMContentLoaded","readystatechange","error","abort","scroll"]);function O(t,e){return e&&`${e}::${E++}`||t.uidEvent||E++}function x(t){const e=O(t);return t.uidEvent=e,A[e]=A[e]||{},A[e]}function k(t,e,i=null){return Object.values(t).find((t=>t.callable===e&&t.delegationSelector===i))}function L(t,e,i){const n="string"==typeof e,s=n?i:e||i;let o=I(t);return C.has(o)||(o=t),[n,s,o]}function S(t,e,i,n,s){if("string"!=typeof e||!t)return;let[o,r,a]=L(e,i,n);if(e in T){const t=t=>function(e){if(!e.relatedTarget||e.relatedTarget!==e.delegateTarget&&!e.delegateTarget.contains(e.relatedTarget))return t.call(this,e)};r=t(r)}const l=x(t),c=l[a]||(l[a]={}),h=k(c,r,o?i:null);if(h)return void(h.oneOff=h.oneOff&&s);const d=O(r,e.replace(v,"")),u=o?function(t,e,i){return function n(s){const o=t.querySelectorAll(e);for(let{target:r}=s;r&&r!==this;r=r.parentNode)for(const a of o)if(a===r)return P(s,{delegateTarget:r}),n.oneOff&&N.off(t,s.type,e,i),i.apply(r,[s])}}(t,i,r):function(t,e){return function i(n){return P(n,{delegateTarget:t}),i.oneOff&&N.off(t,n.type,e),e.apply(t,[n])}}(t,r);u.delegationSelector=o?i:null,u.callable=r,u.oneOff=s,u.uidEvent=d,c[d]=u,t.addEventListener(a,u,o)}function D(t,e,i,n,s){const o=k(e[i],n,s);o&&(t.removeEventListener(i,o,Boolean(s)),delete e[i][o.uidEvent])}function $(t,e,i,n){const s=e[i]||{};for(const[o,r]of Object.entries(s))o.includes(n)&&D(t,e,i,r.callable,r.delegationSelector)}function I(t){return t=t.replace(y,""),T[t]||t}const N={on(t,e,i,n){S(t,e,i,n,!1)},one(t,e,i,n){S(t,e,i,n,!0)},off(t,e,i,n){if("string"!=typeof e||!t)return;const[s,o,r]=L(e,i,n),a=r!==e,l=x(t),c=l[r]||{},h=e.startsWith(".");if(void 0===o){if(h)for(const i of Object.keys(l))$(t,l,i,e.slice(1));for(const[i,n]of Object.entries(c)){const s=i.replace(w,"");a&&!e.includes(s)||D(t,l,r,n.callable,n.delegationSelector)}}else{if(!Object.keys(c).length)return;D(t,l,r,o,s?i:null)}},trigger(t,e,i){if("string"!=typeof e||!t)return null;const n=u();let s=null,o=!0,r=!0,a=!1;e!==I(e)&&n&&(s=n.Event(e,i),n(t).trigger(s),o=!s.isPropagationStopped(),r=!s.isImmediatePropagationStopped(),a=s.isDefaultPrevented());const l=P(new Event(e,{bubbles:o,cancelable:!0}),i);return a&&l.preventDefault(),r&&t.dispatchEvent(l),l.defaultPrevented&&s&&s.preventDefault(),l}};function P(t,e={}){for(const[i,n]of Object.entries(e))try{t[i]=n}catch(e){Object.defineProperty(t,i,{configurable:!0,get:()=>n})}return t}function M(t){if("true"===t)return!0;if("false"===t)return!1;if(t===Number(t).toString())return Number(t);if(""===t||"null"===t)return null;if("string"!=typeof t)return t;try{return JSON.parse(decodeURIComponent(t))}catch(e){return t}}function j(t){return t.replace(/[A-Z]/g,(t=>`-${t.toLowerCase()}`))}const F={setDataAttribute(t,e,i){t.setAttribute(`data-bs-${j(e)}`,i)},removeDataAttribute(t,e){t.removeAttribute(`data-bs-${j(e)}`)},getDataAttributes(t){if(!t)return{};const e={},i=Object.keys(t.dataset).filter((t=>t.startsWith("bs")&&!t.startsWith("bsConfig")));for(const n of i){let i=n.replace(/^bs/,"");i=i.charAt(0).toLowerCase()+i.slice(1,i.length),e[i]=M(t.dataset[n])}return e},getDataAttribute:(t,e)=>M(t.getAttribute(`data-bs-${j(e)}`))};class H{static get Default(){return{}}static get DefaultType(){return{}}static get NAME(){throw new Error('You have to implement the static method "NAME", for each component!')}_getConfig(t){return t=this._mergeConfigObj(t),t=this._configAfterMerge(t),this._typeCheckConfig(t),t}_configAfterMerge(t){return t}_mergeConfigObj(t,e){const i=o(e)?F.getDataAttribute(e,"config"):{};return{...this.constructor.Default,..."object"==typeof i?i:{},...o(e)?F.getDataAttributes(e):{},..."object"==typeof t?t:{}}}_typeCheckConfig(t,e=this.constructor.DefaultType){for(const[n,s]of Object.entries(e)){const e=t[n],r=o(e)?"element":null==(i=e)?`${i}`:Object.prototype.toString.call(i).match(/\s([a-z]+)/i)[1].toLowerCase();if(!new RegExp(s).test(r))throw new TypeError(`${this.constructor.NAME.toUpperCase()}: Option "${n}" provided type "${r}" but expected type "${s}".`)}var i}}class W extends H{constructor(t,i){super(),(t=r(t))&&(this._element=t,this._config=this._getConfig(i),e.set(this._element,this.constructor.DATA_KEY,this))}dispose(){e.remove(this._element,this.constructor.DATA_KEY),N.off(this._element,this.constructor.EVENT_KEY);for(const t of Object.getOwnPropertyNames(this))this[t]=null}_queueCallback(t,e,i=!0){_(t,e,i)}_getConfig(t){return t=this._mergeConfigObj(t,this._element),t=this._configAfterMerge(t),this._typeCheckConfig(t),t}static getInstance(t){return e.get(r(t),this.DATA_KEY)}static getOrCreateInstance(t,e={}){return this.getInstance(t)||new this(t,"object"==typeof e?e:null)}static get VERSION(){return"5.3.1"}static get DATA_KEY(){return`bs.${this.NAME}`}static get EVENT_KEY(){return`.${this.DATA_KEY}`}static eventName(t){return`${t}${this.EVENT_KEY}`}}const B=t=>{let e=t.getAttribute("data-bs-target");if(!e||"#"===e){let i=t.getAttribute("href");if(!i||!i.includes("#")&&!i.startsWith("."))return null;i.includes("#")&&!i.startsWith("#")&&(i=`#${i.split("#")[1]}`),e=i&&"#"!==i?i.trim():null}return n(e)},z={find:(t,e=document.documentElement)=>[].concat(...Element.prototype.querySelectorAll.call(e,t)),findOne:(t,e=document.documentElement)=>Element.prototype.querySelector.call(e,t),children:(t,e)=>[].concat(...t.children).filter((t=>t.matches(e))),parents(t,e){const i=[];let n=t.parentNode.closest(e);for(;n;)i.push(n),n=n.parentNode.closest(e);return i},prev(t,e){let i=t.previousElementSibling;for(;i;){if(i.matches(e))return[i];i=i.previousElementSibling}return[]},next(t,e){let i=t.nextElementSibling;for(;i;){if(i.matches(e))return[i];i=i.nextElementSibling}return[]},focusableChildren(t){const e=["a","button","input","textarea","select","details","[tabindex]",'[contenteditable="true"]'].map((t=>`${t}:not([tabindex^="-"])`)).join(",");return this.find(e,t).filter((t=>!l(t)&&a(t)))},getSelectorFromElement(t){const e=B(t);return e&&z.findOne(e)?e:null},getElementFromSelector(t){const e=B(t);return e?z.findOne(e):null},getMultipleElementsFromSelector(t){const e=B(t);return e?z.find(e):[]}},R=(t,e="hide")=>{const i=`click.dismiss${t.EVENT_KEY}`,n=t.NAME;N.on(document,i,`[data-bs-dismiss="${n}"]`,(function(i){if(["A","AREA"].includes(this.tagName)&&i.preventDefault(),l(this))return;const s=z.getElementFromSelector(this)||this.closest(`.${n}`);t.getOrCreateInstance(s)[e]()}))},q=".bs.alert",V=`close${q}`,K=`closed${q}`;class Q extends W{static get NAME(){return"alert"}close(){if(N.trigger(this._element,V).defaultPrevented)return;this._element.classList.remove("show");const t=this._element.classList.contains("fade");this._queueCallback((()=>this._destroyElement()),this._element,t)}_destroyElement(){this._element.remove(),N.trigger(this._element,K),this.dispose()}static jQueryInterface(t){return this.each((function(){const e=Q.getOrCreateInstance(this);if("string"==typeof t){if(void 0===e[t]||t.startsWith("_")||"constructor"===t)throw new TypeError(`No method named "${t}"`);e[t](this)}}))}}R(Q,"close"),m(Q);const X='[data-bs-toggle="button"]';class Y extends W{static get NAME(){return"button"}toggle(){this._element.setAttribute("aria-pressed",this._element.classList.toggle("active"))}static jQueryInterface(t){return this.each((function(){const e=Y.getOrCreateInstance(this);"toggle"===t&&e[t]()}))}}N.on(document,"click.bs.button.data-api",X,(t=>{t.preventDefault();const e=t.target.closest(X);Y.getOrCreateInstance(e).toggle()})),m(Y);const U=".bs.swipe",G=`touchstart${U}`,J=`touchmove${U}`,Z=`touchend${U}`,tt=`pointerdown${U}`,et=`pointerup${U}`,it={endCallback:null,leftCallback:null,rightCallback:null},nt={endCallback:"(function|null)",leftCallback:"(function|null)",rightCallback:"(function|null)"};class st extends H{constructor(t,e){super(),this._element=t,t&&st.isSupported()&&(this._config=this._getConfig(e),this._deltaX=0,this._supportPointerEvents=Boolean(window.PointerEvent),this._initEvents())}static get Default(){return it}static get DefaultType(){return nt}static get NAME(){return"swipe"}dispose(){N.off(this._element,U)}_start(t){this._supportPointerEvents?this._eventIsPointerPenTouch(t)&&(this._deltaX=t.clientX):this._deltaX=t.touches[0].clientX}_end(t){this._eventIsPointerPenTouch(t)&&(this._deltaX=t.clientX-this._deltaX),this._handleSwipe(),g(this._config.endCallback)}_move(t){this._deltaX=t.touches&&t.touches.length>1?0:t.touches[0].clientX-this._deltaX}_handleSwipe(){const t=Math.abs(this._deltaX);if(t<=40)return;const e=t/this._deltaX;this._deltaX=0,e&&g(e>0?this._config.rightCallback:this._config.leftCallback)}_initEvents(){this._supportPointerEvents?(N.on(this._element,tt,(t=>this._start(t))),N.on(this._element,et,(t=>this._end(t))),this._element.classList.add("pointer-event")):(N.on(this._element,G,(t=>this._start(t))),N.on(this._element,J,(t=>this._move(t))),N.on(this._element,Z,(t=>this._end(t))))}_eventIsPointerPenTouch(t){return this._supportPointerEvents&&("pen"===t.pointerType||"touch"===t.pointerType)}static isSupported(){return"ontouchstart"in document.documentElement||navigator.maxTouchPoints>0}}const ot=".bs.carousel",rt=".data-api",at="next",lt="prev",ct="left",ht="right",dt=`slide${ot}`,ut=`slid${ot}`,ft=`keydown${ot}`,pt=`mouseenter${ot}`,mt=`mouseleave${ot}`,gt=`dragstart${ot}`,_t=`load${ot}${rt}`,bt=`click${ot}${rt}`,vt="carousel",yt="active",wt=".active",At=".carousel-item",Et=wt+At,Tt={ArrowLeft:ht,ArrowRight:ct},Ct={interval:5e3,keyboard:!0,pause:"hover",ride:!1,touch:!0,wrap:!0},Ot={interval:"(number|boolean)",keyboard:"boolean",pause:"(string|boolean)",ride:"(boolean|string)",touch:"boolean",wrap:"boolean"};class xt extends W{constructor(t,e){super(t,e),this._interval=null,this._activeElement=null,this._isSliding=!1,this.touchTimeout=null,this._swipeHelper=null,this._indicatorsElement=z.findOne(".carousel-indicators",this._element),this._addEventListeners(),this._config.ride===vt&&this.cycle()}static get Default(){return Ct}static get DefaultType(){return Ot}static get NAME(){return"carousel"}next(){this._slide(at)}nextWhenVisible(){!document.hidden&&a(this._element)&&this.next()}prev(){this._slide(lt)}pause(){this._isSliding&&s(this._element),this._clearInterval()}cycle(){this._clearInterval(),this._updateInterval(),this._interval=setInterval((()=>this.nextWhenVisible()),this._config.interval)}_maybeEnableCycle(){this._config.ride&&(this._isSliding?N.one(this._element,ut,(()=>this.cycle())):this.cycle())}to(t){const e=this._getItems();if(t>e.length-1||t<0)return;if(this._isSliding)return void N.one(this._element,ut,(()=>this.to(t)));const i=this._getItemIndex(this._getActive());if(i===t)return;const n=t>i?at:lt;this._slide(n,e[t])}dispose(){this._swipeHelper&&this._swipeHelper.dispose(),super.dispose()}_configAfterMerge(t){return t.defaultInterval=t.interval,t}_addEventListeners(){this._config.keyboard&&N.on(this._element,ft,(t=>this._keydown(t))),"hover"===this._config.pause&&(N.on(this._element,pt,(()=>this.pause())),N.on(this._element,mt,(()=>this._maybeEnableCycle()))),this._config.touch&&st.isSupported()&&this._addTouchEventListeners()}_addTouchEventListeners(){for(const t of z.find(".carousel-item img",this._element))N.on(t,gt,(t=>t.preventDefault()));const t={leftCallback:()=>this._slide(this._directionToOrder(ct)),rightCallback:()=>this._slide(this._directionToOrder(ht)),endCallback:()=>{"hover"===this._config.pause&&(this.pause(),this.touchTimeout&&clearTimeout(this.touchTimeout),this.touchTimeout=setTimeout((()=>this._maybeEnableCycle()),500+this._config.interval))}};this._swipeHelper=new st(this._element,t)}_keydown(t){if(/input|textarea/i.test(t.target.tagName))return;const e=Tt[t.key];e&&(t.preventDefault(),this._slide(this._directionToOrder(e)))}_getItemIndex(t){return this._getItems().indexOf(t)}_setActiveIndicatorElement(t){if(!this._indicatorsElement)return;const e=z.findOne(wt,this._indicatorsElement);e.classList.remove(yt),e.removeAttribute("aria-current");const i=z.findOne(`[data-bs-slide-to="${t}"]`,this._indicatorsElement);i&&(i.classList.add(yt),i.setAttribute("aria-current","true"))}_updateInterval(){const t=this._activeElement||this._getActive();if(!t)return;const e=Number.parseInt(t.getAttribute("data-bs-interval"),10);this._config.interval=e||this._config.defaultInterval}_slide(t,e=null){if(this._isSliding)return;const i=this._getActive(),n=t===at,s=e||b(this._getItems(),i,n,this._config.wrap);if(s===i)return;const o=this._getItemIndex(s),r=e=>N.trigger(this._element,e,{relatedTarget:s,direction:this._orderToDirection(t),from:this._getItemIndex(i),to:o});if(r(dt).defaultPrevented)return;if(!i||!s)return;const a=Boolean(this._interval);this.pause(),this._isSliding=!0,this._setActiveIndicatorElement(o),this._activeElement=s;const l=n?"carousel-item-start":"carousel-item-end",c=n?"carousel-item-next":"carousel-item-prev";s.classList.add(c),d(s),i.classList.add(l),s.classList.add(l),this._queueCallback((()=>{s.classList.remove(l,c),s.classList.add(yt),i.classList.remove(yt,c,l),this._isSliding=!1,r(ut)}),i,this._isAnimated()),a&&this.cycle()}_isAnimated(){return this._element.classList.contains("slide")}_getActive(){return z.findOne(Et,this._element)}_getItems(){return z.find(At,this._element)}_clearInterval(){this._interval&&(clearInterval(this._interval),this._interval=null)}_directionToOrder(t){return p()?t===ct?lt:at:t===ct?at:lt}_orderToDirection(t){return p()?t===lt?ct:ht:t===lt?ht:ct}static jQueryInterface(t){return this.each((function(){const e=xt.getOrCreateInstance(this,t);if("number"!=typeof t){if("string"==typeof t){if(void 0===e[t]||t.startsWith("_")||"constructor"===t)throw new TypeError(`No method named "${t}"`);e[t]()}}else e.to(t)}))}}N.on(document,bt,"[data-bs-slide], [data-bs-slide-to]",(function(t){const e=z.getElementFromSelector(this);if(!e||!e.classList.contains(vt))return;t.preventDefault();const i=xt.getOrCreateInstance(e),n=this.getAttribute("data-bs-slide-to");return n?(i.to(n),void i._maybeEnableCycle()):"next"===F.getDataAttribute(this,"slide")?(i.next(),void i._maybeEnableCycle()):(i.prev(),void i._maybeEnableCycle())})),N.on(window,_t,(()=>{const t=z.find('[data-bs-ride="carousel"]');for(const e of t)xt.getOrCreateInstance(e)})),m(xt);const kt=".bs.collapse",Lt=`show${kt}`,St=`shown${kt}`,Dt=`hide${kt}`,$t=`hidden${kt}`,It=`click${kt}.data-api`,Nt="show",Pt="collapse",Mt="collapsing",jt=`:scope .${Pt} .${Pt}`,Ft='[data-bs-toggle="collapse"]',Ht={parent:null,toggle:!0},Wt={parent:"(null|element)",toggle:"boolean"};class Bt extends W{constructor(t,e){super(t,e),this._isTransitioning=!1,this._triggerArray=[];const i=z.find(Ft);for(const t of i){const e=z.getSelectorFromElement(t),i=z.find(e).filter((t=>t===this._element));null!==e&&i.length&&this._triggerArray.push(t)}this._initializeChildren(),this._config.parent||this._addAriaAndCollapsedClass(this._triggerArray,this._isShown()),this._config.toggle&&this.toggle()}static get Default(){return Ht}static get DefaultType(){return Wt}static get NAME(){return"collapse"}toggle(){this._isShown()?this.hide():this.show()}show(){if(this._isTransitioning||this._isShown())return;let t=[];if(this._config.parent&&(t=this._getFirstLevelChildren(".collapse.show, .collapse.collapsing").filter((t=>t!==this._element)).map((t=>Bt.getOrCreateInstance(t,{toggle:!1})))),t.length&&t[0]._isTransitioning)return;if(N.trigger(this._element,Lt).defaultPrevented)return;for(const e of t)e.hide();const e=this._getDimension();this._element.classList.remove(Pt),this._element.classList.add(Mt),this._element.style[e]=0,this._addAriaAndCollapsedClass(this._triggerArray,!0),this._isTransitioning=!0;const i=`scroll${e[0].toUpperCase()+e.slice(1)}`;this._queueCallback((()=>{this._isTransitioning=!1,this._element.classList.remove(Mt),this._element.classList.add(Pt,Nt),this._element.style[e]="",N.trigger(this._element,St)}),this._element,!0),this._element.style[e]=`${this._element[i]}px`}hide(){if(this._isTransitioning||!this._isShown())return;if(N.trigger(this._element,Dt).defaultPrevented)return;const t=this._getDimension();this._element.style[t]=`${this._element.getBoundingClientRect()[t]}px`,d(this._element),this._element.classList.add(Mt),this._element.classList.remove(Pt,Nt);for(const t of this._triggerArray){const e=z.getElementFromSelector(t);e&&!this._isShown(e)&&this._addAriaAndCollapsedClass([t],!1)}this._isTransitioning=!0,this._element.style[t]="",this._queueCallback((()=>{this._isTransitioning=!1,this._element.classList.remove(Mt),this._element.classList.add(Pt),N.trigger(this._element,$t)}),this._element,!0)}_isShown(t=this._element){return t.classList.contains(Nt)}_configAfterMerge(t){return t.toggle=Boolean(t.toggle),t.parent=r(t.parent),t}_getDimension(){return this._element.classList.contains("collapse-horizontal")?"width":"height"}_initializeChildren(){if(!this._config.parent)return;const t=this._getFirstLevelChildren(Ft);for(const e of t){const t=z.getElementFromSelector(e);t&&this._addAriaAndCollapsedClass([e],this._isShown(t))}}_getFirstLevelChildren(t){const e=z.find(jt,this._config.parent);return z.find(t,this._config.parent).filter((t=>!e.includes(t)))}_addAriaAndCollapsedClass(t,e){if(t.length)for(const i of t)i.classList.toggle("collapsed",!e),i.setAttribute("aria-expanded",e)}static jQueryInterface(t){const e={};return"string"==typeof t&&/show|hide/.test(t)&&(e.toggle=!1),this.each((function(){const i=Bt.getOrCreateInstance(this,e);if("string"==typeof t){if(void 0===i[t])throw new TypeError(`No method named "${t}"`);i[t]()}}))}}N.on(document,It,Ft,(function(t){("A"===t.target.tagName||t.delegateTarget&&"A"===t.delegateTarget.tagName)&&t.preventDefault();for(const t of z.getMultipleElementsFromSelector(this))Bt.getOrCreateInstance(t,{toggle:!1}).toggle()})),m(Bt);var zt="top",Rt="bottom",qt="right",Vt="left",Kt="auto",Qt=[zt,Rt,qt,Vt],Xt="start",Yt="end",Ut="clippingParents",Gt="viewport",Jt="popper",Zt="reference",te=Qt.reduce((function(t,e){return t.concat([e+"-"+Xt,e+"-"+Yt])}),[]),ee=[].concat(Qt,[Kt]).reduce((function(t,e){return t.concat([e,e+"-"+Xt,e+"-"+Yt])}),[]),ie="beforeRead",ne="read",se="afterRead",oe="beforeMain",re="main",ae="afterMain",le="beforeWrite",ce="write",he="afterWrite",de=[ie,ne,se,oe,re,ae,le,ce,he];function ue(t){return t?(t.nodeName||"").toLowerCase():null}function fe(t){if(null==t)return window;if("[object Window]"!==t.toString()){var e=t.ownerDocument;return e&&e.defaultView||window}return t}function pe(t){return t instanceof fe(t).Element||t instanceof Element}function me(t){return t instanceof fe(t).HTMLElement||t instanceof HTMLElement}function ge(t){return"undefined"!=typeof ShadowRoot&&(t instanceof fe(t).ShadowRoot||t instanceof ShadowRoot)}const _e={name:"applyStyles",enabled:!0,phase:"write",fn:function(t){var e=t.state;Object.keys(e.elements).forEach((function(t){var i=e.styles[t]||{},n=e.attributes[t]||{},s=e.elements[t];me(s)&&ue(s)&&(Object.assign(s.style,i),Object.keys(n).forEach((function(t){var e=n[t];!1===e?s.removeAttribute(t):s.setAttribute(t,!0===e?"":e)})))}))},effect:function(t){var e=t.state,i={popper:{position:e.options.strategy,left:"0",top:"0",margin:"0"},arrow:{position:"absolute"},reference:{}};return Object.assign(e.elements.popper.style,i.popper),e.styles=i,e.elements.arrow&&Object.assign(e.elements.arrow.style,i.arrow),function(){Object.keys(e.elements).forEach((function(t){var n=e.elements[t],s=e.attributes[t]||{},o=Object.keys(e.styles.hasOwnProperty(t)?e.styles[t]:i[t]).reduce((function(t,e){return t[e]="",t}),{});me(n)&&ue(n)&&(Object.assign(n.style,o),Object.keys(s).forEach((function(t){n.removeAttribute(t)})))}))}},requires:["computeStyles"]};function be(t){return t.split("-")[0]}var ve=Math.max,ye=Math.min,we=Math.round;function Ae(){var t=navigator.userAgentData;return null!=t&&t.brands&&Array.isArray(t.brands)?t.brands.map((function(t){return t.brand+"/"+t.version})).join(" "):navigator.userAgent}function Ee(){return!/^((?!chrome|android).)*safari/i.test(Ae())}function Te(t,e,i){void 0===e&&(e=!1),void 0===i&&(i=!1);var n=t.getBoundingClientRect(),s=1,o=1;e&&me(t)&&(s=t.offsetWidth>0&&we(n.width)/t.offsetWidth||1,o=t.offsetHeight>0&&we(n.height)/t.offsetHeight||1);var r=(pe(t)?fe(t):window).visualViewport,a=!Ee()&&i,l=(n.left+(a&&r?r.offsetLeft:0))/s,c=(n.top+(a&&r?r.offsetTop:0))/o,h=n.width/s,d=n.height/o;return{width:h,height:d,top:c,right:l+h,bottom:c+d,left:l,x:l,y:c}}function Ce(t){var e=Te(t),i=t.offsetWidth,n=t.offsetHeight;return Math.abs(e.width-i)<=1&&(i=e.width),Math.abs(e.height-n)<=1&&(n=e.height),{x:t.offsetLeft,y:t.offsetTop,width:i,height:n}}function Oe(t,e){var i=e.getRootNode&&e.getRootNode();if(t.contains(e))return!0;if(i&&ge(i)){var n=e;do{if(n&&t.isSameNode(n))return!0;n=n.parentNode||n.host}while(n)}return!1}function xe(t){return fe(t).getComputedStyle(t)}function ke(t){return["table","td","th"].indexOf(ue(t))>=0}function Le(t){return((pe(t)?t.ownerDocument:t.document)||window.document).documentElement}function Se(t){return"html"===ue(t)?t:t.assignedSlot||t.parentNode||(ge(t)?t.host:null)||Le(t)}function De(t){return me(t)&&"fixed"!==xe(t).position?t.offsetParent:null}function $e(t){for(var e=fe(t),i=De(t);i&&ke(i)&&"static"===xe(i).position;)i=De(i);return i&&("html"===ue(i)||"body"===ue(i)&&"static"===xe(i).position)?e:i||function(t){var e=/firefox/i.test(Ae());if(/Trident/i.test(Ae())&&me(t)&&"fixed"===xe(t).position)return null;var i=Se(t);for(ge(i)&&(i=i.host);me(i)&&["html","body"].indexOf(ue(i))<0;){var n=xe(i);if("none"!==n.transform||"none"!==n.perspective||"paint"===n.contain||-1!==["transform","perspective"].indexOf(n.willChange)||e&&"filter"===n.willChange||e&&n.filter&&"none"!==n.filter)return i;i=i.parentNode}return null}(t)||e}function Ie(t){return["top","bottom"].indexOf(t)>=0?"x":"y"}function Ne(t,e,i){return ve(t,ye(e,i))}function Pe(t){return Object.assign({},{top:0,right:0,bottom:0,left:0},t)}function Me(t,e){return e.reduce((function(e,i){return e[i]=t,e}),{})}const je={name:"arrow",enabled:!0,phase:"main",fn:function(t){var e,i=t.state,n=t.name,s=t.options,o=i.elements.arrow,r=i.modifiersData.popperOffsets,a=be(i.placement),l=Ie(a),c=[Vt,qt].indexOf(a)>=0?"height":"width";if(o&&r){var h=function(t,e){return Pe("number"!=typeof(t="function"==typeof t?t(Object.assign({},e.rects,{placement:e.placement})):t)?t:Me(t,Qt))}(s.padding,i),d=Ce(o),u="y"===l?zt:Vt,f="y"===l?Rt:qt,p=i.rects.reference[c]+i.rects.reference[l]-r[l]-i.rects.popper[c],m=r[l]-i.rects.reference[l],g=$e(o),_=g?"y"===l?g.clientHeight||0:g.clientWidth||0:0,b=p/2-m/2,v=h[u],y=_-d[c]-h[f],w=_/2-d[c]/2+b,A=Ne(v,w,y),E=l;i.modifiersData[n]=((e={})[E]=A,e.centerOffset=A-w,e)}},effect:function(t){var e=t.state,i=t.options.element,n=void 0===i?"[data-popper-arrow]":i;null!=n&&("string"!=typeof n||(n=e.elements.popper.querySelector(n)))&&Oe(e.elements.popper,n)&&(e.elements.arrow=n)},requires:["popperOffsets"],requiresIfExists:["preventOverflow"]};function Fe(t){return t.split("-")[1]}var He={top:"auto",right:"auto",bottom:"auto",left:"auto"};function We(t){var e,i=t.popper,n=t.popperRect,s=t.placement,o=t.variation,r=t.offsets,a=t.position,l=t.gpuAcceleration,c=t.adaptive,h=t.roundOffsets,d=t.isFixed,u=r.x,f=void 0===u?0:u,p=r.y,m=void 0===p?0:p,g="function"==typeof h?h({x:f,y:m}):{x:f,y:m};f=g.x,m=g.y;var _=r.hasOwnProperty("x"),b=r.hasOwnProperty("y"),v=Vt,y=zt,w=window;if(c){var A=$e(i),E="clientHeight",T="clientWidth";A===fe(i)&&"static"!==xe(A=Le(i)).position&&"absolute"===a&&(E="scrollHeight",T="scrollWidth"),(s===zt||(s===Vt||s===qt)&&o===Yt)&&(y=Rt,m-=(d&&A===w&&w.visualViewport?w.visualViewport.height:A[E])-n.height,m*=l?1:-1),s!==Vt&&(s!==zt&&s!==Rt||o!==Yt)||(v=qt,f-=(d&&A===w&&w.visualViewport?w.visualViewport.width:A[T])-n.width,f*=l?1:-1)}var C,O=Object.assign({position:a},c&&He),x=!0===h?function(t,e){var i=t.x,n=t.y,s=e.devicePixelRatio||1;return{x:we(i*s)/s||0,y:we(n*s)/s||0}}({x:f,y:m},fe(i)):{x:f,y:m};return f=x.x,m=x.y,l?Object.assign({},O,((C={})[y]=b?"0":"",C[v]=_?"0":"",C.transform=(w.devicePixelRatio||1)<=1?"translate("+f+"px, "+m+"px)":"translate3d("+f+"px, "+m+"px, 0)",C)):Object.assign({},O,((e={})[y]=b?m+"px":"",e[v]=_?f+"px":"",e.transform="",e))}const Be={name:"computeStyles",enabled:!0,phase:"beforeWrite",fn:function(t){var e=t.state,i=t.options,n=i.gpuAcceleration,s=void 0===n||n,o=i.adaptive,r=void 0===o||o,a=i.roundOffsets,l=void 0===a||a,c={placement:be(e.placement),variation:Fe(e.placement),popper:e.elements.popper,popperRect:e.rects.popper,gpuAcceleration:s,isFixed:"fixed"===e.options.strategy};null!=e.modifiersData.popperOffsets&&(e.styles.popper=Object.assign({},e.styles.popper,We(Object.assign({},c,{offsets:e.modifiersData.popperOffsets,position:e.options.strategy,adaptive:r,roundOffsets:l})))),null!=e.modifiersData.arrow&&(e.styles.arrow=Object.assign({},e.styles.arrow,We(Object.assign({},c,{offsets:e.modifiersData.arrow,position:"absolute",adaptive:!1,roundOffsets:l})))),e.attributes.popper=Object.assign({},e.attributes.popper,{"data-popper-placement":e.placement})},data:{}};var ze={passive:!0};const Re={name:"eventListeners",enabled:!0,phase:"write",fn:function(){},effect:function(t){var e=t.state,i=t.instance,n=t.options,s=n.scroll,o=void 0===s||s,r=n.resize,a=void 0===r||r,l=fe(e.elements.popper),c=[].concat(e.scrollParents.reference,e.scrollParents.popper);return o&&c.forEach((function(t){t.addEventListener("scroll",i.update,ze)})),a&&l.addEventListener("resize",i.update,ze),function(){o&&c.forEach((function(t){t.removeEventListener("scroll",i.update,ze)})),a&&l.removeEventListener("resize",i.update,ze)}},data:{}};var qe={left:"right",right:"left",bottom:"top",top:"bottom"};function Ve(t){return t.replace(/left|right|bottom|top/g,(function(t){return qe[t]}))}var Ke={start:"end",end:"start"};function Qe(t){return t.replace(/start|end/g,(function(t){return Ke[t]}))}function Xe(t){var e=fe(t);return{scrollLeft:e.pageXOffset,scrollTop:e.pageYOffset}}function Ye(t){return Te(Le(t)).left+Xe(t).scrollLeft}function Ue(t){var e=xe(t),i=e.overflow,n=e.overflowX,s=e.overflowY;return/auto|scroll|overlay|hidden/.test(i+s+n)}function Ge(t){return["html","body","#document"].indexOf(ue(t))>=0?t.ownerDocument.body:me(t)&&Ue(t)?t:Ge(Se(t))}function Je(t,e){var i;void 0===e&&(e=[]);var n=Ge(t),s=n===(null==(i=t.ownerDocument)?void 0:i.body),o=fe(n),r=s?[o].concat(o.visualViewport||[],Ue(n)?n:[]):n,a=e.concat(r);return s?a:a.concat(Je(Se(r)))}function Ze(t){return Object.assign({},t,{left:t.x,top:t.y,right:t.x+t.width,bottom:t.y+t.height})}function ti(t,e,i){return e===Gt?Ze(function(t,e){var i=fe(t),n=Le(t),s=i.visualViewport,o=n.clientWidth,r=n.clientHeight,a=0,l=0;if(s){o=s.width,r=s.height;var c=Ee();(c||!c&&"fixed"===e)&&(a=s.offsetLeft,l=s.offsetTop)}return{width:o,height:r,x:a+Ye(t),y:l}}(t,i)):pe(e)?function(t,e){var i=Te(t,!1,"fixed"===e);return i.top=i.top+t.clientTop,i.left=i.left+t.clientLeft,i.bottom=i.top+t.clientHeight,i.right=i.left+t.clientWidth,i.width=t.clientWidth,i.height=t.clientHeight,i.x=i.left,i.y=i.top,i}(e,i):Ze(function(t){var e,i=Le(t),n=Xe(t),s=null==(e=t.ownerDocument)?void 0:e.body,o=ve(i.scrollWidth,i.clientWidth,s?s.scrollWidth:0,s?s.clientWidth:0),r=ve(i.scrollHeight,i.clientHeight,s?s.scrollHeight:0,s?s.clientHeight:0),a=-n.scrollLeft+Ye(t),l=-n.scrollTop;return"rtl"===xe(s||i).direction&&(a+=ve(i.clientWidth,s?s.clientWidth:0)-o),{width:o,height:r,x:a,y:l}}(Le(t)))}function ei(t){var e,i=t.reference,n=t.element,s=t.placement,o=s?be(s):null,r=s?Fe(s):null,a=i.x+i.width/2-n.width/2,l=i.y+i.height/2-n.height/2;switch(o){case zt:e={x:a,y:i.y-n.height};break;case Rt:e={x:a,y:i.y+i.height};break;case qt:e={x:i.x+i.width,y:l};break;case Vt:e={x:i.x-n.width,y:l};break;default:e={x:i.x,y:i.y}}var c=o?Ie(o):null;if(null!=c){var h="y"===c?"height":"width";switch(r){case Xt:e[c]=e[c]-(i[h]/2-n[h]/2);break;case Yt:e[c]=e[c]+(i[h]/2-n[h]/2)}}return e}function ii(t,e){void 0===e&&(e={});var i=e,n=i.placement,s=void 0===n?t.placement:n,o=i.strategy,r=void 0===o?t.strategy:o,a=i.boundary,l=void 0===a?Ut:a,c=i.rootBoundary,h=void 0===c?Gt:c,d=i.elementContext,u=void 0===d?Jt:d,f=i.altBoundary,p=void 0!==f&&f,m=i.padding,g=void 0===m?0:m,_=Pe("number"!=typeof g?g:Me(g,Qt)),b=u===Jt?Zt:Jt,v=t.rects.popper,y=t.elements[p?b:u],w=function(t,e,i,n){var s="clippingParents"===e?function(t){var e=Je(Se(t)),i=["absolute","fixed"].indexOf(xe(t).position)>=0&&me(t)?$e(t):t;return pe(i)?e.filter((function(t){return pe(t)&&Oe(t,i)&&"body"!==ue(t)})):[]}(t):[].concat(e),o=[].concat(s,[i]),r=o[0],a=o.reduce((function(e,i){var s=ti(t,i,n);return e.top=ve(s.top,e.top),e.right=ye(s.right,e.right),e.bottom=ye(s.bottom,e.bottom),e.left=ve(s.left,e.left),e}),ti(t,r,n));return a.width=a.right-a.left,a.height=a.bottom-a.top,a.x=a.left,a.y=a.top,a}(pe(y)?y:y.contextElement||Le(t.elements.popper),l,h,r),A=Te(t.elements.reference),E=ei({reference:A,element:v,strategy:"absolute",placement:s}),T=Ze(Object.assign({},v,E)),C=u===Jt?T:A,O={top:w.top-C.top+_.top,bottom:C.bottom-w.bottom+_.bottom,left:w.left-C.left+_.left,right:C.right-w.right+_.right},x=t.modifiersData.offset;if(u===Jt&&x){var k=x[s];Object.keys(O).forEach((function(t){var e=[qt,Rt].indexOf(t)>=0?1:-1,i=[zt,Rt].indexOf(t)>=0?"y":"x";O[t]+=k[i]*e}))}return O}function ni(t,e){void 0===e&&(e={});var i=e,n=i.placement,s=i.boundary,o=i.rootBoundary,r=i.padding,a=i.flipVariations,l=i.allowedAutoPlacements,c=void 0===l?ee:l,h=Fe(n),d=h?a?te:te.filter((function(t){return Fe(t)===h})):Qt,u=d.filter((function(t){return c.indexOf(t)>=0}));0===u.length&&(u=d);var f=u.reduce((function(e,i){return e[i]=ii(t,{placement:i,boundary:s,rootBoundary:o,padding:r})[be(i)],e}),{});return Object.keys(f).sort((function(t,e){return f[t]-f[e]}))}const si={name:"flip",enabled:!0,phase:"main",fn:function(t){var e=t.state,i=t.options,n=t.name;if(!e.modifiersData[n]._skip){for(var s=i.mainAxis,o=void 0===s||s,r=i.altAxis,a=void 0===r||r,l=i.fallbackPlacements,c=i.padding,h=i.boundary,d=i.rootBoundary,u=i.altBoundary,f=i.flipVariations,p=void 0===f||f,m=i.allowedAutoPlacements,g=e.options.placement,_=be(g),b=l||(_!==g&&p?function(t){if(be(t)===Kt)return[];var e=Ve(t);return[Qe(t),e,Qe(e)]}(g):[Ve(g)]),v=[g].concat(b).reduce((function(t,i){return t.concat(be(i)===Kt?ni(e,{placement:i,boundary:h,rootBoundary:d,padding:c,flipVariations:p,allowedAutoPlacements:m}):i)}),[]),y=e.rects.reference,w=e.rects.popper,A=new Map,E=!0,T=v[0],C=0;C=0,S=L?"width":"height",D=ii(e,{placement:O,boundary:h,rootBoundary:d,altBoundary:u,padding:c}),$=L?k?qt:Vt:k?Rt:zt;y[S]>w[S]&&($=Ve($));var I=Ve($),N=[];if(o&&N.push(D[x]<=0),a&&N.push(D[$]<=0,D[I]<=0),N.every((function(t){return t}))){T=O,E=!1;break}A.set(O,N)}if(E)for(var P=function(t){var e=v.find((function(e){var i=A.get(e);if(i)return i.slice(0,t).every((function(t){return t}))}));if(e)return T=e,"break"},M=p?3:1;M>0&&"break"!==P(M);M--);e.placement!==T&&(e.modifiersData[n]._skip=!0,e.placement=T,e.reset=!0)}},requiresIfExists:["offset"],data:{_skip:!1}};function oi(t,e,i){return void 0===i&&(i={x:0,y:0}),{top:t.top-e.height-i.y,right:t.right-e.width+i.x,bottom:t.bottom-e.height+i.y,left:t.left-e.width-i.x}}function ri(t){return[zt,qt,Rt,Vt].some((function(e){return t[e]>=0}))}const ai={name:"hide",enabled:!0,phase:"main",requiresIfExists:["preventOverflow"],fn:function(t){var e=t.state,i=t.name,n=e.rects.reference,s=e.rects.popper,o=e.modifiersData.preventOverflow,r=ii(e,{elementContext:"reference"}),a=ii(e,{altBoundary:!0}),l=oi(r,n),c=oi(a,s,o),h=ri(l),d=ri(c);e.modifiersData[i]={referenceClippingOffsets:l,popperEscapeOffsets:c,isReferenceHidden:h,hasPopperEscaped:d},e.attributes.popper=Object.assign({},e.attributes.popper,{"data-popper-reference-hidden":h,"data-popper-escaped":d})}},li={name:"offset",enabled:!0,phase:"main",requires:["popperOffsets"],fn:function(t){var e=t.state,i=t.options,n=t.name,s=i.offset,o=void 0===s?[0,0]:s,r=ee.reduce((function(t,i){return t[i]=function(t,e,i){var n=be(t),s=[Vt,zt].indexOf(n)>=0?-1:1,o="function"==typeof i?i(Object.assign({},e,{placement:t})):i,r=o[0],a=o[1];return r=r||0,a=(a||0)*s,[Vt,qt].indexOf(n)>=0?{x:a,y:r}:{x:r,y:a}}(i,e.rects,o),t}),{}),a=r[e.placement],l=a.x,c=a.y;null!=e.modifiersData.popperOffsets&&(e.modifiersData.popperOffsets.x+=l,e.modifiersData.popperOffsets.y+=c),e.modifiersData[n]=r}},ci={name:"popperOffsets",enabled:!0,phase:"read",fn:function(t){var e=t.state,i=t.name;e.modifiersData[i]=ei({reference:e.rects.reference,element:e.rects.popper,strategy:"absolute",placement:e.placement})},data:{}},hi={name:"preventOverflow",enabled:!0,phase:"main",fn:function(t){var e=t.state,i=t.options,n=t.name,s=i.mainAxis,o=void 0===s||s,r=i.altAxis,a=void 0!==r&&r,l=i.boundary,c=i.rootBoundary,h=i.altBoundary,d=i.padding,u=i.tether,f=void 0===u||u,p=i.tetherOffset,m=void 0===p?0:p,g=ii(e,{boundary:l,rootBoundary:c,padding:d,altBoundary:h}),_=be(e.placement),b=Fe(e.placement),v=!b,y=Ie(_),w="x"===y?"y":"x",A=e.modifiersData.popperOffsets,E=e.rects.reference,T=e.rects.popper,C="function"==typeof m?m(Object.assign({},e.rects,{placement:e.placement})):m,O="number"==typeof C?{mainAxis:C,altAxis:C}:Object.assign({mainAxis:0,altAxis:0},C),x=e.modifiersData.offset?e.modifiersData.offset[e.placement]:null,k={x:0,y:0};if(A){if(o){var L,S="y"===y?zt:Vt,D="y"===y?Rt:qt,$="y"===y?"height":"width",I=A[y],N=I+g[S],P=I-g[D],M=f?-T[$]/2:0,j=b===Xt?E[$]:T[$],F=b===Xt?-T[$]:-E[$],H=e.elements.arrow,W=f&&H?Ce(H):{width:0,height:0},B=e.modifiersData["arrow#persistent"]?e.modifiersData["arrow#persistent"].padding:{top:0,right:0,bottom:0,left:0},z=B[S],R=B[D],q=Ne(0,E[$],W[$]),V=v?E[$]/2-M-q-z-O.mainAxis:j-q-z-O.mainAxis,K=v?-E[$]/2+M+q+R+O.mainAxis:F+q+R+O.mainAxis,Q=e.elements.arrow&&$e(e.elements.arrow),X=Q?"y"===y?Q.clientTop||0:Q.clientLeft||0:0,Y=null!=(L=null==x?void 0:x[y])?L:0,U=I+K-Y,G=Ne(f?ye(N,I+V-Y-X):N,I,f?ve(P,U):P);A[y]=G,k[y]=G-I}if(a){var J,Z="x"===y?zt:Vt,tt="x"===y?Rt:qt,et=A[w],it="y"===w?"height":"width",nt=et+g[Z],st=et-g[tt],ot=-1!==[zt,Vt].indexOf(_),rt=null!=(J=null==x?void 0:x[w])?J:0,at=ot?nt:et-E[it]-T[it]-rt+O.altAxis,lt=ot?et+E[it]+T[it]-rt-O.altAxis:st,ct=f&&ot?function(t,e,i){var n=Ne(t,e,i);return n>i?i:n}(at,et,lt):Ne(f?at:nt,et,f?lt:st);A[w]=ct,k[w]=ct-et}e.modifiersData[n]=k}},requiresIfExists:["offset"]};function di(t,e,i){void 0===i&&(i=!1);var n,s,o=me(e),r=me(e)&&function(t){var e=t.getBoundingClientRect(),i=we(e.width)/t.offsetWidth||1,n=we(e.height)/t.offsetHeight||1;return 1!==i||1!==n}(e),a=Le(e),l=Te(t,r,i),c={scrollLeft:0,scrollTop:0},h={x:0,y:0};return(o||!o&&!i)&&(("body"!==ue(e)||Ue(a))&&(c=(n=e)!==fe(n)&&me(n)?{scrollLeft:(s=n).scrollLeft,scrollTop:s.scrollTop}:Xe(n)),me(e)?((h=Te(e,!0)).x+=e.clientLeft,h.y+=e.clientTop):a&&(h.x=Ye(a))),{x:l.left+c.scrollLeft-h.x,y:l.top+c.scrollTop-h.y,width:l.width,height:l.height}}function ui(t){var e=new Map,i=new Set,n=[];function s(t){i.add(t.name),[].concat(t.requires||[],t.requiresIfExists||[]).forEach((function(t){if(!i.has(t)){var n=e.get(t);n&&s(n)}})),n.push(t)}return t.forEach((function(t){e.set(t.name,t)})),t.forEach((function(t){i.has(t.name)||s(t)})),n}var fi={placement:"bottom",modifiers:[],strategy:"absolute"};function pi(){for(var t=arguments.length,e=new Array(t),i=0;iNumber.parseInt(t,10))):"function"==typeof t?e=>t(e,this._element):t}_getPopperConfig(){const t={placement:this._getPlacement(),modifiers:[{name:"preventOverflow",options:{boundary:this._config.boundary}},{name:"offset",options:{offset:this._getOffset()}}]};return(this._inNavbar||"static"===this._config.display)&&(F.setDataAttribute(this._menu,"popper","static"),t.modifiers=[{name:"applyStyles",enabled:!1}]),{...t,...g(this._config.popperConfig,[t])}}_selectMenuItem({key:t,target:e}){const i=z.find(".dropdown-menu .dropdown-item:not(.disabled):not(:disabled)",this._menu).filter((t=>a(t)));i.length&&b(i,e,t===Ti,!i.includes(e)).focus()}static jQueryInterface(t){return this.each((function(){const e=qi.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t])throw new TypeError(`No method named "${t}"`);e[t]()}}))}static clearMenus(t){if(2===t.button||"keyup"===t.type&&"Tab"!==t.key)return;const e=z.find(Ni);for(const i of e){const e=qi.getInstance(i);if(!e||!1===e._config.autoClose)continue;const n=t.composedPath(),s=n.includes(e._menu);if(n.includes(e._element)||"inside"===e._config.autoClose&&!s||"outside"===e._config.autoClose&&s)continue;if(e._menu.contains(t.target)&&("keyup"===t.type&&"Tab"===t.key||/input|select|option|textarea|form/i.test(t.target.tagName)))continue;const o={relatedTarget:e._element};"click"===t.type&&(o.clickEvent=t),e._completeHide(o)}}static dataApiKeydownHandler(t){const e=/input|textarea/i.test(t.target.tagName),i="Escape"===t.key,n=[Ei,Ti].includes(t.key);if(!n&&!i)return;if(e&&!i)return;t.preventDefault();const s=this.matches(Ii)?this:z.prev(this,Ii)[0]||z.next(this,Ii)[0]||z.findOne(Ii,t.delegateTarget.parentNode),o=qi.getOrCreateInstance(s);if(n)return t.stopPropagation(),o.show(),void o._selectMenuItem(t);o._isShown()&&(t.stopPropagation(),o.hide(),s.focus())}}N.on(document,Si,Ii,qi.dataApiKeydownHandler),N.on(document,Si,Pi,qi.dataApiKeydownHandler),N.on(document,Li,qi.clearMenus),N.on(document,Di,qi.clearMenus),N.on(document,Li,Ii,(function(t){t.preventDefault(),qi.getOrCreateInstance(this).toggle()})),m(qi);const Vi="backdrop",Ki="show",Qi=`mousedown.bs.${Vi}`,Xi={className:"modal-backdrop",clickCallback:null,isAnimated:!1,isVisible:!0,rootElement:"body"},Yi={className:"string",clickCallback:"(function|null)",isAnimated:"boolean",isVisible:"boolean",rootElement:"(element|string)"};class Ui extends H{constructor(t){super(),this._config=this._getConfig(t),this._isAppended=!1,this._element=null}static get Default(){return Xi}static get DefaultType(){return Yi}static get NAME(){return Vi}show(t){if(!this._config.isVisible)return void g(t);this._append();const e=this._getElement();this._config.isAnimated&&d(e),e.classList.add(Ki),this._emulateAnimation((()=>{g(t)}))}hide(t){this._config.isVisible?(this._getElement().classList.remove(Ki),this._emulateAnimation((()=>{this.dispose(),g(t)}))):g(t)}dispose(){this._isAppended&&(N.off(this._element,Qi),this._element.remove(),this._isAppended=!1)}_getElement(){if(!this._element){const t=document.createElement("div");t.className=this._config.className,this._config.isAnimated&&t.classList.add("fade"),this._element=t}return this._element}_configAfterMerge(t){return t.rootElement=r(t.rootElement),t}_append(){if(this._isAppended)return;const t=this._getElement();this._config.rootElement.append(t),N.on(t,Qi,(()=>{g(this._config.clickCallback)})),this._isAppended=!0}_emulateAnimation(t){_(t,this._getElement(),this._config.isAnimated)}}const Gi=".bs.focustrap",Ji=`focusin${Gi}`,Zi=`keydown.tab${Gi}`,tn="backward",en={autofocus:!0,trapElement:null},nn={autofocus:"boolean",trapElement:"element"};class sn extends H{constructor(t){super(),this._config=this._getConfig(t),this._isActive=!1,this._lastTabNavDirection=null}static get Default(){return en}static get DefaultType(){return nn}static get NAME(){return"focustrap"}activate(){this._isActive||(this._config.autofocus&&this._config.trapElement.focus(),N.off(document,Gi),N.on(document,Ji,(t=>this._handleFocusin(t))),N.on(document,Zi,(t=>this._handleKeydown(t))),this._isActive=!0)}deactivate(){this._isActive&&(this._isActive=!1,N.off(document,Gi))}_handleFocusin(t){const{trapElement:e}=this._config;if(t.target===document||t.target===e||e.contains(t.target))return;const i=z.focusableChildren(e);0===i.length?e.focus():this._lastTabNavDirection===tn?i[i.length-1].focus():i[0].focus()}_handleKeydown(t){"Tab"===t.key&&(this._lastTabNavDirection=t.shiftKey?tn:"forward")}}const on=".fixed-top, .fixed-bottom, .is-fixed, .sticky-top",rn=".sticky-top",an="padding-right",ln="margin-right";class cn{constructor(){this._element=document.body}getWidth(){const t=document.documentElement.clientWidth;return Math.abs(window.innerWidth-t)}hide(){const t=this.getWidth();this._disableOverFlow(),this._setElementAttributes(this._element,an,(e=>e+t)),this._setElementAttributes(on,an,(e=>e+t)),this._setElementAttributes(rn,ln,(e=>e-t))}reset(){this._resetElementAttributes(this._element,"overflow"),this._resetElementAttributes(this._element,an),this._resetElementAttributes(on,an),this._resetElementAttributes(rn,ln)}isOverflowing(){return this.getWidth()>0}_disableOverFlow(){this._saveInitialAttribute(this._element,"overflow"),this._element.style.overflow="hidden"}_setElementAttributes(t,e,i){const n=this.getWidth();this._applyManipulationCallback(t,(t=>{if(t!==this._element&&window.innerWidth>t.clientWidth+n)return;this._saveInitialAttribute(t,e);const s=window.getComputedStyle(t).getPropertyValue(e);t.style.setProperty(e,`${i(Number.parseFloat(s))}px`)}))}_saveInitialAttribute(t,e){const i=t.style.getPropertyValue(e);i&&F.setDataAttribute(t,e,i)}_resetElementAttributes(t,e){this._applyManipulationCallback(t,(t=>{const i=F.getDataAttribute(t,e);null!==i?(F.removeDataAttribute(t,e),t.style.setProperty(e,i)):t.style.removeProperty(e)}))}_applyManipulationCallback(t,e){if(o(t))e(t);else for(const i of z.find(t,this._element))e(i)}}const hn=".bs.modal",dn=`hide${hn}`,un=`hidePrevented${hn}`,fn=`hidden${hn}`,pn=`show${hn}`,mn=`shown${hn}`,gn=`resize${hn}`,_n=`click.dismiss${hn}`,bn=`mousedown.dismiss${hn}`,vn=`keydown.dismiss${hn}`,yn=`click${hn}.data-api`,wn="modal-open",An="show",En="modal-static",Tn={backdrop:!0,focus:!0,keyboard:!0},Cn={backdrop:"(boolean|string)",focus:"boolean",keyboard:"boolean"};class On extends W{constructor(t,e){super(t,e),this._dialog=z.findOne(".modal-dialog",this._element),this._backdrop=this._initializeBackDrop(),this._focustrap=this._initializeFocusTrap(),this._isShown=!1,this._isTransitioning=!1,this._scrollBar=new cn,this._addEventListeners()}static get Default(){return Tn}static get DefaultType(){return Cn}static get NAME(){return"modal"}toggle(t){return this._isShown?this.hide():this.show(t)}show(t){this._isShown||this._isTransitioning||N.trigger(this._element,pn,{relatedTarget:t}).defaultPrevented||(this._isShown=!0,this._isTransitioning=!0,this._scrollBar.hide(),document.body.classList.add(wn),this._adjustDialog(),this._backdrop.show((()=>this._showElement(t))))}hide(){this._isShown&&!this._isTransitioning&&(N.trigger(this._element,dn).defaultPrevented||(this._isShown=!1,this._isTransitioning=!0,this._focustrap.deactivate(),this._element.classList.remove(An),this._queueCallback((()=>this._hideModal()),this._element,this._isAnimated())))}dispose(){N.off(window,hn),N.off(this._dialog,hn),this._backdrop.dispose(),this._focustrap.deactivate(),super.dispose()}handleUpdate(){this._adjustDialog()}_initializeBackDrop(){return new Ui({isVisible:Boolean(this._config.backdrop),isAnimated:this._isAnimated()})}_initializeFocusTrap(){return new sn({trapElement:this._element})}_showElement(t){document.body.contains(this._element)||document.body.append(this._element),this._element.style.display="block",this._element.removeAttribute("aria-hidden"),this._element.setAttribute("aria-modal",!0),this._element.setAttribute("role","dialog"),this._element.scrollTop=0;const e=z.findOne(".modal-body",this._dialog);e&&(e.scrollTop=0),d(this._element),this._element.classList.add(An),this._queueCallback((()=>{this._config.focus&&this._focustrap.activate(),this._isTransitioning=!1,N.trigger(this._element,mn,{relatedTarget:t})}),this._dialog,this._isAnimated())}_addEventListeners(){N.on(this._element,vn,(t=>{"Escape"===t.key&&(this._config.keyboard?this.hide():this._triggerBackdropTransition())})),N.on(window,gn,(()=>{this._isShown&&!this._isTransitioning&&this._adjustDialog()})),N.on(this._element,bn,(t=>{N.one(this._element,_n,(e=>{this._element===t.target&&this._element===e.target&&("static"!==this._config.backdrop?this._config.backdrop&&this.hide():this._triggerBackdropTransition())}))}))}_hideModal(){this._element.style.display="none",this._element.setAttribute("aria-hidden",!0),this._element.removeAttribute("aria-modal"),this._element.removeAttribute("role"),this._isTransitioning=!1,this._backdrop.hide((()=>{document.body.classList.remove(wn),this._resetAdjustments(),this._scrollBar.reset(),N.trigger(this._element,fn)}))}_isAnimated(){return this._element.classList.contains("fade")}_triggerBackdropTransition(){if(N.trigger(this._element,un).defaultPrevented)return;const t=this._element.scrollHeight>document.documentElement.clientHeight,e=this._element.style.overflowY;"hidden"===e||this._element.classList.contains(En)||(t||(this._element.style.overflowY="hidden"),this._element.classList.add(En),this._queueCallback((()=>{this._element.classList.remove(En),this._queueCallback((()=>{this._element.style.overflowY=e}),this._dialog)}),this._dialog),this._element.focus())}_adjustDialog(){const t=this._element.scrollHeight>document.documentElement.clientHeight,e=this._scrollBar.getWidth(),i=e>0;if(i&&!t){const t=p()?"paddingLeft":"paddingRight";this._element.style[t]=`${e}px`}if(!i&&t){const t=p()?"paddingRight":"paddingLeft";this._element.style[t]=`${e}px`}}_resetAdjustments(){this._element.style.paddingLeft="",this._element.style.paddingRight=""}static jQueryInterface(t,e){return this.each((function(){const i=On.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===i[t])throw new TypeError(`No method named "${t}"`);i[t](e)}}))}}N.on(document,yn,'[data-bs-toggle="modal"]',(function(t){const e=z.getElementFromSelector(this);["A","AREA"].includes(this.tagName)&&t.preventDefault(),N.one(e,pn,(t=>{t.defaultPrevented||N.one(e,fn,(()=>{a(this)&&this.focus()}))}));const i=z.findOne(".modal.show");i&&On.getInstance(i).hide(),On.getOrCreateInstance(e).toggle(this)})),R(On),m(On);const xn=".bs.offcanvas",kn=".data-api",Ln=`load${xn}${kn}`,Sn="show",Dn="showing",$n="hiding",In=".offcanvas.show",Nn=`show${xn}`,Pn=`shown${xn}`,Mn=`hide${xn}`,jn=`hidePrevented${xn}`,Fn=`hidden${xn}`,Hn=`resize${xn}`,Wn=`click${xn}${kn}`,Bn=`keydown.dismiss${xn}`,zn={backdrop:!0,keyboard:!0,scroll:!1},Rn={backdrop:"(boolean|string)",keyboard:"boolean",scroll:"boolean"};class qn extends W{constructor(t,e){super(t,e),this._isShown=!1,this._backdrop=this._initializeBackDrop(),this._focustrap=this._initializeFocusTrap(),this._addEventListeners()}static get Default(){return zn}static get DefaultType(){return Rn}static get NAME(){return"offcanvas"}toggle(t){return this._isShown?this.hide():this.show(t)}show(t){this._isShown||N.trigger(this._element,Nn,{relatedTarget:t}).defaultPrevented||(this._isShown=!0,this._backdrop.show(),this._config.scroll||(new cn).hide(),this._element.setAttribute("aria-modal",!0),this._element.setAttribute("role","dialog"),this._element.classList.add(Dn),this._queueCallback((()=>{this._config.scroll&&!this._config.backdrop||this._focustrap.activate(),this._element.classList.add(Sn),this._element.classList.remove(Dn),N.trigger(this._element,Pn,{relatedTarget:t})}),this._element,!0))}hide(){this._isShown&&(N.trigger(this._element,Mn).defaultPrevented||(this._focustrap.deactivate(),this._element.blur(),this._isShown=!1,this._element.classList.add($n),this._backdrop.hide(),this._queueCallback((()=>{this._element.classList.remove(Sn,$n),this._element.removeAttribute("aria-modal"),this._element.removeAttribute("role"),this._config.scroll||(new cn).reset(),N.trigger(this._element,Fn)}),this._element,!0)))}dispose(){this._backdrop.dispose(),this._focustrap.deactivate(),super.dispose()}_initializeBackDrop(){const t=Boolean(this._config.backdrop);return new Ui({className:"offcanvas-backdrop",isVisible:t,isAnimated:!0,rootElement:this._element.parentNode,clickCallback:t?()=>{"static"!==this._config.backdrop?this.hide():N.trigger(this._element,jn)}:null})}_initializeFocusTrap(){return new sn({trapElement:this._element})}_addEventListeners(){N.on(this._element,Bn,(t=>{"Escape"===t.key&&(this._config.keyboard?this.hide():N.trigger(this._element,jn))}))}static jQueryInterface(t){return this.each((function(){const e=qn.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t]||t.startsWith("_")||"constructor"===t)throw new TypeError(`No method named "${t}"`);e[t](this)}}))}}N.on(document,Wn,'[data-bs-toggle="offcanvas"]',(function(t){const e=z.getElementFromSelector(this);if(["A","AREA"].includes(this.tagName)&&t.preventDefault(),l(this))return;N.one(e,Fn,(()=>{a(this)&&this.focus()}));const i=z.findOne(In);i&&i!==e&&qn.getInstance(i).hide(),qn.getOrCreateInstance(e).toggle(this)})),N.on(window,Ln,(()=>{for(const t of z.find(In))qn.getOrCreateInstance(t).show()})),N.on(window,Hn,(()=>{for(const t of z.find("[aria-modal][class*=show][class*=offcanvas-]"))"fixed"!==getComputedStyle(t).position&&qn.getOrCreateInstance(t).hide()})),R(qn),m(qn);const Vn={"*":["class","dir","id","lang","role",/^aria-[\w-]*$/i],a:["target","href","title","rel"],area:[],b:[],br:[],col:[],code:[],div:[],em:[],hr:[],h1:[],h2:[],h3:[],h4:[],h5:[],h6:[],i:[],img:["src","srcset","alt","title","width","height"],li:[],ol:[],p:[],pre:[],s:[],small:[],span:[],sub:[],sup:[],strong:[],u:[],ul:[]},Kn=new Set(["background","cite","href","itemtype","longdesc","poster","src","xlink:href"]),Qn=/^(?!javascript:)(?:[a-z0-9+.-]+:|[^&:/?#]*(?:[/?#]|$))/i,Xn=(t,e)=>{const i=t.nodeName.toLowerCase();return e.includes(i)?!Kn.has(i)||Boolean(Qn.test(t.nodeValue)):e.filter((t=>t instanceof RegExp)).some((t=>t.test(i)))},Yn={allowList:Vn,content:{},extraClass:"",html:!1,sanitize:!0,sanitizeFn:null,template:"
"},Un={allowList:"object",content:"object",extraClass:"(string|function)",html:"boolean",sanitize:"boolean",sanitizeFn:"(null|function)",template:"string"},Gn={entry:"(string|element|function|null)",selector:"(string|element)"};class Jn extends H{constructor(t){super(),this._config=this._getConfig(t)}static get Default(){return Yn}static get DefaultType(){return Un}static get NAME(){return"TemplateFactory"}getContent(){return Object.values(this._config.content).map((t=>this._resolvePossibleFunction(t))).filter(Boolean)}hasContent(){return this.getContent().length>0}changeContent(t){return this._checkContent(t),this._config.content={...this._config.content,...t},this}toHtml(){const t=document.createElement("div");t.innerHTML=this._maybeSanitize(this._config.template);for(const[e,i]of Object.entries(this._config.content))this._setContent(t,i,e);const e=t.children[0],i=this._resolvePossibleFunction(this._config.extraClass);return i&&e.classList.add(...i.split(" ")),e}_typeCheckConfig(t){super._typeCheckConfig(t),this._checkContent(t.content)}_checkContent(t){for(const[e,i]of Object.entries(t))super._typeCheckConfig({selector:e,entry:i},Gn)}_setContent(t,e,i){const n=z.findOne(i,t);n&&((e=this._resolvePossibleFunction(e))?o(e)?this._putElementInTemplate(r(e),n):this._config.html?n.innerHTML=this._maybeSanitize(e):n.textContent=e:n.remove())}_maybeSanitize(t){return this._config.sanitize?function(t,e,i){if(!t.length)return t;if(i&&"function"==typeof i)return i(t);const n=(new window.DOMParser).parseFromString(t,"text/html"),s=[].concat(...n.body.querySelectorAll("*"));for(const t of s){const i=t.nodeName.toLowerCase();if(!Object.keys(e).includes(i)){t.remove();continue}const n=[].concat(...t.attributes),s=[].concat(e["*"]||[],e[i]||[]);for(const e of n)Xn(e,s)||t.removeAttribute(e.nodeName)}return n.body.innerHTML}(t,this._config.allowList,this._config.sanitizeFn):t}_resolvePossibleFunction(t){return g(t,[this])}_putElementInTemplate(t,e){if(this._config.html)return e.innerHTML="",void e.append(t);e.textContent=t.textContent}}const Zn=new Set(["sanitize","allowList","sanitizeFn"]),ts="fade",es="show",is=".modal",ns="hide.bs.modal",ss="hover",os="focus",rs={AUTO:"auto",TOP:"top",RIGHT:p()?"left":"right",BOTTOM:"bottom",LEFT:p()?"right":"left"},as={allowList:Vn,animation:!0,boundary:"clippingParents",container:!1,customClass:"",delay:0,fallbackPlacements:["top","right","bottom","left"],html:!1,offset:[0,6],placement:"top",popperConfig:null,sanitize:!0,sanitizeFn:null,selector:!1,template:'',title:"",trigger:"hover focus"},ls={allowList:"object",animation:"boolean",boundary:"(string|element)",container:"(string|element|boolean)",customClass:"(string|function)",delay:"(number|object)",fallbackPlacements:"array",html:"boolean",offset:"(array|string|function)",placement:"(string|function)",popperConfig:"(null|object|function)",sanitize:"boolean",sanitizeFn:"(null|function)",selector:"(string|boolean)",template:"string",title:"(string|element|function)",trigger:"string"};class cs extends W{constructor(t,e){if(void 0===vi)throw new TypeError("Bootstrap's tooltips require Popper (https://popper.js.org)");super(t,e),this._isEnabled=!0,this._timeout=0,this._isHovered=null,this._activeTrigger={},this._popper=null,this._templateFactory=null,this._newContent=null,this.tip=null,this._setListeners(),this._config.selector||this._fixTitle()}static get Default(){return as}static get DefaultType(){return ls}static get NAME(){return"tooltip"}enable(){this._isEnabled=!0}disable(){this._isEnabled=!1}toggleEnabled(){this._isEnabled=!this._isEnabled}toggle(){this._isEnabled&&(this._activeTrigger.click=!this._activeTrigger.click,this._isShown()?this._leave():this._enter())}dispose(){clearTimeout(this._timeout),N.off(this._element.closest(is),ns,this._hideModalHandler),this._element.getAttribute("data-bs-original-title")&&this._element.setAttribute("title",this._element.getAttribute("data-bs-original-title")),this._disposePopper(),super.dispose()}show(){if("none"===this._element.style.display)throw new Error("Please use show on visible elements");if(!this._isWithContent()||!this._isEnabled)return;const t=N.trigger(this._element,this.constructor.eventName("show")),e=(c(this._element)||this._element.ownerDocument.documentElement).contains(this._element);if(t.defaultPrevented||!e)return;this._disposePopper();const i=this._getTipElement();this._element.setAttribute("aria-describedby",i.getAttribute("id"));const{container:n}=this._config;if(this._element.ownerDocument.documentElement.contains(this.tip)||(n.append(i),N.trigger(this._element,this.constructor.eventName("inserted"))),this._popper=this._createPopper(i),i.classList.add(es),"ontouchstart"in document.documentElement)for(const t of[].concat(...document.body.children))N.on(t,"mouseover",h);this._queueCallback((()=>{N.trigger(this._element,this.constructor.eventName("shown")),!1===this._isHovered&&this._leave(),this._isHovered=!1}),this.tip,this._isAnimated())}hide(){if(this._isShown()&&!N.trigger(this._element,this.constructor.eventName("hide")).defaultPrevented){if(this._getTipElement().classList.remove(es),"ontouchstart"in document.documentElement)for(const t of[].concat(...document.body.children))N.off(t,"mouseover",h);this._activeTrigger.click=!1,this._activeTrigger[os]=!1,this._activeTrigger[ss]=!1,this._isHovered=null,this._queueCallback((()=>{this._isWithActiveTrigger()||(this._isHovered||this._disposePopper(),this._element.removeAttribute("aria-describedby"),N.trigger(this._element,this.constructor.eventName("hidden")))}),this.tip,this._isAnimated())}}update(){this._popper&&this._popper.update()}_isWithContent(){return Boolean(this._getTitle())}_getTipElement(){return this.tip||(this.tip=this._createTipElement(this._newContent||this._getContentForTemplate())),this.tip}_createTipElement(t){const e=this._getTemplateFactory(t).toHtml();if(!e)return null;e.classList.remove(ts,es),e.classList.add(`bs-${this.constructor.NAME}-auto`);const i=(t=>{do{t+=Math.floor(1e6*Math.random())}while(document.getElementById(t));return t})(this.constructor.NAME).toString();return e.setAttribute("id",i),this._isAnimated()&&e.classList.add(ts),e}setContent(t){this._newContent=t,this._isShown()&&(this._disposePopper(),this.show())}_getTemplateFactory(t){return this._templateFactory?this._templateFactory.changeContent(t):this._templateFactory=new Jn({...this._config,content:t,extraClass:this._resolvePossibleFunction(this._config.customClass)}),this._templateFactory}_getContentForTemplate(){return{".tooltip-inner":this._getTitle()}}_getTitle(){return this._resolvePossibleFunction(this._config.title)||this._element.getAttribute("data-bs-original-title")}_initializeOnDelegatedTarget(t){return this.constructor.getOrCreateInstance(t.delegateTarget,this._getDelegateConfig())}_isAnimated(){return this._config.animation||this.tip&&this.tip.classList.contains(ts)}_isShown(){return this.tip&&this.tip.classList.contains(es)}_createPopper(t){const e=g(this._config.placement,[this,t,this._element]),i=rs[e.toUpperCase()];return bi(this._element,t,this._getPopperConfig(i))}_getOffset(){const{offset:t}=this._config;return"string"==typeof t?t.split(",").map((t=>Number.parseInt(t,10))):"function"==typeof t?e=>t(e,this._element):t}_resolvePossibleFunction(t){return g(t,[this._element])}_getPopperConfig(t){const e={placement:t,modifiers:[{name:"flip",options:{fallbackPlacements:this._config.fallbackPlacements}},{name:"offset",options:{offset:this._getOffset()}},{name:"preventOverflow",options:{boundary:this._config.boundary}},{name:"arrow",options:{element:`.${this.constructor.NAME}-arrow`}},{name:"preSetPlacement",enabled:!0,phase:"beforeMain",fn:t=>{this._getTipElement().setAttribute("data-popper-placement",t.state.placement)}}]};return{...e,...g(this._config.popperConfig,[e])}}_setListeners(){const t=this._config.trigger.split(" ");for(const e of t)if("click"===e)N.on(this._element,this.constructor.eventName("click"),this._config.selector,(t=>{this._initializeOnDelegatedTarget(t).toggle()}));else if("manual"!==e){const t=e===ss?this.constructor.eventName("mouseenter"):this.constructor.eventName("focusin"),i=e===ss?this.constructor.eventName("mouseleave"):this.constructor.eventName("focusout");N.on(this._element,t,this._config.selector,(t=>{const e=this._initializeOnDelegatedTarget(t);e._activeTrigger["focusin"===t.type?os:ss]=!0,e._enter()})),N.on(this._element,i,this._config.selector,(t=>{const e=this._initializeOnDelegatedTarget(t);e._activeTrigger["focusout"===t.type?os:ss]=e._element.contains(t.relatedTarget),e._leave()}))}this._hideModalHandler=()=>{this._element&&this.hide()},N.on(this._element.closest(is),ns,this._hideModalHandler)}_fixTitle(){const t=this._element.getAttribute("title");t&&(this._element.getAttribute("aria-label")||this._element.textContent.trim()||this._element.setAttribute("aria-label",t),this._element.setAttribute("data-bs-original-title",t),this._element.removeAttribute("title"))}_enter(){this._isShown()||this._isHovered?this._isHovered=!0:(this._isHovered=!0,this._setTimeout((()=>{this._isHovered&&this.show()}),this._config.delay.show))}_leave(){this._isWithActiveTrigger()||(this._isHovered=!1,this._setTimeout((()=>{this._isHovered||this.hide()}),this._config.delay.hide))}_setTimeout(t,e){clearTimeout(this._timeout),this._timeout=setTimeout(t,e)}_isWithActiveTrigger(){return Object.values(this._activeTrigger).includes(!0)}_getConfig(t){const e=F.getDataAttributes(this._element);for(const t of Object.keys(e))Zn.has(t)&&delete e[t];return t={...e,..."object"==typeof t&&t?t:{}},t=this._mergeConfigObj(t),t=this._configAfterMerge(t),this._typeCheckConfig(t),t}_configAfterMerge(t){return t.container=!1===t.container?document.body:r(t.container),"number"==typeof t.delay&&(t.delay={show:t.delay,hide:t.delay}),"number"==typeof t.title&&(t.title=t.title.toString()),"number"==typeof t.content&&(t.content=t.content.toString()),t}_getDelegateConfig(){const t={};for(const[e,i]of Object.entries(this._config))this.constructor.Default[e]!==i&&(t[e]=i);return t.selector=!1,t.trigger="manual",t}_disposePopper(){this._popper&&(this._popper.destroy(),this._popper=null),this.tip&&(this.tip.remove(),this.tip=null)}static jQueryInterface(t){return this.each((function(){const e=cs.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t])throw new TypeError(`No method named "${t}"`);e[t]()}}))}}m(cs);const hs={...cs.Default,content:"",offset:[0,8],placement:"right",template:'',trigger:"click"},ds={...cs.DefaultType,content:"(null|string|element|function)"};class us extends cs{static get Default(){return hs}static get DefaultType(){return ds}static get NAME(){return"popover"}_isWithContent(){return this._getTitle()||this._getContent()}_getContentForTemplate(){return{".popover-header":this._getTitle(),".popover-body":this._getContent()}}_getContent(){return this._resolvePossibleFunction(this._config.content)}static jQueryInterface(t){return this.each((function(){const e=us.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t])throw new TypeError(`No method named "${t}"`);e[t]()}}))}}m(us);const fs=".bs.scrollspy",ps=`activate${fs}`,ms=`click${fs}`,gs=`load${fs}.data-api`,_s="active",bs="[href]",vs=".nav-link",ys=`${vs}, .nav-item > ${vs}, .list-group-item`,ws={offset:null,rootMargin:"0px 0px -25%",smoothScroll:!1,target:null,threshold:[.1,.5,1]},As={offset:"(number|null)",rootMargin:"string",smoothScroll:"boolean",target:"element",threshold:"array"};class Es extends W{constructor(t,e){super(t,e),this._targetLinks=new Map,this._observableSections=new Map,this._rootElement="visible"===getComputedStyle(this._element).overflowY?null:this._element,this._activeTarget=null,this._observer=null,this._previousScrollData={visibleEntryTop:0,parentScrollTop:0},this.refresh()}static get Default(){return ws}static get DefaultType(){return As}static get NAME(){return"scrollspy"}refresh(){this._initializeTargetsAndObservables(),this._maybeEnableSmoothScroll(),this._observer?this._observer.disconnect():this._observer=this._getNewObserver();for(const t of this._observableSections.values())this._observer.observe(t)}dispose(){this._observer.disconnect(),super.dispose()}_configAfterMerge(t){return t.target=r(t.target)||document.body,t.rootMargin=t.offset?`${t.offset}px 0px -30%`:t.rootMargin,"string"==typeof t.threshold&&(t.threshold=t.threshold.split(",").map((t=>Number.parseFloat(t)))),t}_maybeEnableSmoothScroll(){this._config.smoothScroll&&(N.off(this._config.target,ms),N.on(this._config.target,ms,bs,(t=>{const e=this._observableSections.get(t.target.hash);if(e){t.preventDefault();const i=this._rootElement||window,n=e.offsetTop-this._element.offsetTop;if(i.scrollTo)return void i.scrollTo({top:n,behavior:"smooth"});i.scrollTop=n}})))}_getNewObserver(){const t={root:this._rootElement,threshold:this._config.threshold,rootMargin:this._config.rootMargin};return new IntersectionObserver((t=>this._observerCallback(t)),t)}_observerCallback(t){const e=t=>this._targetLinks.get(`#${t.target.id}`),i=t=>{this._previousScrollData.visibleEntryTop=t.target.offsetTop,this._process(e(t))},n=(this._rootElement||document.documentElement).scrollTop,s=n>=this._previousScrollData.parentScrollTop;this._previousScrollData.parentScrollTop=n;for(const o of t){if(!o.isIntersecting){this._activeTarget=null,this._clearActiveClass(e(o));continue}const t=o.target.offsetTop>=this._previousScrollData.visibleEntryTop;if(s&&t){if(i(o),!n)return}else s||t||i(o)}}_initializeTargetsAndObservables(){this._targetLinks=new Map,this._observableSections=new Map;const t=z.find(bs,this._config.target);for(const e of t){if(!e.hash||l(e))continue;const t=z.findOne(decodeURI(e.hash),this._element);a(t)&&(this._targetLinks.set(decodeURI(e.hash),e),this._observableSections.set(e.hash,t))}}_process(t){this._activeTarget!==t&&(this._clearActiveClass(this._config.target),this._activeTarget=t,t.classList.add(_s),this._activateParents(t),N.trigger(this._element,ps,{relatedTarget:t}))}_activateParents(t){if(t.classList.contains("dropdown-item"))z.findOne(".dropdown-toggle",t.closest(".dropdown")).classList.add(_s);else for(const e of z.parents(t,".nav, .list-group"))for(const t of z.prev(e,ys))t.classList.add(_s)}_clearActiveClass(t){t.classList.remove(_s);const e=z.find(`${bs}.${_s}`,t);for(const t of e)t.classList.remove(_s)}static jQueryInterface(t){return this.each((function(){const e=Es.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t]||t.startsWith("_")||"constructor"===t)throw new TypeError(`No method named "${t}"`);e[t]()}}))}}N.on(window,gs,(()=>{for(const t of z.find('[data-bs-spy="scroll"]'))Es.getOrCreateInstance(t)})),m(Es);const Ts=".bs.tab",Cs=`hide${Ts}`,Os=`hidden${Ts}`,xs=`show${Ts}`,ks=`shown${Ts}`,Ls=`click${Ts}`,Ss=`keydown${Ts}`,Ds=`load${Ts}`,$s="ArrowLeft",Is="ArrowRight",Ns="ArrowUp",Ps="ArrowDown",Ms="Home",js="End",Fs="active",Hs="fade",Ws="show",Bs=":not(.dropdown-toggle)",zs='[data-bs-toggle="tab"], [data-bs-toggle="pill"], [data-bs-toggle="list"]',Rs=`.nav-link${Bs}, .list-group-item${Bs}, [role="tab"]${Bs}, ${zs}`,qs=`.${Fs}[data-bs-toggle="tab"], .${Fs}[data-bs-toggle="pill"], .${Fs}[data-bs-toggle="list"]`;class Vs extends W{constructor(t){super(t),this._parent=this._element.closest('.list-group, .nav, [role="tablist"]'),this._parent&&(this._setInitialAttributes(this._parent,this._getChildren()),N.on(this._element,Ss,(t=>this._keydown(t))))}static get NAME(){return"tab"}show(){const t=this._element;if(this._elemIsActive(t))return;const e=this._getActiveElem(),i=e?N.trigger(e,Cs,{relatedTarget:t}):null;N.trigger(t,xs,{relatedTarget:e}).defaultPrevented||i&&i.defaultPrevented||(this._deactivate(e,t),this._activate(t,e))}_activate(t,e){t&&(t.classList.add(Fs),this._activate(z.getElementFromSelector(t)),this._queueCallback((()=>{"tab"===t.getAttribute("role")?(t.removeAttribute("tabindex"),t.setAttribute("aria-selected",!0),this._toggleDropDown(t,!0),N.trigger(t,ks,{relatedTarget:e})):t.classList.add(Ws)}),t,t.classList.contains(Hs)))}_deactivate(t,e){t&&(t.classList.remove(Fs),t.blur(),this._deactivate(z.getElementFromSelector(t)),this._queueCallback((()=>{"tab"===t.getAttribute("role")?(t.setAttribute("aria-selected",!1),t.setAttribute("tabindex","-1"),this._toggleDropDown(t,!1),N.trigger(t,Os,{relatedTarget:e})):t.classList.remove(Ws)}),t,t.classList.contains(Hs)))}_keydown(t){if(![$s,Is,Ns,Ps,Ms,js].includes(t.key))return;t.stopPropagation(),t.preventDefault();const e=this._getChildren().filter((t=>!l(t)));let i;if([Ms,js].includes(t.key))i=e[t.key===Ms?0:e.length-1];else{const n=[Is,Ps].includes(t.key);i=b(e,t.target,n,!0)}i&&(i.focus({preventScroll:!0}),Vs.getOrCreateInstance(i).show())}_getChildren(){return z.find(Rs,this._parent)}_getActiveElem(){return this._getChildren().find((t=>this._elemIsActive(t)))||null}_setInitialAttributes(t,e){this._setAttributeIfNotExists(t,"role","tablist");for(const t of e)this._setInitialAttributesOnChild(t)}_setInitialAttributesOnChild(t){t=this._getInnerElement(t);const e=this._elemIsActive(t),i=this._getOuterElement(t);t.setAttribute("aria-selected",e),i!==t&&this._setAttributeIfNotExists(i,"role","presentation"),e||t.setAttribute("tabindex","-1"),this._setAttributeIfNotExists(t,"role","tab"),this._setInitialAttributesOnTargetPanel(t)}_setInitialAttributesOnTargetPanel(t){const e=z.getElementFromSelector(t);e&&(this._setAttributeIfNotExists(e,"role","tabpanel"),t.id&&this._setAttributeIfNotExists(e,"aria-labelledby",`${t.id}`))}_toggleDropDown(t,e){const i=this._getOuterElement(t);if(!i.classList.contains("dropdown"))return;const n=(t,n)=>{const s=z.findOne(t,i);s&&s.classList.toggle(n,e)};n(".dropdown-toggle",Fs),n(".dropdown-menu",Ws),i.setAttribute("aria-expanded",e)}_setAttributeIfNotExists(t,e,i){t.hasAttribute(e)||t.setAttribute(e,i)}_elemIsActive(t){return t.classList.contains(Fs)}_getInnerElement(t){return t.matches(Rs)?t:z.findOne(Rs,t)}_getOuterElement(t){return t.closest(".nav-item, .list-group-item")||t}static jQueryInterface(t){return this.each((function(){const e=Vs.getOrCreateInstance(this);if("string"==typeof t){if(void 0===e[t]||t.startsWith("_")||"constructor"===t)throw new TypeError(`No method named "${t}"`);e[t]()}}))}}N.on(document,Ls,zs,(function(t){["A","AREA"].includes(this.tagName)&&t.preventDefault(),l(this)||Vs.getOrCreateInstance(this).show()})),N.on(window,Ds,(()=>{for(const t of z.find(qs))Vs.getOrCreateInstance(t)})),m(Vs);const Ks=".bs.toast",Qs=`mouseover${Ks}`,Xs=`mouseout${Ks}`,Ys=`focusin${Ks}`,Us=`focusout${Ks}`,Gs=`hide${Ks}`,Js=`hidden${Ks}`,Zs=`show${Ks}`,to=`shown${Ks}`,eo="hide",io="show",no="showing",so={animation:"boolean",autohide:"boolean",delay:"number"},oo={animation:!0,autohide:!0,delay:5e3};class ro extends W{constructor(t,e){super(t,e),this._timeout=null,this._hasMouseInteraction=!1,this._hasKeyboardInteraction=!1,this._setListeners()}static get Default(){return oo}static get DefaultType(){return so}static get NAME(){return"toast"}show(){N.trigger(this._element,Zs).defaultPrevented||(this._clearTimeout(),this._config.animation&&this._element.classList.add("fade"),this._element.classList.remove(eo),d(this._element),this._element.classList.add(io,no),this._queueCallback((()=>{this._element.classList.remove(no),N.trigger(this._element,to),this._maybeScheduleHide()}),this._element,this._config.animation))}hide(){this.isShown()&&(N.trigger(this._element,Gs).defaultPrevented||(this._element.classList.add(no),this._queueCallback((()=>{this._element.classList.add(eo),this._element.classList.remove(no,io),N.trigger(this._element,Js)}),this._element,this._config.animation)))}dispose(){this._clearTimeout(),this.isShown()&&this._element.classList.remove(io),super.dispose()}isShown(){return this._element.classList.contains(io)}_maybeScheduleHide(){this._config.autohide&&(this._hasMouseInteraction||this._hasKeyboardInteraction||(this._timeout=setTimeout((()=>{this.hide()}),this._config.delay)))}_onInteraction(t,e){switch(t.type){case"mouseover":case"mouseout":this._hasMouseInteraction=e;break;case"focusin":case"focusout":this._hasKeyboardInteraction=e}if(e)return void this._clearTimeout();const i=t.relatedTarget;this._element===i||this._element.contains(i)||this._maybeScheduleHide()}_setListeners(){N.on(this._element,Qs,(t=>this._onInteraction(t,!0))),N.on(this._element,Xs,(t=>this._onInteraction(t,!1))),N.on(this._element,Ys,(t=>this._onInteraction(t,!0))),N.on(this._element,Us,(t=>this._onInteraction(t,!1)))}_clearTimeout(){clearTimeout(this._timeout),this._timeout=null}static jQueryInterface(t){return this.each((function(){const e=ro.getOrCreateInstance(this,t);if("string"==typeof t){if(void 0===e[t])throw new TypeError(`No method named "${t}"`);e[t](this)}}))}}return R(ro),m(ro),{Alert:Q,Button:Y,Carousel:xt,Collapse:Bt,Dropdown:qi,Modal:On,Offcanvas:qn,Popover:us,ScrollSpy:Es,Tab:Vs,Toast:ro,Tooltip:cs}})); +//# sourceMappingURL=bootstrap.bundle.min.js.map \ No newline at end of file diff --git a/site_libs/clipboard/clipboard.min.js b/site_libs/clipboard/clipboard.min.js new file mode 100644 index 0000000..1103f81 --- /dev/null +++ b/site_libs/clipboard/clipboard.min.js @@ -0,0 +1,7 @@ +/*! + * clipboard.js v2.0.11 + * https://clipboardjs.com/ + * + * Licensed MIT © Zeno Rocha + */ +!function(t,e){"object"==typeof exports&&"object"==typeof module?module.exports=e():"function"==typeof define&&define.amd?define([],e):"object"==typeof exports?exports.ClipboardJS=e():t.ClipboardJS=e()}(this,function(){return n={686:function(t,e,n){"use strict";n.d(e,{default:function(){return b}});var e=n(279),i=n.n(e),e=n(370),u=n.n(e),e=n(817),r=n.n(e);function c(t){try{return document.execCommand(t)}catch(t){return}}var a=function(t){t=r()(t);return c("cut"),t};function o(t,e){var n,o,t=(n=t,o="rtl"===document.documentElement.getAttribute("dir"),(t=document.createElement("textarea")).style.fontSize="12pt",t.style.border="0",t.style.padding="0",t.style.margin="0",t.style.position="absolute",t.style[o?"right":"left"]="-9999px",o=window.pageYOffset||document.documentElement.scrollTop,t.style.top="".concat(o,"px"),t.setAttribute("readonly",""),t.value=n,t);return e.container.appendChild(t),e=r()(t),c("copy"),t.remove(),e}var f=function(t){var e=1.anchorjs-link,.anchorjs-link:focus{opacity:1}",A.sheet.cssRules.length),A.sheet.insertRule("[data-anchorjs-icon]::after{content:attr(data-anchorjs-icon)}",A.sheet.cssRules.length),A.sheet.insertRule('@font-face{font-family:anchorjs-icons;src:url(data:n/a;base64,AAEAAAALAIAAAwAwT1MvMg8yG2cAAAE4AAAAYGNtYXDp3gC3AAABpAAAAExnYXNwAAAAEAAAA9wAAAAIZ2x5ZlQCcfwAAAH4AAABCGhlYWQHFvHyAAAAvAAAADZoaGVhBnACFwAAAPQAAAAkaG10eASAADEAAAGYAAAADGxvY2EACACEAAAB8AAAAAhtYXhwAAYAVwAAARgAAAAgbmFtZQGOH9cAAAMAAAAAunBvc3QAAwAAAAADvAAAACAAAQAAAAEAAHzE2p9fDzz1AAkEAAAAAADRecUWAAAAANQA6R8AAAAAAoACwAAAAAgAAgAAAAAAAAABAAADwP/AAAACgAAA/9MCrQABAAAAAAAAAAAAAAAAAAAAAwABAAAAAwBVAAIAAAAAAAIAAAAAAAAAAAAAAAAAAAAAAAMCQAGQAAUAAAKZAswAAACPApkCzAAAAesAMwEJAAAAAAAAAAAAAAAAAAAAARAAAAAAAAAAAAAAAAAAAAAAQAAg//0DwP/AAEADwABAAAAAAQAAAAAAAAAAAAAAIAAAAAAAAAIAAAACgAAxAAAAAwAAAAMAAAAcAAEAAwAAABwAAwABAAAAHAAEADAAAAAIAAgAAgAAACDpy//9//8AAAAg6cv//f///+EWNwADAAEAAAAAAAAAAAAAAAAACACEAAEAAAAAAAAAAAAAAAAxAAACAAQARAKAAsAAKwBUAAABIiYnJjQ3NzY2MzIWFxYUBwcGIicmNDc3NjQnJiYjIgYHBwYUFxYUBwYGIwciJicmNDc3NjIXFhQHBwYUFxYWMzI2Nzc2NCcmNDc2MhcWFAcHBgYjARQGDAUtLXoWOR8fORYtLTgKGwoKCjgaGg0gEhIgDXoaGgkJBQwHdR85Fi0tOAobCgoKOBoaDSASEiANehoaCQkKGwotLXoWOR8BMwUFLYEuehYXFxYugC44CQkKGwo4GkoaDQ0NDXoaShoKGwoFBe8XFi6ALjgJCQobCjgaShoNDQ0NehpKGgobCgoKLYEuehYXAAAADACWAAEAAAAAAAEACAAAAAEAAAAAAAIAAwAIAAEAAAAAAAMACAAAAAEAAAAAAAQACAAAAAEAAAAAAAUAAQALAAEAAAAAAAYACAAAAAMAAQQJAAEAEAAMAAMAAQQJAAIABgAcAAMAAQQJAAMAEAAMAAMAAQQJAAQAEAAMAAMAAQQJAAUAAgAiAAMAAQQJAAYAEAAMYW5jaG9yanM0MDBAAGEAbgBjAGgAbwByAGoAcwA0ADAAMABAAAAAAwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAH//wAP) format("truetype")}',A.sheet.cssRules.length)),h=document.querySelectorAll("[id]"),t=[].map.call(h,function(A){return A.id}),i=0;i\]./()*\\\n\t\b\v\u00A0]/g,"-").replace(/-{2,}/g,"-").substring(0,this.options.truncate).replace(/^-+|-+$/gm,"").toLowerCase()},this.hasAnchorJSLink=function(A){var e=A.firstChild&&-1<(" "+A.firstChild.className+" ").indexOf(" anchorjs-link "),A=A.lastChild&&-1<(" "+A.lastChild.className+" ").indexOf(" anchorjs-link ");return e||A||!1}}}); +// @license-end \ No newline at end of file diff --git a/site_libs/quarto-html/popper.min.js b/site_libs/quarto-html/popper.min.js new file mode 100644 index 0000000..e3726d7 --- /dev/null +++ b/site_libs/quarto-html/popper.min.js @@ -0,0 +1,6 @@ +/** + * @popperjs/core v2.11.7 - MIT License + */ + +!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports):"function"==typeof define&&define.amd?define(["exports"],t):t((e="undefined"!=typeof globalThis?globalThis:e||self).Popper={})}(this,(function(e){"use strict";function t(e){if(null==e)return window;if("[object Window]"!==e.toString()){var t=e.ownerDocument;return t&&t.defaultView||window}return e}function n(e){return e instanceof t(e).Element||e instanceof Element}function r(e){return e instanceof t(e).HTMLElement||e instanceof HTMLElement}function o(e){return"undefined"!=typeof ShadowRoot&&(e instanceof t(e).ShadowRoot||e instanceof ShadowRoot)}var i=Math.max,a=Math.min,s=Math.round;function f(){var e=navigator.userAgentData;return null!=e&&e.brands&&Array.isArray(e.brands)?e.brands.map((function(e){return e.brand+"/"+e.version})).join(" "):navigator.userAgent}function c(){return!/^((?!chrome|android).)*safari/i.test(f())}function p(e,o,i){void 0===o&&(o=!1),void 0===i&&(i=!1);var a=e.getBoundingClientRect(),f=1,p=1;o&&r(e)&&(f=e.offsetWidth>0&&s(a.width)/e.offsetWidth||1,p=e.offsetHeight>0&&s(a.height)/e.offsetHeight||1);var u=(n(e)?t(e):window).visualViewport,l=!c()&&i,d=(a.left+(l&&u?u.offsetLeft:0))/f,h=(a.top+(l&&u?u.offsetTop:0))/p,m=a.width/f,v=a.height/p;return{width:m,height:v,top:h,right:d+m,bottom:h+v,left:d,x:d,y:h}}function u(e){var n=t(e);return{scrollLeft:n.pageXOffset,scrollTop:n.pageYOffset}}function l(e){return e?(e.nodeName||"").toLowerCase():null}function d(e){return((n(e)?e.ownerDocument:e.document)||window.document).documentElement}function h(e){return p(d(e)).left+u(e).scrollLeft}function m(e){return t(e).getComputedStyle(e)}function v(e){var t=m(e),n=t.overflow,r=t.overflowX,o=t.overflowY;return/auto|scroll|overlay|hidden/.test(n+o+r)}function y(e,n,o){void 0===o&&(o=!1);var i,a,f=r(n),c=r(n)&&function(e){var t=e.getBoundingClientRect(),n=s(t.width)/e.offsetWidth||1,r=s(t.height)/e.offsetHeight||1;return 1!==n||1!==r}(n),m=d(n),y=p(e,c,o),g={scrollLeft:0,scrollTop:0},b={x:0,y:0};return(f||!f&&!o)&&(("body"!==l(n)||v(m))&&(g=(i=n)!==t(i)&&r(i)?{scrollLeft:(a=i).scrollLeft,scrollTop:a.scrollTop}:u(i)),r(n)?((b=p(n,!0)).x+=n.clientLeft,b.y+=n.clientTop):m&&(b.x=h(m))),{x:y.left+g.scrollLeft-b.x,y:y.top+g.scrollTop-b.y,width:y.width,height:y.height}}function g(e){var t=p(e),n=e.offsetWidth,r=e.offsetHeight;return Math.abs(t.width-n)<=1&&(n=t.width),Math.abs(t.height-r)<=1&&(r=t.height),{x:e.offsetLeft,y:e.offsetTop,width:n,height:r}}function b(e){return"html"===l(e)?e:e.assignedSlot||e.parentNode||(o(e)?e.host:null)||d(e)}function x(e){return["html","body","#document"].indexOf(l(e))>=0?e.ownerDocument.body:r(e)&&v(e)?e:x(b(e))}function w(e,n){var r;void 0===n&&(n=[]);var o=x(e),i=o===(null==(r=e.ownerDocument)?void 0:r.body),a=t(o),s=i?[a].concat(a.visualViewport||[],v(o)?o:[]):o,f=n.concat(s);return i?f:f.concat(w(b(s)))}function O(e){return["table","td","th"].indexOf(l(e))>=0}function j(e){return r(e)&&"fixed"!==m(e).position?e.offsetParent:null}function E(e){for(var n=t(e),i=j(e);i&&O(i)&&"static"===m(i).position;)i=j(i);return i&&("html"===l(i)||"body"===l(i)&&"static"===m(i).position)?n:i||function(e){var t=/firefox/i.test(f());if(/Trident/i.test(f())&&r(e)&&"fixed"===m(e).position)return null;var n=b(e);for(o(n)&&(n=n.host);r(n)&&["html","body"].indexOf(l(n))<0;){var i=m(n);if("none"!==i.transform||"none"!==i.perspective||"paint"===i.contain||-1!==["transform","perspective"].indexOf(i.willChange)||t&&"filter"===i.willChange||t&&i.filter&&"none"!==i.filter)return n;n=n.parentNode}return null}(e)||n}var D="top",A="bottom",L="right",P="left",M="auto",k=[D,A,L,P],W="start",B="end",H="viewport",T="popper",R=k.reduce((function(e,t){return e.concat([t+"-"+W,t+"-"+B])}),[]),S=[].concat(k,[M]).reduce((function(e,t){return e.concat([t,t+"-"+W,t+"-"+B])}),[]),V=["beforeRead","read","afterRead","beforeMain","main","afterMain","beforeWrite","write","afterWrite"];function q(e){var t=new Map,n=new Set,r=[];function o(e){n.add(e.name),[].concat(e.requires||[],e.requiresIfExists||[]).forEach((function(e){if(!n.has(e)){var r=t.get(e);r&&o(r)}})),r.push(e)}return e.forEach((function(e){t.set(e.name,e)})),e.forEach((function(e){n.has(e.name)||o(e)})),r}function C(e){return e.split("-")[0]}function N(e,t){var n=t.getRootNode&&t.getRootNode();if(e.contains(t))return!0;if(n&&o(n)){var r=t;do{if(r&&e.isSameNode(r))return!0;r=r.parentNode||r.host}while(r)}return!1}function I(e){return Object.assign({},e,{left:e.x,top:e.y,right:e.x+e.width,bottom:e.y+e.height})}function _(e,r,o){return r===H?I(function(e,n){var r=t(e),o=d(e),i=r.visualViewport,a=o.clientWidth,s=o.clientHeight,f=0,p=0;if(i){a=i.width,s=i.height;var u=c();(u||!u&&"fixed"===n)&&(f=i.offsetLeft,p=i.offsetTop)}return{width:a,height:s,x:f+h(e),y:p}}(e,o)):n(r)?function(e,t){var n=p(e,!1,"fixed"===t);return n.top=n.top+e.clientTop,n.left=n.left+e.clientLeft,n.bottom=n.top+e.clientHeight,n.right=n.left+e.clientWidth,n.width=e.clientWidth,n.height=e.clientHeight,n.x=n.left,n.y=n.top,n}(r,o):I(function(e){var t,n=d(e),r=u(e),o=null==(t=e.ownerDocument)?void 0:t.body,a=i(n.scrollWidth,n.clientWidth,o?o.scrollWidth:0,o?o.clientWidth:0),s=i(n.scrollHeight,n.clientHeight,o?o.scrollHeight:0,o?o.clientHeight:0),f=-r.scrollLeft+h(e),c=-r.scrollTop;return"rtl"===m(o||n).direction&&(f+=i(n.clientWidth,o?o.clientWidth:0)-a),{width:a,height:s,x:f,y:c}}(d(e)))}function F(e,t,o,s){var f="clippingParents"===t?function(e){var t=w(b(e)),o=["absolute","fixed"].indexOf(m(e).position)>=0&&r(e)?E(e):e;return n(o)?t.filter((function(e){return n(e)&&N(e,o)&&"body"!==l(e)})):[]}(e):[].concat(t),c=[].concat(f,[o]),p=c[0],u=c.reduce((function(t,n){var r=_(e,n,s);return t.top=i(r.top,t.top),t.right=a(r.right,t.right),t.bottom=a(r.bottom,t.bottom),t.left=i(r.left,t.left),t}),_(e,p,s));return u.width=u.right-u.left,u.height=u.bottom-u.top,u.x=u.left,u.y=u.top,u}function U(e){return e.split("-")[1]}function z(e){return["top","bottom"].indexOf(e)>=0?"x":"y"}function X(e){var t,n=e.reference,r=e.element,o=e.placement,i=o?C(o):null,a=o?U(o):null,s=n.x+n.width/2-r.width/2,f=n.y+n.height/2-r.height/2;switch(i){case D:t={x:s,y:n.y-r.height};break;case A:t={x:s,y:n.y+n.height};break;case L:t={x:n.x+n.width,y:f};break;case P:t={x:n.x-r.width,y:f};break;default:t={x:n.x,y:n.y}}var c=i?z(i):null;if(null!=c){var p="y"===c?"height":"width";switch(a){case W:t[c]=t[c]-(n[p]/2-r[p]/2);break;case B:t[c]=t[c]+(n[p]/2-r[p]/2)}}return t}function Y(e){return Object.assign({},{top:0,right:0,bottom:0,left:0},e)}function G(e,t){return t.reduce((function(t,n){return t[n]=e,t}),{})}function J(e,t){void 0===t&&(t={});var r=t,o=r.placement,i=void 0===o?e.placement:o,a=r.strategy,s=void 0===a?e.strategy:a,f=r.boundary,c=void 0===f?"clippingParents":f,u=r.rootBoundary,l=void 0===u?H:u,h=r.elementContext,m=void 0===h?T:h,v=r.altBoundary,y=void 0!==v&&v,g=r.padding,b=void 0===g?0:g,x=Y("number"!=typeof b?b:G(b,k)),w=m===T?"reference":T,O=e.rects.popper,j=e.elements[y?w:m],E=F(n(j)?j:j.contextElement||d(e.elements.popper),c,l,s),P=p(e.elements.reference),M=X({reference:P,element:O,strategy:"absolute",placement:i}),W=I(Object.assign({},O,M)),B=m===T?W:P,R={top:E.top-B.top+x.top,bottom:B.bottom-E.bottom+x.bottom,left:E.left-B.left+x.left,right:B.right-E.right+x.right},S=e.modifiersData.offset;if(m===T&&S){var V=S[i];Object.keys(R).forEach((function(e){var t=[L,A].indexOf(e)>=0?1:-1,n=[D,A].indexOf(e)>=0?"y":"x";R[e]+=V[n]*t}))}return R}var K={placement:"bottom",modifiers:[],strategy:"absolute"};function Q(){for(var e=arguments.length,t=new Array(e),n=0;n=0?-1:1,i="function"==typeof n?n(Object.assign({},t,{placement:e})):n,a=i[0],s=i[1];return a=a||0,s=(s||0)*o,[P,L].indexOf(r)>=0?{x:s,y:a}:{x:a,y:s}}(n,t.rects,i),e}),{}),s=a[t.placement],f=s.x,c=s.y;null!=t.modifiersData.popperOffsets&&(t.modifiersData.popperOffsets.x+=f,t.modifiersData.popperOffsets.y+=c),t.modifiersData[r]=a}},se={left:"right",right:"left",bottom:"top",top:"bottom"};function fe(e){return e.replace(/left|right|bottom|top/g,(function(e){return se[e]}))}var ce={start:"end",end:"start"};function pe(e){return e.replace(/start|end/g,(function(e){return ce[e]}))}function ue(e,t){void 0===t&&(t={});var n=t,r=n.placement,o=n.boundary,i=n.rootBoundary,a=n.padding,s=n.flipVariations,f=n.allowedAutoPlacements,c=void 0===f?S:f,p=U(r),u=p?s?R:R.filter((function(e){return U(e)===p})):k,l=u.filter((function(e){return c.indexOf(e)>=0}));0===l.length&&(l=u);var d=l.reduce((function(t,n){return t[n]=J(e,{placement:n,boundary:o,rootBoundary:i,padding:a})[C(n)],t}),{});return Object.keys(d).sort((function(e,t){return d[e]-d[t]}))}var le={name:"flip",enabled:!0,phase:"main",fn:function(e){var t=e.state,n=e.options,r=e.name;if(!t.modifiersData[r]._skip){for(var o=n.mainAxis,i=void 0===o||o,a=n.altAxis,s=void 0===a||a,f=n.fallbackPlacements,c=n.padding,p=n.boundary,u=n.rootBoundary,l=n.altBoundary,d=n.flipVariations,h=void 0===d||d,m=n.allowedAutoPlacements,v=t.options.placement,y=C(v),g=f||(y===v||!h?[fe(v)]:function(e){if(C(e)===M)return[];var t=fe(e);return[pe(e),t,pe(t)]}(v)),b=[v].concat(g).reduce((function(e,n){return e.concat(C(n)===M?ue(t,{placement:n,boundary:p,rootBoundary:u,padding:c,flipVariations:h,allowedAutoPlacements:m}):n)}),[]),x=t.rects.reference,w=t.rects.popper,O=new Map,j=!0,E=b[0],k=0;k=0,S=R?"width":"height",V=J(t,{placement:B,boundary:p,rootBoundary:u,altBoundary:l,padding:c}),q=R?T?L:P:T?A:D;x[S]>w[S]&&(q=fe(q));var N=fe(q),I=[];if(i&&I.push(V[H]<=0),s&&I.push(V[q]<=0,V[N]<=0),I.every((function(e){return e}))){E=B,j=!1;break}O.set(B,I)}if(j)for(var _=function(e){var t=b.find((function(t){var n=O.get(t);if(n)return n.slice(0,e).every((function(e){return e}))}));if(t)return E=t,"break"},F=h?3:1;F>0;F--){if("break"===_(F))break}t.placement!==E&&(t.modifiersData[r]._skip=!0,t.placement=E,t.reset=!0)}},requiresIfExists:["offset"],data:{_skip:!1}};function de(e,t,n){return i(e,a(t,n))}var he={name:"preventOverflow",enabled:!0,phase:"main",fn:function(e){var t=e.state,n=e.options,r=e.name,o=n.mainAxis,s=void 0===o||o,f=n.altAxis,c=void 0!==f&&f,p=n.boundary,u=n.rootBoundary,l=n.altBoundary,d=n.padding,h=n.tether,m=void 0===h||h,v=n.tetherOffset,y=void 0===v?0:v,b=J(t,{boundary:p,rootBoundary:u,padding:d,altBoundary:l}),x=C(t.placement),w=U(t.placement),O=!w,j=z(x),M="x"===j?"y":"x",k=t.modifiersData.popperOffsets,B=t.rects.reference,H=t.rects.popper,T="function"==typeof y?y(Object.assign({},t.rects,{placement:t.placement})):y,R="number"==typeof T?{mainAxis:T,altAxis:T}:Object.assign({mainAxis:0,altAxis:0},T),S=t.modifiersData.offset?t.modifiersData.offset[t.placement]:null,V={x:0,y:0};if(k){if(s){var q,N="y"===j?D:P,I="y"===j?A:L,_="y"===j?"height":"width",F=k[j],X=F+b[N],Y=F-b[I],G=m?-H[_]/2:0,K=w===W?B[_]:H[_],Q=w===W?-H[_]:-B[_],Z=t.elements.arrow,$=m&&Z?g(Z):{width:0,height:0},ee=t.modifiersData["arrow#persistent"]?t.modifiersData["arrow#persistent"].padding:{top:0,right:0,bottom:0,left:0},te=ee[N],ne=ee[I],re=de(0,B[_],$[_]),oe=O?B[_]/2-G-re-te-R.mainAxis:K-re-te-R.mainAxis,ie=O?-B[_]/2+G+re+ne+R.mainAxis:Q+re+ne+R.mainAxis,ae=t.elements.arrow&&E(t.elements.arrow),se=ae?"y"===j?ae.clientTop||0:ae.clientLeft||0:0,fe=null!=(q=null==S?void 0:S[j])?q:0,ce=F+ie-fe,pe=de(m?a(X,F+oe-fe-se):X,F,m?i(Y,ce):Y);k[j]=pe,V[j]=pe-F}if(c){var ue,le="x"===j?D:P,he="x"===j?A:L,me=k[M],ve="y"===M?"height":"width",ye=me+b[le],ge=me-b[he],be=-1!==[D,P].indexOf(x),xe=null!=(ue=null==S?void 0:S[M])?ue:0,we=be?ye:me-B[ve]-H[ve]-xe+R.altAxis,Oe=be?me+B[ve]+H[ve]-xe-R.altAxis:ge,je=m&&be?function(e,t,n){var r=de(e,t,n);return r>n?n:r}(we,me,Oe):de(m?we:ye,me,m?Oe:ge);k[M]=je,V[M]=je-me}t.modifiersData[r]=V}},requiresIfExists:["offset"]};var me={name:"arrow",enabled:!0,phase:"main",fn:function(e){var t,n=e.state,r=e.name,o=e.options,i=n.elements.arrow,a=n.modifiersData.popperOffsets,s=C(n.placement),f=z(s),c=[P,L].indexOf(s)>=0?"height":"width";if(i&&a){var p=function(e,t){return Y("number"!=typeof(e="function"==typeof e?e(Object.assign({},t.rects,{placement:t.placement})):e)?e:G(e,k))}(o.padding,n),u=g(i),l="y"===f?D:P,d="y"===f?A:L,h=n.rects.reference[c]+n.rects.reference[f]-a[f]-n.rects.popper[c],m=a[f]-n.rects.reference[f],v=E(i),y=v?"y"===f?v.clientHeight||0:v.clientWidth||0:0,b=h/2-m/2,x=p[l],w=y-u[c]-p[d],O=y/2-u[c]/2+b,j=de(x,O,w),M=f;n.modifiersData[r]=((t={})[M]=j,t.centerOffset=j-O,t)}},effect:function(e){var t=e.state,n=e.options.element,r=void 0===n?"[data-popper-arrow]":n;null!=r&&("string"!=typeof r||(r=t.elements.popper.querySelector(r)))&&N(t.elements.popper,r)&&(t.elements.arrow=r)},requires:["popperOffsets"],requiresIfExists:["preventOverflow"]};function ve(e,t,n){return void 0===n&&(n={x:0,y:0}),{top:e.top-t.height-n.y,right:e.right-t.width+n.x,bottom:e.bottom-t.height+n.y,left:e.left-t.width-n.x}}function ye(e){return[D,L,A,P].some((function(t){return e[t]>=0}))}var ge={name:"hide",enabled:!0,phase:"main",requiresIfExists:["preventOverflow"],fn:function(e){var t=e.state,n=e.name,r=t.rects.reference,o=t.rects.popper,i=t.modifiersData.preventOverflow,a=J(t,{elementContext:"reference"}),s=J(t,{altBoundary:!0}),f=ve(a,r),c=ve(s,o,i),p=ye(f),u=ye(c);t.modifiersData[n]={referenceClippingOffsets:f,popperEscapeOffsets:c,isReferenceHidden:p,hasPopperEscaped:u},t.attributes.popper=Object.assign({},t.attributes.popper,{"data-popper-reference-hidden":p,"data-popper-escaped":u})}},be=Z({defaultModifiers:[ee,te,oe,ie]}),xe=[ee,te,oe,ie,ae,le,he,me,ge],we=Z({defaultModifiers:xe});e.applyStyles=ie,e.arrow=me,e.computeStyles=oe,e.createPopper=we,e.createPopperLite=be,e.defaultModifiers=xe,e.detectOverflow=J,e.eventListeners=ee,e.flip=le,e.hide=ge,e.offset=ae,e.popperGenerator=Z,e.popperOffsets=te,e.preventOverflow=he,Object.defineProperty(e,"__esModule",{value:!0})})); + diff --git a/site_libs/quarto-html/quarto-syntax-highlighting.css b/site_libs/quarto-html/quarto-syntax-highlighting.css new file mode 100644 index 0000000..d9fd98f --- /dev/null +++ b/site_libs/quarto-html/quarto-syntax-highlighting.css @@ -0,0 +1,203 @@ +/* quarto syntax highlight colors */ +:root { + --quarto-hl-ot-color: #003B4F; + --quarto-hl-at-color: #657422; + --quarto-hl-ss-color: #20794D; + --quarto-hl-an-color: #5E5E5E; + --quarto-hl-fu-color: #4758AB; + --quarto-hl-st-color: #20794D; + --quarto-hl-cf-color: #003B4F; + --quarto-hl-op-color: #5E5E5E; + --quarto-hl-er-color: #AD0000; + --quarto-hl-bn-color: #AD0000; + --quarto-hl-al-color: #AD0000; + --quarto-hl-va-color: #111111; + --quarto-hl-bu-color: inherit; + --quarto-hl-ex-color: inherit; + --quarto-hl-pp-color: #AD0000; + --quarto-hl-in-color: #5E5E5E; + --quarto-hl-vs-color: #20794D; + --quarto-hl-wa-color: #5E5E5E; + --quarto-hl-do-color: #5E5E5E; + --quarto-hl-im-color: #00769E; + --quarto-hl-ch-color: #20794D; + --quarto-hl-dt-color: #AD0000; + --quarto-hl-fl-color: #AD0000; + --quarto-hl-co-color: #5E5E5E; + --quarto-hl-cv-color: #5E5E5E; + --quarto-hl-cn-color: #8f5902; + --quarto-hl-sc-color: #5E5E5E; + --quarto-hl-dv-color: #AD0000; + --quarto-hl-kw-color: #003B4F; +} + +/* other quarto variables */ +:root { + --quarto-font-monospace: SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace; +} + +pre > code.sourceCode > span { + color: #003B4F; +} + +code span { + color: #003B4F; +} + +code.sourceCode > span { + color: #003B4F; +} + +div.sourceCode, +div.sourceCode pre.sourceCode { + color: #003B4F; +} + +code span.ot { + color: #003B4F; + font-style: inherit; +} + +code span.at { + color: #657422; + font-style: inherit; +} + +code span.ss { + color: #20794D; + font-style: inherit; +} + +code span.an { + color: #5E5E5E; + font-style: inherit; +} + +code span.fu { + color: #4758AB; + font-style: inherit; +} + +code span.st { + color: #20794D; + font-style: inherit; +} + +code span.cf { + color: #003B4F; + font-style: inherit; +} + +code span.op { + color: #5E5E5E; + font-style: inherit; +} + +code span.er { + color: #AD0000; + font-style: inherit; +} + +code span.bn { + color: #AD0000; + font-style: inherit; +} + +code span.al { + color: #AD0000; + font-style: inherit; +} + +code span.va { + color: #111111; + font-style: inherit; +} + +code span.bu { + font-style: inherit; +} + +code span.ex { + font-style: inherit; +} + +code span.pp { + color: #AD0000; + font-style: inherit; +} + +code span.in { + color: #5E5E5E; + font-style: inherit; +} + +code span.vs { + color: #20794D; + font-style: inherit; +} + +code span.wa { + color: #5E5E5E; + font-style: italic; +} + +code span.do { + color: #5E5E5E; + font-style: italic; +} + +code span.im { + color: #00769E; + font-style: inherit; +} + +code span.ch { + color: #20794D; + font-style: inherit; +} + +code span.dt { + color: #AD0000; + font-style: inherit; +} + +code span.fl { + color: #AD0000; + font-style: inherit; +} + +code span.co { + color: #5E5E5E; + font-style: inherit; +} + +code span.cv { + color: #5E5E5E; + font-style: italic; +} + +code span.cn { + color: #8f5902; + font-style: inherit; +} + +code span.sc { + color: #5E5E5E; + font-style: inherit; +} + +code span.dv { + color: #AD0000; + font-style: inherit; +} + +code span.kw { + color: #003B4F; + font-style: inherit; +} + +.prevent-inlining { + content: " { + // Find any conflicting margin elements and add margins to the + // top to prevent overlap + const marginChildren = window.document.querySelectorAll( + ".column-margin.column-container > *, .margin-caption, .aside" + ); + + let lastBottom = 0; + for (const marginChild of marginChildren) { + if (marginChild.offsetParent !== null) { + // clear the top margin so we recompute it + marginChild.style.marginTop = null; + const top = marginChild.getBoundingClientRect().top + window.scrollY; + if (top < lastBottom) { + const marginChildStyle = window.getComputedStyle(marginChild); + const marginBottom = parseFloat(marginChildStyle["marginBottom"]); + const margin = lastBottom - top + marginBottom; + marginChild.style.marginTop = `${margin}px`; + } + const styles = window.getComputedStyle(marginChild); + const marginTop = parseFloat(styles["marginTop"]); + lastBottom = top + marginChild.getBoundingClientRect().height + marginTop; + } + } +}; + +window.document.addEventListener("DOMContentLoaded", function (_event) { + // Recompute the position of margin elements anytime the body size changes + if (window.ResizeObserver) { + const resizeObserver = new window.ResizeObserver( + throttle(() => { + layoutMarginEls(); + if ( + window.document.body.getBoundingClientRect().width < 990 && + isReaderMode() + ) { + quartoToggleReader(); + } + }, 50) + ); + resizeObserver.observe(window.document.body); + } + + const tocEl = window.document.querySelector('nav.toc-active[role="doc-toc"]'); + const sidebarEl = window.document.getElementById("quarto-sidebar"); + const leftTocEl = window.document.getElementById("quarto-sidebar-toc-left"); + const marginSidebarEl = window.document.getElementById( + "quarto-margin-sidebar" + ); + // function to determine whether the element has a previous sibling that is active + const prevSiblingIsActiveLink = (el) => { + const sibling = el.previousElementSibling; + if (sibling && sibling.tagName === "A") { + return sibling.classList.contains("active"); + } else { + return false; + } + }; + + // fire slideEnter for bootstrap tab activations (for htmlwidget resize behavior) + function fireSlideEnter(e) { + const event = window.document.createEvent("Event"); + event.initEvent("slideenter", true, true); + window.document.dispatchEvent(event); + } + const tabs = window.document.querySelectorAll('a[data-bs-toggle="tab"]'); + tabs.forEach((tab) => { + tab.addEventListener("shown.bs.tab", fireSlideEnter); + }); + + // fire slideEnter for tabby tab activations (for htmlwidget resize behavior) + document.addEventListener("tabby", fireSlideEnter, false); + + // Track scrolling and mark TOC links as active + // get table of contents and sidebar (bail if we don't have at least one) + const tocLinks = tocEl + ? [...tocEl.querySelectorAll("a[data-scroll-target]")] + : []; + const makeActive = (link) => tocLinks[link].classList.add("active"); + const removeActive = (link) => tocLinks[link].classList.remove("active"); + const removeAllActive = () => + [...Array(tocLinks.length).keys()].forEach((link) => removeActive(link)); + + // activate the anchor for a section associated with this TOC entry + tocLinks.forEach((link) => { + link.addEventListener("click", () => { + if (link.href.indexOf("#") !== -1) { + const anchor = link.href.split("#")[1]; + const heading = window.document.querySelector( + `[data-anchor-id=${anchor}]` + ); + if (heading) { + // Add the class + heading.classList.add("reveal-anchorjs-link"); + + // function to show the anchor + const handleMouseout = () => { + heading.classList.remove("reveal-anchorjs-link"); + heading.removeEventListener("mouseout", handleMouseout); + }; + + // add a function to clear the anchor when the user mouses out of it + heading.addEventListener("mouseout", handleMouseout); + } + } + }); + }); + + const sections = tocLinks.map((link) => { + const target = link.getAttribute("data-scroll-target"); + if (target.startsWith("#")) { + return window.document.getElementById(decodeURI(`${target.slice(1)}`)); + } else { + return window.document.querySelector(decodeURI(`${target}`)); + } + }); + + const sectionMargin = 200; + let currentActive = 0; + // track whether we've initialized state the first time + let init = false; + + const updateActiveLink = () => { + // The index from bottom to top (e.g. reversed list) + let sectionIndex = -1; + if ( + window.innerHeight + window.pageYOffset >= + window.document.body.offsetHeight + ) { + sectionIndex = 0; + } else { + sectionIndex = [...sections].reverse().findIndex((section) => { + if (section) { + return window.pageYOffset >= section.offsetTop - sectionMargin; + } else { + return false; + } + }); + } + if (sectionIndex > -1) { + const current = sections.length - sectionIndex - 1; + if (current !== currentActive) { + removeAllActive(); + currentActive = current; + makeActive(current); + if (init) { + window.dispatchEvent(sectionChanged); + } + init = true; + } + } + }; + + const inHiddenRegion = (top, bottom, hiddenRegions) => { + for (const region of hiddenRegions) { + if (top <= region.bottom && bottom >= region.top) { + return true; + } + } + return false; + }; + + const categorySelector = "header.quarto-title-block .quarto-category"; + const activateCategories = (href) => { + // Find any categories + // Surround them with a link pointing back to: + // #category=Authoring + try { + const categoryEls = window.document.querySelectorAll(categorySelector); + for (const categoryEl of categoryEls) { + const categoryText = categoryEl.textContent; + if (categoryText) { + const link = `${href}#category=${encodeURIComponent(categoryText)}`; + const linkEl = window.document.createElement("a"); + linkEl.setAttribute("href", link); + for (const child of categoryEl.childNodes) { + linkEl.append(child); + } + categoryEl.appendChild(linkEl); + } + } + } catch { + // Ignore errors + } + }; + function hasTitleCategories() { + return window.document.querySelector(categorySelector) !== null; + } + + function offsetRelativeUrl(url) { + const offset = getMeta("quarto:offset"); + return offset ? offset + url : url; + } + + function offsetAbsoluteUrl(url) { + const offset = getMeta("quarto:offset"); + const baseUrl = new URL(offset, window.location); + + const projRelativeUrl = url.replace(baseUrl, ""); + if (projRelativeUrl.startsWith("/")) { + return projRelativeUrl; + } else { + return "/" + projRelativeUrl; + } + } + + // read a meta tag value + function getMeta(metaName) { + const metas = window.document.getElementsByTagName("meta"); + for (let i = 0; i < metas.length; i++) { + if (metas[i].getAttribute("name") === metaName) { + return metas[i].getAttribute("content"); + } + } + return ""; + } + + async function findAndActivateCategories() { + const currentPagePath = offsetAbsoluteUrl(window.location.href); + const response = await fetch(offsetRelativeUrl("listings.json")); + if (response.status == 200) { + return response.json().then(function (listingPaths) { + const listingHrefs = []; + for (const listingPath of listingPaths) { + const pathWithoutLeadingSlash = listingPath.listing.substring(1); + for (const item of listingPath.items) { + if ( + item === currentPagePath || + item === currentPagePath + "index.html" + ) { + // Resolve this path against the offset to be sure + // we already are using the correct path to the listing + // (this adjusts the listing urls to be rooted against + // whatever root the page is actually running against) + const relative = offsetRelativeUrl(pathWithoutLeadingSlash); + const baseUrl = window.location; + const resolvedPath = new URL(relative, baseUrl); + listingHrefs.push(resolvedPath.pathname); + break; + } + } + } + + // Look up the tree for a nearby linting and use that if we find one + const nearestListing = findNearestParentListing( + offsetAbsoluteUrl(window.location.pathname), + listingHrefs + ); + if (nearestListing) { + activateCategories(nearestListing); + } else { + // See if the referrer is a listing page for this item + const referredRelativePath = offsetAbsoluteUrl(document.referrer); + const referrerListing = listingHrefs.find((listingHref) => { + const isListingReferrer = + listingHref === referredRelativePath || + listingHref === referredRelativePath + "index.html"; + return isListingReferrer; + }); + + if (referrerListing) { + // Try to use the referrer if possible + activateCategories(referrerListing); + } else if (listingHrefs.length > 0) { + // Otherwise, just fall back to the first listing + activateCategories(listingHrefs[0]); + } + } + }); + } + } + if (hasTitleCategories()) { + findAndActivateCategories(); + } + + const findNearestParentListing = (href, listingHrefs) => { + if (!href || !listingHrefs) { + return undefined; + } + // Look up the tree for a nearby linting and use that if we find one + const relativeParts = href.substring(1).split("/"); + while (relativeParts.length > 0) { + const path = relativeParts.join("/"); + for (const listingHref of listingHrefs) { + if (listingHref.startsWith(path)) { + return listingHref; + } + } + relativeParts.pop(); + } + + return undefined; + }; + + const manageSidebarVisiblity = (el, placeholderDescriptor) => { + let isVisible = true; + let elRect; + + return (hiddenRegions) => { + if (el === null) { + return; + } + + // Find the last element of the TOC + const lastChildEl = el.lastElementChild; + + if (lastChildEl) { + // Converts the sidebar to a menu + const convertToMenu = () => { + for (const child of el.children) { + child.style.opacity = 0; + child.style.overflow = "hidden"; + } + + nexttick(() => { + const toggleContainer = window.document.createElement("div"); + toggleContainer.style.width = "100%"; + toggleContainer.classList.add("zindex-over-content"); + toggleContainer.classList.add("quarto-sidebar-toggle"); + toggleContainer.classList.add("headroom-target"); // Marks this to be managed by headeroom + toggleContainer.id = placeholderDescriptor.id; + toggleContainer.style.position = "fixed"; + + const toggleIcon = window.document.createElement("i"); + toggleIcon.classList.add("quarto-sidebar-toggle-icon"); + toggleIcon.classList.add("bi"); + toggleIcon.classList.add("bi-caret-down-fill"); + + const toggleTitle = window.document.createElement("div"); + const titleEl = window.document.body.querySelector( + placeholderDescriptor.titleSelector + ); + if (titleEl) { + toggleTitle.append( + titleEl.textContent || titleEl.innerText, + toggleIcon + ); + } + toggleTitle.classList.add("zindex-over-content"); + toggleTitle.classList.add("quarto-sidebar-toggle-title"); + toggleContainer.append(toggleTitle); + + const toggleContents = window.document.createElement("div"); + toggleContents.classList = el.classList; + toggleContents.classList.add("zindex-over-content"); + toggleContents.classList.add("quarto-sidebar-toggle-contents"); + for (const child of el.children) { + if (child.id === "toc-title") { + continue; + } + + const clone = child.cloneNode(true); + clone.style.opacity = 1; + clone.style.display = null; + toggleContents.append(clone); + } + toggleContents.style.height = "0px"; + const positionToggle = () => { + // position the element (top left of parent, same width as parent) + if (!elRect) { + elRect = el.getBoundingClientRect(); + } + toggleContainer.style.left = `${elRect.left}px`; + toggleContainer.style.top = `${elRect.top}px`; + toggleContainer.style.width = `${elRect.width}px`; + }; + positionToggle(); + + toggleContainer.append(toggleContents); + el.parentElement.prepend(toggleContainer); + + // Process clicks + let tocShowing = false; + // Allow the caller to control whether this is dismissed + // when it is clicked (e.g. sidebar navigation supports + // opening and closing the nav tree, so don't dismiss on click) + const clickEl = placeholderDescriptor.dismissOnClick + ? toggleContainer + : toggleTitle; + + const closeToggle = () => { + if (tocShowing) { + toggleContainer.classList.remove("expanded"); + toggleContents.style.height = "0px"; + tocShowing = false; + } + }; + + // Get rid of any expanded toggle if the user scrolls + window.document.addEventListener( + "scroll", + throttle(() => { + closeToggle(); + }, 50) + ); + + // Handle positioning of the toggle + window.addEventListener( + "resize", + throttle(() => { + elRect = undefined; + positionToggle(); + }, 50) + ); + + window.addEventListener("quarto-hrChanged", () => { + elRect = undefined; + }); + + // Process the click + clickEl.onclick = () => { + if (!tocShowing) { + toggleContainer.classList.add("expanded"); + toggleContents.style.height = null; + tocShowing = true; + } else { + closeToggle(); + } + }; + }); + }; + + // Converts a sidebar from a menu back to a sidebar + const convertToSidebar = () => { + for (const child of el.children) { + child.style.opacity = 1; + child.style.overflow = null; + } + + const placeholderEl = window.document.getElementById( + placeholderDescriptor.id + ); + if (placeholderEl) { + placeholderEl.remove(); + } + + el.classList.remove("rollup"); + }; + + if (isReaderMode()) { + convertToMenu(); + isVisible = false; + } else { + // Find the top and bottom o the element that is being managed + const elTop = el.offsetTop; + const elBottom = + elTop + lastChildEl.offsetTop + lastChildEl.offsetHeight; + + if (!isVisible) { + // If the element is current not visible reveal if there are + // no conflicts with overlay regions + if (!inHiddenRegion(elTop, elBottom, hiddenRegions)) { + convertToSidebar(); + isVisible = true; + } + } else { + // If the element is visible, hide it if it conflicts with overlay regions + // and insert a placeholder toggle (or if we're in reader mode) + if (inHiddenRegion(elTop, elBottom, hiddenRegions)) { + convertToMenu(); + isVisible = false; + } + } + } + } + }; + }; + + const tabEls = document.querySelectorAll('a[data-bs-toggle="tab"]'); + for (const tabEl of tabEls) { + const id = tabEl.getAttribute("data-bs-target"); + if (id) { + const columnEl = document.querySelector( + `${id} .column-margin, .tabset-margin-content` + ); + if (columnEl) + tabEl.addEventListener("shown.bs.tab", function (event) { + const el = event.srcElement; + if (el) { + const visibleCls = `${el.id}-margin-content`; + // walk up until we find a parent tabset + let panelTabsetEl = el.parentElement; + while (panelTabsetEl) { + if (panelTabsetEl.classList.contains("panel-tabset")) { + break; + } + panelTabsetEl = panelTabsetEl.parentElement; + } + + if (panelTabsetEl) { + const prevSib = panelTabsetEl.previousElementSibling; + if ( + prevSib && + prevSib.classList.contains("tabset-margin-container") + ) { + const childNodes = prevSib.querySelectorAll( + ".tabset-margin-content" + ); + for (const childEl of childNodes) { + if (childEl.classList.contains(visibleCls)) { + childEl.classList.remove("collapse"); + } else { + childEl.classList.add("collapse"); + } + } + } + } + } + + layoutMarginEls(); + }); + } + } + + // Manage the visibility of the toc and the sidebar + const marginScrollVisibility = manageSidebarVisiblity(marginSidebarEl, { + id: "quarto-toc-toggle", + titleSelector: "#toc-title", + dismissOnClick: true, + }); + const sidebarScrollVisiblity = manageSidebarVisiblity(sidebarEl, { + id: "quarto-sidebarnav-toggle", + titleSelector: ".title", + dismissOnClick: false, + }); + let tocLeftScrollVisibility; + if (leftTocEl) { + tocLeftScrollVisibility = manageSidebarVisiblity(leftTocEl, { + id: "quarto-lefttoc-toggle", + titleSelector: "#toc-title", + dismissOnClick: true, + }); + } + + // Find the first element that uses formatting in special columns + const conflictingEls = window.document.body.querySelectorAll( + '[class^="column-"], [class*=" column-"], aside, [class*="margin-caption"], [class*=" margin-caption"], [class*="margin-ref"], [class*=" margin-ref"]' + ); + + // Filter all the possibly conflicting elements into ones + // the do conflict on the left or ride side + const arrConflictingEls = Array.from(conflictingEls); + const leftSideConflictEls = arrConflictingEls.filter((el) => { + if (el.tagName === "ASIDE") { + return false; + } + return Array.from(el.classList).find((className) => { + return ( + className !== "column-body" && + className.startsWith("column-") && + !className.endsWith("right") && + !className.endsWith("container") && + className !== "column-margin" + ); + }); + }); + const rightSideConflictEls = arrConflictingEls.filter((el) => { + if (el.tagName === "ASIDE") { + return true; + } + + const hasMarginCaption = Array.from(el.classList).find((className) => { + return className == "margin-caption"; + }); + if (hasMarginCaption) { + return true; + } + + return Array.from(el.classList).find((className) => { + return ( + className !== "column-body" && + !className.endsWith("container") && + className.startsWith("column-") && + !className.endsWith("left") + ); + }); + }); + + const kOverlapPaddingSize = 10; + function toRegions(els) { + return els.map((el) => { + const boundRect = el.getBoundingClientRect(); + const top = + boundRect.top + + document.documentElement.scrollTop - + kOverlapPaddingSize; + return { + top, + bottom: top + el.scrollHeight + 2 * kOverlapPaddingSize, + }; + }); + } + + let hasObserved = false; + const visibleItemObserver = (els) => { + let visibleElements = [...els]; + const intersectionObserver = new IntersectionObserver( + (entries, _observer) => { + entries.forEach((entry) => { + if (entry.isIntersecting) { + if (visibleElements.indexOf(entry.target) === -1) { + visibleElements.push(entry.target); + } + } else { + visibleElements = visibleElements.filter((visibleEntry) => { + return visibleEntry !== entry; + }); + } + }); + + if (!hasObserved) { + hideOverlappedSidebars(); + } + hasObserved = true; + }, + {} + ); + els.forEach((el) => { + intersectionObserver.observe(el); + }); + + return { + getVisibleEntries: () => { + return visibleElements; + }, + }; + }; + + const rightElementObserver = visibleItemObserver(rightSideConflictEls); + const leftElementObserver = visibleItemObserver(leftSideConflictEls); + + const hideOverlappedSidebars = () => { + marginScrollVisibility(toRegions(rightElementObserver.getVisibleEntries())); + sidebarScrollVisiblity(toRegions(leftElementObserver.getVisibleEntries())); + if (tocLeftScrollVisibility) { + tocLeftScrollVisibility( + toRegions(leftElementObserver.getVisibleEntries()) + ); + } + }; + + window.quartoToggleReader = () => { + // Applies a slow class (or removes it) + // to update the transition speed + const slowTransition = (slow) => { + const manageTransition = (id, slow) => { + const el = document.getElementById(id); + if (el) { + if (slow) { + el.classList.add("slow"); + } else { + el.classList.remove("slow"); + } + } + }; + + manageTransition("TOC", slow); + manageTransition("quarto-sidebar", slow); + }; + const readerMode = !isReaderMode(); + setReaderModeValue(readerMode); + + // If we're entering reader mode, slow the transition + if (readerMode) { + slowTransition(readerMode); + } + highlightReaderToggle(readerMode); + hideOverlappedSidebars(); + + // If we're exiting reader mode, restore the non-slow transition + if (!readerMode) { + slowTransition(!readerMode); + } + }; + + const highlightReaderToggle = (readerMode) => { + const els = document.querySelectorAll(".quarto-reader-toggle"); + if (els) { + els.forEach((el) => { + if (readerMode) { + el.classList.add("reader"); + } else { + el.classList.remove("reader"); + } + }); + } + }; + + const setReaderModeValue = (val) => { + if (window.location.protocol !== "file:") { + window.localStorage.setItem("quarto-reader-mode", val); + } else { + localReaderMode = val; + } + }; + + const isReaderMode = () => { + if (window.location.protocol !== "file:") { + return window.localStorage.getItem("quarto-reader-mode") === "true"; + } else { + return localReaderMode; + } + }; + let localReaderMode = null; + + const tocOpenDepthStr = tocEl?.getAttribute("data-toc-expanded"); + const tocOpenDepth = tocOpenDepthStr ? Number(tocOpenDepthStr) : 1; + + // Walk the TOC and collapse/expand nodes + // Nodes are expanded if: + // - they are top level + // - they have children that are 'active' links + // - they are directly below an link that is 'active' + const walk = (el, depth) => { + // Tick depth when we enter a UL + if (el.tagName === "UL") { + depth = depth + 1; + } + + // It this is active link + let isActiveNode = false; + if (el.tagName === "A" && el.classList.contains("active")) { + isActiveNode = true; + } + + // See if there is an active child to this element + let hasActiveChild = false; + for (child of el.children) { + hasActiveChild = walk(child, depth) || hasActiveChild; + } + + // Process the collapse state if this is an UL + if (el.tagName === "UL") { + if (tocOpenDepth === -1 && depth > 1) { + el.classList.add("collapse"); + } else if ( + depth <= tocOpenDepth || + hasActiveChild || + prevSiblingIsActiveLink(el) + ) { + el.classList.remove("collapse"); + } else { + el.classList.add("collapse"); + } + + // untick depth when we leave a UL + depth = depth - 1; + } + return hasActiveChild || isActiveNode; + }; + + // walk the TOC and expand / collapse any items that should be shown + + if (tocEl) { + walk(tocEl, 0); + updateActiveLink(); + } + + // Throttle the scroll event and walk peridiocally + window.document.addEventListener( + "scroll", + throttle(() => { + if (tocEl) { + updateActiveLink(); + walk(tocEl, 0); + } + if (!isReaderMode()) { + hideOverlappedSidebars(); + } + }, 5) + ); + window.addEventListener( + "resize", + throttle(() => { + if (!isReaderMode()) { + hideOverlappedSidebars(); + } + }, 10) + ); + hideOverlappedSidebars(); + highlightReaderToggle(isReaderMode()); +}); + +// grouped tabsets +window.addEventListener("pageshow", (_event) => { + function getTabSettings() { + const data = localStorage.getItem("quarto-persistent-tabsets-data"); + if (!data) { + localStorage.setItem("quarto-persistent-tabsets-data", "{}"); + return {}; + } + if (data) { + return JSON.parse(data); + } + } + + function setTabSettings(data) { + localStorage.setItem( + "quarto-persistent-tabsets-data", + JSON.stringify(data) + ); + } + + function setTabState(groupName, groupValue) { + const data = getTabSettings(); + data[groupName] = groupValue; + setTabSettings(data); + } + + function toggleTab(tab, active) { + const tabPanelId = tab.getAttribute("aria-controls"); + const tabPanel = document.getElementById(tabPanelId); + if (active) { + tab.classList.add("active"); + tabPanel.classList.add("active"); + } else { + tab.classList.remove("active"); + tabPanel.classList.remove("active"); + } + } + + function toggleAll(selectedGroup, selectorsToSync) { + for (const [thisGroup, tabs] of Object.entries(selectorsToSync)) { + const active = selectedGroup === thisGroup; + for (const tab of tabs) { + toggleTab(tab, active); + } + } + } + + function findSelectorsToSyncByLanguage() { + const result = {}; + const tabs = Array.from( + document.querySelectorAll(`div[data-group] a[id^='tabset-']`) + ); + for (const item of tabs) { + const div = item.parentElement.parentElement.parentElement; + const group = div.getAttribute("data-group"); + if (!result[group]) { + result[group] = {}; + } + const selectorsToSync = result[group]; + const value = item.innerHTML; + if (!selectorsToSync[value]) { + selectorsToSync[value] = []; + } + selectorsToSync[value].push(item); + } + return result; + } + + function setupSelectorSync() { + const selectorsToSync = findSelectorsToSyncByLanguage(); + Object.entries(selectorsToSync).forEach(([group, tabSetsByValue]) => { + Object.entries(tabSetsByValue).forEach(([value, items]) => { + items.forEach((item) => { + item.addEventListener("click", (_event) => { + setTabState(group, value); + toggleAll(value, selectorsToSync[group]); + }); + }); + }); + }); + return selectorsToSync; + } + + const selectorsToSync = setupSelectorSync(); + for (const [group, selectedName] of Object.entries(getTabSettings())) { + const selectors = selectorsToSync[group]; + // it's possible that stale state gives us empty selections, so we explicitly check here. + if (selectors) { + toggleAll(selectedName, selectors); + } + } +}); + +function throttle(func, wait) { + let waiting = false; + return function () { + if (!waiting) { + func.apply(this, arguments); + waiting = true; + setTimeout(function () { + waiting = false; + }, wait); + } + }; +} + +function nexttick(func) { + return setTimeout(func, 0); +} diff --git a/site_libs/quarto-html/tippy.css b/site_libs/quarto-html/tippy.css new file mode 100644 index 0000000..e6ae635 --- /dev/null +++ b/site_libs/quarto-html/tippy.css @@ -0,0 +1 @@ +.tippy-box[data-animation=fade][data-state=hidden]{opacity:0}[data-tippy-root]{max-width:calc(100vw - 10px)}.tippy-box{position:relative;background-color:#333;color:#fff;border-radius:4px;font-size:14px;line-height:1.4;white-space:normal;outline:0;transition-property:transform,visibility,opacity}.tippy-box[data-placement^=top]>.tippy-arrow{bottom:0}.tippy-box[data-placement^=top]>.tippy-arrow:before{bottom:-7px;left:0;border-width:8px 8px 0;border-top-color:initial;transform-origin:center top}.tippy-box[data-placement^=bottom]>.tippy-arrow{top:0}.tippy-box[data-placement^=bottom]>.tippy-arrow:before{top:-7px;left:0;border-width:0 8px 8px;border-bottom-color:initial;transform-origin:center bottom}.tippy-box[data-placement^=left]>.tippy-arrow{right:0}.tippy-box[data-placement^=left]>.tippy-arrow:before{border-width:8px 0 8px 8px;border-left-color:initial;right:-7px;transform-origin:center left}.tippy-box[data-placement^=right]>.tippy-arrow{left:0}.tippy-box[data-placement^=right]>.tippy-arrow:before{left:-7px;border-width:8px 8px 8px 0;border-right-color:initial;transform-origin:center right}.tippy-box[data-inertia][data-state=visible]{transition-timing-function:cubic-bezier(.54,1.5,.38,1.11)}.tippy-arrow{width:16px;height:16px;color:#333}.tippy-arrow:before{content:"";position:absolute;border-color:transparent;border-style:solid}.tippy-content{position:relative;padding:5px 9px;z-index:1} \ No newline at end of file diff --git a/site_libs/quarto-html/tippy.umd.min.js b/site_libs/quarto-html/tippy.umd.min.js new file mode 100644 index 0000000..ca292be --- /dev/null +++ b/site_libs/quarto-html/tippy.umd.min.js @@ -0,0 +1,2 @@ +!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?module.exports=t(require("@popperjs/core")):"function"==typeof define&&define.amd?define(["@popperjs/core"],t):(e=e||self).tippy=t(e.Popper)}(this,(function(e){"use strict";var t={passive:!0,capture:!0},n=function(){return document.body};function r(e,t,n){if(Array.isArray(e)){var r=e[t];return null==r?Array.isArray(n)?n[t]:n:r}return e}function o(e,t){var n={}.toString.call(e);return 0===n.indexOf("[object")&&n.indexOf(t+"]")>-1}function i(e,t){return"function"==typeof e?e.apply(void 0,t):e}function a(e,t){return 0===t?e:function(r){clearTimeout(n),n=setTimeout((function(){e(r)}),t)};var n}function s(e,t){var n=Object.assign({},e);return t.forEach((function(e){delete n[e]})),n}function u(e){return[].concat(e)}function c(e,t){-1===e.indexOf(t)&&e.push(t)}function p(e){return e.split("-")[0]}function f(e){return[].slice.call(e)}function l(e){return Object.keys(e).reduce((function(t,n){return void 0!==e[n]&&(t[n]=e[n]),t}),{})}function d(){return document.createElement("div")}function v(e){return["Element","Fragment"].some((function(t){return o(e,t)}))}function m(e){return o(e,"MouseEvent")}function g(e){return!(!e||!e._tippy||e._tippy.reference!==e)}function h(e){return v(e)?[e]:function(e){return o(e,"NodeList")}(e)?f(e):Array.isArray(e)?e:f(document.querySelectorAll(e))}function b(e,t){e.forEach((function(e){e&&(e.style.transitionDuration=t+"ms")}))}function y(e,t){e.forEach((function(e){e&&e.setAttribute("data-state",t)}))}function w(e){var t,n=u(e)[0];return null!=n&&null!=(t=n.ownerDocument)&&t.body?n.ownerDocument:document}function E(e,t,n){var r=t+"EventListener";["transitionend","webkitTransitionEnd"].forEach((function(t){e[r](t,n)}))}function O(e,t){for(var n=t;n;){var r;if(e.contains(n))return!0;n=null==n.getRootNode||null==(r=n.getRootNode())?void 0:r.host}return!1}var x={isTouch:!1},C=0;function T(){x.isTouch||(x.isTouch=!0,window.performance&&document.addEventListener("mousemove",A))}function A(){var e=performance.now();e-C<20&&(x.isTouch=!1,document.removeEventListener("mousemove",A)),C=e}function L(){var e=document.activeElement;if(g(e)){var t=e._tippy;e.blur&&!t.state.isVisible&&e.blur()}}var D=!!("undefined"!=typeof window&&"undefined"!=typeof document)&&!!window.msCrypto,R=Object.assign({appendTo:n,aria:{content:"auto",expanded:"auto"},delay:0,duration:[300,250],getReferenceClientRect:null,hideOnClick:!0,ignoreAttributes:!1,interactive:!1,interactiveBorder:2,interactiveDebounce:0,moveTransition:"",offset:[0,10],onAfterUpdate:function(){},onBeforeUpdate:function(){},onCreate:function(){},onDestroy:function(){},onHidden:function(){},onHide:function(){},onMount:function(){},onShow:function(){},onShown:function(){},onTrigger:function(){},onUntrigger:function(){},onClickOutside:function(){},placement:"top",plugins:[],popperOptions:{},render:null,showOnCreate:!1,touch:!0,trigger:"mouseenter focus",triggerTarget:null},{animateFill:!1,followCursor:!1,inlinePositioning:!1,sticky:!1},{allowHTML:!1,animation:"fade",arrow:!0,content:"",inertia:!1,maxWidth:350,role:"tooltip",theme:"",zIndex:9999}),k=Object.keys(R);function P(e){var t=(e.plugins||[]).reduce((function(t,n){var r,o=n.name,i=n.defaultValue;o&&(t[o]=void 0!==e[o]?e[o]:null!=(r=R[o])?r:i);return t}),{});return Object.assign({},e,t)}function j(e,t){var n=Object.assign({},t,{content:i(t.content,[e])},t.ignoreAttributes?{}:function(e,t){return(t?Object.keys(P(Object.assign({},R,{plugins:t}))):k).reduce((function(t,n){var r=(e.getAttribute("data-tippy-"+n)||"").trim();if(!r)return t;if("content"===n)t[n]=r;else try{t[n]=JSON.parse(r)}catch(e){t[n]=r}return t}),{})}(e,t.plugins));return n.aria=Object.assign({},R.aria,n.aria),n.aria={expanded:"auto"===n.aria.expanded?t.interactive:n.aria.expanded,content:"auto"===n.aria.content?t.interactive?null:"describedby":n.aria.content},n}function M(e,t){e.innerHTML=t}function V(e){var t=d();return!0===e?t.className="tippy-arrow":(t.className="tippy-svg-arrow",v(e)?t.appendChild(e):M(t,e)),t}function I(e,t){v(t.content)?(M(e,""),e.appendChild(t.content)):"function"!=typeof t.content&&(t.allowHTML?M(e,t.content):e.textContent=t.content)}function S(e){var t=e.firstElementChild,n=f(t.children);return{box:t,content:n.find((function(e){return e.classList.contains("tippy-content")})),arrow:n.find((function(e){return e.classList.contains("tippy-arrow")||e.classList.contains("tippy-svg-arrow")})),backdrop:n.find((function(e){return e.classList.contains("tippy-backdrop")}))}}function N(e){var t=d(),n=d();n.className="tippy-box",n.setAttribute("data-state","hidden"),n.setAttribute("tabindex","-1");var r=d();function o(n,r){var o=S(t),i=o.box,a=o.content,s=o.arrow;r.theme?i.setAttribute("data-theme",r.theme):i.removeAttribute("data-theme"),"string"==typeof r.animation?i.setAttribute("data-animation",r.animation):i.removeAttribute("data-animation"),r.inertia?i.setAttribute("data-inertia",""):i.removeAttribute("data-inertia"),i.style.maxWidth="number"==typeof r.maxWidth?r.maxWidth+"px":r.maxWidth,r.role?i.setAttribute("role",r.role):i.removeAttribute("role"),n.content===r.content&&n.allowHTML===r.allowHTML||I(a,e.props),r.arrow?s?n.arrow!==r.arrow&&(i.removeChild(s),i.appendChild(V(r.arrow))):i.appendChild(V(r.arrow)):s&&i.removeChild(s)}return r.className="tippy-content",r.setAttribute("data-state","hidden"),I(r,e.props),t.appendChild(n),n.appendChild(r),o(e.props,e.props),{popper:t,onUpdate:o}}N.$$tippy=!0;var B=1,H=[],U=[];function _(o,s){var v,g,h,C,T,A,L,k,M=j(o,Object.assign({},R,P(l(s)))),V=!1,I=!1,N=!1,_=!1,F=[],W=a(we,M.interactiveDebounce),X=B++,Y=(k=M.plugins).filter((function(e,t){return k.indexOf(e)===t})),$={id:X,reference:o,popper:d(),popperInstance:null,props:M,state:{isEnabled:!0,isVisible:!1,isDestroyed:!1,isMounted:!1,isShown:!1},plugins:Y,clearDelayTimeouts:function(){clearTimeout(v),clearTimeout(g),cancelAnimationFrame(h)},setProps:function(e){if($.state.isDestroyed)return;ae("onBeforeUpdate",[$,e]),be();var t=$.props,n=j(o,Object.assign({},t,l(e),{ignoreAttributes:!0}));$.props=n,he(),t.interactiveDebounce!==n.interactiveDebounce&&(ce(),W=a(we,n.interactiveDebounce));t.triggerTarget&&!n.triggerTarget?u(t.triggerTarget).forEach((function(e){e.removeAttribute("aria-expanded")})):n.triggerTarget&&o.removeAttribute("aria-expanded");ue(),ie(),J&&J(t,n);$.popperInstance&&(Ce(),Ae().forEach((function(e){requestAnimationFrame(e._tippy.popperInstance.forceUpdate)})));ae("onAfterUpdate",[$,e])},setContent:function(e){$.setProps({content:e})},show:function(){var e=$.state.isVisible,t=$.state.isDestroyed,o=!$.state.isEnabled,a=x.isTouch&&!$.props.touch,s=r($.props.duration,0,R.duration);if(e||t||o||a)return;if(te().hasAttribute("disabled"))return;if(ae("onShow",[$],!1),!1===$.props.onShow($))return;$.state.isVisible=!0,ee()&&(z.style.visibility="visible");ie(),de(),$.state.isMounted||(z.style.transition="none");if(ee()){var u=re(),p=u.box,f=u.content;b([p,f],0)}A=function(){var e;if($.state.isVisible&&!_){if(_=!0,z.offsetHeight,z.style.transition=$.props.moveTransition,ee()&&$.props.animation){var t=re(),n=t.box,r=t.content;b([n,r],s),y([n,r],"visible")}se(),ue(),c(U,$),null==(e=$.popperInstance)||e.forceUpdate(),ae("onMount",[$]),$.props.animation&&ee()&&function(e,t){me(e,t)}(s,(function(){$.state.isShown=!0,ae("onShown",[$])}))}},function(){var e,t=$.props.appendTo,r=te();e=$.props.interactive&&t===n||"parent"===t?r.parentNode:i(t,[r]);e.contains(z)||e.appendChild(z);$.state.isMounted=!0,Ce()}()},hide:function(){var e=!$.state.isVisible,t=$.state.isDestroyed,n=!$.state.isEnabled,o=r($.props.duration,1,R.duration);if(e||t||n)return;if(ae("onHide",[$],!1),!1===$.props.onHide($))return;$.state.isVisible=!1,$.state.isShown=!1,_=!1,V=!1,ee()&&(z.style.visibility="hidden");if(ce(),ve(),ie(!0),ee()){var i=re(),a=i.box,s=i.content;$.props.animation&&(b([a,s],o),y([a,s],"hidden"))}se(),ue(),$.props.animation?ee()&&function(e,t){me(e,(function(){!$.state.isVisible&&z.parentNode&&z.parentNode.contains(z)&&t()}))}(o,$.unmount):$.unmount()},hideWithInteractivity:function(e){ne().addEventListener("mousemove",W),c(H,W),W(e)},enable:function(){$.state.isEnabled=!0},disable:function(){$.hide(),$.state.isEnabled=!1},unmount:function(){$.state.isVisible&&$.hide();if(!$.state.isMounted)return;Te(),Ae().forEach((function(e){e._tippy.unmount()})),z.parentNode&&z.parentNode.removeChild(z);U=U.filter((function(e){return e!==$})),$.state.isMounted=!1,ae("onHidden",[$])},destroy:function(){if($.state.isDestroyed)return;$.clearDelayTimeouts(),$.unmount(),be(),delete o._tippy,$.state.isDestroyed=!0,ae("onDestroy",[$])}};if(!M.render)return $;var q=M.render($),z=q.popper,J=q.onUpdate;z.setAttribute("data-tippy-root",""),z.id="tippy-"+$.id,$.popper=z,o._tippy=$,z._tippy=$;var G=Y.map((function(e){return e.fn($)})),K=o.hasAttribute("aria-expanded");return he(),ue(),ie(),ae("onCreate",[$]),M.showOnCreate&&Le(),z.addEventListener("mouseenter",(function(){$.props.interactive&&$.state.isVisible&&$.clearDelayTimeouts()})),z.addEventListener("mouseleave",(function(){$.props.interactive&&$.props.trigger.indexOf("mouseenter")>=0&&ne().addEventListener("mousemove",W)})),$;function Q(){var e=$.props.touch;return Array.isArray(e)?e:[e,0]}function Z(){return"hold"===Q()[0]}function ee(){var e;return!(null==(e=$.props.render)||!e.$$tippy)}function te(){return L||o}function ne(){var e=te().parentNode;return e?w(e):document}function re(){return S(z)}function oe(e){return $.state.isMounted&&!$.state.isVisible||x.isTouch||C&&"focus"===C.type?0:r($.props.delay,e?0:1,R.delay)}function ie(e){void 0===e&&(e=!1),z.style.pointerEvents=$.props.interactive&&!e?"":"none",z.style.zIndex=""+$.props.zIndex}function ae(e,t,n){var r;(void 0===n&&(n=!0),G.forEach((function(n){n[e]&&n[e].apply(n,t)})),n)&&(r=$.props)[e].apply(r,t)}function se(){var e=$.props.aria;if(e.content){var t="aria-"+e.content,n=z.id;u($.props.triggerTarget||o).forEach((function(e){var r=e.getAttribute(t);if($.state.isVisible)e.setAttribute(t,r?r+" "+n:n);else{var o=r&&r.replace(n,"").trim();o?e.setAttribute(t,o):e.removeAttribute(t)}}))}}function ue(){!K&&$.props.aria.expanded&&u($.props.triggerTarget||o).forEach((function(e){$.props.interactive?e.setAttribute("aria-expanded",$.state.isVisible&&e===te()?"true":"false"):e.removeAttribute("aria-expanded")}))}function ce(){ne().removeEventListener("mousemove",W),H=H.filter((function(e){return e!==W}))}function pe(e){if(!x.isTouch||!N&&"mousedown"!==e.type){var t=e.composedPath&&e.composedPath()[0]||e.target;if(!$.props.interactive||!O(z,t)){if(u($.props.triggerTarget||o).some((function(e){return O(e,t)}))){if(x.isTouch)return;if($.state.isVisible&&$.props.trigger.indexOf("click")>=0)return}else ae("onClickOutside",[$,e]);!0===$.props.hideOnClick&&($.clearDelayTimeouts(),$.hide(),I=!0,setTimeout((function(){I=!1})),$.state.isMounted||ve())}}}function fe(){N=!0}function le(){N=!1}function de(){var e=ne();e.addEventListener("mousedown",pe,!0),e.addEventListener("touchend",pe,t),e.addEventListener("touchstart",le,t),e.addEventListener("touchmove",fe,t)}function ve(){var e=ne();e.removeEventListener("mousedown",pe,!0),e.removeEventListener("touchend",pe,t),e.removeEventListener("touchstart",le,t),e.removeEventListener("touchmove",fe,t)}function me(e,t){var n=re().box;function r(e){e.target===n&&(E(n,"remove",r),t())}if(0===e)return t();E(n,"remove",T),E(n,"add",r),T=r}function ge(e,t,n){void 0===n&&(n=!1),u($.props.triggerTarget||o).forEach((function(r){r.addEventListener(e,t,n),F.push({node:r,eventType:e,handler:t,options:n})}))}function he(){var e;Z()&&(ge("touchstart",ye,{passive:!0}),ge("touchend",Ee,{passive:!0})),(e=$.props.trigger,e.split(/\s+/).filter(Boolean)).forEach((function(e){if("manual"!==e)switch(ge(e,ye),e){case"mouseenter":ge("mouseleave",Ee);break;case"focus":ge(D?"focusout":"blur",Oe);break;case"focusin":ge("focusout",Oe)}}))}function be(){F.forEach((function(e){var t=e.node,n=e.eventType,r=e.handler,o=e.options;t.removeEventListener(n,r,o)})),F=[]}function ye(e){var t,n=!1;if($.state.isEnabled&&!xe(e)&&!I){var r="focus"===(null==(t=C)?void 0:t.type);C=e,L=e.currentTarget,ue(),!$.state.isVisible&&m(e)&&H.forEach((function(t){return t(e)})),"click"===e.type&&($.props.trigger.indexOf("mouseenter")<0||V)&&!1!==$.props.hideOnClick&&$.state.isVisible?n=!0:Le(e),"click"===e.type&&(V=!n),n&&!r&&De(e)}}function we(e){var t=e.target,n=te().contains(t)||z.contains(t);"mousemove"===e.type&&n||function(e,t){var n=t.clientX,r=t.clientY;return e.every((function(e){var t=e.popperRect,o=e.popperState,i=e.props.interactiveBorder,a=p(o.placement),s=o.modifiersData.offset;if(!s)return!0;var u="bottom"===a?s.top.y:0,c="top"===a?s.bottom.y:0,f="right"===a?s.left.x:0,l="left"===a?s.right.x:0,d=t.top-r+u>i,v=r-t.bottom-c>i,m=t.left-n+f>i,g=n-t.right-l>i;return d||v||m||g}))}(Ae().concat(z).map((function(e){var t,n=null==(t=e._tippy.popperInstance)?void 0:t.state;return n?{popperRect:e.getBoundingClientRect(),popperState:n,props:M}:null})).filter(Boolean),e)&&(ce(),De(e))}function Ee(e){xe(e)||$.props.trigger.indexOf("click")>=0&&V||($.props.interactive?$.hideWithInteractivity(e):De(e))}function Oe(e){$.props.trigger.indexOf("focusin")<0&&e.target!==te()||$.props.interactive&&e.relatedTarget&&z.contains(e.relatedTarget)||De(e)}function xe(e){return!!x.isTouch&&Z()!==e.type.indexOf("touch")>=0}function Ce(){Te();var t=$.props,n=t.popperOptions,r=t.placement,i=t.offset,a=t.getReferenceClientRect,s=t.moveTransition,u=ee()?S(z).arrow:null,c=a?{getBoundingClientRect:a,contextElement:a.contextElement||te()}:o,p=[{name:"offset",options:{offset:i}},{name:"preventOverflow",options:{padding:{top:2,bottom:2,left:5,right:5}}},{name:"flip",options:{padding:5}},{name:"computeStyles",options:{adaptive:!s}},{name:"$$tippy",enabled:!0,phase:"beforeWrite",requires:["computeStyles"],fn:function(e){var t=e.state;if(ee()){var n=re().box;["placement","reference-hidden","escaped"].forEach((function(e){"placement"===e?n.setAttribute("data-placement",t.placement):t.attributes.popper["data-popper-"+e]?n.setAttribute("data-"+e,""):n.removeAttribute("data-"+e)})),t.attributes.popper={}}}}];ee()&&u&&p.push({name:"arrow",options:{element:u,padding:3}}),p.push.apply(p,(null==n?void 0:n.modifiers)||[]),$.popperInstance=e.createPopper(c,z,Object.assign({},n,{placement:r,onFirstUpdate:A,modifiers:p}))}function Te(){$.popperInstance&&($.popperInstance.destroy(),$.popperInstance=null)}function Ae(){return f(z.querySelectorAll("[data-tippy-root]"))}function Le(e){$.clearDelayTimeouts(),e&&ae("onTrigger",[$,e]),de();var t=oe(!0),n=Q(),r=n[0],o=n[1];x.isTouch&&"hold"===r&&o&&(t=o),t?v=setTimeout((function(){$.show()}),t):$.show()}function De(e){if($.clearDelayTimeouts(),ae("onUntrigger",[$,e]),$.state.isVisible){if(!($.props.trigger.indexOf("mouseenter")>=0&&$.props.trigger.indexOf("click")>=0&&["mouseleave","mousemove"].indexOf(e.type)>=0&&V)){var t=oe(!1);t?g=setTimeout((function(){$.state.isVisible&&$.hide()}),t):h=requestAnimationFrame((function(){$.hide()}))}}else ve()}}function F(e,n){void 0===n&&(n={});var r=R.plugins.concat(n.plugins||[]);document.addEventListener("touchstart",T,t),window.addEventListener("blur",L);var o=Object.assign({},n,{plugins:r}),i=h(e).reduce((function(e,t){var n=t&&_(t,o);return n&&e.push(n),e}),[]);return v(e)?i[0]:i}F.defaultProps=R,F.setDefaultProps=function(e){Object.keys(e).forEach((function(t){R[t]=e[t]}))},F.currentInput=x;var W=Object.assign({},e.applyStyles,{effect:function(e){var t=e.state,n={popper:{position:t.options.strategy,left:"0",top:"0",margin:"0"},arrow:{position:"absolute"},reference:{}};Object.assign(t.elements.popper.style,n.popper),t.styles=n,t.elements.arrow&&Object.assign(t.elements.arrow.style,n.arrow)}}),X={mouseover:"mouseenter",focusin:"focus",click:"click"};var Y={name:"animateFill",defaultValue:!1,fn:function(e){var t;if(null==(t=e.props.render)||!t.$$tippy)return{};var n=S(e.popper),r=n.box,o=n.content,i=e.props.animateFill?function(){var e=d();return e.className="tippy-backdrop",y([e],"hidden"),e}():null;return{onCreate:function(){i&&(r.insertBefore(i,r.firstElementChild),r.setAttribute("data-animatefill",""),r.style.overflow="hidden",e.setProps({arrow:!1,animation:"shift-away"}))},onMount:function(){if(i){var e=r.style.transitionDuration,t=Number(e.replace("ms",""));o.style.transitionDelay=Math.round(t/10)+"ms",i.style.transitionDuration=e,y([i],"visible")}},onShow:function(){i&&(i.style.transitionDuration="0ms")},onHide:function(){i&&y([i],"hidden")}}}};var $={clientX:0,clientY:0},q=[];function z(e){var t=e.clientX,n=e.clientY;$={clientX:t,clientY:n}}var J={name:"followCursor",defaultValue:!1,fn:function(e){var t=e.reference,n=w(e.props.triggerTarget||t),r=!1,o=!1,i=!0,a=e.props;function s(){return"initial"===e.props.followCursor&&e.state.isVisible}function u(){n.addEventListener("mousemove",f)}function c(){n.removeEventListener("mousemove",f)}function p(){r=!0,e.setProps({getReferenceClientRect:null}),r=!1}function f(n){var r=!n.target||t.contains(n.target),o=e.props.followCursor,i=n.clientX,a=n.clientY,s=t.getBoundingClientRect(),u=i-s.left,c=a-s.top;!r&&e.props.interactive||e.setProps({getReferenceClientRect:function(){var e=t.getBoundingClientRect(),n=i,r=a;"initial"===o&&(n=e.left+u,r=e.top+c);var s="horizontal"===o?e.top:r,p="vertical"===o?e.right:n,f="horizontal"===o?e.bottom:r,l="vertical"===o?e.left:n;return{width:p-l,height:f-s,top:s,right:p,bottom:f,left:l}}})}function l(){e.props.followCursor&&(q.push({instance:e,doc:n}),function(e){e.addEventListener("mousemove",z)}(n))}function d(){0===(q=q.filter((function(t){return t.instance!==e}))).filter((function(e){return e.doc===n})).length&&function(e){e.removeEventListener("mousemove",z)}(n)}return{onCreate:l,onDestroy:d,onBeforeUpdate:function(){a=e.props},onAfterUpdate:function(t,n){var i=n.followCursor;r||void 0!==i&&a.followCursor!==i&&(d(),i?(l(),!e.state.isMounted||o||s()||u()):(c(),p()))},onMount:function(){e.props.followCursor&&!o&&(i&&(f($),i=!1),s()||u())},onTrigger:function(e,t){m(t)&&($={clientX:t.clientX,clientY:t.clientY}),o="focus"===t.type},onHidden:function(){e.props.followCursor&&(p(),c(),i=!0)}}}};var G={name:"inlinePositioning",defaultValue:!1,fn:function(e){var t,n=e.reference;var r=-1,o=!1,i=[],a={name:"tippyInlinePositioning",enabled:!0,phase:"afterWrite",fn:function(o){var a=o.state;e.props.inlinePositioning&&(-1!==i.indexOf(a.placement)&&(i=[]),t!==a.placement&&-1===i.indexOf(a.placement)&&(i.push(a.placement),e.setProps({getReferenceClientRect:function(){return function(e){return function(e,t,n,r){if(n.length<2||null===e)return t;if(2===n.length&&r>=0&&n[0].left>n[1].right)return n[r]||t;switch(e){case"top":case"bottom":var o=n[0],i=n[n.length-1],a="top"===e,s=o.top,u=i.bottom,c=a?o.left:i.left,p=a?o.right:i.right;return{top:s,bottom:u,left:c,right:p,width:p-c,height:u-s};case"left":case"right":var f=Math.min.apply(Math,n.map((function(e){return e.left}))),l=Math.max.apply(Math,n.map((function(e){return e.right}))),d=n.filter((function(t){return"left"===e?t.left===f:t.right===l})),v=d[0].top,m=d[d.length-1].bottom;return{top:v,bottom:m,left:f,right:l,width:l-f,height:m-v};default:return t}}(p(e),n.getBoundingClientRect(),f(n.getClientRects()),r)}(a.placement)}})),t=a.placement)}};function s(){var t;o||(t=function(e,t){var n;return{popperOptions:Object.assign({},e.popperOptions,{modifiers:[].concat(((null==(n=e.popperOptions)?void 0:n.modifiers)||[]).filter((function(e){return e.name!==t.name})),[t])})}}(e.props,a),o=!0,e.setProps(t),o=!1)}return{onCreate:s,onAfterUpdate:s,onTrigger:function(t,n){if(m(n)){var o=f(e.reference.getClientRects()),i=o.find((function(e){return e.left-2<=n.clientX&&e.right+2>=n.clientX&&e.top-2<=n.clientY&&e.bottom+2>=n.clientY})),a=o.indexOf(i);r=a>-1?a:r}},onHidden:function(){r=-1}}}};var K={name:"sticky",defaultValue:!1,fn:function(e){var t=e.reference,n=e.popper;function r(t){return!0===e.props.sticky||e.props.sticky===t}var o=null,i=null;function a(){var s=r("reference")?(e.popperInstance?e.popperInstance.state.elements.reference:t).getBoundingClientRect():null,u=r("popper")?n.getBoundingClientRect():null;(s&&Q(o,s)||u&&Q(i,u))&&e.popperInstance&&e.popperInstance.update(),o=s,i=u,e.state.isMounted&&requestAnimationFrame(a)}return{onMount:function(){e.props.sticky&&a()}}}};function Q(e,t){return!e||!t||(e.top!==t.top||e.right!==t.right||e.bottom!==t.bottom||e.left!==t.left)}return F.setDefaultProps({plugins:[Y,J,G,K],render:N}),F.createSingleton=function(e,t){var n;void 0===t&&(t={});var r,o=e,i=[],a=[],c=t.overrides,p=[],f=!1;function l(){a=o.map((function(e){return u(e.props.triggerTarget||e.reference)})).reduce((function(e,t){return e.concat(t)}),[])}function v(){i=o.map((function(e){return e.reference}))}function m(e){o.forEach((function(t){e?t.enable():t.disable()}))}function g(e){return o.map((function(t){var n=t.setProps;return t.setProps=function(o){n(o),t.reference===r&&e.setProps(o)},function(){t.setProps=n}}))}function h(e,t){var n=a.indexOf(t);if(t!==r){r=t;var s=(c||[]).concat("content").reduce((function(e,t){return e[t]=o[n].props[t],e}),{});e.setProps(Object.assign({},s,{getReferenceClientRect:"function"==typeof s.getReferenceClientRect?s.getReferenceClientRect:function(){var e;return null==(e=i[n])?void 0:e.getBoundingClientRect()}}))}}m(!1),v(),l();var b={fn:function(){return{onDestroy:function(){m(!0)},onHidden:function(){r=null},onClickOutside:function(e){e.props.showOnCreate&&!f&&(f=!0,r=null)},onShow:function(e){e.props.showOnCreate&&!f&&(f=!0,h(e,i[0]))},onTrigger:function(e,t){h(e,t.currentTarget)}}}},y=F(d(),Object.assign({},s(t,["overrides"]),{plugins:[b].concat(t.plugins||[]),triggerTarget:a,popperOptions:Object.assign({},t.popperOptions,{modifiers:[].concat((null==(n=t.popperOptions)?void 0:n.modifiers)||[],[W])})})),w=y.show;y.show=function(e){if(w(),!r&&null==e)return h(y,i[0]);if(!r||null!=e){if("number"==typeof e)return i[e]&&h(y,i[e]);if(o.indexOf(e)>=0){var t=e.reference;return h(y,t)}return i.indexOf(e)>=0?h(y,e):void 0}},y.showNext=function(){var e=i[0];if(!r)return y.show(0);var t=i.indexOf(r);y.show(i[t+1]||e)},y.showPrevious=function(){var e=i[i.length-1];if(!r)return y.show(e);var t=i.indexOf(r),n=i[t-1]||e;y.show(n)};var E=y.setProps;return y.setProps=function(e){c=e.overrides||c,E(e)},y.setInstances=function(e){m(!0),p.forEach((function(e){return e()})),o=e,m(!1),v(),l(),p=g(y),y.setProps({triggerTarget:a})},p=g(y),y},F.delegate=function(e,n){var r=[],o=[],i=!1,a=n.target,c=s(n,["target"]),p=Object.assign({},c,{trigger:"manual",touch:!1}),f=Object.assign({touch:R.touch},c,{showOnCreate:!0}),l=F(e,p);function d(e){if(e.target&&!i){var t=e.target.closest(a);if(t){var r=t.getAttribute("data-tippy-trigger")||n.trigger||R.trigger;if(!t._tippy&&!("touchstart"===e.type&&"boolean"==typeof f.touch||"touchstart"!==e.type&&r.indexOf(X[e.type])<0)){var s=F(t,f);s&&(o=o.concat(s))}}}}function v(e,t,n,o){void 0===o&&(o=!1),e.addEventListener(t,n,o),r.push({node:e,eventType:t,handler:n,options:o})}return u(l).forEach((function(e){var n=e.destroy,a=e.enable,s=e.disable;e.destroy=function(e){void 0===e&&(e=!0),e&&o.forEach((function(e){e.destroy()})),o=[],r.forEach((function(e){var t=e.node,n=e.eventType,r=e.handler,o=e.options;t.removeEventListener(n,r,o)})),r=[],n()},e.enable=function(){a(),o.forEach((function(e){return e.enable()})),i=!1},e.disable=function(){s(),o.forEach((function(e){return e.disable()})),i=!0},function(e){var n=e.reference;v(n,"touchstart",d,t),v(n,"mouseover",d),v(n,"focusin",d),v(n,"click",d)}(e)})),l},F.hideAll=function(e){var t=void 0===e?{}:e,n=t.exclude,r=t.duration;U.forEach((function(e){var t=!1;if(n&&(t=g(n)?e.reference===n:e.popper===n.popper),!t){var o=e.props.duration;e.setProps({duration:r}),e.hide(),e.state.isDestroyed||e.setProps({duration:o})}}))},F.roundArrow='',F})); + diff --git a/site_libs/quarto-nav/headroom.min.js b/site_libs/quarto-nav/headroom.min.js new file mode 100644 index 0000000..b08f1df --- /dev/null +++ b/site_libs/quarto-nav/headroom.min.js @@ -0,0 +1,7 @@ +/*! + * headroom.js v0.12.0 - Give your page some headroom. Hide your header until you need it + * Copyright (c) 2020 Nick Williams - http://wicky.nillia.ms/headroom.js + * License: MIT + */ + +!function(t,n){"object"==typeof exports&&"undefined"!=typeof module?module.exports=n():"function"==typeof define&&define.amd?define(n):(t=t||self).Headroom=n()}(this,function(){"use strict";function t(){return"undefined"!=typeof window}function d(t){return function(t){return t&&t.document&&function(t){return 9===t.nodeType}(t.document)}(t)?function(t){var n=t.document,o=n.body,s=n.documentElement;return{scrollHeight:function(){return Math.max(o.scrollHeight,s.scrollHeight,o.offsetHeight,s.offsetHeight,o.clientHeight,s.clientHeight)},height:function(){return t.innerHeight||s.clientHeight||o.clientHeight},scrollY:function(){return void 0!==t.pageYOffset?t.pageYOffset:(s||o.parentNode||o).scrollTop}}}(t):function(t){return{scrollHeight:function(){return Math.max(t.scrollHeight,t.offsetHeight,t.clientHeight)},height:function(){return Math.max(t.offsetHeight,t.clientHeight)},scrollY:function(){return t.scrollTop}}}(t)}function n(t,s,e){var n,o=function(){var n=!1;try{var t={get passive(){n=!0}};window.addEventListener("test",t,t),window.removeEventListener("test",t,t)}catch(t){n=!1}return n}(),i=!1,r=d(t),l=r.scrollY(),a={};function c(){var t=Math.round(r.scrollY()),n=r.height(),o=r.scrollHeight();a.scrollY=t,a.lastScrollY=l,a.direction=ls.tolerance[a.direction],e(a),l=t,i=!1}function h(){i||(i=!0,n=requestAnimationFrame(c))}var u=!!o&&{passive:!0,capture:!1};return t.addEventListener("scroll",h,u),c(),{destroy:function(){cancelAnimationFrame(n),t.removeEventListener("scroll",h,u)}}}function o(t){return t===Object(t)?t:{down:t,up:t}}function s(t,n){n=n||{},Object.assign(this,s.options,n),this.classes=Object.assign({},s.options.classes,n.classes),this.elem=t,this.tolerance=o(this.tolerance),this.offset=o(this.offset),this.initialised=!1,this.frozen=!1}return s.prototype={constructor:s,init:function(){return s.cutsTheMustard&&!this.initialised&&(this.addClass("initial"),this.initialised=!0,setTimeout(function(t){t.scrollTracker=n(t.scroller,{offset:t.offset,tolerance:t.tolerance},t.update.bind(t))},100,this)),this},destroy:function(){this.initialised=!1,Object.keys(this.classes).forEach(this.removeClass,this),this.scrollTracker.destroy()},unpin:function(){!this.hasClass("pinned")&&this.hasClass("unpinned")||(this.addClass("unpinned"),this.removeClass("pinned"),this.onUnpin&&this.onUnpin.call(this))},pin:function(){this.hasClass("unpinned")&&(this.addClass("pinned"),this.removeClass("unpinned"),this.onPin&&this.onPin.call(this))},freeze:function(){this.frozen=!0,this.addClass("frozen")},unfreeze:function(){this.frozen=!1,this.removeClass("frozen")},top:function(){this.hasClass("top")||(this.addClass("top"),this.removeClass("notTop"),this.onTop&&this.onTop.call(this))},notTop:function(){this.hasClass("notTop")||(this.addClass("notTop"),this.removeClass("top"),this.onNotTop&&this.onNotTop.call(this))},bottom:function(){this.hasClass("bottom")||(this.addClass("bottom"),this.removeClass("notBottom"),this.onBottom&&this.onBottom.call(this))},notBottom:function(){this.hasClass("notBottom")||(this.addClass("notBottom"),this.removeClass("bottom"),this.onNotBottom&&this.onNotBottom.call(this))},shouldUnpin:function(t){return"down"===t.direction&&!t.top&&t.toleranceExceeded},shouldPin:function(t){return"up"===t.direction&&t.toleranceExceeded||t.top},addClass:function(t){this.elem.classList.add.apply(this.elem.classList,this.classes[t].split(" "))},removeClass:function(t){this.elem.classList.remove.apply(this.elem.classList,this.classes[t].split(" "))},hasClass:function(t){return this.classes[t].split(" ").every(function(t){return this.classList.contains(t)},this.elem)},update:function(t){t.isOutOfBounds||!0!==this.frozen&&(t.top?this.top():this.notTop(),t.bottom?this.bottom():this.notBottom(),this.shouldUnpin(t)?this.unpin():this.shouldPin(t)&&this.pin())}},s.options={tolerance:{up:0,down:0},offset:0,scroller:t()?window:null,classes:{frozen:"headroom--frozen",pinned:"headroom--pinned",unpinned:"headroom--unpinned",top:"headroom--top",notTop:"headroom--not-top",bottom:"headroom--bottom",notBottom:"headroom--not-bottom",initial:"headroom"}},s.cutsTheMustard=!!(t()&&function(){}.bind&&"classList"in document.documentElement&&Object.assign&&Object.keys&&requestAnimationFrame),s}); diff --git a/site_libs/quarto-nav/quarto-nav.js b/site_libs/quarto-nav/quarto-nav.js new file mode 100644 index 0000000..f6a53b1 --- /dev/null +++ b/site_libs/quarto-nav/quarto-nav.js @@ -0,0 +1,289 @@ +const headroomChanged = new CustomEvent("quarto-hrChanged", { + detail: {}, + bubbles: true, + cancelable: false, + composed: false, +}); + +window.document.addEventListener("DOMContentLoaded", function () { + let init = false; + + // Manage the back to top button, if one is present. + let lastScrollTop = window.pageYOffset || document.documentElement.scrollTop; + const scrollDownBuffer = 5; + const scrollUpBuffer = 35; + const btn = document.getElementById("quarto-back-to-top"); + const hideBackToTop = () => { + btn.style.display = "none"; + }; + const showBackToTop = () => { + btn.style.display = "inline-block"; + }; + if (btn) { + window.document.addEventListener( + "scroll", + function () { + const currentScrollTop = + window.pageYOffset || document.documentElement.scrollTop; + + // Shows and hides the button 'intelligently' as the user scrolls + if (currentScrollTop - scrollDownBuffer > lastScrollTop) { + hideBackToTop(); + lastScrollTop = currentScrollTop <= 0 ? 0 : currentScrollTop; + } else if (currentScrollTop < lastScrollTop - scrollUpBuffer) { + showBackToTop(); + lastScrollTop = currentScrollTop <= 0 ? 0 : currentScrollTop; + } + + // Show the button at the bottom, hides it at the top + if (currentScrollTop <= 0) { + hideBackToTop(); + } else if ( + window.innerHeight + currentScrollTop >= + document.body.offsetHeight + ) { + showBackToTop(); + } + }, + false + ); + } + + function throttle(func, wait) { + var timeout; + return function () { + const context = this; + const args = arguments; + const later = function () { + clearTimeout(timeout); + timeout = null; + func.apply(context, args); + }; + + if (!timeout) { + timeout = setTimeout(later, wait); + } + }; + } + + function headerOffset() { + // Set an offset if there is are fixed top navbar + const headerEl = window.document.querySelector("header.fixed-top"); + if (headerEl) { + return headerEl.clientHeight; + } else { + return 0; + } + } + + function footerOffset() { + const footerEl = window.document.querySelector("footer.footer"); + if (footerEl) { + return footerEl.clientHeight; + } else { + return 0; + } + } + + function dashboardOffset() { + const dashboardNavEl = window.document.getElementById( + "quarto-dashboard-header" + ); + if (dashboardNavEl !== null) { + return dashboardNavEl.clientHeight; + } else { + return 0; + } + } + + function updateDocumentOffsetWithoutAnimation() { + updateDocumentOffset(false); + } + + function updateDocumentOffset(animated) { + // set body offset + const topOffset = headerOffset(); + const bodyOffset = topOffset + footerOffset() + dashboardOffset(); + const bodyEl = window.document.body; + bodyEl.setAttribute("data-bs-offset", topOffset); + bodyEl.style.paddingTop = topOffset + "px"; + + // deal with sidebar offsets + const sidebars = window.document.querySelectorAll( + ".sidebar, .headroom-target" + ); + sidebars.forEach((sidebar) => { + if (!animated) { + sidebar.classList.add("notransition"); + // Remove the no transition class after the animation has time to complete + setTimeout(function () { + sidebar.classList.remove("notransition"); + }, 201); + } + + if (window.Headroom && sidebar.classList.contains("sidebar-unpinned")) { + sidebar.style.top = "0"; + sidebar.style.maxHeight = "100vh"; + } else { + sidebar.style.top = topOffset + "px"; + sidebar.style.maxHeight = "calc(100vh - " + topOffset + "px)"; + } + }); + + // allow space for footer + const mainContainer = window.document.querySelector(".quarto-container"); + if (mainContainer) { + mainContainer.style.minHeight = "calc(100vh - " + bodyOffset + "px)"; + } + + // link offset + let linkStyle = window.document.querySelector("#quarto-target-style"); + if (!linkStyle) { + linkStyle = window.document.createElement("style"); + linkStyle.setAttribute("id", "quarto-target-style"); + window.document.head.appendChild(linkStyle); + } + while (linkStyle.firstChild) { + linkStyle.removeChild(linkStyle.firstChild); + } + if (topOffset > 0) { + linkStyle.appendChild( + window.document.createTextNode(` + section:target::before { + content: ""; + display: block; + height: ${topOffset}px; + margin: -${topOffset}px 0 0; + }`) + ); + } + if (init) { + window.dispatchEvent(headroomChanged); + } + init = true; + } + + // initialize headroom + var header = window.document.querySelector("#quarto-header"); + if (header && window.Headroom) { + const headroom = new window.Headroom(header, { + tolerance: 5, + onPin: function () { + const sidebars = window.document.querySelectorAll( + ".sidebar, .headroom-target" + ); + sidebars.forEach((sidebar) => { + sidebar.classList.remove("sidebar-unpinned"); + }); + updateDocumentOffset(); + }, + onUnpin: function () { + const sidebars = window.document.querySelectorAll( + ".sidebar, .headroom-target" + ); + sidebars.forEach((sidebar) => { + sidebar.classList.add("sidebar-unpinned"); + }); + updateDocumentOffset(); + }, + }); + headroom.init(); + + let frozen = false; + window.quartoToggleHeadroom = function () { + if (frozen) { + headroom.unfreeze(); + frozen = false; + } else { + headroom.freeze(); + frozen = true; + } + }; + } + + window.addEventListener( + "hashchange", + function (e) { + if ( + getComputedStyle(document.documentElement).scrollBehavior !== "smooth" + ) { + window.scrollTo(0, window.pageYOffset - headerOffset()); + } + }, + false + ); + + // Observe size changed for the header + const headerEl = window.document.querySelector("header.fixed-top"); + if (headerEl && window.ResizeObserver) { + const observer = new window.ResizeObserver(() => { + setTimeout(updateDocumentOffsetWithoutAnimation, 0); + }); + observer.observe(headerEl, { + attributes: true, + childList: true, + characterData: true, + }); + } else { + window.addEventListener( + "resize", + throttle(updateDocumentOffsetWithoutAnimation, 50) + ); + } + setTimeout(updateDocumentOffsetWithoutAnimation, 250); + + // fixup index.html links if we aren't on the filesystem + if (window.location.protocol !== "file:") { + const links = window.document.querySelectorAll("a"); + for (let i = 0; i < links.length; i++) { + if (links[i].href) { + links[i].dataset.originalHref = links[i].href; + links[i].href = links[i].href.replace(/\/index\.html/, "/"); + } + } + + // Fixup any sharing links that require urls + // Append url to any sharing urls + const sharingLinks = window.document.querySelectorAll( + "a.sidebar-tools-main-item, a.quarto-navigation-tool, a.quarto-navbar-tools, a.quarto-navbar-tools-item" + ); + for (let i = 0; i < sharingLinks.length; i++) { + const sharingLink = sharingLinks[i]; + const href = sharingLink.getAttribute("href"); + if (href) { + sharingLink.setAttribute( + "href", + href.replace("|url|", window.location.href) + ); + } + } + + // Scroll the active navigation item into view, if necessary + const navSidebar = window.document.querySelector("nav#quarto-sidebar"); + if (navSidebar) { + // Find the active item + const activeItem = navSidebar.querySelector("li.sidebar-item a.active"); + if (activeItem) { + // Wait for the scroll height and height to resolve by observing size changes on the + // nav element that is scrollable + const resizeObserver = new ResizeObserver((_entries) => { + // The bottom of the element + const elBottom = activeItem.offsetTop; + const viewBottom = navSidebar.scrollTop + navSidebar.clientHeight; + + // The element height and scroll height are the same, then we are still loading + if (viewBottom !== navSidebar.scrollHeight) { + // Determine if the item isn't visible and scroll to it + if (elBottom >= viewBottom) { + navSidebar.scrollTop = elBottom; + } + + // stop observing now since we've completed the scroll + resizeObserver.unobserve(navSidebar); + } + }); + resizeObserver.observe(navSidebar); + } + } + } +}); diff --git a/site_libs/quarto-search/autocomplete.umd.js b/site_libs/quarto-search/autocomplete.umd.js new file mode 100644 index 0000000..ae0063a --- /dev/null +++ b/site_libs/quarto-search/autocomplete.umd.js @@ -0,0 +1,3 @@ +/*! @algolia/autocomplete-js 1.11.1 | MIT License | © Algolia, Inc. and contributors | https://github.com/algolia/autocomplete */ +!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports):"function"==typeof define&&define.amd?define(["exports"],t):t((e="undefined"!=typeof globalThis?globalThis:e||self)["@algolia/autocomplete-js"]={})}(this,(function(e){"use strict";function t(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function n(e){for(var n=1;n=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}function a(e,t){return function(e){if(Array.isArray(e))return e}(e)||function(e,t){var n=null==e?null:"undefined"!=typeof Symbol&&e[Symbol.iterator]||e["@@iterator"];if(null!=n){var r,o,i,u,a=[],l=!0,c=!1;try{if(i=(n=n.call(e)).next,0===t){if(Object(n)!==n)return;l=!1}else for(;!(l=(r=i.call(n)).done)&&(a.push(r.value),a.length!==t);l=!0);}catch(e){c=!0,o=e}finally{try{if(!l&&null!=n.return&&(u=n.return(),Object(u)!==u))return}finally{if(c)throw o}}return a}}(e,t)||c(e,t)||function(){throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()}function l(e){return function(e){if(Array.isArray(e))return s(e)}(e)||function(e){if("undefined"!=typeof Symbol&&null!=e[Symbol.iterator]||null!=e["@@iterator"])return Array.from(e)}(e)||c(e)||function(){throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()}function c(e,t){if(e){if("string"==typeof e)return s(e,t);var n=Object.prototype.toString.call(e).slice(8,-1);return"Object"===n&&e.constructor&&(n=e.constructor.name),"Map"===n||"Set"===n?Array.from(e):"Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)?s(e,t):void 0}}function s(e,t){(null==t||t>e.length)&&(t=e.length);for(var n=0,r=new Array(t);ne.length)&&(t=e.length);for(var n=0,r=new Array(t);ne.length)&&(t=e.length);for(var n=0,r=new Array(t);n=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}function x(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function N(e){for(var t=1;t1&&void 0!==arguments[1]?arguments[1]:20,n=[],r=0;r=3||2===n&&r>=4||1===n&&r>=10);function i(t,n,r){if(o&&void 0!==r){var i=r[0].__autocomplete_algoliaCredentials,u={"X-Algolia-Application-Id":i.appId,"X-Algolia-API-Key":i.apiKey};e.apply(void 0,[t].concat(D(n),[{headers:u}]))}else e.apply(void 0,[t].concat(D(n)))}return{init:function(t,n){e("init",{appId:t,apiKey:n})},setUserToken:function(t){e("setUserToken",t)},clickedObjectIDsAfterSearch:function(){for(var e=arguments.length,t=new Array(e),n=0;n0&&i("clickedObjectIDsAfterSearch",B(t),t[0].items)},clickedObjectIDs:function(){for(var e=arguments.length,t=new Array(e),n=0;n0&&i("clickedObjectIDs",B(t),t[0].items)},clickedFilters:function(){for(var t=arguments.length,n=new Array(t),r=0;r0&&e.apply(void 0,["clickedFilters"].concat(n))},convertedObjectIDsAfterSearch:function(){for(var e=arguments.length,t=new Array(e),n=0;n0&&i("convertedObjectIDsAfterSearch",B(t),t[0].items)},convertedObjectIDs:function(){for(var e=arguments.length,t=new Array(e),n=0;n0&&i("convertedObjectIDs",B(t),t[0].items)},convertedFilters:function(){for(var t=arguments.length,n=new Array(t),r=0;r0&&e.apply(void 0,["convertedFilters"].concat(n))},viewedObjectIDs:function(){for(var e=arguments.length,t=new Array(e),n=0;n0&&t.reduce((function(e,t){var n=t.items,r=k(t,A);return[].concat(D(e),D(q(N(N({},r),{},{objectIDs:(null==n?void 0:n.map((function(e){return e.objectID})))||r.objectIDs})).map((function(e){return{items:n,payload:e}}))))}),[]).forEach((function(e){var t=e.items;return i("viewedObjectIDs",[e.payload],t)}))},viewedFilters:function(){for(var t=arguments.length,n=new Array(t),r=0;r0&&e.apply(void 0,["viewedFilters"].concat(n))}}}function F(e){var t=e.items.reduce((function(e,t){var n;return e[t.__autocomplete_indexName]=(null!==(n=e[t.__autocomplete_indexName])&&void 0!==n?n:[]).concat(t),e}),{});return Object.keys(t).map((function(e){return{index:e,items:t[e],algoliaSource:["autocomplete"]}}))}function L(e){return e.objectID&&e.__autocomplete_indexName&&e.__autocomplete_queryID}function U(e){return U="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e},U(e)}function M(e){return function(e){if(Array.isArray(e))return H(e)}(e)||function(e){if("undefined"!=typeof Symbol&&null!=e[Symbol.iterator]||null!=e["@@iterator"])return Array.from(e)}(e)||function(e,t){if(!e)return;if("string"==typeof e)return H(e,t);var n=Object.prototype.toString.call(e).slice(8,-1);"Object"===n&&e.constructor&&(n=e.constructor.name);if("Map"===n||"Set"===n)return Array.from(e);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return H(e,t)}(e)||function(){throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()}function H(e,t){(null==t||t>e.length)&&(t=e.length);for(var n=0,r=new Array(t);n0&&z({onItemsChange:r,items:n,insights:a,state:t}))}}),0);return{name:"aa.algoliaInsightsPlugin",subscribe:function(e){var t=e.setContext,n=e.onSelect,r=e.onActive;function l(e){t({algoliaInsightsPlugin:{__algoliaSearchParameters:W({clickAnalytics:!0},e?{userToken:e}:{}),insights:a}})}u("addAlgoliaAgent","insights-plugin"),l(),u("onUserTokenChange",l),u("getUserToken",null,(function(e,t){l(t)})),n((function(e){var t=e.item,n=e.state,r=e.event,i=e.source;L(t)&&o({state:n,event:r,insights:a,item:t,insightsEvents:[W({eventName:"Item Selected"},j({item:t,items:i.getItems().filter(L)}))]})})),r((function(e){var t=e.item,n=e.source,r=e.state,o=e.event;L(t)&&i({state:r,event:o,insights:a,item:t,insightsEvents:[W({eventName:"Item Active"},j({item:t,items:n.getItems().filter(L)}))]})}))},onStateChange:function(e){var t=e.state;c({state:t})},__autocomplete_pluginOptions:e}}function J(e,t){var n=t;return{then:function(t,r){return J(e.then(Y(t,n,e),Y(r,n,e)),n)},catch:function(t){return J(e.catch(Y(t,n,e)),n)},finally:function(t){return t&&n.onCancelList.push(t),J(e.finally(Y(t&&function(){return n.onCancelList=[],t()},n,e)),n)},cancel:function(){n.isCanceled=!0;var e=n.onCancelList;n.onCancelList=[],e.forEach((function(e){e()}))},isCanceled:function(){return!0===n.isCanceled}}}function X(e){return J(e,{isCanceled:!1,onCancelList:[]})}function Y(e,t,n){return e?function(n){return t.isCanceled?n:e(n)}:n}function Z(e,t,n,r){if(!n)return null;if(e<0&&(null===t||null!==r&&0===t))return n+e;var o=(null===t?-1:t)+e;return o<=-1||o>=n?null===r?null:0:o}function ee(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function te(e){for(var t=1;te.length)&&(t=e.length);for(var n=0,r=new Array(t);n0},reshape:function(e){return e.sources}},e),{},{id:null!==(n=e.id)&&void 0!==n?n:d(),plugins:o,initialState:he({activeItemId:null,query:"",completion:null,collections:[],isOpen:!1,status:"idle",context:{}},e.initialState),onStateChange:function(t){var n;null===(n=e.onStateChange)||void 0===n||n.call(e,t),o.forEach((function(e){var n;return null===(n=e.onStateChange)||void 0===n?void 0:n.call(e,t)}))},onSubmit:function(t){var n;null===(n=e.onSubmit)||void 0===n||n.call(e,t),o.forEach((function(e){var n;return null===(n=e.onSubmit)||void 0===n?void 0:n.call(e,t)}))},onReset:function(t){var n;null===(n=e.onReset)||void 0===n||n.call(e,t),o.forEach((function(e){var n;return null===(n=e.onReset)||void 0===n?void 0:n.call(e,t)}))},getSources:function(n){return Promise.all([].concat(ye(o.map((function(e){return e.getSources}))),[e.getSources]).filter(Boolean).map((function(e){return function(e,t){var n=[];return Promise.resolve(e(t)).then((function(e){return Promise.all(e.filter((function(e){return Boolean(e)})).map((function(e){if(e.sourceId,n.includes(e.sourceId))throw new Error("[Autocomplete] The `sourceId` ".concat(JSON.stringify(e.sourceId)," is not unique."));n.push(e.sourceId);var t={getItemInputValue:function(e){return e.state.query},getItemUrl:function(){},onSelect:function(e){(0,e.setIsOpen)(!1)},onActive:O,onResolve:O};Object.keys(t).forEach((function(e){t[e].__default=!0}));var r=te(te({},t),e);return Promise.resolve(r)})))}))}(e,n)}))).then((function(e){return m(e)})).then((function(e){return e.map((function(e){return he(he({},e),{},{onSelect:function(n){e.onSelect(n),t.forEach((function(e){var t;return null===(t=e.onSelect)||void 0===t?void 0:t.call(e,n)}))},onActive:function(n){e.onActive(n),t.forEach((function(e){var t;return null===(t=e.onActive)||void 0===t?void 0:t.call(e,n)}))},onResolve:function(n){e.onResolve(n),t.forEach((function(e){var t;return null===(t=e.onResolve)||void 0===t?void 0:t.call(e,n)}))}})}))}))},navigator:he({navigate:function(e){var t=e.itemUrl;r.location.assign(t)},navigateNewTab:function(e){var t=e.itemUrl,n=r.open(t,"_blank","noopener");null==n||n.focus()},navigateNewWindow:function(e){var t=e.itemUrl;r.open(t,"_blank","noopener")}},e.navigator)})}function Se(e){return Se="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e},Se(e)}function je(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function Pe(e){for(var t=1;te.length)&&(t=e.length);for(var n=0,r=new Array(t);n=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var He,Ve,We,Ke=null,Qe=(He=-1,Ve=-1,We=void 0,function(e){var t=++He;return Promise.resolve(e).then((function(e){return We&&t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}function et(e){return et="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e},et(e)}var tt=["props","refresh","store"],nt=["inputElement","formElement","panelElement"],rt=["inputElement"],ot=["inputElement","maxLength"],it=["source"],ut=["item","source"];function at(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function lt(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}function ft(e){var t=e.props,n=e.refresh,r=e.store,o=st(e,tt);return{getEnvironmentProps:function(e){var n=e.inputElement,o=e.formElement,i=e.panelElement;function u(e){!r.getState().isOpen&&r.pendingRequests.isEmpty()||e.target===n||!1===[o,i].some((function(t){return n=t,r=e.target,n===r||n.contains(r);var n,r}))&&(r.dispatch("blur",null),t.debug||r.pendingRequests.cancelAll())}return lt({onTouchStart:u,onMouseDown:u,onTouchMove:function(e){!1!==r.getState().isOpen&&n===t.environment.document.activeElement&&e.target!==n&&n.blur()}},st(e,nt))},getRootProps:function(e){return lt({role:"combobox","aria-expanded":r.getState().isOpen,"aria-haspopup":"listbox","aria-owns":r.getState().isOpen?r.getState().collections.map((function(e){var n=e.source;return ie(t.id,"list",n)})).join(" "):void 0,"aria-labelledby":ie(t.id,"label")},e)},getFormProps:function(e){return e.inputElement,lt({action:"",noValidate:!0,role:"search",onSubmit:function(i){var u;i.preventDefault(),t.onSubmit(lt({event:i,refresh:n,state:r.getState()},o)),r.dispatch("submit",null),null===(u=e.inputElement)||void 0===u||u.blur()},onReset:function(i){var u;i.preventDefault(),t.onReset(lt({event:i,refresh:n,state:r.getState()},o)),r.dispatch("reset",null),null===(u=e.inputElement)||void 0===u||u.focus()}},st(e,rt))},getLabelProps:function(e){return lt({htmlFor:ie(t.id,"input"),id:ie(t.id,"label")},e)},getInputProps:function(e){var i;function u(e){(t.openOnFocus||Boolean(r.getState().query))&&$e(lt({event:e,props:t,query:r.getState().completion||r.getState().query,refresh:n,store:r},o)),r.dispatch("focus",null)}var a=e||{};a.inputElement;var l=a.maxLength,c=void 0===l?512:l,s=st(a,ot),f=oe(r.getState()),p=function(e){return Boolean(e&&e.match(ue))}((null===(i=t.environment.navigator)||void 0===i?void 0:i.userAgent)||""),m=t.enterKeyHint||(null!=f&&f.itemUrl&&!p?"go":"search");return lt({"aria-autocomplete":"both","aria-activedescendant":r.getState().isOpen&&null!==r.getState().activeItemId?ie(t.id,"item-".concat(r.getState().activeItemId),null==f?void 0:f.source):void 0,"aria-controls":r.getState().isOpen?r.getState().collections.map((function(e){var n=e.source;return ie(t.id,"list",n)})).join(" "):void 0,"aria-labelledby":ie(t.id,"label"),value:r.getState().completion||r.getState().query,id:ie(t.id,"input"),autoComplete:"off",autoCorrect:"off",autoCapitalize:"off",enterKeyHint:m,spellCheck:"false",autoFocus:t.autoFocus,placeholder:t.placeholder,maxLength:c,type:"search",onChange:function(e){$e(lt({event:e,props:t,query:e.currentTarget.value.slice(0,c),refresh:n,store:r},o))},onKeyDown:function(e){!function(e){var t=e.event,n=e.props,r=e.refresh,o=e.store,i=Ze(e,Ge);if("ArrowUp"===t.key||"ArrowDown"===t.key){var u=function(){var e=oe(o.getState()),t=n.environment.document.getElementById(ie(n.id,"item-".concat(o.getState().activeItemId),null==e?void 0:e.source));t&&(t.scrollIntoViewIfNeeded?t.scrollIntoViewIfNeeded(!1):t.scrollIntoView(!1))},a=function(){var e=oe(o.getState());if(null!==o.getState().activeItemId&&e){var n=e.item,u=e.itemInputValue,a=e.itemUrl,l=e.source;l.onActive(Xe({event:t,item:n,itemInputValue:u,itemUrl:a,refresh:r,source:l,state:o.getState()},i))}};t.preventDefault(),!1===o.getState().isOpen&&(n.openOnFocus||Boolean(o.getState().query))?$e(Xe({event:t,props:n,query:o.getState().query,refresh:r,store:o},i)).then((function(){o.dispatch(t.key,{nextActiveItemId:n.defaultActiveItemId}),a(),setTimeout(u,0)})):(o.dispatch(t.key,{}),a(),u())}else if("Escape"===t.key)t.preventDefault(),o.dispatch(t.key,null),o.pendingRequests.cancelAll();else if("Tab"===t.key)o.dispatch("blur",null),o.pendingRequests.cancelAll();else if("Enter"===t.key){if(null===o.getState().activeItemId||o.getState().collections.every((function(e){return 0===e.items.length})))return void(n.debug||o.pendingRequests.cancelAll());t.preventDefault();var l=oe(o.getState()),c=l.item,s=l.itemInputValue,f=l.itemUrl,p=l.source;if(t.metaKey||t.ctrlKey)void 0!==f&&(p.onSelect(Xe({event:t,item:c,itemInputValue:s,itemUrl:f,refresh:r,source:p,state:o.getState()},i)),n.navigator.navigateNewTab({itemUrl:f,item:c,state:o.getState()}));else if(t.shiftKey)void 0!==f&&(p.onSelect(Xe({event:t,item:c,itemInputValue:s,itemUrl:f,refresh:r,source:p,state:o.getState()},i)),n.navigator.navigateNewWindow({itemUrl:f,item:c,state:o.getState()}));else if(t.altKey);else{if(void 0!==f)return p.onSelect(Xe({event:t,item:c,itemInputValue:s,itemUrl:f,refresh:r,source:p,state:o.getState()},i)),void n.navigator.navigate({itemUrl:f,item:c,state:o.getState()});$e(Xe({event:t,nextState:{isOpen:!1},props:n,query:s,refresh:r,store:o},i)).then((function(){p.onSelect(Xe({event:t,item:c,itemInputValue:s,itemUrl:f,refresh:r,source:p,state:o.getState()},i))}))}}}(lt({event:e,props:t,refresh:n,store:r},o))},onFocus:u,onBlur:O,onClick:function(n){e.inputElement!==t.environment.document.activeElement||r.getState().isOpen||u(n)}},s)},getPanelProps:function(e){return lt({onMouseDown:function(e){e.preventDefault()},onMouseLeave:function(){r.dispatch("mouseleave",null)}},e)},getListProps:function(e){var n=e||{},r=n.source,o=st(n,it);return lt({role:"listbox","aria-labelledby":ie(t.id,"label"),id:ie(t.id,"list",r)},o)},getItemProps:function(e){var i=e.item,u=e.source,a=st(e,ut);return lt({id:ie(t.id,"item-".concat(i.__autocomplete_id),u),role:"option","aria-selected":r.getState().activeItemId===i.__autocomplete_id,onMouseMove:function(e){if(i.__autocomplete_id!==r.getState().activeItemId){r.dispatch("mousemove",i.__autocomplete_id);var t=oe(r.getState());if(null!==r.getState().activeItemId&&t){var u=t.item,a=t.itemInputValue,l=t.itemUrl,c=t.source;c.onActive(lt({event:e,item:u,itemInputValue:a,itemUrl:l,refresh:n,source:c,state:r.getState()},o))}}},onMouseDown:function(e){e.preventDefault()},onClick:function(e){var a=u.getItemInputValue({item:i,state:r.getState()}),l=u.getItemUrl({item:i,state:r.getState()});(l?Promise.resolve():$e(lt({event:e,nextState:{isOpen:!1},props:t,query:a,refresh:n,store:r},o))).then((function(){u.onSelect(lt({event:e,item:i,itemInputValue:a,itemUrl:l,refresh:n,source:u,state:r.getState()},o))}))}},a)}}}function pt(e){return pt="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e},pt(e)}function mt(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function vt(e){for(var t=1;t=5&&((o||!e&&5===r)&&(u.push(r,0,o,n),r=6),e&&(u.push(r,e,0,n),r=6)),o=""},l=0;l"===t?(r=1,o=""):o=t+o[0]:i?t===i?i="":o+=t:'"'===t||"'"===t?i=t:">"===t?(a(),r=1):r&&("="===t?(r=5,n=o,o=""):"/"===t&&(r<5||">"===e[l][c+1])?(a(),3===r&&(u=u[0]),r=u,(u=u[0]).push(2,0,r),r=0):" "===t||"\t"===t||"\n"===t||"\r"===t?(a(),r=2):o+=t),3===r&&"!--"===o&&(r=4,u=u[0])}return a(),u}(e)),t),arguments,[])).length>1?t:t[0]}var kt=function(e){var t=e.environment,n=t.document.createElementNS("http://www.w3.org/2000/svg","svg");n.setAttribute("class","aa-ClearIcon"),n.setAttribute("viewBox","0 0 24 24"),n.setAttribute("width","18"),n.setAttribute("height","18"),n.setAttribute("fill","currentColor");var r=t.document.createElementNS("http://www.w3.org/2000/svg","path");return r.setAttribute("d","M5.293 6.707l5.293 5.293-5.293 5.293c-0.391 0.391-0.391 1.024 0 1.414s1.024 0.391 1.414 0l5.293-5.293 5.293 5.293c0.391 0.391 1.024 0.391 1.414 0s0.391-1.024 0-1.414l-5.293-5.293 5.293-5.293c0.391-0.391 0.391-1.024 0-1.414s-1.024-0.391-1.414 0l-5.293 5.293-5.293-5.293c-0.391-0.391-1.024-0.391-1.414 0s-0.391 1.024 0 1.414z"),n.appendChild(r),n};function xt(e,t){if("string"==typeof t){var n=e.document.querySelector(t);return"The element ".concat(JSON.stringify(t)," is not in the document."),n}return t}function Nt(){for(var e=arguments.length,t=new Array(e),n=0;n2&&(u.children=arguments.length>3?Jt.call(arguments,2):n),"function"==typeof e&&null!=e.defaultProps)for(i in e.defaultProps)void 0===u[i]&&(u[i]=e.defaultProps[i]);return sn(e,u,r,o,null)}function sn(e,t,n,r,o){var i={type:e,props:t,key:n,ref:r,__k:null,__:null,__b:0,__e:null,__d:void 0,__c:null,__h:null,constructor:void 0,__v:null==o?++Yt:o};return null==o&&null!=Xt.vnode&&Xt.vnode(i),i}function fn(e){return e.children}function pn(e,t){this.props=e,this.context=t}function mn(e,t){if(null==t)return e.__?mn(e.__,e.__.__k.indexOf(e)+1):null;for(var n;tt&&Zt.sort(nn));yn.__r=0}function bn(e,t,n,r,o,i,u,a,l,c){var s,f,p,m,v,d,y,b=r&&r.__k||on,g=b.length;for(n.__k=[],s=0;s0?sn(m.type,m.props,m.key,m.ref?m.ref:null,m.__v):m)){if(m.__=n,m.__b=n.__b+1,null===(p=b[s])||p&&m.key==p.key&&m.type===p.type)b[s]=void 0;else for(f=0;f=0;t--)if((n=e.__k[t])&&(r=On(n)))return r;return null}function _n(e,t,n){"-"===t[0]?e.setProperty(t,null==n?"":n):e[t]=null==n?"":"number"!=typeof n||un.test(t)?n:n+"px"}function Sn(e,t,n,r,o){var i;e:if("style"===t)if("string"==typeof n)e.style.cssText=n;else{if("string"==typeof r&&(e.style.cssText=r=""),r)for(t in r)n&&t in n||_n(e.style,t,"");if(n)for(t in n)r&&n[t]===r[t]||_n(e.style,t,n[t])}else if("o"===t[0]&&"n"===t[1])i=t!==(t=t.replace(/Capture$/,"")),t=t.toLowerCase()in e?t.toLowerCase().slice(2):t.slice(2),e.l||(e.l={}),e.l[t+i]=n,n?r||e.addEventListener(t,i?Pn:jn,i):e.removeEventListener(t,i?Pn:jn,i);else if("dangerouslySetInnerHTML"!==t){if(o)t=t.replace(/xlink(H|:h)/,"h").replace(/sName$/,"s");else if("width"!==t&&"height"!==t&&"href"!==t&&"list"!==t&&"form"!==t&&"tabIndex"!==t&&"download"!==t&&t in e)try{e[t]=null==n?"":n;break e}catch(e){}"function"==typeof n||(null==n||!1===n&&"-"!==t[4]?e.removeAttribute(t):e.setAttribute(t,n))}}function jn(e){return this.l[e.type+!1](Xt.event?Xt.event(e):e)}function Pn(e){return this.l[e.type+!0](Xt.event?Xt.event(e):e)}function wn(e,t,n,r,o,i,u,a,l){var c,s,f,p,m,v,d,y,b,g,h,O,_,S,j,P=t.type;if(void 0!==t.constructor)return null;null!=n.__h&&(l=n.__h,a=t.__e=n.__e,t.__h=null,i=[a]),(c=Xt.__b)&&c(t);try{e:if("function"==typeof P){if(y=t.props,b=(c=P.contextType)&&r[c.__c],g=c?b?b.props.value:c.__:r,n.__c?d=(s=t.__c=n.__c).__=s.__E:("prototype"in P&&P.prototype.render?t.__c=s=new P(y,g):(t.__c=s=new pn(y,g),s.constructor=P,s.render=Cn),b&&b.sub(s),s.props=y,s.state||(s.state={}),s.context=g,s.__n=r,f=s.__d=!0,s.__h=[],s._sb=[]),null==s.__s&&(s.__s=s.state),null!=P.getDerivedStateFromProps&&(s.__s==s.state&&(s.__s=an({},s.__s)),an(s.__s,P.getDerivedStateFromProps(y,s.__s))),p=s.props,m=s.state,s.__v=t,f)null==P.getDerivedStateFromProps&&null!=s.componentWillMount&&s.componentWillMount(),null!=s.componentDidMount&&s.__h.push(s.componentDidMount);else{if(null==P.getDerivedStateFromProps&&y!==p&&null!=s.componentWillReceiveProps&&s.componentWillReceiveProps(y,g),!s.__e&&null!=s.shouldComponentUpdate&&!1===s.shouldComponentUpdate(y,s.__s,g)||t.__v===n.__v){for(t.__v!==n.__v&&(s.props=y,s.state=s.__s,s.__d=!1),s.__e=!1,t.__e=n.__e,t.__k=n.__k,t.__k.forEach((function(e){e&&(e.__=t)})),h=0;h0&&void 0!==arguments[0]?arguments[0]:[];return{get:function(){return e},add:function(t){var n=e[e.length-1];(null==n?void 0:n.isHighlighted)===t.isHighlighted?e[e.length-1]={value:n.value+t.value,isHighlighted:n.isHighlighted}:e.push(t)}}}(n?[{value:n,isHighlighted:!1}]:[]);return t.forEach((function(e){var t=e.split(xn);r.add({value:t[0],isHighlighted:!0}),""!==t[1]&&r.add({value:t[1],isHighlighted:!1})})),r.get()}function Tn(e){return function(e){if(Array.isArray(e))return qn(e)}(e)||function(e){if("undefined"!=typeof Symbol&&null!=e[Symbol.iterator]||null!=e["@@iterator"])return Array.from(e)}(e)||function(e,t){if(!e)return;if("string"==typeof e)return qn(e,t);var n=Object.prototype.toString.call(e).slice(8,-1);"Object"===n&&e.constructor&&(n=e.constructor.name);if("Map"===n||"Set"===n)return Array.from(e);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return qn(e,t)}(e)||function(){throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()}function qn(e,t){(null==t||t>e.length)&&(t=e.length);for(var n=0,r=new Array(t);n",""":'"',"'":"'"},Fn=new RegExp(/\w/i),Ln=/&(amp|quot|lt|gt|#39);/g,Un=RegExp(Ln.source);function Mn(e,t){var n,r,o,i=e[t],u=(null===(n=e[t+1])||void 0===n?void 0:n.isHighlighted)||!0,a=(null===(r=e[t-1])||void 0===r?void 0:r.isHighlighted)||!0;return Fn.test((o=i.value)&&Un.test(o)?o.replace(Ln,(function(e){return Rn[e]})):o)||a!==u?i.isHighlighted:a}function Hn(e){return Hn="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e},Hn(e)}function Vn(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function Wn(e){for(var t=1;te.length)&&(t=e.length);for(var n=0,r=new Array(t);n=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}function ur(e){return function(e){if(Array.isArray(e))return ar(e)}(e)||function(e){if("undefined"!=typeof Symbol&&null!=e[Symbol.iterator]||null!=e["@@iterator"])return Array.from(e)}(e)||function(e,t){if(!e)return;if("string"==typeof e)return ar(e,t);var n=Object.prototype.toString.call(e).slice(8,-1);"Object"===n&&e.constructor&&(n=e.constructor.name);if("Map"===n||"Set"===n)return Array.from(e);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return ar(e,t)}(e)||function(){throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()}function ar(e,t){(null==t||t>e.length)&&(t=e.length);for(var n=0,r=new Array(t);n0;if(!O.value.core.openOnFocus&&!t.query)return n;var r=Boolean(y.current||O.value.renderer.renderNoResults);return!n&&r||n},__autocomplete_metadata:{userAgents:br,options:e}}))})),j=f(n({collections:[],completion:null,context:{},isOpen:!1,query:"",activeItemId:null,status:"idle"},O.value.core.initialState)),P={getEnvironmentProps:O.value.renderer.getEnvironmentProps,getFormProps:O.value.renderer.getFormProps,getInputProps:O.value.renderer.getInputProps,getItemProps:O.value.renderer.getItemProps,getLabelProps:O.value.renderer.getLabelProps,getListProps:O.value.renderer.getListProps,getPanelProps:O.value.renderer.getPanelProps,getRootProps:O.value.renderer.getRootProps},w={setActiveItemId:S.value.setActiveItemId,setQuery:S.value.setQuery,setCollections:S.value.setCollections,setIsOpen:S.value.setIsOpen,setStatus:S.value.setStatus,setContext:S.value.setContext,refresh:S.value.refresh,navigator:S.value.navigator},I=m((function(){return Ct.bind(O.value.renderer.renderer.createElement)})),A=m((function(){return Gt({autocomplete:S.value,autocompleteScopeApi:w,classNames:O.value.renderer.classNames,environment:O.value.core.environment,isDetached:_.value,placeholder:O.value.core.placeholder,propGetters:P,setIsModalOpen:k,state:j.current,translations:O.value.renderer.translations})}));function E(){Ht(A.value.panel,{style:_.value?{}:yr({panelPlacement:O.value.renderer.panelPlacement,container:A.value.root,form:A.value.form,environment:O.value.core.environment})})}function D(e){j.current=e;var t={autocomplete:S.value,autocompleteScopeApi:w,classNames:O.value.renderer.classNames,components:O.value.renderer.components,container:O.value.renderer.container,html:I.value,dom:A.value,panelContainer:_.value?A.value.detachedContainer:O.value.renderer.panelContainer,propGetters:P,state:j.current,renderer:O.value.renderer.renderer},r=!b(e)&&!y.current&&O.value.renderer.renderNoResults||O.value.renderer.render;!function(e){var t=e.autocomplete,r=e.autocompleteScopeApi,o=e.dom,i=e.propGetters,u=e.state;Vt(o.root,i.getRootProps(n({state:u,props:t.getRootProps({})},r))),Vt(o.input,i.getInputProps(n({state:u,props:t.getInputProps({inputElement:o.input}),inputElement:o.input},r))),Ht(o.label,{hidden:"stalled"===u.status}),Ht(o.loadingIndicator,{hidden:"stalled"!==u.status}),Ht(o.clearButton,{hidden:!u.query}),Ht(o.detachedSearchButtonQuery,{textContent:u.query}),Ht(o.detachedSearchButtonPlaceholder,{hidden:Boolean(u.query)})}(t),function(e,t){var r=t.autocomplete,o=t.autocompleteScopeApi,u=t.classNames,a=t.html,l=t.dom,c=t.panelContainer,s=t.propGetters,f=t.state,p=t.components,m=t.renderer;if(f.isOpen){c.contains(l.panel)||"loading"===f.status||c.appendChild(l.panel),l.panel.classList.toggle("aa-Panel--stalled","stalled"===f.status);var v=f.collections.filter((function(e){var t=e.source,n=e.items;return t.templates.noResults||n.length>0})).map((function(e,t){var l=e.source,c=e.items;return m.createElement("section",{key:t,className:u.source,"data-autocomplete-source-id":l.sourceId},l.templates.header&&m.createElement("div",{className:u.sourceHeader},l.templates.header({components:p,createElement:m.createElement,Fragment:m.Fragment,items:c,source:l,state:f,html:a})),l.templates.noResults&&0===c.length?m.createElement("div",{className:u.sourceNoResults},l.templates.noResults({components:p,createElement:m.createElement,Fragment:m.Fragment,source:l,state:f,html:a})):m.createElement("ul",i({className:u.list},s.getListProps(n({state:f,props:r.getListProps({source:l})},o))),c.map((function(e){var t=r.getItemProps({item:e,source:l});return m.createElement("li",i({key:t.id,className:u.item},s.getItemProps(n({state:f,props:t},o))),l.templates.item({components:p,createElement:m.createElement,Fragment:m.Fragment,item:e,state:f,html:a}))}))),l.templates.footer&&m.createElement("div",{className:u.sourceFooter},l.templates.footer({components:p,createElement:m.createElement,Fragment:m.Fragment,items:c,source:l,state:f,html:a})))})),d=m.createElement(m.Fragment,null,m.createElement("div",{className:u.panelLayout},v),m.createElement("div",{className:"aa-GradientBottom"})),y=v.reduce((function(e,t){return e[t.props["data-autocomplete-source-id"]]=t,e}),{});e(n(n({children:d,state:f,sections:v,elements:y},m),{},{components:p,html:a},o),l.panel)}else c.contains(l.panel)&&c.removeChild(l.panel)}(r,t)}function C(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{};l();var t=O.value.renderer,n=t.components,r=u(t,gr);g.current=qt(r,O.value.core,{components:Bt(n,(function(e){return!e.value.hasOwnProperty("__autocomplete_componentName")})),initialState:j.current},e),v(),c(),S.value.refresh().then((function(){D(j.current)}))}function k(e){requestAnimationFrame((function(){var t=O.value.core.environment.document.body.contains(A.value.detachedOverlay);e!==t&&(e?(O.value.core.environment.document.body.appendChild(A.value.detachedOverlay),O.value.core.environment.document.body.classList.add("aa-Detached"),A.value.input.focus()):(O.value.core.environment.document.body.removeChild(A.value.detachedOverlay),O.value.core.environment.document.body.classList.remove("aa-Detached")))}))}return a((function(){var e=S.value.getEnvironmentProps({formElement:A.value.form,panelElement:A.value.panel,inputElement:A.value.input});return Ht(O.value.core.environment,e),function(){Ht(O.value.core.environment,Object.keys(e).reduce((function(e,t){return n(n({},e),{},o({},t,void 0))}),{}))}})),a((function(){var e=_.value?O.value.core.environment.document.body:O.value.renderer.panelContainer,t=_.value?A.value.detachedOverlay:A.value.panel;return _.value&&j.current.isOpen&&k(!0),D(j.current),function(){e.contains(t)&&e.removeChild(t)}})),a((function(){var e=O.value.renderer.container;return e.appendChild(A.value.root),function(){e.removeChild(A.value.root)}})),a((function(){var e=p((function(e){D(e.state)}),0);return h.current=function(t){var n=t.state,r=t.prevState;(_.value&&r.isOpen!==n.isOpen&&k(n.isOpen),_.value||!n.isOpen||r.isOpen||E(),n.query!==r.query)&&O.value.core.environment.document.querySelectorAll(".aa-Panel--scrollable").forEach((function(e){0!==e.scrollTop&&(e.scrollTop=0)}));e({state:n})},function(){h.current=void 0}})),a((function(){var e=p((function(){var e=_.value;_.value=O.value.core.environment.matchMedia(O.value.renderer.detachedMediaQuery).matches,e!==_.value?C({}):requestAnimationFrame(E)}),20);return O.value.core.environment.addEventListener("resize",e),function(){O.value.core.environment.removeEventListener("resize",e)}})),a((function(){if(!_.value)return function(){};function e(e){A.value.detachedContainer.classList.toggle("aa-DetachedContainer--modal",e)}function t(t){e(t.matches)}var n=O.value.core.environment.matchMedia(getComputedStyle(O.value.core.environment.document.documentElement).getPropertyValue("--aa-detached-modal-media-query"));e(n.matches);var r=Boolean(n.addEventListener);return r?n.addEventListener("change",t):n.addListener(t),function(){r?n.removeEventListener("change",t):n.removeListener(t)}})),a((function(){return requestAnimationFrame(E),function(){}})),n(n({},w),{},{update:C,destroy:function(){l()}})},e.getAlgoliaFacets=function(e){var t=hr({transformResponse:function(e){return e.facetHits}}),r=e.queries.map((function(e){return n(n({},e),{},{type:"facet"})}));return t(n(n({},e),{},{queries:r}))},e.getAlgoliaResults=Or,Object.defineProperty(e,"__esModule",{value:!0})})); + diff --git a/site_libs/quarto-search/fuse.min.js b/site_libs/quarto-search/fuse.min.js new file mode 100644 index 0000000..adc2835 --- /dev/null +++ b/site_libs/quarto-search/fuse.min.js @@ -0,0 +1,9 @@ +/** + * Fuse.js v6.6.2 - Lightweight fuzzy-search (http://fusejs.io) + * + * Copyright (c) 2022 Kiro Risk (http://kiro.me) + * All Rights Reserved. Apache Software License 2.0 + * + * http://www.apache.org/licenses/LICENSE-2.0 + */ +var e,t;e=this,t=function(){"use strict";function e(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function t(t){for(var n=1;ne.length)&&(t=e.length);for(var n=0,r=new Array(t);n0&&void 0!==arguments[0]?arguments[0]:1,t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:3,n=new Map,r=Math.pow(10,t);return{get:function(t){var i=t.match(C).length;if(n.has(i))return n.get(i);var o=1/Math.pow(i,.5*e),c=parseFloat(Math.round(o*r)/r);return n.set(i,c),c},clear:function(){n.clear()}}}var $=function(){function e(){var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},n=t.getFn,i=void 0===n?I.getFn:n,o=t.fieldNormWeight,c=void 0===o?I.fieldNormWeight:o;r(this,e),this.norm=E(c,3),this.getFn=i,this.isCreated=!1,this.setIndexRecords()}return o(e,[{key:"setSources",value:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:[];this.docs=e}},{key:"setIndexRecords",value:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:[];this.records=e}},{key:"setKeys",value:function(){var e=this,t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:[];this.keys=t,this._keysMap={},t.forEach((function(t,n){e._keysMap[t.id]=n}))}},{key:"create",value:function(){var e=this;!this.isCreated&&this.docs.length&&(this.isCreated=!0,g(this.docs[0])?this.docs.forEach((function(t,n){e._addString(t,n)})):this.docs.forEach((function(t,n){e._addObject(t,n)})),this.norm.clear())}},{key:"add",value:function(e){var t=this.size();g(e)?this._addString(e,t):this._addObject(e,t)}},{key:"removeAt",value:function(e){this.records.splice(e,1);for(var t=e,n=this.size();t2&&void 0!==arguments[2]?arguments[2]:{},r=n.getFn,i=void 0===r?I.getFn:r,o=n.fieldNormWeight,c=void 0===o?I.fieldNormWeight:o,a=new $({getFn:i,fieldNormWeight:c});return a.setKeys(e.map(_)),a.setSources(t),a.create(),a}function R(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=t.errors,r=void 0===n?0:n,i=t.currentLocation,o=void 0===i?0:i,c=t.expectedLocation,a=void 0===c?0:c,s=t.distance,u=void 0===s?I.distance:s,h=t.ignoreLocation,l=void 0===h?I.ignoreLocation:h,f=r/e.length;if(l)return f;var d=Math.abs(a-o);return u?f+d/u:d?1:f}function N(){for(var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:[],t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:I.minMatchCharLength,n=[],r=-1,i=-1,o=0,c=e.length;o=t&&n.push([r,i]),r=-1)}return e[o-1]&&o-r>=t&&n.push([r,o-1]),n}var P=32;function W(e){for(var t={},n=0,r=e.length;n1&&void 0!==arguments[1]?arguments[1]:{},o=i.location,c=void 0===o?I.location:o,a=i.threshold,s=void 0===a?I.threshold:a,u=i.distance,h=void 0===u?I.distance:u,l=i.includeMatches,f=void 0===l?I.includeMatches:l,d=i.findAllMatches,v=void 0===d?I.findAllMatches:d,g=i.minMatchCharLength,y=void 0===g?I.minMatchCharLength:g,p=i.isCaseSensitive,m=void 0===p?I.isCaseSensitive:p,k=i.ignoreLocation,M=void 0===k?I.ignoreLocation:k;if(r(this,e),this.options={location:c,threshold:s,distance:h,includeMatches:f,findAllMatches:v,minMatchCharLength:y,isCaseSensitive:m,ignoreLocation:M},this.pattern=m?t:t.toLowerCase(),this.chunks=[],this.pattern.length){var b=function(e,t){n.chunks.push({pattern:e,alphabet:W(e),startIndex:t})},x=this.pattern.length;if(x>P){for(var w=0,L=x%P,S=x-L;w3&&void 0!==arguments[3]?arguments[3]:{},i=r.location,o=void 0===i?I.location:i,c=r.distance,a=void 0===c?I.distance:c,s=r.threshold,u=void 0===s?I.threshold:s,h=r.findAllMatches,l=void 0===h?I.findAllMatches:h,f=r.minMatchCharLength,d=void 0===f?I.minMatchCharLength:f,v=r.includeMatches,g=void 0===v?I.includeMatches:v,y=r.ignoreLocation,p=void 0===y?I.ignoreLocation:y;if(t.length>P)throw new Error(w(P));for(var m,k=t.length,M=e.length,b=Math.max(0,Math.min(o,M)),x=u,L=b,S=d>1||g,_=S?Array(M):[];(m=e.indexOf(t,L))>-1;){var O=R(t,{currentLocation:m,expectedLocation:b,distance:a,ignoreLocation:p});if(x=Math.min(O,x),L=m+k,S)for(var j=0;j=z;q-=1){var B=q-1,J=n[e.charAt(B)];if(S&&(_[B]=+!!J),K[q]=(K[q+1]<<1|1)&J,F&&(K[q]|=(A[q+1]|A[q])<<1|1|A[q+1]),K[q]&$&&(C=R(t,{errors:F,currentLocation:B,expectedLocation:b,distance:a,ignoreLocation:p}))<=x){if(x=C,(L=B)<=b)break;z=Math.max(1,2*b-L)}}if(R(t,{errors:F+1,currentLocation:b,expectedLocation:b,distance:a,ignoreLocation:p})>x)break;A=K}var U={isMatch:L>=0,score:Math.max(.001,C)};if(S){var V=N(_,d);V.length?g&&(U.indices=V):U.isMatch=!1}return U}(e,n,i,{location:c+o,distance:a,threshold:s,findAllMatches:u,minMatchCharLength:h,includeMatches:r,ignoreLocation:l}),p=y.isMatch,m=y.score,k=y.indices;p&&(g=!0),v+=m,p&&k&&(d=[].concat(f(d),f(k)))}));var y={isMatch:g,score:g?v/this.chunks.length:1};return g&&r&&(y.indices=d),y}}]),e}(),z=function(){function e(t){r(this,e),this.pattern=t}return o(e,[{key:"search",value:function(){}}],[{key:"isMultiMatch",value:function(e){return D(e,this.multiRegex)}},{key:"isSingleMatch",value:function(e){return D(e,this.singleRegex)}}]),e}();function D(e,t){var n=e.match(t);return n?n[1]:null}var K=function(e){a(n,e);var t=l(n);function n(e){return r(this,n),t.call(this,e)}return o(n,[{key:"search",value:function(e){var t=e===this.pattern;return{isMatch:t,score:t?0:1,indices:[0,this.pattern.length-1]}}}],[{key:"type",get:function(){return"exact"}},{key:"multiRegex",get:function(){return/^="(.*)"$/}},{key:"singleRegex",get:function(){return/^=(.*)$/}}]),n}(z),q=function(e){a(n,e);var t=l(n);function n(e){return r(this,n),t.call(this,e)}return o(n,[{key:"search",value:function(e){var t=-1===e.indexOf(this.pattern);return{isMatch:t,score:t?0:1,indices:[0,e.length-1]}}}],[{key:"type",get:function(){return"inverse-exact"}},{key:"multiRegex",get:function(){return/^!"(.*)"$/}},{key:"singleRegex",get:function(){return/^!(.*)$/}}]),n}(z),B=function(e){a(n,e);var t=l(n);function n(e){return r(this,n),t.call(this,e)}return o(n,[{key:"search",value:function(e){var t=e.startsWith(this.pattern);return{isMatch:t,score:t?0:1,indices:[0,this.pattern.length-1]}}}],[{key:"type",get:function(){return"prefix-exact"}},{key:"multiRegex",get:function(){return/^\^"(.*)"$/}},{key:"singleRegex",get:function(){return/^\^(.*)$/}}]),n}(z),J=function(e){a(n,e);var t=l(n);function n(e){return r(this,n),t.call(this,e)}return o(n,[{key:"search",value:function(e){var t=!e.startsWith(this.pattern);return{isMatch:t,score:t?0:1,indices:[0,e.length-1]}}}],[{key:"type",get:function(){return"inverse-prefix-exact"}},{key:"multiRegex",get:function(){return/^!\^"(.*)"$/}},{key:"singleRegex",get:function(){return/^!\^(.*)$/}}]),n}(z),U=function(e){a(n,e);var t=l(n);function n(e){return r(this,n),t.call(this,e)}return o(n,[{key:"search",value:function(e){var t=e.endsWith(this.pattern);return{isMatch:t,score:t?0:1,indices:[e.length-this.pattern.length,e.length-1]}}}],[{key:"type",get:function(){return"suffix-exact"}},{key:"multiRegex",get:function(){return/^"(.*)"\$$/}},{key:"singleRegex",get:function(){return/^(.*)\$$/}}]),n}(z),V=function(e){a(n,e);var t=l(n);function n(e){return r(this,n),t.call(this,e)}return o(n,[{key:"search",value:function(e){var t=!e.endsWith(this.pattern);return{isMatch:t,score:t?0:1,indices:[0,e.length-1]}}}],[{key:"type",get:function(){return"inverse-suffix-exact"}},{key:"multiRegex",get:function(){return/^!"(.*)"\$$/}},{key:"singleRegex",get:function(){return/^!(.*)\$$/}}]),n}(z),G=function(e){a(n,e);var t=l(n);function n(e){var i,o=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},c=o.location,a=void 0===c?I.location:c,s=o.threshold,u=void 0===s?I.threshold:s,h=o.distance,l=void 0===h?I.distance:h,f=o.includeMatches,d=void 0===f?I.includeMatches:f,v=o.findAllMatches,g=void 0===v?I.findAllMatches:v,y=o.minMatchCharLength,p=void 0===y?I.minMatchCharLength:y,m=o.isCaseSensitive,k=void 0===m?I.isCaseSensitive:m,M=o.ignoreLocation,b=void 0===M?I.ignoreLocation:M;return r(this,n),(i=t.call(this,e))._bitapSearch=new T(e,{location:a,threshold:u,distance:l,includeMatches:d,findAllMatches:g,minMatchCharLength:p,isCaseSensitive:k,ignoreLocation:b}),i}return o(n,[{key:"search",value:function(e){return this._bitapSearch.searchIn(e)}}],[{key:"type",get:function(){return"fuzzy"}},{key:"multiRegex",get:function(){return/^"(.*)"$/}},{key:"singleRegex",get:function(){return/^(.*)$/}}]),n}(z),H=function(e){a(n,e);var t=l(n);function n(e){return r(this,n),t.call(this,e)}return o(n,[{key:"search",value:function(e){for(var t,n=0,r=[],i=this.pattern.length;(t=e.indexOf(this.pattern,n))>-1;)n=t+i,r.push([t,n-1]);var o=!!r.length;return{isMatch:o,score:o?0:1,indices:r}}}],[{key:"type",get:function(){return"include"}},{key:"multiRegex",get:function(){return/^'"(.*)"$/}},{key:"singleRegex",get:function(){return/^'(.*)$/}}]),n}(z),Q=[K,H,B,J,V,U,q,G],X=Q.length,Y=/ +(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)/;function Z(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};return e.split("|").map((function(e){for(var n=e.trim().split(Y).filter((function(e){return e&&!!e.trim()})),r=[],i=0,o=n.length;i1&&void 0!==arguments[1]?arguments[1]:{},i=n.isCaseSensitive,o=void 0===i?I.isCaseSensitive:i,c=n.includeMatches,a=void 0===c?I.includeMatches:c,s=n.minMatchCharLength,u=void 0===s?I.minMatchCharLength:s,h=n.ignoreLocation,l=void 0===h?I.ignoreLocation:h,f=n.findAllMatches,d=void 0===f?I.findAllMatches:f,v=n.location,g=void 0===v?I.location:v,y=n.threshold,p=void 0===y?I.threshold:y,m=n.distance,k=void 0===m?I.distance:m;r(this,e),this.query=null,this.options={isCaseSensitive:o,includeMatches:a,minMatchCharLength:u,findAllMatches:d,ignoreLocation:l,location:g,threshold:p,distance:k},this.pattern=o?t:t.toLowerCase(),this.query=Z(this.pattern,this.options)}return o(e,[{key:"searchIn",value:function(e){var t=this.query;if(!t)return{isMatch:!1,score:1};var n=this.options,r=n.includeMatches;e=n.isCaseSensitive?e:e.toLowerCase();for(var i=0,o=[],c=0,a=0,s=t.length;a-1&&(n.refIndex=e.idx),t.matches.push(n)}}))}function ve(e,t){t.score=e.score}function ge(e,t){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{},r=n.includeMatches,i=void 0===r?I.includeMatches:r,o=n.includeScore,c=void 0===o?I.includeScore:o,a=[];return i&&a.push(de),c&&a.push(ve),e.map((function(e){var n=e.idx,r={item:t[n],refIndex:n};return a.length&&a.forEach((function(t){t(e,r)})),r}))}var ye=function(){function e(n){var i=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},o=arguments.length>2?arguments[2]:void 0;r(this,e),this.options=t(t({},I),i),this.options.useExtendedSearch,this._keyStore=new S(this.options.keys),this.setCollection(n,o)}return o(e,[{key:"setCollection",value:function(e,t){if(this._docs=e,t&&!(t instanceof $))throw new Error("Incorrect 'index' type");this._myIndex=t||F(this.options.keys,this._docs,{getFn:this.options.getFn,fieldNormWeight:this.options.fieldNormWeight})}},{key:"add",value:function(e){k(e)&&(this._docs.push(e),this._myIndex.add(e))}},{key:"remove",value:function(){for(var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:function(){return!1},t=[],n=0,r=this._docs.length;n1&&void 0!==arguments[1]?arguments[1]:{},n=t.limit,r=void 0===n?-1:n,i=this.options,o=i.includeMatches,c=i.includeScore,a=i.shouldSort,s=i.sortFn,u=i.ignoreFieldNorm,h=g(e)?g(this._docs[0])?this._searchStringList(e):this._searchObjectList(e):this._searchLogical(e);return fe(h,{ignoreFieldNorm:u}),a&&h.sort(s),y(r)&&r>-1&&(h=h.slice(0,r)),ge(h,this._docs,{includeMatches:o,includeScore:c})}},{key:"_searchStringList",value:function(e){var t=re(e,this.options),n=this._myIndex.records,r=[];return n.forEach((function(e){var n=e.v,i=e.i,o=e.n;if(k(n)){var c=t.searchIn(n),a=c.isMatch,s=c.score,u=c.indices;a&&r.push({item:n,idx:i,matches:[{score:s,value:n,norm:o,indices:u}]})}})),r}},{key:"_searchLogical",value:function(e){var t=this,n=function(e,t){var n=(arguments.length>2&&void 0!==arguments[2]?arguments[2]:{}).auto,r=void 0===n||n,i=function e(n){var i=Object.keys(n),o=ue(n);if(!o&&i.length>1&&!se(n))return e(le(n));if(he(n)){var c=o?n[ce]:i[0],a=o?n[ae]:n[c];if(!g(a))throw new Error(x(c));var s={keyId:j(c),pattern:a};return r&&(s.searcher=re(a,t)),s}var u={children:[],operator:i[0]};return i.forEach((function(t){var r=n[t];v(r)&&r.forEach((function(t){u.children.push(e(t))}))})),u};return se(e)||(e=le(e)),i(e)}(e,this.options),r=function e(n,r,i){if(!n.children){var o=n.keyId,c=n.searcher,a=t._findMatches({key:t._keyStore.get(o),value:t._myIndex.getValueForItemAtKeyId(r,o),searcher:c});return a&&a.length?[{idx:i,item:r,matches:a}]:[]}for(var s=[],u=0,h=n.children.length;u1&&void 0!==arguments[1]?arguments[1]:{},n=t.getFn,r=void 0===n?I.getFn:n,i=t.fieldNormWeight,o=void 0===i?I.fieldNormWeight:i,c=e.keys,a=e.records,s=new $({getFn:r,fieldNormWeight:o});return s.setKeys(c),s.setIndexRecords(a),s},ye.config=I,function(){ne.push.apply(ne,arguments)}(te),ye},"object"==typeof exports&&"undefined"!=typeof module?module.exports=t():"function"==typeof define&&define.amd?define(t):(e="undefined"!=typeof globalThis?globalThis:e||self).Fuse=t(); \ No newline at end of file diff --git a/site_libs/quarto-search/quarto-search.js b/site_libs/quarto-search/quarto-search.js new file mode 100644 index 0000000..5f723d7 --- /dev/null +++ b/site_libs/quarto-search/quarto-search.js @@ -0,0 +1,1286 @@ +const kQueryArg = "q"; +const kResultsArg = "show-results"; + +// If items don't provide a URL, then both the navigator and the onSelect +// function aren't called (and therefore, the default implementation is used) +// +// We're using this sentinel URL to signal to those handlers that this +// item is a more item (along with the type) and can be handled appropriately +const kItemTypeMoreHref = "0767FDFD-0422-4E5A-BC8A-3BE11E5BBA05"; + +window.document.addEventListener("DOMContentLoaded", function (_event) { + // Ensure that search is available on this page. If it isn't, + // should return early and not do anything + var searchEl = window.document.getElementById("quarto-search"); + if (!searchEl) return; + + const { autocomplete } = window["@algolia/autocomplete-js"]; + + let quartoSearchOptions = {}; + let language = {}; + const searchOptionEl = window.document.getElementById( + "quarto-search-options" + ); + if (searchOptionEl) { + const jsonStr = searchOptionEl.textContent; + quartoSearchOptions = JSON.parse(jsonStr); + language = quartoSearchOptions.language; + } + + // note the search mode + if (quartoSearchOptions.type === "overlay") { + searchEl.classList.add("type-overlay"); + } else { + searchEl.classList.add("type-textbox"); + } + + // Used to determine highlighting behavior for this page + // A `q` query param is expected when the user follows a search + // to this page + const currentUrl = new URL(window.location); + const query = currentUrl.searchParams.get(kQueryArg); + const showSearchResults = currentUrl.searchParams.get(kResultsArg); + const mainEl = window.document.querySelector("main"); + + // highlight matches on the page + if (query && mainEl) { + // perform any highlighting + highlight(escapeRegExp(query), mainEl); + + // fix up the URL to remove the q query param + const replacementUrl = new URL(window.location); + replacementUrl.searchParams.delete(kQueryArg); + window.history.replaceState({}, "", replacementUrl); + } + + // function to clear highlighting on the page when the search query changes + // (e.g. if the user edits the query or clears it) + let highlighting = true; + const resetHighlighting = (searchTerm) => { + if (mainEl && highlighting && query && searchTerm !== query) { + clearHighlight(query, mainEl); + highlighting = false; + } + }; + + // Clear search highlighting when the user scrolls sufficiently + const resetFn = () => { + resetHighlighting(""); + window.removeEventListener("quarto-hrChanged", resetFn); + window.removeEventListener("quarto-sectionChanged", resetFn); + }; + + // Register this event after the initial scrolling and settling of events + // on the page + window.addEventListener("quarto-hrChanged", resetFn); + window.addEventListener("quarto-sectionChanged", resetFn); + + // Responsively switch to overlay mode if the search is present on the navbar + // Note that switching the sidebar to overlay mode requires more coordinate (not just + // the media query since we generate different HTML for sidebar overlays than we do + // for sidebar input UI) + const detachedMediaQuery = + quartoSearchOptions.type === "overlay" ? "all" : "(max-width: 991px)"; + + // If configured, include the analytics client to send insights + const plugins = configurePlugins(quartoSearchOptions); + + let lastState = null; + const { setIsOpen, setQuery, setCollections } = autocomplete({ + container: searchEl, + detachedMediaQuery: detachedMediaQuery, + defaultActiveItemId: 0, + panelContainer: "#quarto-search-results", + panelPlacement: quartoSearchOptions["panel-placement"], + debug: false, + openOnFocus: true, + plugins, + classNames: { + form: "d-flex", + }, + placeholder: language["search-text-placeholder"], + translations: { + clearButtonTitle: language["search-clear-button-title"], + detachedCancelButtonText: language["search-detached-cancel-button-title"], + submitButtonTitle: language["search-submit-button-title"], + }, + initialState: { + query, + }, + getItemUrl({ item }) { + return item.href; + }, + onStateChange({ state }) { + // If this is a file URL, note that + + // Perhaps reset highlighting + resetHighlighting(state.query); + + // If the panel just opened, ensure the panel is positioned properly + if (state.isOpen) { + if (lastState && !lastState.isOpen) { + setTimeout(() => { + positionPanel(quartoSearchOptions["panel-placement"]); + }, 150); + } + } + + // Perhaps show the copy link + showCopyLink(state.query, quartoSearchOptions); + + lastState = state; + }, + reshape({ sources, state }) { + return sources.map((source) => { + try { + const items = source.getItems(); + + // Validate the items + validateItems(items); + + // group the items by document + const groupedItems = new Map(); + items.forEach((item) => { + const hrefParts = item.href.split("#"); + const baseHref = hrefParts[0]; + const isDocumentItem = hrefParts.length === 1; + + const items = groupedItems.get(baseHref); + if (!items) { + groupedItems.set(baseHref, [item]); + } else { + // If the href for this item matches the document + // exactly, place this item first as it is the item that represents + // the document itself + if (isDocumentItem) { + items.unshift(item); + } else { + items.push(item); + } + groupedItems.set(baseHref, items); + } + }); + + const reshapedItems = []; + let count = 1; + for (const [_key, value] of groupedItems) { + const firstItem = value[0]; + reshapedItems.push({ + ...firstItem, + type: kItemTypeDoc, + }); + + const collapseMatches = quartoSearchOptions["collapse-after"]; + const collapseCount = + typeof collapseMatches === "number" ? collapseMatches : 1; + + if (value.length > 1) { + const target = `search-more-${count}`; + const isExpanded = + state.context.expanded && + state.context.expanded.includes(target); + + const remainingCount = value.length - collapseCount; + + for (let i = 1; i < value.length; i++) { + if (collapseMatches && i === collapseCount) { + reshapedItems.push({ + target, + title: isExpanded + ? language["search-hide-matches-text"] + : remainingCount === 1 + ? `${remainingCount} ${language["search-more-match-text"]}` + : `${remainingCount} ${language["search-more-matches-text"]}`, + type: kItemTypeMore, + href: kItemTypeMoreHref, + }); + } + + if (isExpanded || !collapseMatches || i < collapseCount) { + reshapedItems.push({ + ...value[i], + type: kItemTypeItem, + target, + }); + } + } + } + count += 1; + } + + return { + ...source, + getItems() { + return reshapedItems; + }, + }; + } catch (error) { + // Some form of error occurred + return { + ...source, + getItems() { + return [ + { + title: error.name || "An Error Occurred While Searching", + text: + error.message || + "An unknown error occurred while attempting to perform the requested search.", + type: kItemTypeError, + }, + ]; + }, + }; + } + }); + }, + navigator: { + navigate({ itemUrl }) { + if (itemUrl !== offsetURL(kItemTypeMoreHref)) { + window.location.assign(itemUrl); + } + }, + navigateNewTab({ itemUrl }) { + if (itemUrl !== offsetURL(kItemTypeMoreHref)) { + const windowReference = window.open(itemUrl, "_blank", "noopener"); + if (windowReference) { + windowReference.focus(); + } + } + }, + navigateNewWindow({ itemUrl }) { + if (itemUrl !== offsetURL(kItemTypeMoreHref)) { + window.open(itemUrl, "_blank", "noopener"); + } + }, + }, + getSources({ state, setContext, setActiveItemId, refresh }) { + return [ + { + sourceId: "documents", + getItemUrl({ item }) { + if (item.href) { + return offsetURL(item.href); + } else { + return undefined; + } + }, + onSelect({ + item, + state, + setContext, + setIsOpen, + setActiveItemId, + refresh, + }) { + if (item.type === kItemTypeMore) { + toggleExpanded(item, state, setContext, setActiveItemId, refresh); + + // Toggle more + setIsOpen(true); + } + }, + getItems({ query }) { + if (query === null || query === "") { + return []; + } + + const limit = quartoSearchOptions.limit; + if (quartoSearchOptions.algolia) { + return algoliaSearch(query, limit, quartoSearchOptions.algolia); + } else { + // Fuse search options + const fuseSearchOptions = { + isCaseSensitive: false, + shouldSort: true, + minMatchCharLength: 2, + limit: limit, + }; + + return readSearchData().then(function (fuse) { + return fuseSearch(query, fuse, fuseSearchOptions); + }); + } + }, + templates: { + noResults({ createElement }) { + const hasQuery = lastState.query; + + return createElement( + "div", + { + class: `quarto-search-no-results${ + hasQuery ? "" : " no-query" + }`, + }, + language["search-no-results-text"] + ); + }, + header({ items, createElement }) { + // count the documents + const count = items.filter((item) => { + return item.type === kItemTypeDoc; + }).length; + + if (count > 0) { + return createElement( + "div", + { class: "search-result-header" }, + `${count} ${language["search-matching-documents-text"]}` + ); + } else { + return createElement( + "div", + { class: "search-result-header-no-results" }, + `` + ); + } + }, + footer({ _items, createElement }) { + if ( + quartoSearchOptions.algolia && + quartoSearchOptions.algolia["show-logo"] + ) { + const libDir = quartoSearchOptions.algolia["libDir"]; + const logo = createElement("img", { + src: offsetURL( + `${libDir}/quarto-search/search-by-algolia.svg` + ), + class: "algolia-search-logo", + }); + return createElement( + "a", + { href: "http://www.algolia.com/" }, + logo + ); + } + }, + + item({ item, createElement }) { + return renderItem( + item, + createElement, + state, + setActiveItemId, + setContext, + refresh, + quartoSearchOptions + ); + }, + }, + }, + ]; + }, + }); + + window.quartoOpenSearch = () => { + setIsOpen(false); + setIsOpen(true); + focusSearchInput(); + }; + + document.addEventListener("keyup", (event) => { + const { key } = event; + const kbds = quartoSearchOptions["keyboard-shortcut"]; + const focusedEl = document.activeElement; + + const isFormElFocused = [ + "input", + "select", + "textarea", + "button", + "option", + ].find((tag) => { + return focusedEl.tagName.toLowerCase() === tag; + }); + + if ( + kbds && + kbds.includes(key) && + !isFormElFocused && + !document.activeElement.isContentEditable + ) { + event.preventDefault(); + window.quartoOpenSearch(); + } + }); + + // Remove the labeleledby attribute since it is pointing + // to a non-existent label + if (quartoSearchOptions.type === "overlay") { + const inputEl = window.document.querySelector( + "#quarto-search .aa-Autocomplete" + ); + if (inputEl) { + inputEl.removeAttribute("aria-labelledby"); + } + } + + function throttle(func, wait) { + let waiting = false; + return function () { + if (!waiting) { + func.apply(this, arguments); + waiting = true; + setTimeout(function () { + waiting = false; + }, wait); + } + }; + } + + // If the main document scrolls dismiss the search results + // (otherwise, since they're floating in the document they can scroll with the document) + window.document.body.onscroll = throttle(() => { + // Only do this if we're not detached + // Bug #7117 + // This will happen when the keyboard is shown on ios (resulting in a scroll) + // which then closed the search UI + if (!window.matchMedia(detachedMediaQuery).matches) { + setIsOpen(false); + } + }, 50); + + if (showSearchResults) { + setIsOpen(true); + focusSearchInput(); + } +}); + +function configurePlugins(quartoSearchOptions) { + const autocompletePlugins = []; + const algoliaOptions = quartoSearchOptions.algolia; + if ( + algoliaOptions && + algoliaOptions["analytics-events"] && + algoliaOptions["search-only-api-key"] && + algoliaOptions["application-id"] + ) { + const apiKey = algoliaOptions["search-only-api-key"]; + const appId = algoliaOptions["application-id"]; + + // Aloglia insights may not be loaded because they require cookie consent + // Use deferred loading so events will start being recorded when/if consent + // is granted. + const algoliaInsightsDeferredPlugin = deferredLoadPlugin(() => { + if ( + window.aa && + window["@algolia/autocomplete-plugin-algolia-insights"] + ) { + window.aa("init", { + appId, + apiKey, + useCookie: true, + }); + + const { createAlgoliaInsightsPlugin } = + window["@algolia/autocomplete-plugin-algolia-insights"]; + // Register the insights client + const algoliaInsightsPlugin = createAlgoliaInsightsPlugin({ + insightsClient: window.aa, + onItemsChange({ insights, insightsEvents }) { + const events = insightsEvents.flatMap((event) => { + // This API limits the number of items per event to 20 + const chunkSize = 20; + const itemChunks = []; + const eventItems = event.items; + for (let i = 0; i < eventItems.length; i += chunkSize) { + itemChunks.push(eventItems.slice(i, i + chunkSize)); + } + // Split the items into multiple events that can be sent + const events = itemChunks.map((items) => { + return { + ...event, + items, + }; + }); + return events; + }); + + for (const event of events) { + insights.viewedObjectIDs(event); + } + }, + }); + return algoliaInsightsPlugin; + } + }); + + // Add the plugin + autocompletePlugins.push(algoliaInsightsDeferredPlugin); + return autocompletePlugins; + } +} + +// For plugins that may not load immediately, create a wrapper +// plugin and forward events and plugin data once the plugin +// is initialized. This is useful for cases like cookie consent +// which may prevent the analytics insights event plugin from initializing +// immediately. +function deferredLoadPlugin(createPlugin) { + let plugin = undefined; + let subscribeObj = undefined; + const wrappedPlugin = () => { + if (!plugin && subscribeObj) { + plugin = createPlugin(); + if (plugin && plugin.subscribe) { + plugin.subscribe(subscribeObj); + } + } + return plugin; + }; + + return { + subscribe: (obj) => { + subscribeObj = obj; + }, + onStateChange: (obj) => { + const plugin = wrappedPlugin(); + if (plugin && plugin.onStateChange) { + plugin.onStateChange(obj); + } + }, + onSubmit: (obj) => { + const plugin = wrappedPlugin(); + if (plugin && plugin.onSubmit) { + plugin.onSubmit(obj); + } + }, + onReset: (obj) => { + const plugin = wrappedPlugin(); + if (plugin && plugin.onReset) { + plugin.onReset(obj); + } + }, + getSources: (obj) => { + const plugin = wrappedPlugin(); + if (plugin && plugin.getSources) { + return plugin.getSources(obj); + } else { + return Promise.resolve([]); + } + }, + data: (obj) => { + const plugin = wrappedPlugin(); + if (plugin && plugin.data) { + plugin.data(obj); + } + }, + }; +} + +function validateItems(items) { + // Validate the first item + if (items.length > 0) { + const item = items[0]; + const missingFields = []; + if (item.href == undefined) { + missingFields.push("href"); + } + if (!item.title == undefined) { + missingFields.push("title"); + } + if (!item.text == undefined) { + missingFields.push("text"); + } + + if (missingFields.length === 1) { + throw { + name: `Error: Search index is missing the ${missingFields[0]} field.`, + message: `The items being returned for this search do not include all the required fields. Please ensure that your index items include the ${missingFields[0]} field or use index-fields in your _quarto.yml file to specify the field names.`, + }; + } else if (missingFields.length > 1) { + const missingFieldList = missingFields + .map((field) => { + return `${field}`; + }) + .join(", "); + + throw { + name: `Error: Search index is missing the following fields: ${missingFieldList}.`, + message: `The items being returned for this search do not include all the required fields. Please ensure that your index items includes the following fields: ${missingFieldList}, or use index-fields in your _quarto.yml file to specify the field names.`, + }; + } + } +} + +let lastQuery = null; +function showCopyLink(query, options) { + const language = options.language; + lastQuery = query; + // Insert share icon + const inputSuffixEl = window.document.body.querySelector( + ".aa-Form .aa-InputWrapperSuffix" + ); + + if (inputSuffixEl) { + let copyButtonEl = window.document.body.querySelector( + ".aa-Form .aa-InputWrapperSuffix .aa-CopyButton" + ); + + if (copyButtonEl === null) { + copyButtonEl = window.document.createElement("button"); + copyButtonEl.setAttribute("class", "aa-CopyButton"); + copyButtonEl.setAttribute("type", "button"); + copyButtonEl.setAttribute("title", language["search-copy-link-title"]); + copyButtonEl.onmousedown = (e) => { + e.preventDefault(); + e.stopPropagation(); + }; + + const linkIcon = "bi-clipboard"; + const checkIcon = "bi-check2"; + + const shareIconEl = window.document.createElement("i"); + shareIconEl.setAttribute("class", `bi ${linkIcon}`); + copyButtonEl.appendChild(shareIconEl); + inputSuffixEl.prepend(copyButtonEl); + + const clipboard = new window.ClipboardJS(".aa-CopyButton", { + text: function (_trigger) { + const copyUrl = new URL(window.location); + copyUrl.searchParams.set(kQueryArg, lastQuery); + copyUrl.searchParams.set(kResultsArg, "1"); + return copyUrl.toString(); + }, + }); + clipboard.on("success", function (e) { + // Focus the input + + // button target + const button = e.trigger; + const icon = button.querySelector("i.bi"); + + // flash "checked" + icon.classList.add(checkIcon); + icon.classList.remove(linkIcon); + setTimeout(function () { + icon.classList.remove(checkIcon); + icon.classList.add(linkIcon); + }, 1000); + }); + } + + // If there is a query, show the link icon + if (copyButtonEl) { + if (lastQuery && options["copy-button"]) { + copyButtonEl.style.display = "flex"; + } else { + copyButtonEl.style.display = "none"; + } + } + } +} + +/* Search Index Handling */ +// create the index +var fuseIndex = undefined; +var shownWarning = false; + +// fuse index options +const kFuseIndexOptions = { + keys: [ + { name: "title", weight: 20 }, + { name: "section", weight: 20 }, + { name: "text", weight: 10 }, + ], + ignoreLocation: true, + threshold: 0.1, +}; + +async function readSearchData() { + // Initialize the search index on demand + if (fuseIndex === undefined) { + if (window.location.protocol === "file:" && !shownWarning) { + window.alert( + "Search requires JavaScript features disabled when running in file://... URLs. In order to use search, please run this document in a web server." + ); + shownWarning = true; + return; + } + const fuse = new window.Fuse([], kFuseIndexOptions); + + // fetch the main search.json + const response = await fetch(offsetURL("search.json")); + if (response.status == 200) { + return response.json().then(function (searchDocs) { + searchDocs.forEach(function (searchDoc) { + fuse.add(searchDoc); + }); + fuseIndex = fuse; + return fuseIndex; + }); + } else { + return Promise.reject( + new Error( + "Unexpected status from search index request: " + response.status + ) + ); + } + } + + return fuseIndex; +} + +function inputElement() { + return window.document.body.querySelector(".aa-Form .aa-Input"); +} + +function focusSearchInput() { + setTimeout(() => { + const inputEl = inputElement(); + if (inputEl) { + inputEl.focus(); + } + }, 50); +} + +/* Panels */ +const kItemTypeDoc = "document"; +const kItemTypeMore = "document-more"; +const kItemTypeItem = "document-item"; +const kItemTypeError = "error"; + +function renderItem( + item, + createElement, + state, + setActiveItemId, + setContext, + refresh, + quartoSearchOptions +) { + switch (item.type) { + case kItemTypeDoc: + return createDocumentCard( + createElement, + "file-richtext", + item.title, + item.section, + item.text, + item.href, + item.crumbs, + quartoSearchOptions + ); + case kItemTypeMore: + return createMoreCard( + createElement, + item, + state, + setActiveItemId, + setContext, + refresh + ); + case kItemTypeItem: + return createSectionCard( + createElement, + item.section, + item.text, + item.href + ); + case kItemTypeError: + return createErrorCard(createElement, item.title, item.text); + default: + return undefined; + } +} + +function createDocumentCard( + createElement, + icon, + title, + section, + text, + href, + crumbs, + quartoSearchOptions +) { + const iconEl = createElement("i", { + class: `bi bi-${icon} search-result-icon`, + }); + const titleEl = createElement("p", { class: "search-result-title" }, title); + const titleContents = [iconEl, titleEl]; + const showParent = quartoSearchOptions["show-item-context"]; + if (crumbs && showParent) { + let crumbsOut = undefined; + const crumbClz = ["search-result-crumbs"]; + if (showParent === "root") { + crumbsOut = crumbs.length > 1 ? crumbs[0] : undefined; + } else if (showParent === "parent") { + crumbsOut = crumbs.length > 1 ? crumbs[crumbs.length - 2] : undefined; + } else { + crumbsOut = crumbs.length > 1 ? crumbs.join(" > ") : undefined; + crumbClz.push("search-result-crumbs-wrap"); + } + + const crumbEl = createElement( + "p", + { class: crumbClz.join(" ") }, + crumbsOut + ); + titleContents.push(crumbEl); + } + + const titleContainerEl = createElement( + "div", + { class: "search-result-title-container" }, + titleContents + ); + + const textEls = []; + if (section) { + const sectionEl = createElement( + "p", + { class: "search-result-section" }, + section + ); + textEls.push(sectionEl); + } + const descEl = createElement("p", { + class: "search-result-text", + dangerouslySetInnerHTML: { + __html: text, + }, + }); + textEls.push(descEl); + + const textContainerEl = createElement( + "div", + { class: "search-result-text-container" }, + textEls + ); + + const containerEl = createElement( + "div", + { + class: "search-result-container", + }, + [titleContainerEl, textContainerEl] + ); + + const linkEl = createElement( + "a", + { + href: offsetURL(href), + class: "search-result-link", + }, + containerEl + ); + + const classes = ["search-result-doc", "search-item"]; + if (!section) { + classes.push("document-selectable"); + } + + return createElement( + "div", + { + class: classes.join(" "), + }, + linkEl + ); +} + +function createMoreCard( + createElement, + item, + state, + setActiveItemId, + setContext, + refresh +) { + const moreCardEl = createElement( + "div", + { + class: "search-result-more search-item", + onClick: (e) => { + // Handle expanding the sections by adding the expanded + // section to the list of expanded sections + toggleExpanded(item, state, setContext, setActiveItemId, refresh); + e.stopPropagation(); + }, + }, + item.title + ); + + return moreCardEl; +} + +function toggleExpanded(item, state, setContext, setActiveItemId, refresh) { + const expanded = state.context.expanded || []; + if (expanded.includes(item.target)) { + setContext({ + expanded: expanded.filter((target) => target !== item.target), + }); + } else { + setContext({ expanded: [...expanded, item.target] }); + } + + refresh(); + setActiveItemId(item.__autocomplete_id); +} + +function createSectionCard(createElement, section, text, href) { + const sectionEl = createSection(createElement, section, text, href); + return createElement( + "div", + { + class: "search-result-doc-section search-item", + }, + sectionEl + ); +} + +function createSection(createElement, title, text, href) { + const descEl = createElement("p", { + class: "search-result-text", + dangerouslySetInnerHTML: { + __html: text, + }, + }); + + const titleEl = createElement("p", { class: "search-result-section" }, title); + const linkEl = createElement( + "a", + { + href: offsetURL(href), + class: "search-result-link", + }, + [titleEl, descEl] + ); + return linkEl; +} + +function createErrorCard(createElement, title, text) { + const descEl = createElement("p", { + class: "search-error-text", + dangerouslySetInnerHTML: { + __html: text, + }, + }); + + const titleEl = createElement("p", { + class: "search-error-title", + dangerouslySetInnerHTML: { + __html: ` ${title}`, + }, + }); + const errorEl = createElement("div", { class: "search-error" }, [ + titleEl, + descEl, + ]); + return errorEl; +} + +function positionPanel(pos) { + const panelEl = window.document.querySelector( + "#quarto-search-results .aa-Panel" + ); + const inputEl = window.document.querySelector( + "#quarto-search .aa-Autocomplete" + ); + + if (panelEl && inputEl) { + panelEl.style.top = `${Math.round(panelEl.offsetTop)}px`; + if (pos === "start") { + panelEl.style.left = `${Math.round(inputEl.left)}px`; + } else { + panelEl.style.right = `${Math.round(inputEl.offsetRight)}px`; + } + } +} + +/* Highlighting */ +// highlighting functions +function highlightMatch(query, text) { + if (text) { + const start = text.toLowerCase().indexOf(query.toLowerCase()); + if (start !== -1) { + const startMark = ""; + const endMark = ""; + + const end = start + query.length; + text = + text.slice(0, start) + + startMark + + text.slice(start, end) + + endMark + + text.slice(end); + const startInfo = clipStart(text, start); + const endInfo = clipEnd( + text, + startInfo.position + startMark.length + endMark.length + ); + text = + startInfo.prefix + + text.slice(startInfo.position, endInfo.position) + + endInfo.suffix; + + return text; + } else { + return text; + } + } else { + return text; + } +} + +function clipStart(text, pos) { + const clipStart = pos - 50; + if (clipStart < 0) { + // This will just return the start of the string + return { + position: 0, + prefix: "", + }; + } else { + // We're clipping before the start of the string, walk backwards to the first space. + const spacePos = findSpace(text, pos, -1); + return { + position: spacePos.position, + prefix: "", + }; + } +} + +function clipEnd(text, pos) { + const clipEnd = pos + 200; + if (clipEnd > text.length) { + return { + position: text.length, + suffix: "", + }; + } else { + const spacePos = findSpace(text, clipEnd, 1); + return { + position: spacePos.position, + suffix: spacePos.clipped ? "…" : "", + }; + } +} + +function findSpace(text, start, step) { + let stepPos = start; + while (stepPos > -1 && stepPos < text.length) { + const char = text[stepPos]; + if (char === " " || char === "," || char === ":") { + return { + position: step === 1 ? stepPos : stepPos - step, + clipped: stepPos > 1 && stepPos < text.length, + }; + } + stepPos = stepPos + step; + } + + return { + position: stepPos - step, + clipped: false, + }; +} + +// removes highlighting as implemented by the mark tag +function clearHighlight(searchterm, el) { + const childNodes = el.childNodes; + for (let i = childNodes.length - 1; i >= 0; i--) { + const node = childNodes[i]; + if (node.nodeType === Node.ELEMENT_NODE) { + if ( + node.tagName === "MARK" && + node.innerText.toLowerCase() === searchterm.toLowerCase() + ) { + el.replaceChild(document.createTextNode(node.innerText), node); + } else { + clearHighlight(searchterm, node); + } + } + } +} + +function escapeRegExp(string) { + return string.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string +} + +// highlight matches +function highlight(term, el) { + const termRegex = new RegExp(term, "ig"); + const childNodes = el.childNodes; + + // walk back to front avoid mutating elements in front of us + for (let i = childNodes.length - 1; i >= 0; i--) { + const node = childNodes[i]; + + if (node.nodeType === Node.TEXT_NODE) { + // Search text nodes for text to highlight + const text = node.nodeValue; + + let startIndex = 0; + let matchIndex = text.search(termRegex); + if (matchIndex > -1) { + const markFragment = document.createDocumentFragment(); + while (matchIndex > -1) { + const prefix = text.slice(startIndex, matchIndex); + markFragment.appendChild(document.createTextNode(prefix)); + + const mark = document.createElement("mark"); + mark.appendChild( + document.createTextNode( + text.slice(matchIndex, matchIndex + term.length) + ) + ); + markFragment.appendChild(mark); + + startIndex = matchIndex + term.length; + matchIndex = text.slice(startIndex).search(new RegExp(term, "ig")); + if (matchIndex > -1) { + matchIndex = startIndex + matchIndex; + } + } + if (startIndex < text.length) { + markFragment.appendChild( + document.createTextNode(text.slice(startIndex, text.length)) + ); + } + + el.replaceChild(markFragment, node); + } + } else if (node.nodeType === Node.ELEMENT_NODE) { + // recurse through elements + highlight(term, node); + } + } +} + +/* Link Handling */ +// get the offset from this page for a given site root relative url +function offsetURL(url) { + var offset = getMeta("quarto:offset"); + return offset ? offset + url : url; +} + +// read a meta tag value +function getMeta(metaName) { + var metas = window.document.getElementsByTagName("meta"); + for (let i = 0; i < metas.length; i++) { + if (metas[i].getAttribute("name") === metaName) { + return metas[i].getAttribute("content"); + } + } + return ""; +} + +function algoliaSearch(query, limit, algoliaOptions) { + const { getAlgoliaResults } = window["@algolia/autocomplete-preset-algolia"]; + + const applicationId = algoliaOptions["application-id"]; + const searchOnlyApiKey = algoliaOptions["search-only-api-key"]; + const indexName = algoliaOptions["index-name"]; + const indexFields = algoliaOptions["index-fields"]; + const searchClient = window.algoliasearch(applicationId, searchOnlyApiKey); + const searchParams = algoliaOptions["params"]; + const searchAnalytics = !!algoliaOptions["analytics-events"]; + + return getAlgoliaResults({ + searchClient, + queries: [ + { + indexName: indexName, + query, + params: { + hitsPerPage: limit, + clickAnalytics: searchAnalytics, + ...searchParams, + }, + }, + ], + transformResponse: (response) => { + if (!indexFields) { + return response.hits.map((hit) => { + return hit.map((item) => { + return { + ...item, + text: highlightMatch(query, item.text), + }; + }); + }); + } else { + const remappedHits = response.hits.map((hit) => { + return hit.map((item) => { + const newItem = { ...item }; + ["href", "section", "title", "text", "crumbs"].forEach( + (keyName) => { + const mappedName = indexFields[keyName]; + if ( + mappedName && + item[mappedName] !== undefined && + mappedName !== keyName + ) { + newItem[keyName] = item[mappedName]; + delete newItem[mappedName]; + } + } + ); + newItem.text = highlightMatch(query, newItem.text); + return newItem; + }); + }); + return remappedHits; + } + }, + }); +} + +let subSearchTerm = undefined; +let subSearchFuse = undefined; +const kFuseMaxWait = 125; + +async function fuseSearch(query, fuse, fuseOptions) { + let index = fuse; + // Fuse.js using the Bitap algorithm for text matching which runs in + // O(nm) time (no matter the structure of the text). In our case this + // means that long search terms mixed with large index gets very slow + // + // This injects a subIndex that will be used once the terms get long enough + // Usually making this subindex is cheap since there will typically be + // a subset of results matching the existing query + if (subSearchFuse !== undefined && query.startsWith(subSearchTerm)) { + // Use the existing subSearchFuse + index = subSearchFuse; + } else if (subSearchFuse !== undefined) { + // The term changed, discard the existing fuse + subSearchFuse = undefined; + subSearchTerm = undefined; + } + + // Search using the active fuse + const then = performance.now(); + const resultsRaw = await index.search(query, fuseOptions); + const now = performance.now(); + + const results = resultsRaw.map((result) => { + const addParam = (url, name, value) => { + const anchorParts = url.split("#"); + const baseUrl = anchorParts[0]; + const sep = baseUrl.search("\\?") > 0 ? "&" : "?"; + anchorParts[0] = baseUrl + sep + name + "=" + value; + return anchorParts.join("#"); + }; + + return { + title: result.item.title, + section: result.item.section, + href: addParam(result.item.href, kQueryArg, query), + text: highlightMatch(query, result.item.text), + crumbs: result.item.crumbs, + }; + }); + + // If we don't have a subfuse and the query is long enough, go ahead + // and create a subfuse to use for subsequent queries + if (now - then > kFuseMaxWait && subSearchFuse === undefined) { + subSearchTerm = query; + subSearchFuse = new window.Fuse([], kFuseIndexOptions); + resultsRaw.forEach((rr) => { + subSearchFuse.add(rr.item); + }); + } + return results; +} diff --git a/sitemap.xml b/sitemap.xml new file mode 100644 index 0000000..30c42f1 --- /dev/null +++ b/sitemap.xml @@ -0,0 +1,111 @@ + + + + https://collabora.github.io/WhisperSpeech/6. Quality-boosting vocoder.html + 2024-04-09T09:51:07.268Z + + + https://collabora.github.io/WhisperSpeech/1c. vad merging.html + 2024-04-09T09:51:07.212Z + + + https://collabora.github.io/WhisperSpeech/2A. Whisper quantization dataset preparation.html + 2024-04-09T09:51:07.180Z + + + https://collabora.github.io/WhisperSpeech/3c. s2a acoustic tokens preparation.html + 2024-04-09T09:51:03.604Z + + + https://collabora.github.io/WhisperSpeech/C2. Testing.html + 2024-04-09T09:51:03.488Z + + + https://collabora.github.io/WhisperSpeech/index.html + 2024-04-09T09:51:03.072Z + + + https://collabora.github.io/WhisperSpeech/B1. Training.html + 2024-04-09T09:51:02.964Z + + + https://collabora.github.io/WhisperSpeech/3a. t2s transcripts preparation.html + 2024-04-09T09:51:06.532Z + + + https://collabora.github.io/WhisperSpeech/D. Common inference utilities.html + 2024-04-09T09:51:01.452Z + + + https://collabora.github.io/WhisperSpeech/b. languages.html + 2024-04-09T09:50:59.904Z + + + https://collabora.github.io/WhisperSpeech/1b. voice activity detection.html + 2024-04-09T09:50:59.856Z + + + https://collabora.github.io/WhisperSpeech/5b. multi-lang text to semantic token modeling.html + 2024-04-09T09:51:03.424Z + + + https://collabora.github.io/WhisperSpeech/dataset preparation.html + 2024-04-09T09:50:59.812Z + + + https://collabora.github.io/WhisperSpeech/c. benchmark.html + 2024-04-09T09:50:59.768Z + + + https://collabora.github.io/WhisperSpeech/0. download models.html + 2024-04-09T09:50:59.812Z + + + https://collabora.github.io/WhisperSpeech/1. acoustic token extraction.html + 2024-04-09T09:51:01.044Z + + + https://collabora.github.io/WhisperSpeech/3D. Split out validation.html + 2024-04-09T09:50:59.844Z + + + https://collabora.github.io/WhisperSpeech/B2. Training (Lightning).html + 2024-04-09T09:50:59.884Z + + + https://collabora.github.io/WhisperSpeech/3b. semantic token extraction.html + 2024-04-09T09:51:04.076Z + + + https://collabora.github.io/WhisperSpeech/D. Common dataset utilities.html + 2024-04-09T09:51:01.172Z + + + https://collabora.github.io/WhisperSpeech/7. Pipeline.html + 2024-04-09T09:51:01.720Z + + + https://collabora.github.io/WhisperSpeech/2c. whisper quantization (semantic token) evaluation.html + 2024-04-09T09:51:03.032Z + + + https://collabora.github.io/WhisperSpeech/4b. multi-language semantic to acoustic token modeling.html + 2024-04-09T09:51:03.428Z + + + https://collabora.github.io/WhisperSpeech/2A. Speaker Embeddings.html + 2024-04-09T09:51:07.456Z + + + https://collabora.github.io/WhisperSpeech/2b. whisper quantization (semantic token) model.html + 2024-04-09T09:51:07.560Z + + + https://collabora.github.io/WhisperSpeech/C. Word error rate metrics.html + 2024-04-09T09:51:07.124Z + + + https://collabora.github.io/WhisperSpeech/A. Neural modules.html + 2024-04-09T09:51:07.352Z + + diff --git a/styles.css b/styles.css new file mode 100644 index 0000000..66ccc49 --- /dev/null +++ b/styles.css @@ -0,0 +1,37 @@ +.cell { + margin-bottom: 1rem; +} + +.cell > .sourceCode { + margin-bottom: 0; +} + +.cell-output > pre { + margin-bottom: 0; +} + +.cell-output > pre, .cell-output > .sourceCode > pre, .cell-output-stdout > pre { + margin-left: 0.8rem; + margin-top: 0; + background: none; + border-left: 2px solid lightsalmon; + border-top-left-radius: 0; + border-top-right-radius: 0; +} + +.cell-output > .sourceCode { + border: none; +} + +.cell-output > .sourceCode { + background: none; + margin-top: 0; +} + +div.description { + padding-left: 2px; + padding-top: 5px; + font-style: italic; + font-size: 135%; + opacity: 70%; +} diff --git a/whisper-block.png b/whisper-block.png new file mode 100644 index 0000000..ee2c020 Binary files /dev/null and b/whisper-block.png differ