diff --git a/.circleci/config.yml b/.circleci/config.yml index b4289243e0..f08608285b 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -10,6 +10,7 @@ jobs: - image: cimg/python:3.8.10 environment: DOAJENV: test + - image: cimg/redis:6.2.14 - image: elasticsearch:7.10.1 environment: - xpack.security.enabled: false diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 8ee9b3549c..a8027abeaa 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -1,12 +1,10 @@ -# Title <- provide a title for the PR - -*Please don't delete any sections when completing this PR template; instead enter **N/A** for checkboxes or sections which are not applicable, unless otherwise stated below* +* Issue: [enter link to issue here] -See # <- enter link to issue on main board +--- -Describe the scope/purpose of the PR here in as much detail as you like +# Title <- provide a title for the PR -## Categorisation +*briefly describe the PR here* This PR... - [ ] has scripts to run @@ -18,121 +16,52 @@ This PR... - [ ] affects the publisher area - [ ] affects the monitoring -## Basic PR Checklist - -Instructions for developers: -* For each checklist item, if it is N/A to your PR check the N/A box -* For each item that you have done and confirmed for yourself, check Developer box (including if you have checked the N/A box) - -Instructions for reviewers: -* For each checklist item that has been confirmed by the Developer, check the Reviewer box if you agree -* For multiple reviewers, feel free to add your own checkbox with your github username next to it if that helps with review tracking - -### Code Style - -- No deprecated methods are used - - [ ] N/A - - [ ] Developer - - [ ] Reviewer - -- No magic strings/numbers - all strings are in `constants` or `messages` files - - [ ] N/A - - [ ] Developer - - [ ] Reviewer - -- ES queries are wrapped in a Query object rather than inlined in the code - - [ ] N/A - - [ ] Developer - - [ ] Reviewer - -- Where possible our common library functions have been used (e.g. dates manipulated via `dates`) - - [ ] N/A - - [ ] Developer - - [ ] Reviewer - -- Cleaned up commented out code, etc - - [ ] N/A - - [ ] Developer - - [ ] Reviewer - -- Urls are constructed with `url_for` not hard-coded - - [ ] N/A - - [ ] Developer - - [ ] Reviewer -### Testing - -- Unit tests have been added/modified - - [ ] N/A - - [ ] Developer - - [ ] Reviewer - -- Functional tests have been added/modified - - [ ] N/A - - [ ] Developer - - [ ] Reviewer - -- Code has been run manually in development, and functional tests followed locally - - [ ] N/A - - [ ] Developer - - [ ] Reviewer - -- Have CSS/style changes been implemented? If they are of a global scope (e.g. on base HTML elements) have the downstream impacts of the change in other areas of the system been considered? - - [ ] N/A - - [ ] Developer - - [ ] Reviewer - -### Documentation - -- FeatureMap annotations have been added - - [ ] N/A - - [ ] Developer - - [ ] Reviewer - -- Documentation updates - if needed - have been identified and prepared for inclusion into main documentation (e.g. added and highlighted/commented as appropriate to this PR) - - [ ] N/A - - [ ] Developer - - [ ] Reviewer - -- Core model documentation has been added to if needed: https://docs.google.com/spreadsheets/d/1lun2S9vwGbyfy3WjIjgXBm05D-3wWDZ4bp8xiIYfImM/edit - - [ ] N/A - - [ ] Developer - - [ ] Reviewer - -- Events and consumers documentation has been added if needed: https://docs.google.com/spreadsheets/d/1oIeG5vg-blm2MZCE-7YhwulUlSz6TOUeY8jAftdP9JE/edit - - [ ] N/A - - [ ] Developer - - [ ] Reviewer - -- The docs for this branch have been generated and pushed to the doc site (see docs/README.md for details) - - [ ] N/A - - [ ] Developer - - [ ] Reviewer - - -### Release Readiness - -- If needed, migration has been created and tested locally - - [ ] N/A - - [ ] Developer - - [ ] Reviewer - -- Release sheet has been created, and completed as far as is possible https://docs.google.com/spreadsheets/d/1Bqx23J1MwXzjrmAygbqlU3YHxN1Wf7zkkRv14eTVLZQ/edit - - [ ] N/A - - [ ] Developer - - [ ] Reviewer - -- There has been a recent merge up from `develop` (or other base branch). List the dates of the merges up from develop below - - [date of merge up] - +## Developer Checklist + +*Developers should review and confirm each of these items before requesting review* + +* [ ] Code meets acceptance criteria from issue +* [ ] Unit tests are written and all pass +* [ ] User Test Scripts (if required) are written and have been run through +* [ ] Project's coding standards are met + - No deprecated methods are used + - No magic strings/numbers - all strings are in `constants` or `messages` files + - ES queries are wrapped in a Query object rather than inlined in the code + - Where possible our common library functions have been used (e.g. dates manipulated via `dates`) + - Cleaned up commented out code, etc + - Urls are constructed with `url_for` not hard-coded +* [ ] Code documentation and related non-code documentation has all been updated + - Core model documentation has been added to if needed: https://docs.google.com/spreadsheets/d/1lun2S9vwGbyfy3WjIjgXBm05D-3wWDZ4bp8xiIYfImM/edit + - Events and consumers documentation has been added if needed: https://docs.google.com/spreadsheets/d/1oIeG5vg-blm2MZCE-7YhwulUlSz6TOUeY8jAftdP9JE/edit +* [ ] Migation has been created and tested +* [ ] There is a recent merge from `develop` + +## Reviewer Checklist + +*Reviewers should review and confirm each of these items before approval* +*If there are multiple reviewers, this section should be duplicated for each reviewer* + +* [ ] Code meets acceptance criteria from issue +* [ ] Unit tests are written and all pass +* [ ] User Test Scripts (if required) are written and have been run through +* [ ] Project's coding standards are met + - No deprecated methods are used + - No magic strings/numbers - all strings are in `constants` or `messages` files + - ES queries are wrapped in a Query object rather than inlined in the code + - Where possible our common library functions have been used (e.g. dates manipulated via `dates`) + - Cleaned up commented out code, etc + - Urls are constructed with `url_for` not hard-coded +* [ ] Code documentation and related non-code documentation has all been updated + - Core model documentation has been added to if needed: https://docs.google.com/spreadsheets/d/1lun2S9vwGbyfy3WjIjgXBm05D-3wWDZ4bp8xiIYfImM/edit + - Events and consumers documentation has been added if needed: https://docs.google.com/spreadsheets/d/1oIeG5vg-blm2MZCE-7YhwulUlSz6TOUeY8jAftdP9JE/edit +* [ ] Migation has been created and tested +* [ ] There is a recent merge from `develop` ## Testing -List the Functional Tests that must be run to confirm this feature - -1. ... -2. ... - +*List user test scripts that need to be run* +*List any non-unit test scripts that need to be run by reviewers* ## Deployment @@ -161,5 +90,3 @@ What new infrastructure does this PR require (e.g. new services that need to run ### Continuous Integration What CI changes are required for this - - diff --git a/.gitignore b/.gitignore index bd2d5fbb88..6d28c2acfa 100644 --- a/.gitignore +++ b/.gitignore @@ -32,6 +32,7 @@ app.cfg # directories of transient stuff that gets created locally media/* upload/* +upload_async/* !upload/README.md failed_articles/* !failed_articles/README.md @@ -131,3 +132,6 @@ doajtest/unit/resources/harvester_resp_temp.json # playground dir for try things out portality/_playground/* + +# ignore local gitignore +.gitignore-local diff --git a/.gitmodules b/.gitmodules index d1b3c3667c..84dfae748f 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,6 +1,3 @@ [submodule "portality/static/vendor/edges"] path = portality/static/vendor/edges - url = https://github.com/CottageLabs/edges.git -[submodule "portality/static/vendor/swagger-ui"] - path = portality/static/vendor/swagger-ui - url = https://github.com/swagger-api/swagger-ui.git + url = https://github.com/CottageLabs/edges.git \ No newline at end of file diff --git a/cms/assets/img/sponsors/Degruyter.svg b/cms/assets/img/sponsors/Degruyter.svg deleted file mode 100644 index 4fcff995f5..0000000000 --- a/cms/assets/img/sponsors/Degruyter.svg +++ /dev/null @@ -1,3 +0,0 @@ - - - diff --git a/cms/assets/img/sponsors/Elife-logo-2020.png b/cms/assets/img/sponsors/Elife-logo-2020.png new file mode 100644 index 0000000000..3bb9b2c4d7 Binary files /dev/null and b/cms/assets/img/sponsors/Elife-logo-2020.png differ diff --git a/cms/assets/img/sponsors/Elsevier_logo_2019.svg b/cms/assets/img/sponsors/Elsevier_logo_2019.svg new file mode 100644 index 0000000000..a754f6f903 --- /dev/null +++ b/cms/assets/img/sponsors/Elsevier_logo_2019.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/cms/assets/img/sponsors/LIB.png b/cms/assets/img/sponsors/LIB.png new file mode 100644 index 0000000000..30652d63e6 Binary files /dev/null and b/cms/assets/img/sponsors/LIB.png differ diff --git a/cms/assets/img/sponsors/Logo1.png b/cms/assets/img/sponsors/Logo1.png new file mode 100644 index 0000000000..0aa4ceebaf Binary files /dev/null and b/cms/assets/img/sponsors/Logo1.png differ diff --git a/cms/assets/img/sponsors/Logo_of_Erudit.svg.png b/cms/assets/img/sponsors/Logo_of_Erudit.svg.png new file mode 100644 index 0000000000..159d5dcf08 Binary files /dev/null and b/cms/assets/img/sponsors/Logo_of_Erudit.svg.png differ diff --git a/cms/assets/img/sponsors/OurResearch.png b/cms/assets/img/sponsors/OurResearch.png new file mode 100644 index 0000000000..09eb7fe3e3 Binary files /dev/null and b/cms/assets/img/sponsors/OurResearch.png differ diff --git a/cms/assets/img/sponsors/Pensoft.png b/cms/assets/img/sponsors/Pensoft.png new file mode 100644 index 0000000000..b4d9f7d76b Binary files /dev/null and b/cms/assets/img/sponsors/Pensoft.png differ diff --git a/cms/assets/img/sponsors/Sirsi.png b/cms/assets/img/sponsors/Sirsi.png new file mode 100644 index 0000000000..fc7abbbf5a Binary files /dev/null and b/cms/assets/img/sponsors/Sirsi.png differ diff --git a/cms/assets/img/sponsors/TSPlogo.jpg b/cms/assets/img/sponsors/TSPlogo.jpg new file mode 100644 index 0000000000..276f0d53b4 Binary files /dev/null and b/cms/assets/img/sponsors/TSPlogo.jpg differ diff --git a/cms/assets/img/sponsors/cogitatio.png b/cms/assets/img/sponsors/cogitatio.png new file mode 100644 index 0000000000..3a6bdf9b12 Binary files /dev/null and b/cms/assets/img/sponsors/cogitatio.png differ diff --git a/cms/assets/img/sponsors/de_gruyter.svg b/cms/assets/img/sponsors/de_gruyter.svg new file mode 100644 index 0000000000..f2afa97e15 --- /dev/null +++ b/cms/assets/img/sponsors/de_gruyter.svg @@ -0,0 +1,42 @@ + + + + + + diff --git a/cms/assets/img/sponsors/delta.png b/cms/assets/img/sponsors/delta.png new file mode 100644 index 0000000000..ab2c3b0061 Binary files /dev/null and b/cms/assets/img/sponsors/delta.png differ diff --git a/cms/assets/img/sponsors/emerald.png b/cms/assets/img/sponsors/emerald.png new file mode 100644 index 0000000000..b93d70b8a5 Binary files /dev/null and b/cms/assets/img/sponsors/emerald.png differ diff --git a/cms/assets/img/sponsors/igf.png b/cms/assets/img/sponsors/igf.png new file mode 100644 index 0000000000..6a61f6e5c3 Binary files /dev/null and b/cms/assets/img/sponsors/igf.png differ diff --git a/cms/assets/img/sponsors/pensoftlogo.jpg b/cms/assets/img/sponsors/pensoftlogo.jpg new file mode 100644 index 0000000000..4247ee6279 Binary files /dev/null and b/cms/assets/img/sponsors/pensoftlogo.jpg differ diff --git a/cms/assets/img/sponsors/scup-logo.png b/cms/assets/img/sponsors/scup-logo.png new file mode 100644 index 0000000000..dae5c6dd20 Binary files /dev/null and b/cms/assets/img/sponsors/scup-logo.png differ diff --git a/cms/assets/img/sponsors/uj.png b/cms/assets/img/sponsors/uj.png new file mode 100644 index 0000000000..9ee31612c7 Binary files /dev/null and b/cms/assets/img/sponsors/uj.png differ diff --git a/cms/assets/img/volunteers/Ben Catt.jpg b/cms/assets/img/volunteers/Ben Catt.jpg new file mode 100644 index 0000000000..1f7ba89b53 Binary files /dev/null and b/cms/assets/img/volunteers/Ben Catt.jpg differ diff --git a/cms/assets/img/volunteers/Kamila_Kokot.jpg b/cms/assets/img/volunteers/Kamila_Kokot.jpg new file mode 100644 index 0000000000..e34526af2f Binary files /dev/null and b/cms/assets/img/volunteers/Kamila_Kokot.jpg differ diff --git a/cms/assets/img/volunteers/Mirecka.jpeg b/cms/assets/img/volunteers/Mirecka.jpeg deleted file mode 100644 index 338eda062e..0000000000 Binary files a/cms/assets/img/volunteers/Mirecka.jpeg and /dev/null differ diff --git a/cms/assets/img/volunteers/Patricia.jpg b/cms/assets/img/volunteers/Patricia.jpg new file mode 100644 index 0000000000..b6ebd1dc53 Binary files /dev/null and b/cms/assets/img/volunteers/Patricia.jpg differ diff --git a/cms/assets/img/volunteers/Popova-1.jpeg b/cms/assets/img/volunteers/Popova-1.jpeg new file mode 120000 index 0000000000..238ea46773 --- /dev/null +++ b/cms/assets/img/volunteers/Popova-1.jpeg @@ -0,0 +1 @@ +../ambassadors/Popova-1.jpeg \ No newline at end of file diff --git a/cms/assets/img/volunteers/ina-smith.png b/cms/assets/img/volunteers/ina-smith.png new file mode 120000 index 0000000000..878b93f265 --- /dev/null +++ b/cms/assets/img/volunteers/ina-smith.png @@ -0,0 +1 @@ +../ambassadors/ina-smith.png \ No newline at end of file diff --git a/cms/assets/img/volunteers/max.png b/cms/assets/img/volunteers/max.png new file mode 120000 index 0000000000..7cad0ebe9d --- /dev/null +++ b/cms/assets/img/volunteers/max.png @@ -0,0 +1 @@ +../ambassadors/max.png \ No newline at end of file diff --git a/cms/data/ambassadors.yml b/cms/data/ambassadors.yml index db06e7db21..a14afcc2b4 100644 --- a/cms/data/ambassadors.yml +++ b/cms/data/ambassadors.yml @@ -72,7 +72,7 @@ - name: Maxim Mitrofanov region: Russian Federation - bio: "Maxim was born in Moscow, graduated from the University of Foreign Relations and then worked for the Ministry of Foreign Affairs of Russia for nine years. After leaving the Ministry in 2007 he worked for the largest Russian exhibition company, Expocentre, and then joined NEICON in 2014. NEICON is currently the only organisation in Russia supporting DOAJ and is the driving force to share open access ideas among the Russian journal and scholar community." + bio: "Maxim was born in Moscow, graduated from the University of Foreign Relations and then worked for the Ministry of Foreign Affairs of Russia for nine years. After leaving the Ministry in 2007 he worked for the largest Russian exhibition company, Expocentre, and then joined NEICON in 2014. NEICON is the driving force to share open access ideas among the Russian journal and scholar community." photo: "max.png" coi: 2022: https://drive.google.com/file/d/1Dzb8QzS5V0KzjNnybkWFQpjBwk1Jy8il/view?usp=sharing diff --git a/cms/data/nav.yml b/cms/data/nav.yml index 1330fd564b..62cbdece73 100644 --- a/cms/data/nav.yml +++ b/cms/data/nav.yml @@ -46,6 +46,8 @@ entries: route: doaj.ambassadors # ~~->Ambassadors:WebRoute~~ - label: Advisory Board & Council route: doaj.abc # ~~->AdvisoryBoardCouncil:WebRoute~~ + - label: Editorial Policy and Advisory Group + route: doaj.epag # ~~->EditorialPolicyAdvisoryGroup:WebRoute~~ - label: Volunteers route: doaj.volunteers # ~~->Volunteers:WebRoute~~ - label: News diff --git a/cms/data/notifications.yml b/cms/data/notifications.yml index 71919c5ad4..cde76f922b 100644 --- a/cms/data/notifications.yml +++ b/cms/data/notifications.yml @@ -142,7 +142,23 @@ update_request:publisher:rejected:notify: short: Your update request ({issns}) was rejected -journal:assed:discontinuing_soon:notify: +update_request:publisher:submitted:notify: + long: | + "{application_title}" + + Thank you for submitting an update to your journal on {date_applied}. This has been safely received. A Managing Editor will start to review it soon. In the meantime, your original journal record is still visible in DOAJ and you may continue to upload article metadata to us. + + Please save this email as a record of when you submitted your update. Before you contact us requesting a status update, please check your Spam folder to ensure that a member of our editorial team has not already contacted you. + + Yours faithfully, + + The DOAJ Editorial Team + + **This is an automated message.** + short: + Your update request ({issns}) has been submitted + +journal:maned:discontinuing_soon:notify: long: | Journal "{title}" (id: {id}) will discontinue in {days} days. short: diff --git a/cms/data/publisher-supporters.yml b/cms/data/publisher-supporters.yml index 23b9e70646..2011fa06d1 100644 --- a/cms/data/publisher-supporters.yml +++ b/cms/data/publisher-supporters.yml @@ -1,30 +1 @@ # ~~PublisherSupporters:Data~~ -- name: Cogitatio Press - url: https://www.cogitatiopress.com/ - -- name: Faculty of Communication, Universitas Tarumanagara - url: https://fikom.untar.ac.id/ - -- name: Gruppo Italiano Frattura - url: http://www.gruppofrattura.it/sito/en/ - -- name: INCAS - National Institute for Aerospace Research “Elie Carafoli”, INCAS Bucuresti - url: https://www.gruppofrattura.it/sito/en/ - -- name: Italian Society of Victimology - url: https://www.vittimologia.it/rivista - -- name: Open Academia - url: https://www.openacademia.net/ - -- name: Scandinavian University Press (Universitetsforlaget AS) - url: https://www.universitetsforlaget.no/ - -- name: Scientia Agropecuaria - url: https://revistas.unitru.edu.pe/index.php/scientiaagrop - -- name: Springer Nature - url: https://www.springernature.com/gp - -- name: Tsinghua University Press - url: https://www.tsinghua.edu.cn/en/ diff --git a/cms/data/sponsors.yml b/cms/data/sponsors.yml index 38008a4621..a0128f20d7 100644 --- a/cms/data/sponsors.yml +++ b/cms/data/sponsors.yml @@ -1,98 +1,98 @@ # List of sponsors separated by tiers (premier, sustaining, basic) # ~~Sponsors:Data~~ -- name: Royal Society of Chemistry - url: https://www.rsc.org/ - logo: rsc.png +- name: American Chemical Society + url: https://pubs.acs.org/ + logo: acs.jpg -- name: Georg Thieme Verlag KG - url: https://www.thieme.com/ - logo: thieme.svg +- name: American Psychological Association + url: https://www.apa.org/pubs + logo: apa.png - name: AOSIS url: https://aosis.co.za/ logo: aosis.png +- name: Association of Learned and Professional Society Publishers + url: https://www.alpsp.org/ + logo: ALPSP.png + +- name: Beijing Lanbotu Technology Co., Ltd + url: https://xueshu.libtools.com.cn/?tenant_id=1 + logo: LIB.png + +- name: Cambridge University Press + url: https://www.cambridge.org/ + logo: cambridge.svg + - name: Cappelen Damm Akademisk url: https://www.cappelendammundervisning.no/ logo: cda.jpg +- name: Cogitatio Press + url: https://www.cogitatiopress.com/ + logo: cogitatio.png + - name: Copernicus Publications url: https://publications.copernicus.org logo: copernicus.svg -- name: EBSCO - url: https://www.ebsco.com/ - logo: ebsco.svg - -- name: Springer Nature Ltd - url: https://www.springernature.com/gp - logo: SN_stack_logo.png - -- name: Frontiers - url: https://www.frontiersin.org/ - logo: frontiers.svg - -- name: Knowledge E - url: https://knowledgee.com/ - logo: knowledgee.png - -- name: MDPI - url: https://www.mdpi.com/ - logo: mdpi.svg - -- name: OA.Works - url: https://oa.works/ - logo: oaworks.png - -- name: SAGE Publishing - url: https://www.sagepublications.com/ - logo: sage.svg - -- name: Taylor & Francis Group - url: https://www.taylorandfrancisgroup.com/ - logo: tf.svg - -- name: John Wiley and Sons LTD - url: https://www.wiley.com/en-us - logo: Wiley_Wordmark_black.png - -- name: American Chemical Society - url: https://pubs.acs.org/ - logo: acs.jpg - -- name: American Psychological Association - url: https://www.apa.org/pubs - logo: apa.png +- name: De Gruyter + url: https://www.degruyter.com/ + logo: de_gruyter.svg -- name: Cambridge University Press - url: https://www.cambridge.org/ - logo: cambridge.svg +- name: Delta Think Inc + url: https://deltathink.com/ + logo: delta.png - name: Digital Science url: https://www.digital-science.com/ logo: ds.svg -- name: De Gruyter - url: https://www.degruyter.com/ - logo: Degruyter.svg +- name: EBSCO + url: https://www.ebsco.com/ + logo: ebsco.svg - name: eLife Sciences Publications url: https://elifesciences.org/ - logo: elife.svg + logo: Elife-logo-2020.png - name: Elsevier url: https://www.elsevier.com/ - logo: elsevier.svg + logo: Elsevier_logo_2019.svg - name: Emerald Publishing url: https://emeraldpublishing.com/ logo: emerald.svg +- name: Le Consortium Érudit + url: https://www.erudit.org/en/ + logo: Logo_of_Erudit.svg.png + +- name: Frontiers + url: https://www.frontiersin.org/ + logo: frontiers.svg + +- name: Gruppo Italiano Frattura + url: https://www.gruppofrattura.eu/ + logo: igf.png + +- name: Iași University of Life Sciences + url: https://iuls.ro/en/ + logo: Lasi.png + - name: IEEE url: https://www.ieee.org/ logo: ieee.png +- name: IET + url: https://www.theiet.org/ + logo: iet.svg + +- name: INCAS - National Institute for Aerospace Research “Elie Carafoli” + url: https://www.incas.ro/ + logo: Logo1.png + - name: Institute of Physics url: https://www.iop.org/ logo: iop.jpg @@ -101,55 +101,83 @@ url: https://iucn.org/ logo: IUCN.svg -- name: JMIR Publications - url: https://jmirpublications.com/ - logo: jmir.svg - - name: Karger url: https://www.karger.com/ logo: karger.png -- name: KeAI Communications - url: https://www.keaipublishing.com/ +- name: KEAI - Guest Marketing Service Ltd + url: https://www.keaipublishing.com/en/ logo: keai.svg +- name: MDPI + url: https://www.mdpi.com/ + logo: mdpi.svg + - name: NERAC Inc. url: https://www.nerac.com/ logo: nerac.jpg -- name: OASPA - url: https://oaspa.org/ +- name: Open Access Scholarly Publishers Association + url: https://www.oaspa.org/ logo: oaspa.png +- name: OA.Works + url: https://oa.works/ + logo: oaworks.png + - name: OCLC url: https://www.oclc.org/ logo: oclc.svg +- name: Our Research + url: https://ourresearch.org/ + logo: OurResearch.png + +- name: Pensoft Publishers + url: https://pensoft.net/media + logo: pensoftlogo.jpg + - name: PLOS url: https://plos.org/ logo: plos.svg + +- name: SAGE Publishing + url: https://www.sagepublications.com/ + logo: sage.svg +- name: Scandinavian University Press + url: https://www.universitetsforlaget.no/en/ + logo: scup-logo.png + - name: SciFree url: https://scifree.se/ logo: scifree.svg -- name: The IET - url: https://www.theiet.org/ - logo: iet.svg +- name: SirsiDynix + url: https://www.sirsidynix.com/ + logo: Sirsi.png + +- name: Springer Nature Ltd + url: https://www.springernature.com/gp + logo: SN_stack_logo.png + +- name: Taylor & Francis Group + url: https://www.taylorandfrancisgroup.com/ + logo: tf.svg + +- name: Tsinghua University Press + url: https://www.tsinghua.edu.cn/en/ + logo: TSPlogo.jpg - name: Ubiquity Press url: https://www.ubiquitypress.com/ logo: ubiquity.svg -- name: Iași University of Life Sciences - url: https://iuls.ro/en/ - logo: Lasi.png +- name: University of Johannesburg + url: https://www.uj.ac.za/ + logo: uj.png -- name: Beijing Lanbotu Technology Co - url: https://xueshu.libtools.com.cn/?tenant_id=1 - logo: Beijing.jpg - -- name: Association of Learned and Professional Society Publishers - url: https://www.alpsp.org/ - logo: ALPSP.png +- name: John Wiley and Sons LTD + url: https://www.wiley.com/en-us + logo: Wiley_Wordmark_black.png diff --git a/cms/data/team.yml b/cms/data/team.yml index 5f7758cbc5..009803bf3d 100644 --- a/cms/data/team.yml +++ b/cms/data/team.yml @@ -116,7 +116,7 @@ - name: Lars Bjørnshauge role: Advisor and Founder photo: lars.jpg - bio: "Lars worked at Danish university libraries for two decades and was Director of Libraries at Lund University, Sweden from 2001 to 2011. He founded the DOAJ in 2003, and was Managing Director from 2013-2021. He has vast experience in change management, re-engineering academic libraries, and developing information services for research & higher education. For more than two decades, Lars has strongly advocated open access and provided services to the open access movement. He is co-founder of OpenDOAR, the Directory of Open Access Books and Think. Check. Submit. Lars lives outside Copenhagen and is married with four children and four grandchildren. He enjoys vegetable gardening, growing cacti and succulents, and playing internet chess." + bio: "Lars worked at Danish university libraries for two decades and was Director of Libraries at Lund University, Sweden from 2001 to 2011. He founded the DOAJ in 2003, and was Managing Director from 2013-2021. He has vast experience in change management, re-engineering academic libraries, and developing information services for research & higher education. For more than two decades, Lars has strongly advocated open access and provided services to the open access movement. He is co-founder of OpenDOAR, the Directory of Open Access Books and Think. Check. Submit. Lars lives outside Copenhagen and is married with four children and four grandchildren. He enjoys vegetable gardening, growing cacti and succulents, and playing internet chess." coi: 2018: https://drive.google.com/file/d/1mm1a8nbY5MQX9loqIs2ZQuVN-73RfPuN/view?usp=sharing 2021: https://drive.google.com/file/d/1bNj5sqUsu4sRLmm_YOuh3JCSMERzQ1Ro/view?usp=sharing @@ -175,13 +175,6 @@ 2022: https://drive.google.com/file/d/1soZtiW6gyVJPl7P_J60j2TL2Fqzl0QAs/view?usp=sharing 2024: https://drive.google.com/file/d/1jCz7G1imwy1Z2LB0h05QCxwDYRuHrPzt/view?usp=drive_link -- name: Sophie - role: UX/UI Product Designer - photo: - bio: "Sophie specialises in user experience design and accessibility for open source projects in open access and scholarly communications. She has a Masters in Library and Information science from the University of Montreal and McGill University and did her undergraduate studies in biology and French literature at the University of Ottawa. She also leads design and front-end development at OA.Works, and has worked for the Public Knowledge Project (OJS), SPARC, and Érudit, as well as directly for a number of scholars doing UI design and data visualisation." - coi: - 2022: https://drive.google.com/file/d/19J5ELHNaV_pby7ZpQMii8_Ts4wiERu8K/view?usp=sharing - - name: Zhanna Protasevych role: Finance Administrator photo: zhanna.jpg diff --git a/cms/data/volunteers.yml b/cms/data/volunteers.yml index 7fbe11a1c8..71589566e5 100644 --- a/cms/data/volunteers.yml +++ b/cms/data/volunteers.yml @@ -42,7 +42,7 @@ ed: language: Indonesian, English photo: "Ratodi.jpg" -- name: Natalia Pamuła-Cieślak +- name: Natalia Pamuła area: Humanities, Social Sciences year_since: city: Toruń @@ -95,8 +95,8 @@ ed: ## All associate editors ass_ed: -- name: Adhi Narayanan - area: Scientometric and Bibliometric Studies, Biotechnology +- name: K. Adhinarayanan + area: Scientometric and Bibliometric Studies, Biotechnology, Library and Information Science year_since: city: Tiruchirappalli country: India @@ -112,7 +112,7 @@ ass_ed: photo: "alainchaple.png" - name: Aleksandra Zawadzka - area: Librarianship + area: Library Science year_since: city: Wroclaw country: Poland @@ -138,7 +138,7 @@ ass_ed: area: Library and Information Science, Scientometrics year_since: city: Tehran - country: I.R.Iran + country: Iran language: Persian (Farsi), English photo: "Amir Reza Asnafi.jpg" @@ -151,7 +151,7 @@ ass_ed: photo: "Andista.jpg" - name: Andrea Imre - area: Library Science, Music, General + area: Library Science, Music year_since: city: Carbondale, IL country: USA @@ -164,22 +164,7 @@ ass_ed: country: Italy language: Italian, English photo: "Marchitelli.jpg" - -- name: Andronic Octavian - area: Medicine - year_since: - city: Bucharest - country: Romania - language: Romanian, English - photo: "Octav.jpg" -- name: Anna Sidorko - area: Library Science - year_since: - city: Warsaw - country: Poland - language: Polish, English - - name: Annarita Barbaro area: Humanities year_since: @@ -203,12 +188,13 @@ ass_ed: photo: "azhar.jpg" - name: Ben Catt - area: Librarianship, Research Support + area: Library Science, Research Support year_since: city: York country: United Kingdom language: English - + photo: "Ben Catt.jpg" + - name: Busro Busro area: Humanities year_since: @@ -233,15 +219,15 @@ ass_ed: photo: "Longo.jpg" - name: Carla Marques - area: Information and library sciences + area: Information and Library Sciences year_since: city: Braga country: Portugal - language: Information and library sciences + language: Portuguese, English photo: "Carla.jpg" - name: Cezary Borkowicz - area: Librarianship + area: Library Science year_since: city: Biała Podlaska country: Poland @@ -309,18 +295,11 @@ ass_ed: year_since: city: Balți country: Republic of Moldova - language: Romanian, Russian, Ukrainian, German + language: Romanian, Russian, Ukrainian, English photo: "Dumitru.jpg" -- name: Dr. Eko Supraptono - area: Technology - year_since: - city: Semarang - country: Indonesia - language: Indonesian, English - - name: Dwi Fajar Saputra - area: Librarianship + area: Library Science year_since: city: Jakarta country: Indonesia @@ -341,6 +320,13 @@ ass_ed: country: Indonesia language: Indonesian, English +- name: Eko Supraptono + area: Technology + year_since: + city: Semarang + country: Indonesia + language: Indonesian, English + - name: Emrah Kaya area: Social Sciences, Humanities year_since: @@ -349,6 +335,14 @@ ass_ed: language: English photo: "Emrah Kaya.jpg" +- name: Flavius-Cristian Marcau + area: Social Sciences + year_since: + city: Târgu jiu + country: Romania + language: Romanian, English + photo: "Flavius.jpg" + - name: Francesco Cavinato area: Humanities, Social Sciences year_since: @@ -357,7 +351,7 @@ ass_ed: language: Italian, English - name: Hanae Lrhoul - area: Librarianship + area: Library Science year_since: city: Rabat country: Morocco @@ -371,12 +365,13 @@ ass_ed: language: Indonesian, English photo: "handoko.jpg" -- name: Hyun Jung Yi - area: +- name: Ina Smith + area: Scholarly Publishing, Instructional Design year_since: - city: - country: Republic of Korea - language: Korean, English + city: Pretoria + country: South Africa + language: English, Africaans + photo: "ina-smith.png" - name: Iryna Kuchma area: Humanities, Social Sciences @@ -400,13 +395,6 @@ ass_ed: country: Lebanon language: Arabic, French, English -- name: Janne Pölönen - area: - year_since: - city: - country: - language: English - - name: Jinjin Liu area: Social Sciences, Medicine year_since: @@ -421,13 +409,6 @@ ass_ed: country: Portugal language: Portuguese, English -- name: JooYeun Son - area: - year_since: - city: - country: - language: Korean, English - - name: Juliana Soares Lima area: Librarianship and information science year_since: @@ -445,7 +426,7 @@ ass_ed: photo: "Julio.jpg" - name: Juyeon Park - area: Librarianship + area: Library Science year_since: city: Seoul country: Korea @@ -468,6 +449,14 @@ ass_ed: language: Turkish, English photo: "kamil.jpg" +- name: Kamila Kokot-Kanikuła + area: Library Science + year_since: + city: Gdańsk + country: Poland + language: Polish, English + photo: "Kamila_Kokot.jpg" + - name: Karima Akool Al-Sahili area: Medicine year_since: @@ -485,7 +474,7 @@ ass_ed: photo: "Khoirul.jpg" - name: Kristen Totleben - area: Librarianship, Humanities, Social Sciences + area: Library Science, Humanities, Social Sciences year_since: city: Rochester, New York country: USA @@ -499,13 +488,6 @@ ass_ed: country: Cuba language: Spanish and English photo: "Lee.jpg" - -- name: Lena Lönngren - area: - year_since: - city: - country: Finland - language: Finnish, English - name: Leila Yang area: Scholarly Publishing @@ -545,14 +527,6 @@ ass_ed: country: USA language: English -- name: Marcau Flavius-Cristian - area: Social Sciences - year_since: - city: Târgu jiu - country: Romania - language: Romanian, English - photo: "Flavius.jpg" - - name: Marco Tullney area: Social Sciences, Technology year_since: @@ -560,13 +534,6 @@ ass_ed: country: Germany language: German, English -- name: Maria Chiara Pievatolo - area: Humanities, Social Sciences - year_since: - city: Pisa - country: Italy - language: Italian, English, German - - name: Maria Manuel Borges area: Social Sciences year_since: @@ -580,13 +547,6 @@ ass_ed: city: Montréal country: Canada language: French, English - -- name: Mario Alejandro Marín - area: - year_since: - city: - country: - language: Spanish, English - name: Martyna Mirecka area: History, Political Science @@ -595,7 +555,15 @@ ass_ed: country: Poland language: Polish, English photo: "Martyna.JPG" - + +- name: Maxim Mitrofanov + area: International Relations, Political Science + year_since: + city: Moscow + country: Russian Federation + language: Russian, English + photo: "max.png" + - name: Melih Sever area: Social Sciences year_since: @@ -603,6 +571,20 @@ ass_ed: country: Türkiye language: English +- name: Michaela Voigt + area: Library and Information Science, Scandinavian Studies + year_since: + city: Berlin + country: Germany + language: German, English, Swedish + +- name: Mihaela Teodor + area: Humanities, Social Sciences + year_since: + city: Bucharest + country: Romania + language: Romanian, English, French + - name: Milagro Castro area: Social Sciences year_since: @@ -619,22 +601,23 @@ ass_ed: language: Indonesian, English photo: "MuhamadTaufik.jpg" +- name: Natalia Popova + area: Sociology + year_since: + city: Ekaterinburg + country: Russian Federation + language: Russian, English + photo: "Popova-1.jpeg" + - name: Nataliia Kaliuzhna area: Library and Information Science year_since: city: Kyiv country: Ukraine - language: Ukranian, Russian, English, Polish + language: Ukrainian, Russian, English, Polish featured: true photo: "Nataliia.jpg" - -- name: Nathalia Avila - area: - year_since: - city: - country: - language: Spanish, English - + - name: Natia Gabedava area: Humanities, Education year_since: @@ -659,6 +642,14 @@ ass_ed: language: Arabic, French, English, Kabyle photo: "nourelhouda.png" +- name: Octavian Andronic + area: Medicine + year_since: + city: Bucharest + country: Romania + language: Romanian, English + photo: "Octav.jpg" + - name: Oleksii Vasyliev area: Science, Technology year_since: @@ -674,13 +665,6 @@ ass_ed: language: Ukrainian, Russian, English, Polish photo: "OlenaZimba.jpg" -- name: Olga Kirillova - area: - year_since: - city: - country: - language: - - name: Oskia Agirre area: Library and Information Science year_since: @@ -697,11 +681,19 @@ ass_ed: language: Portuguese, English photo: "paulacarina.JPG" +- name: Patrícia Aragão + area: Academic Publications, Languages and Literature + year_since: + city: Rio Grande Do Sul + country: Brazil + language: Portuguese, Spanish, Italian, English + photo: "Patricia.jpg" + - name: Princess Uju E. Nwafor-Orizu area: Medicine year_since: city: - country: + country: Nigeria language: English photo: "princessnwafor.jpg" @@ -713,13 +705,6 @@ ass_ed: language: English photo: "Priya.jpg" -- name: Prof. Dr. Sudarmin - area: Science - year_since: - city: - country: Indonesia - language: Indonesian, English - - name: Remedios Melero (Reme) area: Social Sciences year_since: @@ -750,18 +735,11 @@ ass_ed: photo: "amini.jpg" - name: Sara Jalalzadeh Asrjadidi - area: Librarianship + area: Library Science year_since: city: Tabriz country: Iran language: Persian, Azari, Turkish, English - -- name: Sara Ricetto - area: Open Access, Open Science - year_since: - city: Milan - country: Italy - language: Italian, English, German - name: Shiying Li area: Forensic Science @@ -770,13 +748,6 @@ ass_ed: country: China language: Chinese -- name: Soon Kim - area: - year_since: - city: - country: Republic of Korea - language: Korean, English - - name: Stewart C Baker area: Humanities, Library Science year_since: @@ -792,6 +763,13 @@ ass_ed: language: French photo: "sogoba1.jpg" +- name: Sudarmin + area: Science + year_since: + city: + country: Indonesia + language: Indonesian, English + - name: Susana Costa area: Library and Information Sciences year_since: @@ -807,13 +785,6 @@ ass_ed: country: Indonesia language: Indonesian, English -- name: Teodor Mihaela - area: Humanities, Social Sciences - year_since: - city: Bucharest - country: Romania - language: Romanian, English, French - - name: Utiya Hikmah area: Material Physics year_since: @@ -823,7 +794,7 @@ ass_ed: photo: "utiyah.jpg" - name: Vasanth N - area: Librarianship + area: Library Science year_since: city: Mysore country: India @@ -835,7 +806,7 @@ ass_ed: year_since: city: Plano, Texas country: USA - language: Spanish, English and Portuguese + language: Spanish, English, Portuguese photo: "Wileidys.jpg" - name: Yalçın Tükel diff --git a/cms/pages/about/editorial-policy-advisory-group.md b/cms/pages/about/editorial-policy-advisory-group.md new file mode 100644 index 0000000000..edf22f70c2 --- /dev/null +++ b/cms/pages/about/editorial-policy-advisory-group.md @@ -0,0 +1,55 @@ +--- +layout: sidenav +title: Editorial Policy Advisory Group +section: About +toc: true +sticky_sidenav: true +featuremap: ~~About:Fragment~~ + +--- + +Our Editorial Policy Advisory Board assists the editorial team in matters of publishing policies and practices. Members of the Board are chosen from trusted organisations working in scholarly publishing. + +The Board helps us ensure that DOAJ criteria are effective for the widest range of publishers across the globe. + +## Scope + +To contribute expertise and advice on: + +- Developments in scholarly publishing +- Publishing ethics and questionable practices +- Publishing practices in different subject or geographical areas +- New criteria for inclusion in DOAJ or changes to existing criteria +- Other issues of editorial policy or practice + +## Members + +The Editorial Policy Advisory Group is: + +### Budianto Hamuddin + +Budianto is a senior lecturer in the Faculty of Education and Vocational Studies at Universitas Lancang Kuning, Indonesia, where he also leads the Research Department at LPPM Unilak. An active member of RJI, he currently serves as the coordinator for Tim Panji RJI to support and develop scientific journals in Indonesia. Budianto earned his Master's degree from the Universiti Malaya in Malaysia and his Doctorate from Universitas Hasanuddin in Indonesia, specializing in Applied Linguistics with a focus on cyberbullying on online platforms. He serves on the editorial boards of several SINTA-accredited journals in Indonesia and is a reviewer for numerous international scientific journals. + +### José Florencio Fabella Lapeña, Jr. + +Joey is a retired Professor of Otolaryngology at U.P. College of Medicine, former Vice Chancellor at the University of the Philippines Manila and Attending Otolaryngologist at Philippine General Hospital. He has special interests in pediatric cleft and aero-digestive surgery, medical education, mentoring, writing and peer review. He is Secretary of the World Association of Medical Editors, Past President of the Asia-Pacific Association of Medical Journal Editors, and Charter President of the Philippine Association of Medical Journal Editors. He is Editor-in-Chief of the *Philippine Journal of Otolaryngology Head and Neck Surgery*, and chairs the Philippine National Journal Selection Committee for the Western Pacific Region Index Medicus of the World Health Organization. + +### Jose Octavio Alonso Gamboa + +Octavio is a founding member of Latindex (www.latindex.org), an information system comprising 24 countries, of which he has been the general coordinator since 2005. He is a tenured academic at the General Directorate of Libraries of the National Autonomous University of Mexico (UNAM) and holds a Master's degree in Library and Information Studies. His experience includes topics such as the quality and visibility of scientific journals, as well as the transformation of academic journals to the digital culture. His professional career focuses on regional science communication and open access, reflecting his interest in the promotion of open science and the democratization of knowledge. + +### Kazuki Ide + +Kazuki is a Specially-Appointed Associate Professor at Osaka University. His broad research interests include scholarly communication/publication ethics, public health/health informatics, and ELSI/RRI. He has performed peer reviews over 500 times and received the Top Peer Reviewer Award in 2019 for Cross-Field (Top 1% in the World, Web of Science). He also led the translation of the following educational materials into Japanese: Combatting Predatory Academic Journals and Conferences (IAP, 2023) and Think.Check.Submit. (2024). + +### Kylie van Zyl + +Kylie is one of the small, dedicated team at African Journals Online, a South Africa-based NPO dedicated to quality African-published scholarly journals, where she handles JPPS assessments of new applicant journals. She holds an MA and PhD in History from Rhodes University, specialising in the social history of health in South African contexts. + +### Matt Hodgkinson + +Matt is a publishing and research integrity consultant based in Cambridge, UK. He volunteers as the co-chair of the membership subcommittee of the Committee on Publication Ethics (COPE) and Treasurer of the European Association of Science Editors (EASE). He has worked at three Open Access journal publishers: as a professional scientific editor at both BioMed Central (BMC) and the Public Library of Science (PLOS), then as Head of Research Integrity and Head of Editorial Policy and Ethics at Hindawi. He was also a Research Integrity Manager at the UK Research Integrity Office (UKRIO). + +### Paulin Ribbe + +Paulin holds a master degree in digital publishing from ENSSIB (French National Library and Information Science School, Lyon, France). He has worked as a project manager for European projects in various research infrastructures in social sciences: Huma-Num, OPERAS (OpenEdition). He currently works in Sciences Po Lyon as Open Science Officer, and as a project manager for Mir@bel, a website and community aiming to facilitate access to online journals. He participates in various projects including major French publishing platforms and agencies working together to increase the visibility of French scientific journals. diff --git a/cms/pages/about/volunteers.md b/cms/pages/about/volunteers.md index 089ee1b8c1..b2dd6767c8 100644 --- a/cms/pages/about/volunteers.md +++ b/cms/pages/about/volunteers.md @@ -11,8 +11,8 @@ featuremap: ~~Volunteers:Fragment->VolunteersData:Template~~ DOAJ volunteers come from all over the world, from a variety of backgrounds, and speak many languages. -DOAJ would not be able to do the work that we do without our volunteers. In 2021, our volunteers provided 2500 hours of editorial effort! We are grateful for the work they do, helping us review applications. +DOAJ would not be able to do the work that we do without our volunteers. In 2023, our volunteers provided 2500 hours of editorial effort! We are grateful for the work they do, helping us review applications. -In recognition of their support for DOAJ and the open access community, we have published the names of our team of volunteers below. +In recognition of their support for DOAJ and the open access community, we have published the names of our volunteers below. Occasionally, we need more volunteers, often when we need help with specific languages. Follow [our blog](https://blog.doaj.org/) or our [Twitter feed](http://twitter.com/doajplus) to be alerted when the next call for volunteers is published. diff --git a/cms/pages/apply/guide.md b/cms/pages/apply/guide.md index 854c2f5bf7..dafe5a95a8 100644 --- a/cms/pages/apply/guide.md +++ b/cms/pages/apply/guide.md @@ -93,7 +93,8 @@ The following information must be available online and easily accessible from th - Use of a plagiarism checking service is highly recommended but not required for inclusion in DOAJ. - Endogeny should be minimised. - The proportion of published research papers where at least one of the authors is an editor, editorial board member, or reviewer must not exceed 25% in either of the latest two issues. - + - Where content is not divided into issues, for example in a continuous publication model, endogeny must not exceed 25% in the last calendar year (minimum 5 articles per year). + --- ### Special issues @@ -176,9 +177,15 @@ We cannot provide a status update for applications which are less than three mon ## If your application is rejected -You will receive an email giving the reasons for rejection of the application. Unless otherwise advised by DOAJ staff, you may not submit another application for the same journal until six months after the date of the notification of rejection. +You will receive an email giving the reasons for rejection of the application. Do not submit another application for the same journal until six months after the notification of rejection. + +New applications may only be submitted within six months if these rejection reasons have been resolved: -If your application was rejected due to an unconfirmed ISSN, we may be able to reopen the application if the ISSN is confirmed within three months of the DOAJ rejection. Email the [DOAJ Helpdesk](mailto:helpdesk@doaj.org). +- Unconfirmed ISSN +- Website unavailable +- Insufficient content published + +Where a journal has been rejected for other reasons, a new application may be submitted after six months. You are responsible for providing accurate information when you submit an application. Applications that contain information that is inaccurate or wrong or that have answers missing are automatically rejected. @@ -208,6 +215,8 @@ Appeals are considered by the DOAJ Appeals Committee, consisting of the Head of No further communication will be entered into after the appeal is heard and the Committee has made their decision. +Appeals or complaints that include abuse of DOAJ editors will not be considered. See our [Zero tolerance policy](https://doaj.org/about/#zero-tolerance-policy). DOAJ reserves the right to impose a penalty on publishers for such behaviour. + --- ## In other languages diff --git a/cms/pages/apply/seal.md b/cms/pages/apply/seal.md index 1a1e6984f3..21f1cb1984 100644 --- a/cms/pages/apply/seal.md +++ b/cms/pages/apply/seal.md @@ -50,7 +50,8 @@ All seven criteria must be met for a journal to be awarded the Seal. Failure to ## In other languages -[French](https://www.erudit.org/public/documents/sceauDOAJ.pdf) - hosted by Érudit +- [French](https://www.erudit.org/public/documents/sceauDOAJ.pdf) - hosted by Érudit +- [Portuguese](https://www.editoracubo.com.br/wp-content/uploads/2024/04/o-Selo-DOAJ.pdf) - hosted by Editora Cubo ## Version history diff --git a/cms/pages/apply/transparency.md b/cms/pages/apply/transparency.md index ec05c1d83a..022bb6f9fe 100644 --- a/cms/pages/apply/transparency.md +++ b/cms/pages/apply/transparency.md @@ -196,6 +196,9 @@ OASPA is a trade association that was established in 2008 in order to represent WAME is a global nonprofit voluntary association of editors of peer-reviewed medical journals who seek to foster cooperation and communication among editors; improve editorial standards; promote professionalism in medical editing through education, self-criticism, and self-regulation; and encourage research on the principles and practice of medical editing. WAME develops policies and recommendations of best practices for medical journal editors and has a syllabus for editors that members are encouraged to follow. +## License for this work + +[Principles of transparency and best practice in scholarly publishing](https://doi.org/10.24318/cope.2019.1.12) by COPE, DOAJ, OASPA, WAME is licensed under [CC BY 4.0](https://creativecommons.org/licenses/by/4.0/?ref=chooser-v1) ## In other languages diff --git a/cms/pages/docs/openurl.md b/cms/pages/docs/openurl.md index b9f1bda22a..538bcdac31 100644 --- a/cms/pages/docs/openurl.md +++ b/cms/pages/docs/openurl.md @@ -8,24 +8,57 @@ featuremap: ~~OpenURL:Fragment~~ --- -An OpenURL is similar to a web address, but instead of referring to a physical website, it refers to an article, book, patent, or other resource within a website. OpenURLs are similar to permalinks because they are permanently connected to a resource, regardless of which website the resource is connected to. (Retrieved from [Wikipedia](https://en.wikipedia.org/wiki/OpenURL).) +An OpenURL is similar to a web address, but instead of referring to a physical website, it refers to an article, book, patent, or other resource within a website. OpenURLs are similar to permalinks because they are permanently connected to a resource, regardless of which website the resource is located at. (Retrieved from [Wikipedia](https://en.wikipedia.org/wiki/OpenURL).) -The resource is retrieved using [a structured URL format.](https://alair.ala.org/bitstream/handle/11213/258/Open%20URL.pdf?sequence=103) +The resource is retrieved using [a structured URL format.](http://hdl.handle.net/11213/19012) On DOAJ, the parameters included in the request are passed to our search interface, which provides the top result. This means that using OpenURL isn't guaranteed to find your result 100% of the time, even if it exists. +## Parameter mapping + +Here is the mapping between OpenURL parameters and our Elasticsearch database fields. + +### Journal + +| Parameter | Elasticsearch field | +|-----------|---------------------------------| +| jtitle | index.title.exact | +| stitle | bibjson.alternative_title.exact | +| issn | index.issn.exact | +| eissn | index.issn.exact | +| isbn | index.title.exact | + +### Article + +| Parameter | Elasticsearch field | +|-----------|----------------------------------| +| aulast | bibjson.author.name.exact | +| aucorp | bibjson.author.affiliation.exact | +| atitle | bibjson.title.exact | +| jtitle | bibjson.journal.title.exact | +| date | bibjson.year.exact | +| volume | bibjson.journal.volume.exact | +| issue | bibjson.journal.number.exact | +| spage | bibjson.start_page.exact | +| epage | bibjson.end_page.exact | +| issn | index.issn.exact | +| eissn | index.issn.exact | +| isbn | index.title.exact | +| doi | index.doi.exact | + ## Improving results There are a few things you can try if you keep seeing the _Not Found_ page or getting the wrong result: {:.numbered-table .numbered-table--labels} -| | Troubleshooting tip | Details | -|---|-------------------------------------|-------------------------------------------------------------------------------------------------------| -| | Use a trustworthy field | Identifiers like `issn` are more reliable than free text like `title`. | -| | Make sure each parameter is correct | Ensure there are no typos or strange formatting and that the parameter labels are correct. | -| | Reduce constraints | Remove some parameters, like specific volume and issue because these may not be present in our index. | -| | Use OpenURL 1.0 | This will remove the rewriting step from the process (see below). | +| | Troubleshooting tip | Details | +|---|----------------------------------------------------------|--------------------------------------------------------------------------------------------------| +| | Use a trustworthy field | Identifiers like `issn` are more reliable than free text like `jtitle`. | +| | Make sure each parameter is correct | Ensure there are no typos or strange formatting and that the parameter labels are correct. | +| | Reduce constraints for article searches | Remove some parameters, like 'volume' or 'issue', because these may not be present in our index. | +| | Improve article search accuracy by using `genre=article` | URLs without this parameter will be directed to the journal page. | +| | Use OpenURL 1.0 | This will remove the rewriting step from the process (see below). | ## Supported OpenURL version -DOAJ prefers to receive OpenURL 1.0 requests. However, if the old "0.1" syntax is used, the DOAJ will rewrite it to the new syntax and try again. You will see a redirect to an OpenURL 1.0 URL, then the result. +DOAJ prefers to receive OpenURL 1.0 requests. However, if the old "0.1" syntax is used, the DOAJ will rewrite it to the new syntax and try again. You will see a redirect to an OpenURL 1.0 URL and then the result. diff --git a/cms/pages/legal/accessibility.md b/cms/pages/legal/accessibility.md index 52a57dd620..1a9a784e4f 100644 --- a/cms/pages/legal/accessibility.md +++ b/cms/pages/legal/accessibility.md @@ -7,11 +7,11 @@ featuremap: ~~Accessibility:Fragment~~ --- -This accessibility statement applies to [doaj.org](https://www.doaj.org) exclusively. +This accessibility statement applies to [doaj.org](https://doaj.org) exclusively. -Its subdomain [blog.doaj.org](https://blog.doaj.org) and its sister site, [doajbestpracticeguide.org](https://www.doajbestpracticeguide.org), are WordPress sites and **are not covered by this statement**. +Its subdomain [blog.doaj.org](https://blog.doaj.org) and sister site [doajbestpracticeguide.org](https://www.doajbestpracticeguide.org) are WordPress sites and **are not covered by this statement**. -This website is run by DOAJ, on behalf of IS4OA. We want as many people as possible to be able to use this website. For example, that means you should be able to: +This website is run by DOAJ on behalf of IS4OA. We want as many people as possible to be able to use this website. For example, that means you should be able to: - Change colours, contrast levels and fonts. - Zoom in up to 300% without the text spilling off the screen. @@ -30,8 +30,19 @@ We know some parts of this website may not be fully accessible: - You cannot use the journal or article search function with Javascript disabled. - However, if you are using a device that renders JavaScript, the HTML output will be accessible. - You cannot modify the line height or spacing of the text. -- Some blog posts contain links to older PDF documents which are not fully accessible to screen reader software. -- The [_Supporters_](/support/supporters) page contains an embedded map and a spreadsheet which may not be accessible to screen reader software. + +We have recently fixed: + +- Made sure all navbars are accessible for keyboard-only users +- Radio buttons in the application form are now focusable +- Search facets are configured for screenreaders to indicate their state +- All pages have a "Skip Links" button so the user can jump to the main content +- In search results, the facets are operable with a keyboard + +Fixes that are in progress now after a review of the changes laid out in WCAG 2.2: + +- we're changing the ReCAPTCHA method to a honeypot method +- on our forms, start using aria-describedby for hint/help text We are continuing to update this document as we find accessibility issues or as we fix them. @@ -41,30 +52,25 @@ If you need information on this website in a different format like accessible PD - **Email** [helpdesk@doaj.org](mailto:helpdesk@doaj.org) -We'll consider your request and get back to you in 5 working days. Unfortunately, DOAJ doesn't have an organisational telephone number. - -If you cannot view the map or the embedded spreadsheet on our [_Supporters_](/support/supporters) page, and have a specific question about either of those, call or email us. +We'll consider your request and reply in 5 working days. Unfortunately, DOAJ doesn't have an organisational telephone number. ## Reporting accessibility problems with this website -We’re always looking to improve the accessibility of this website. If you find any problems not listed on this page or think we're not meeting accessibility requirements, you can do either of the following: - -- Submit a [GitHub issue](https://github.com/DOAJ/doaj/issues/new/choose) -- **Email** Dom Mitchell, DOAJ Operations Manager, [dom@doaj.org](mailto:dom@doaj.org) +We’re always looking to improve the accessibility of this website. If you find any problems not listed on this page or think we're not meeting accessibility requirements, you can submit a [GitHub issue](https://github.com/DOAJ/doaj/issues/new/choose) or email our Helpdesk: [helpdesk@doaj.org](mailto:helpdesk@doaj.org) ## Enforcement procedure -The Equality and Human Rights Commission (EHRC) is responsible for enforcing the Public Sector Bodies (Websites and Mobile Applications) (No. 2) Accessibility Regulations 2018 (the 'accessibility regulations'). If you're not happy with how we respond to your complaint, contact the Equality Advisory and Support Service (EASS). +The Equality and Human Rights Commission (EHRC) is responsible for enforcing the Public Sector Bodies (Websites and Mobile Applications) (No. 2) Accessibility Regulations 2018 (the 'accessibility regulations'). If you're unhappy with how we respond to your complaint, contact the Equality Advisory and Support Service (EASS). --- -## Technical information about this website’s accessibility +## Technical information about this website's accessibility -DOAJ is committed to making its website accessible, in accordance with the Public Sector Bodies (Websites and Mobile Applications) (No. 2) Accessibility Regulations 2018. +DOAJ is committed to making its website accessible in accordance with the Public Sector Bodies (Websites and Mobile Applications) (No. 2) Accessibility Regulations 2018. ### Compliance status -This website is partially compliant with the [Web Content Accessibility Guidelines version 2.1 AA standard](https://www.w3.org/TR/WCAG21/) subject to the non-compliances listed below. +This website is partially compliant with the [Web Content Accessibility Guidelines version 2.2 AA standard](https://www.w3.org/TR/WCAG22/) subject to the non-compliances listed below. ## Non-accessible content @@ -74,25 +80,20 @@ The content listed below is non-accessible for the following reasons. - Some images may not have a text alternative, so people using a screen reader cannot access the information. This fails WCAG 2.1 Success Criterion 1.1.1 (Non-text content). - Some form fields may be missing a label identifying the purpose of its corresponding field. This fails WCAG Success Criterion 1.3.5 (Identify Input Purpose). - -We plan to fix these issues to meet accessibility standards by January 2021. +- We make users solve, recall, or transcribe something to log in. This fails WCAG Success Criterion Success 3.3.8 (Accessible Authentication) ### Disproportionate burden - Navigation and accessing information: it's not always possible to change the device orientation from horizontal to vertical without making it more difficult to view the content. This fails WCAG Success Criterion 1.3.4 (Orientation). - Navigation and accessing information: there's no way to skip the repeated content in the page header (for example, a 'skip to main content' option). -### Content that’s not within the scope of the accessibility regulations - -- PDFs and other documents: we have PDFs that were published in older blog posts but are not essential to our services (searching the index and applying to the index). The accessibility regulations [do not require us to fix PDFs or other documents published before 23 September 2018](https://www.legislation.gov.uk/uksi/2018/952/regulation/4/made) if they're not essential to providing our services. Any new PDFs or Word documents we publish will meet accessibility standards. - - ### Preparation of this accessibility statement -This statement was prepared on **28 May 2020**. It was last reviewed on **20 December 2021**. +This statement was prepared on **28 May 2020**. It was last reviewed on **06 September 2024** with the WCAG 2.2 accessibility guidelines. -This website was last tested on **24 November 2020**. Testing was carried out by the DOAJ team. +This website was last tested on **24 November 2020**. The DOAJ team carried out testing. -#### Changes to this document +## Changes to this document -- 20 Dec 2021: updated target date to meet accessibility standards to Jan 2021. +- 06 September 2024: added details on things we have fixed; removed references to the PDFs and the blog, which are not covered by this statement; added the last point to the Non-compliance section; updated compliance status +- 20 December 2021: updated target date to meet accessibility standards to Jan 2021. diff --git a/cms/pages/legal/privacy.md b/cms/pages/legal/privacy.md index 2495f41017..e0ceca6d47 100644 --- a/cms/pages/legal/privacy.md +++ b/cms/pages/legal/privacy.md @@ -183,7 +183,7 @@ To request that DOAJ delete all of the personal data we hold about you, please e #### 9a What is a subject access request (SAR)? -An SAR is the name given to the process by which a user can request to know details of a site's information about them and how it is being used. A full explanation is given here: [https://ico.org.uk/for-organisations/guide-to-data-protection/principle-6-rights/subject-access-request/](https://ico.org.uk/for-organisations/guide-to-data-protection/principle-6-rights/subject-access-request/) but in summary: 'an individual who makes a written request and pays a fee is entitled to be: told whether any personal data is being processed; given a description of the personal data, the reasons it is being processed, and whether it will be given to any other organisations or people; given a copy of the information comprising the data; and given details of the source of the data (where this is available)'. According to UK law, the recipient organisation of a SAR must respond within 40 calendar days. +An SAR is the name given to the process by which a user can request to know details of a site's information about them and how it is being used. A useful tool on how to make a SAR is available here: [https://ico.org.uk/for-the-public/make-a-subject-access-request/](https://ico.org.uk/for-the-public/make-a-subject-access-request/). In summary: 'an individual who makes a written request and pays a fee is entitled to be: told whether any personal data is being processed; given a description of the personal data, the reasons it is being processed, and whether it will be given to any other organisations or people; given a copy of the information comprising the data; and given details of the source of the data (where this is available)'. According to UK law, the recipient organisation of a SAR must respond within 40 calendar days. #### 9b How to make a SAR to DOAJ diff --git a/cms/pages/legal/terms.md b/cms/pages/legal/terms.md index 1b27b66e99..9df6a74530 100644 --- a/cms/pages/legal/terms.md +++ b/cms/pages/legal/terms.md @@ -25,9 +25,9 @@ DOAJ uses a variety of licenses for the different parts of its website and the c + In the [full data dump of all journal metadata](/docs/public-data-dump/) + As the [list of journals which say they are in DOAJ but are not](https://docs.google.com/spreadsheets/d/1Y_Sza4rPDkf-NNX9kwiErGrKeNTM75md9B63A_gVpaQ/edit?usp=sharing) -3. The *article metadata* provided to us by publishers or that we collect from EuropePMC, which we then process and display, is availble with a [CC0 waiver](https://creativecommons.org/share-your-work/public-domain/cc0/). Collecting, using and reusing part or all of the article-level metadata must be done in accordance with the terms of the CC0 license. The article-level metadata is available: +3. The *article metadata* provided to us by publishers or that we collect from EuropePMC, which we then process and display, is available with a [CC0 waiver](https://creativecommons.org/share-your-work/public-domain/cc0/). Collecting, using and reusing part or all of the article-level metadata must be done under the terms of the CC0 waiver. The article-level metadata is available: - + On individual journal pages in DOAJ, for example: [https://doaj.org/toc/1932-6203](/toc/1932-6203) + + On individual journal pages in DOAJ, for example [https://doaj.org/toc/1932-6203](/toc/1932-6203) + Via our API + In our [OAI-PMH feed](/docs/oai-pmh) + In the [full data dump of all article metadata](/docs/public-data-dump/). @@ -46,13 +46,13 @@ DOAJ uses a variety of licenses for the different parts of its website and the c --- ### Conditions of using this website -8. Use of this website is subject to the following Terms and Conditions. By using this website you agree to be bound by these Terms and Conditions which form a binding contract between you and [Infrastructure Services for Open Access C.I.C.](https://is4oa.org/), the company responsible for providing the DOAJ (Directory of Open Access Journals) service. +8. Use of this website is subject to the following Terms and Conditions. By using this website, you agree to be bound by these Terms and Conditions, which form a binding contract between you and [Infrastructure Services for Open Access C.I.C.](https://is4oa.org/), the company responsible for providing the DOAJ (Directory of Open Access Journals) service. 9. You confirm that you have read and accept [our Privacy policy](/privacy/). 10. IS4OA reserves the right to suspend or terminate your account at any time without notice. Written notice will be provided twenty-four (24) hours in advance. -11. The name 'Directory of Open Access Journals' and the acronym 'DOAJ' are protected as trade names owned by IS4OA. Neither the name, the acronym nor the look and feel of the DOAJ website may be reproduced without the express prior written permission of the [DOAJ Managing Director](mailto:helpdesk@doaj.org). +11. The name 'Directory of Open Access Journals' and the acronym 'DOAJ' are protected as trade names owned by IS4OA. Neither the name, the acronym, nor the look and feel of the DOAJ website may be reproduced without the express prior written permission of the [DOAJ Managing Director](mailto:helpdesk@doaj.org). 12. In no event shall IS4OA, or its employees, ambassadors, volunteers or contractors be liable for any damages of any nature, including without limitation any consequential loss, loss of income or profit, loss of or damage to property, claims of third parties, or any other loss, cost, claim or expense of any kind or character arising out of or in connection with the use of this website, its content or any website with which it is linked. This exclusion and limitation only applies to the extent permitted by law and does not apply to liability for death or personal injury caused by the negligence of IS4OA, its employees, ambassadors, volunteers or contractors. @@ -63,7 +63,7 @@ DOAJ uses a variety of licenses for the different parts of its website and the c --- ### Licensing terms for content published on DOAJ News Service -15. DOAJ News Service is the DOAJ blog, hosted on Wordpress: https://blog.doaj.org +15. DOAJ News Service is the DOAJ blog, hosted on WordPress: https://blog.doaj.org 16. All content posted on the blog is licensed under the CC BY-NC Creative Commons license. See the blog footer for full details. diff --git a/cms/pages/preservation/index.md b/cms/pages/preservation/index.md index d13b3bfe85..f08a60a014 100644 --- a/cms/pages/preservation/index.md +++ b/cms/pages/preservation/index.md @@ -1,6 +1,6 @@ --- layout: sidenav -title: Project JASPER +title: JASPER preservation service section: digital preservation toc: true sticky_sidenav: true @@ -11,11 +11,11 @@ featuremap: ~~Preservation:Fragment~~ [Esta página está disponible en Español](https://docs.google.com/document/d/1dCxZYO0HDmFWMyazbkayZJtpCYkO9AIf0xqw-z55rU0/edit?usp=sharing). - Project JASPER (JournAlS are Preserved forevER) is an initiative to preserve open access journals. It was launched on [World Preservation Day 2020](https://www.dpconline.org/events/world-digital-preservation-day) and is in response to research* that shows that online journals—both open and closed access journals—can disappear from the internet. + JASPER (JournAlS are Preserved forevER) is an initiative to preserve open access journals. Originally a project, it was launched on [World Preservation Day 2020](https://www.dpconline.org/events/world-digital-preservation-day) and is in response to research* that shows that online journals—both open and closed access journals—can disappear from the internet. Long-term archiving of scholarship is of paramount importance, but too often, publishers don't realise this or don't have the resources to do anything about it. Authors want to ensure their contributions to the scholarly record will be permanent. Scholars must be able to access all of the published research in their fields, both now and long into the future. -As a scholarly community, we are pledged to eliminate the possibility that high-value resources can disappear*. Project JASPER aims to close the gap in preservation coverage among open access journals. +As a scholarly community, we are pledged to eliminate the possibility that high-value resources can disappear*. JASPER aims to close the gap in preservation coverage among open access journals. Questions? [Email us](mailto:preservation@doaj.org). @@ -24,9 +24,9 @@ _*References_ 1. M. Laakso, M. Matthias, N. Jahn. _Open is not forever: A study of vanished open access journals_. Journal of the Association for Information Science and Technology, Feb. 2021. [https://doi.org/10.1002/asi.24460](https://doi.org/10.1002/asi.24460) 2. J. Bosman et al. _OA Diamond Journals Study_. [https://zenodo.org/record/4558704](https://zenodo.org/record/4558704) -## The project +## The service -The premise of the JASPER process is that all journals are different, with different needs, different priorities and different resources. The workflow offers viable alternatives to journals so that they engage with one or more archiving services that meet their requirements and capacities. The JASPER process is simple: +The premise of JASPER is that all journals are different, with different needs, priorities and resources. The workflow offers viable alternatives to journals so that they engage with one or more archiving services that meet their requirements and capacities. The process is simple: 1. work out which archiving option might be the best fit for the publisher 2. establish the level where the amount of effort is manageable by the publisher, based on: @@ -46,7 +46,7 @@ We hope that many DOAJ-indexed publishers will want to take part but to ensure o - Your content is licensed with a CC BY licence (sometimes, we will accept less open licenses). - The copyright of published content remains with the author. -We will analyse the financial status of journals or publishers quite closely. We reserve the right not to include a journal or a publisher in JASPER if we think it is or could be supported or funded by an organisation capable of covering the costs of long-term preservation. This is a necessary step while JASPER remains unfunded. +DOAJ is committed to providing the JASPER service free of charge. Since the JASPER service is currently free of charge, we will analyse the financial status of journals or publishers quite closely. We reserve the right not to include a journal or a publisher in JASPER if we think it is or could be supported or funded by an organisation capable of covering the costs of long-term preservation. This is a necessary step while JASPER remains free of charge. ## To apply @@ -64,7 +64,7 @@ Si su revista cumple con los criterios anteriores, [puede presentar su solicitud 3. *The web-crawling route*: if you cannot or do not want to export article metadata and full text, your journal website details will be provided to the Internet Archive for inclusion in a best-effort, automated web harvesting. ### More copies, better preservation -It is always safer for a journal’s content to be archived in more than one place, ideally, in at least three. To simplify this for smaller publishers, journals opting for Route 2 will automatically have their content preserved via Route 3. Future phases of the project will allow Route 1 journals to participate in other routes, too. +It is always safer for a journal’s content to be archived in more than one place, ideally, in at least three. To simplify this for smaller publishers, journals opting for Route 2 will automatically have their content preserved via Route 3. Future plans will allow Route 1 journals to participate in other routes, too. ## For the Keepers There are often costs associated with adding new journals to a preservation service. JASPER aims to significantly reduce these costs by using DOAJ as a common interface to the thousands of journals indexed in it. By delivering content to an FTP server hosted by Internet Archive, content deliveries are unified into a single format and available from a single location. If you want to join JASPER, [get in touch](mailto:preservation@doaj.org). @@ -72,7 +72,7 @@ There are often costs associated with adding new journals to a preservation serv ## For libraries and universities Long-term digital preservation is a profoundly important mission for libraries, universities, and other memory organisations. By [supporting JASPER](https://doaj.org/support/), you are helping us preserve valuable scholarly content for the long term and in a very cost-effective way. Thank you for your continuing support! -## About the project partners +## About the service partners ### CLOCKSS [CLOCKSS](https://clockss.org/) is a not-for-profit collaboration of leading academic publishers and research libraries. Our mission is to ensure the long-term survival of digital scholarly content and instil confidence in authors, scholars, policymakers, libraries and publishers worldwide that their content will be safely and securely preserved for future generations. We are entrusted with the long-term preservation of more than 51 million journal articles and 400,000 scholarly books. CLOCKSS operates [12 archive nodes](https://clockss.org/archive-nodes/) at leading academic institutions worldwide. This secure, robust, and decentralized infrastructure can withstand threats from technological, economic, environmental, and political failures. A destructive event in one location doesn’t jeopardize the survival of preserved digital content because the 11 other locations serve as mirror sites to back up and repair the disrupted location’s archive. Content is triggered from our dark archive if it disappears from the web and is made available to everyone under an Open Access license. @@ -87,4 +87,4 @@ DOAJ is the largest, most diverse, free directory of open access journals in the [Keepers Registry](https://keepers.issn.org) is the global monitor on the archiving arrangements for continuing resources managed by the ISSN International Centre and fuelled by reports from contributing archiving agencies. ### PKP -A research and development initiative within Simon Fraser University’s Core Facilities Program, the [Public Knowledge Project](https://pkp.sfu.ca/) (PKP) develops (free) open-source software. It carries out research to improve the quality and reach of scholarly publishing. As part of its mandate to make knowledge public, PKP understands that preserving and maintaining long-term access to published content is paramount. To support this, PKP has launched a [Preservation Network](https://pkp.sfu.ca/pkp-pn/), a freely available private LOCKSS network that can be used by any OJS journal running an up-to-date version of OJS. PKP welcomes the opportunity to participate in Project JASPER with our colleagues at DOAJ, Keepers Registry, Internet Archive and CLOCKSS. +A research and development initiative within Simon Fraser University’s Core Facilities Program, the [Public Knowledge Project](https://pkp.sfu.ca/) (PKP) develops (free) open-source software. It carries out research to improve the quality and reach of scholarly publishing. As part of its mandate to make knowledge public, PKP understands that preserving and maintaining long-term access to published content is paramount. To support this, PKP has launched a [Preservation Network](https://pkp.sfu.ca/pkp-pn/), a freely available private LOCKSS network that can be used by any OJS journal running an up-to-date version of OJS. PKP welcomes the opportunity to participate in JASPER with our colleagues at DOAJ, Keepers Registry, Internet Archive and CLOCKSS. diff --git a/cms/pages/support/index.md b/cms/pages/support/index.md index a0840b562c..8132c05557 100644 --- a/cms/pages/support/index.md +++ b/cms/pages/support/index.md @@ -10,11 +10,11 @@ featuremap: --- -Support of DOAJ by academic organisations is vital, and we are proud that we can state that over 80% of our support comes to us this way. We are very grateful to all our supporting academic organisations worldwide. +Academic organisations' support of DOAJ is vital, and we are proud that over 80% of our support comes from these types of organisations. We are very grateful to all our supporting academic organisations. -### 2024 pricing +### 2024-2026 pricing -For 2024, we have revised and simplified our supporter model to align with the levels recommended by SCOSS. This new model enables us to invest in the organisation's future and to continue to provide a high-quality service to our community. +For 2024, we have revised and simplified our supporter model to align with the levels recommended by SCOSS. This model enables us to invest in the organisation's future and continue providing high-quality service to our community. | | Euros(€) | USDs($) | GBPs(£) | |---------------------|----------|---------|---------| @@ -22,7 +22,7 @@ For 2024, we have revised and simplified our supporter model to align with the l | Small organisations | 2,000 | 2,200 | 1,720 | | Organisations from [low- and middle-income countries](https://datatopics.worldbank.org/world-development-indicators/the-world-by-income-and-region.html) | 500 | 550 | 430 | -A 30% discount will be applied to institutions supporting via a billing consortium. Please [contact us](/contact/) for further information. +In 2024, a 30% discount will be applied to institutions supporting us via a billing consortium. The discount will be reduced to 25% for 2025 and 2026. Please [contact us](/contact/) for further information. We always have a wishlist of development projects for which we require additional funding. Please contact us if you would like to support us over and above our standard rates. diff --git a/cms/pages/support/publisher-supporters.md b/cms/pages/support/publisher-supporters.md index 3b9b593675..5d0cf13f95 100644 --- a/cms/pages/support/publisher-supporters.md +++ b/cms/pages/support/publisher-supporters.md @@ -63,8 +63,6 @@ We are particularly grateful to those publishers who can contribute over and abo Please [contact us](/contact/) to discuss further. -## Publisher supporters +## Supporting publishers, aggregators, and other organizations
{% include '/data/sponsors.html' %}
- -## Other publisher supporters diff --git a/cms/sass/base/_general.scss b/cms/sass/base/_general.scss index a542529c29..4cb9a41b33 100644 --- a/cms/sass/base/_general.scss +++ b/cms/sass/base/_general.scss @@ -17,6 +17,7 @@ h2, h3, h4, h5, h6 { margin: $spacing-04 0 $spacing-03 0; line-height: 1.05; font-weight: 400; + scroll-margin-top: 60px; &:first-child { margin-top: 0; @@ -27,6 +28,7 @@ h1 { margin: $spacing-04 0; font-weight: 700; line-height: 1.05; + scroll-margin-top: 600px; @include typescale-01; @include font-serif; } diff --git a/cms/sass/pages/_homepage.scss b/cms/sass/pages/_homepage.scss index 68f41d3794..644d4194a1 100644 --- a/cms/sass/pages/_homepage.scss +++ b/cms/sass/pages/_homepage.scss @@ -126,7 +126,7 @@ } article { - margin-bottom: $spacing-04; + margin-bottom: $spacing-05; a { display: block; @@ -134,7 +134,7 @@ border-top: 1px solid $grapefruit; } - &:nth-of-type(4) { + &:nth-of-type(5) { clear: both; // Make sure fifth blog post starts on new line } } diff --git a/data_import_settings/dev_extras.json b/data_import_settings/dev_extras.json index 8525b6e9d2..d863c8c6f3 100644 --- a/data_import_settings/dev_extras.json +++ b/data_import_settings/dev_extras.json @@ -19,6 +19,7 @@ "preserve" : {"import" : true, "limit" : -1}, "provenance" : {"import" : true, "limit" : 10000}, "upload" : {"import" : true, "limit" : 10000}, + "datalog_journal_added": {"import" : true, "limit" : -1}, "article" : {"import" : true, "limit" : 100000} } } \ No newline at end of file diff --git a/deploy/deploy.sh b/deploy/deploy.sh index 44cad4d7a9..70937fd127 100755 --- a/deploy/deploy.sh +++ b/deploy/deploy.sh @@ -55,7 +55,7 @@ if test -f "cms/error_sass.txt"; then exit 1 fi -# Restart all supervisor tasks, will cover the app and kafka, plus huey on the background server. Then reload nginx. +# Restart all supervisor tasks, will cover the app plus huey on the background server. Then reload nginx. sudo supervisorctl update sudo supervisorctl restart all || sudo supervisorctl start all diff --git a/deploy/kafka_2.13-3.1.0/server.properties b/deploy/kafka_2.13-3.1.0/server.properties deleted file mode 100644 index 0ba2bafde5..0000000000 --- a/deploy/kafka_2.13-3.1.0/server.properties +++ /dev/null @@ -1,150 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# see kafka.server.KafkaConfig for additional details and defaults - -############################# Server Basics ############################# - -# The id of the broker. This must be set to a unique integer for each broker. -broker.id=0 - -# Configs to reduce system resource use -default.replication.factor=1 -auto.create.topics.enable=false -queued.max.requests=200 -background.threads=4 - -############################# Socket Server Settings ############################# - -# The address the socket server listens on. It will get the value returned from -# java.net.InetAddress.getCanonicalHostName() if not configured. -# FORMAT: -# listeners = listener_name://host_name:port -# EXAMPLE: -# listeners = PLAINTEXT://your.host.name:9092 -#listeners=PLAINTEXT://:9092 - -# Hostname and port the broker will advertise to producers and consumers. If not set, -# it uses the value for "listeners" if configured. Otherwise, it will use the value -# returned from java.net.InetAddress.getCanonicalHostName(). -#advertised.listeners=PLAINTEXT://your.host.name:9092 - -# Maps listener names to security protocols, the default is for them to be the same. See the config documentation for more details -#listener.security.protocol.map=PLAINTEXT:PLAINTEXT,SSL:SSL,SASL_PLAINTEXT:SASL_PLAINTEXT,SASL_SSL:SASL_SSL - -# The number of threads that the server uses for receiving requests from the network and sending responses to the network -#num.network.threads=3 -num.network.threads=2 - -# The number of threads that the server uses for processing requests, which may include disk I/O -#num.io.threads=8 -num.io.threads=4 - -# The send buffer (SO_SNDBUF) used by the socket server -#socket.send.buffer.bytes=102400 -socket.send.buffer.bytes=51200 - -# The receive buffer (SO_RCVBUF) used by the socket server -#socket.receive.buffer.bytes=102400 -socket.receive.buffer.bytes=51200 - -# The maximum size of a request that the socket server will accept (protection against OOM) -#socket.request.max.bytes=104857600 -socket.request.max.bytes=10485760 - - -############################# Log Basics ############################# - -# A comma separated list of directories under which to store log files -log.dirs=/tmp/kafka-logs - -# The default number of log partitions per topic. More partitions allow greater -# parallelism for consumption, but this will also result in more files across -# the brokers. -#num.partitions=1 -num.partitions=2 - -# The number of threads per data directory to be used for log recovery at startup and flushing at shutdown. -# This value is recommended to be increased for installations with data dirs located in RAID array. -num.recovery.threads.per.data.dir=1 - -############################# Internal Topic Settings ############################# -# The replication factor for the group metadata internal topics "__consumer_offsets" and "__transaction_state" -# For anything other than development testing, a value greater than 1 is recommended to ensure availability such as 3. -offsets.topic.replication.factor=1 -transaction.state.log.replication.factor=1 -transaction.state.log.min.isr=1 - -############################# Log Flush Policy ############################# - -# Messages are immediately written to the filesystem but by default we only fsync() to sync -# the OS cache lazily. The following configurations control the flush of data to disk. -# There are a few important trade-offs here: -# 1. Durability: Unflushed data may be lost if you are not using replication. -# 2. Latency: Very large flush intervals may lead to latency spikes when the flush does occur as there will be a lot of data to flush. -# 3. Throughput: The flush is generally the most expensive operation, and a small flush interval may lead to excessive seeks. -# The settings below allow one to configure the flush policy to flush data after a period of time or -# every N messages (or both). This can be done globally and overridden on a per-topic basis. - -# The number of messages to accept before forcing a flush of data to disk -##log.flush.interval.messages=10000 -log.flush.interval.messages=1000 - -# The maximum amount of time a message can sit in a log before we force a flush -#log.flush.interval.ms=1000 - -############################# Log Retention Policy ############################# - -# The following configurations control the disposal of log segments. The policy can -# be set to delete segments after a period of time, or after a given size has accumulated. -# A segment will be deleted whenever *either* of these criteria are met. Deletion always happens -# from the end of the log. - -# The minimum age of a log file to be eligible for deletion due to age -#log.retention.hours=168 -log.retention.hours=72 - -# A size-based retention policy for logs. Segments are pruned from the log unless the remaining -# segments drop below log.retention.bytes. Functions independently of log.retention.hours. -#log.retention.bytes=1073741824 - -# The maximum size of a log segment file. When this size is reached a new log segment will be created. -log.segment.bytes=1073741824 - -# The interval at which log segments are checked to see if they can be deleted according -# to the retention policies -log.retention.check.interval.ms=300000 - -############################# Zookeeper ############################# - -# Zookeeper connection string (see zookeeper docs for details). -# This is a comma separated host:port pairs, each corresponding to a zk -# server. e.g. "127.0.0.1:3000,127.0.0.1:3001,127.0.0.1:3002". -# You can also append an optional chroot string to the urls to specify the -# root directory for all kafka znodes. -zookeeper.connect=localhost:2181 - -# Timeout in ms for connecting to zookeeper -zookeeper.connection.timeout.ms=18000 - - -############################# Group Coordinator Settings ############################# - -# The following configuration specifies the time, in milliseconds, that the GroupCoordinator will delay the initial consumer rebalance. -# The rebalance will be further delayed by the value of group.initial.rebalance.delay.ms as new members join the group, up to a maximum of max.poll.interval.ms. -# The default value for this is 3 seconds. -# We override this to 0 here as it makes for a better out-of-the-box experience for development and testing. -# However, in production environments the default value of 3 seconds is more suitable as this will help to avoid unnecessary, and potentially expensive, rebalances during application startup. -group.initial.rebalance.delay.ms=0 diff --git a/deploy/nginx/doaj-redirects.map b/deploy/nginx/doaj-redirects.map index 79e4fec819..78b7ecf93c 100644 --- a/deploy/nginx/doaj-redirects.map +++ b/deploy/nginx/doaj-redirects.map @@ -20,7 +20,7 @@ default ""; ~*^/publishermembers/?$ /support/publisher-supporters; ~*^/members/?$ /support/supporters; ~*^/scoss/?$ /support/supporters; -~*^/dismiss_site_note?continue=/news https://blog.doaj.org; -~*^/news https://blog.doaj.org; +~*^/dismiss_site_note?continue=/news$ https://blog.doaj.org; +~*^/news/?$ https://blog.doaj.org; ~*^/admin/suggestions(?.*)/?$ /admin/applications$q; ~*^/admin/suggestion/(?.*)/?$ /admin/application/$id; \ No newline at end of file diff --git a/deploy/supervisor/production/kafka-consumer.conf b/deploy/supervisor/production/kafka-consumer.conf deleted file mode 100644 index 809b2a1d33..0000000000 --- a/deploy/supervisor/production/kafka-consumer.conf +++ /dev/null @@ -1,9 +0,0 @@ -[program:kafka-consumer] -command=/home/cloo/doaj/bin/python -m faust -A portality.events.kafka_consumer worker -l info -environment= DOAJENV=production -user=cloo -directory=/home/cloo/doaj/src/doaj -stdout_logfile=/var/log/supervisor/%(program_name)s-stdout.log -stderr_logfile=/var/log/supervisor/%(program_name)s-error.log -autostart=true -autorestart=true diff --git a/deploy/supervisor/test/kafka-consumer.conf b/deploy/supervisor/test/kafka-consumer.conf deleted file mode 100644 index 78abc65d9c..0000000000 --- a/deploy/supervisor/test/kafka-consumer.conf +++ /dev/null @@ -1,9 +0,0 @@ -[program:kafka-consumer] -command=/home/cloo/doaj/bin/python -m faust -A portality.events.kafka_consumer worker -l info -environment= DOAJENV=test -user=cloo -directory=/home/cloo/doaj/src/doaj -stdout_logfile=/var/log/supervisor/%(program_name)s-stdout.log -stderr_logfile=/var/log/supervisor/%(program_name)s-error.log -autostart=true -autorestart=true diff --git a/deploy/systemd/README.md b/deploy/systemd/README.md deleted file mode 100644 index c930ad11ea..0000000000 --- a/deploy/systemd/README.md +++ /dev/null @@ -1,6 +0,0 @@ -## NOTE - -These systemd should be symlinked to `/etc/systemd/system/`. - -Notice the path to the configs `*.properties` are locally versioned to the kafka -installation directory. \ No newline at end of file diff --git a/deploy/systemd/kafka.service b/deploy/systemd/kafka.service deleted file mode 100644 index 79357e8608..0000000000 --- a/deploy/systemd/kafka.service +++ /dev/null @@ -1,15 +0,0 @@ -[Unit] -Requires=zookeeper.service -After=zookeeper.service -Description=High-available, distributed message broker -After=network.target - -[Service] -Type=simple -User=cloo -ExecStart=/bin/sh -c '/home/cloo/kafka_2.13-3.1.0/bin/kafka-server-start.sh /home/cloo/kafka_2.13-3.1.0/config/server.properties' -ExecStop=/home/cloo/kafka_2.13-3.1.0/bin/kafka-server-stop.sh -Restart=on-abnormal - -[Install] -WantedBy=multi-user.target \ No newline at end of file diff --git a/deploy/systemd/zookeeper.service b/deploy/systemd/zookeeper.service deleted file mode 100644 index b6de4529a9..0000000000 --- a/deploy/systemd/zookeeper.service +++ /dev/null @@ -1,13 +0,0 @@ -[Unit] -Requires=network.target remote-fs.target -After=network.target remote-fs.target - -[Service] -Type=simple -User=cloo -ExecStart=/home/cloo/kafka_2.13-3.1.0/bin/zookeeper-server-start.sh /home/cloo/kafka_2.13-3.1.0/config/zookeeper.properties -ExecStop=/home/cloo/kafka_2.13-3.1.0/bin/zookeeper-server-stop.sh -Restart=on-abnormal - -[Install] -WantedBy=multi-user.target \ No newline at end of file diff --git a/doajtest/fixtures/accounts.py b/doajtest/fixtures/accounts.py index a36dd60c6f..c845bd83e3 100644 --- a/doajtest/fixtures/accounts.py +++ b/doajtest/fixtures/accounts.py @@ -1,5 +1,7 @@ from copy import deepcopy +from portality import models + class AccountFixtureFactory(object): @staticmethod @@ -87,3 +89,12 @@ def create_maned_a(): from portality import models maned = models.Account(**AccountFixtureFactory.make_managing_editor_source()) return maned + + +def create_publisher_account__a(acc_id="testowner", is_save=True, blocking=True) -> models.Account: + acc_src = AccountFixtureFactory.make_publisher_source() + account = models.Account(**acc_src) + account.set_id(acc_id) + if is_save: + account.save(blocking=blocking) + return account diff --git a/doajtest/fixtures/article.py b/doajtest/fixtures/article.py index f47cd18b08..fbee1726fa 100644 --- a/doajtest/fixtures/article.py +++ b/doajtest/fixtures/article.py @@ -1,9 +1,12 @@ -import os +import datetime +import random +from copy import deepcopy +from typing import Dict, Iterable + import rstr from doajtest import test_constants from portality.regex import ISSN_COMPILED, DOI_COMPILED -from copy import deepcopy ARTICLES = test_constants.PATH_RESOURCES / "article_uploads.xml" @@ -85,7 +88,7 @@ def make_many_article_sources(count=2, in_doaj=False, pissn=None, eissn=None): # now some very quick and very dirty date generation fakemonth = i % 12 + 1 - template['created_date'] = "2000-0{fakemonth}-01T00:00:00Z".format(fakemonth=fakemonth) + template['created_date'] = f"{random.randint(2013, 2024)}-{fakemonth:02}-01T00:00:00Z" # Remove template ISSNs and add new ones template['bibjson']['identifier'] = [] @@ -100,7 +103,7 @@ def make_many_article_sources(count=2, in_doaj=False, pissn=None, eissn=None): return article_sources @staticmethod - def make_incoming_api_article(doi=None, fulltext=None): + def make_incoming_api_article(doi=None, fulltext=None) -> Dict: template = deepcopy(ARTICLE_SOURCE) template['bibjson']['journal']['start_page'] = template['bibjson']['start_page'] template['bibjson']['journal']['end_page'] = template['bibjson']['end_page'] @@ -127,9 +130,32 @@ def make_incoming_api_article(doi=None, fulltext=None): if not set_fulltext: template["bibjson"]["link"].append({"type": "fulltext", "url": fulltext}) - return deepcopy(template) + @staticmethod + def make_bulk_incoming_api_article(count=2, doi=None, fulltext=None) -> Iterable[Dict]: + def find_dict_in_list(lst, key, value): + for i, dic in enumerate(lst): + if dic[key] == value: + return i + return -1 + + for i in range(1, count + 1): + data = ArticleFixtureFactory.make_incoming_api_article(doi=doi, fulltext=fulltext) + # change the DOI and fulltext URLs to escape duplicate detection + # and try with multiple articles + doi_ix = find_dict_in_list(data['bibjson']['identifier'], 'type', 'doi') + if doi_ix == -1: + data['bibjson']['identifier'].append({"type": "doi"}) + data['bibjson']['identifier'][doi_ix]['id'] = '10.0000/SOME.IDENTIFIER.{0}'.format(i) + + fulltext_url_ix = find_dict_in_list(data['bibjson']['link'], 'type', 'fulltext') + if fulltext_url_ix == -1: + data['bibjson']['link'].append({"type": "fulltext"}) + data['bibjson']['link'][fulltext_url_ix]['url'] = 'http://www.example.com/article_{0}'.format(i) + + yield deepcopy(data) + @staticmethod def make_article_apido_struct(): return deepcopy(ARTICLE_STRUCT) @@ -178,9 +204,9 @@ def make_article_apido_struct(): " enough time to release its distinctive flavour, but without overpowering the delicate vanilla. ", "author": [ { - "name" : "The Author", - "affiliation" : "University Cottage Labs", - "orcid_id" : "https://orcid.org/0000-0001-1234-1234" + "name": "The Author", + "affiliation": "University Cottage Labs", + "orcid_id": "https://orcid.org/0000-0001-1234-1234" }, ], "keywords": ["word", "key"], diff --git a/doajtest/fixtures/article_doajxml.py b/doajtest/fixtures/article_doajxml.py index 185fdb9340..5886ede5fe 100644 --- a/doajtest/fixtures/article_doajxml.py +++ b/doajtest/fixtures/article_doajxml.py @@ -115,3 +115,9 @@ def invalid_orcid_id(cls): @classmethod def upload_the_same_issns(cls): return cls._response_from_xpath("//record[journalTitle='2 The Same ISSNs']") + + +def to_articles(article_file_handle: BytesIO): + from portality.crosswalks.article_doaj_xml import DOAJXWalk + articles = DOAJXWalk().crosswalk_file(file_handle=article_file_handle, add_journal_info=False) + return articles diff --git a/doajtest/fixtures/urls.py b/doajtest/fixtures/urls.py index df3ceb8949..c9761ec42e 100644 --- a/doajtest/fixtures/urls.py +++ b/doajtest/fixtures/urls.py @@ -7,7 +7,8 @@ "http://ftp.example.com/file%20name.txt", "https://revistalogos.policia.edu.co:8443/index.php/rlct/about", "https://revistalogos.policia.edu.co:65535/index.php/rlct/about", - "https://revistalogos.policia.edu.co:0/index.php/rlct/about" + "https://revistalogos.policia.edu.co:0/index.php/rlct/about", + 'http://example-xxxx.xx:999/a/b/c?d=1&f=2#qq', ] INVALID_URL_LISTS = [ diff --git a/doajtest/functional/bulk_async_articles.py b/doajtest/functional/bulk_async_articles.py new file mode 100644 index 0000000000..c91da60a8c --- /dev/null +++ b/doajtest/functional/bulk_async_articles.py @@ -0,0 +1,22 @@ +""" +Quick manual test of the asynchronous article bulk article create API. + +To run this test first activate the testdrive `/testdrive/publisher_with_journal`, then +insert the API key in the `API_KEY` variable below. Then you can execute this script +to see articles being bulk created, and the status response. +""" + +import requests, json +from doajtest.fixtures.article import ArticleFixtureFactory + +API_KEY = "606d4a0a92ac432f9f86d05ddf8d381b" + +# make some articles to bulk load (Use quite a few to justify the async) + +#TODO: we could use the testdrive fixtures to seed the fixtures below (i.e. a dynamic ISSN) +articles = ArticleFixtureFactory.make_many_article_sources(100, in_doaj=True, eissn="1x11-1111", pissn="2222-2222") +data = json.dumps(articles) + +resp = requests.post(f"https://testdoaj.cottagelabs.com/api/bulk/articles?api_key={API_KEY}", data=data) +print(resp.status_code) +print(resp.text) diff --git a/doajtest/functional/make_notifications.py b/doajtest/functional/make_notifications.py index 1e9b6c4f35..3d5aeea0c9 100644 --- a/doajtest/functional/make_notifications.py +++ b/doajtest/functional/make_notifications.py @@ -1,5 +1,11 @@ # ~~Notifications:FunctionalTest~~ -from portality import models + +from doajtest.fixtures.v2.applications import ApplicationFixtureFactory +from doajtest.fixtures.v2.journals import JournalFixtureFactory +from doajtest.mocks.mock_mail import MockMail +from portality import constants +from portality import models, app_email +from portality.core import app from portality.events.consumers import application_assed_assigned_notify, \ application_assed_inprogress_notify, \ application_editor_completed_notify, \ @@ -18,11 +24,9 @@ update_request_publisher_accepted_notify, \ update_request_publisher_assigned_notify, \ update_request_publisher_rejected_notify -from portality.models.event import Event +from portality.events.consumers.update_request_publisher_submitted_notify import UpdateRequestPublisherSubmittedNotify from portality.models import EditorGroup -from doajtest.fixtures.v2.applications import ApplicationFixtureFactory -from doajtest.fixtures.v2.journals import JournalFixtureFactory -from portality import constants +from portality.models.event import Event USER = "richard" @@ -44,9 +48,12 @@ "journal_editor_group_assigned_notify", "update_request_publisher_accepted_notify", "update_request_publisher_assigned_notify", - "update_request_publisher_rejected_notify" + "update_request_publisher_rejected_notify", + UpdateRequestPublisherSubmittedNotify.ID, ] +app.config["ENABLE_EMAIL"] = True +app_email.Mail = MockMail ############################################## ## ApplicationAssedAssignedNotify @@ -62,7 +69,6 @@ aaan = application_assed_assigned_notify.ApplicationAssedAssignedNotify() aaan.consume(event) - ############################################## ## ApplicationAssedAssignedNotify if "application_assed_inprogress_notify" in NOTIFICATIONS: @@ -77,7 +83,6 @@ aain = application_assed_inprogress_notify.ApplicationAssedInprogressNotify() aain.consume(event) - ############################################## ## ApplicationEditorCompletedNotify if "application_editor_completed_notify" in NOTIFICATIONS: @@ -86,6 +91,7 @@ def editor_group_mock_pull(editor_group_id): "editor": USER }) + eg_pull = EditorGroup.pull EditorGroup.pull = editor_group_mock_pull @@ -109,6 +115,7 @@ def editor_group_mock_pull(key, value): "editor": USER }) + eg_pull = EditorGroup.pull_by_key EditorGroup.pull_by_key = editor_group_mock_pull @@ -124,7 +131,6 @@ def editor_group_mock_pull(key, value): EditorGroup.pull_by_key = eg_pull - ############################################## ## ApplicationEditorInprogressNotify if "application_editor_inprogress_notify" in NOTIFICATIONS: @@ -133,6 +139,7 @@ def editor_group_mock_pull(editor_group_id): "editor": USER }) + eg_pull = EditorGroup.pull EditorGroup.pull = editor_group_mock_pull @@ -148,7 +155,6 @@ def editor_group_mock_pull(editor_group_id): EditorGroup.pull = eg_pull - ############################################## ## ApplicationManedReadyNotify if "application_maned_ready_notify" in NOTIFICATIONS: @@ -157,6 +163,7 @@ def editor_group_mock_pull(key, value): "maned": USER }) + eg_pull = EditorGroup.pull_by_key EditorGroup.pull_by_key = editor_group_mock_pull @@ -200,7 +207,6 @@ def editor_group_mock_pull(key, value): con = application_publisher_assigned_notify.ApplicationPublisherAssignedNotify() con.consume(event) - ############################################## ## ApplicationPublisherCreatedNotify if "application_publisher_created_notify" in NOTIFICATIONS: @@ -295,6 +301,7 @@ def editor_group_mock_pull(key, value): "editor": USER }) + eg_pull = EditorGroup.pull_by_key EditorGroup.pull_by_key = editor_group_mock_pull @@ -351,4 +358,14 @@ def editor_group_mock_pull(key, value): "application": application }) con = update_request_publisher_rejected_notify.UpdateRequestPublisherRejectedNotify() - con.consume(event) \ No newline at end of file + con.consume(event) + +if UpdateRequestPublisherSubmittedNotify.ID in NOTIFICATIONS: + application = ApplicationFixtureFactory.make_application_source() + application["admin"]["owner"] = USER + application["bibjson"]["title"] = "Update Request Publisher Submitted Notify" + application["id"] = "update_request_publisher_submitted_notify" + event = Event(constants.EVENT_APPLICATION_STATUS, USER, context={ + 'application': application, + }) + UpdateRequestPublisherSubmittedNotify.consume(event) diff --git a/doajtest/functional/rapid_ur_resubmission.py b/doajtest/functional/rapid_ur_resubmission.py new file mode 100644 index 0000000000..9c4aa0f1aa --- /dev/null +++ b/doajtest/functional/rapid_ur_resubmission.py @@ -0,0 +1,46 @@ +""" +Use this script to submit an update request twice in rapid succession, to demonstrate the +concurrency prevention mechanism in action. + +To use this script get an API_KEY and a JOURNAL_ID of a journal owned by that user, and place +them in the appropriate variables below +""" + +import requests, json +from doajtest.fixtures.v2.applications import ApplicationFixtureFactory + + +def rapid_resubmit(base_url, key, jid): + source = ApplicationFixtureFactory.incoming_application() + source["admin"]["current_journal"] = jid + application = json.dumps(source) + + # make rapid resubmission + resp1 = requests.post(f"{base_url}/api/applications?api_key={key}", data=application) + resp2 = requests.post(f"{base_url}/api/applications?api_key={key}", data=application) + + return {"resp1": resp1, "resp2": resp2} + +if __name__ == "__main__": + import argparse + + parser = argparse.ArgumentParser() + parser.add_argument("-u", "--url", help="Base URL") + parser.add_argument("-k", "--key", help="API key") + parser.add_argument("-j", "--journal", help="Journal ID") + args = parser.parse_args() + + print("Submitting duplicate responses in rapid succession...") + responses = rapid_resubmit(args.url, args.key, args.journal) + + print("# Initial request:") + print("-- Response code: ", responses["resp1"].status_code, " (Expected: 201)") + print("-- Response body:\n", responses["resp1"].text) + print("\n") + + print("# Second request:") + print("-- Response code: ", responses["resp2"].status_code, " (Expected: 400)") + print("-- Response body:\n", responses["resp2"].text) + + + diff --git a/doajtest/helpers.py b/doajtest/helpers.py index a8f33d52d5..1fcf47eba3 100644 --- a/doajtest/helpers.py +++ b/doajtest/helpers.py @@ -5,6 +5,7 @@ import os import shutil from contextlib import contextmanager +import time from glob import glob from unittest import TestCase @@ -145,7 +146,9 @@ def create_app_patch(cls): 'ENABLE_EMAIL': False, "FAKER_SEED": 1, "EVENT_SEND_FUNCTION": "portality.events.shortcircuit.send_event", - 'CMS_BUILD_ASSETS_ON_STARTUP': False + 'CMS_BUILD_ASSETS_ON_STARTUP': False, + "UR_CONCURRENCY_TIMEOUT": 0, + 'UPLOAD_ASYNC_DIR': paths.create_tmp_path(is_auto_mkdir=True).as_posix(), } @classmethod @@ -235,8 +238,12 @@ def fix_es_mapping(): :return: """ - models.Article(**ArticleFixtureFactory.make_article_source()).save() - models.Application(**ApplicationFixtureFactory.make_application_source()).save() + for m in [ + models.Article(**ArticleFixtureFactory.make_article_source()), + models.Application(**ApplicationFixtureFactory.make_application_source()), + ]: + m.save(blocking=True) + m.delete() models.Notification().save() @@ -335,7 +342,7 @@ def new_fn(*args, **kwargs): # setup new path org_config_val = DoajTestCase.app_test.config[dir_key] org_hist_dir = hist_class.SAVE_BASE_DIRECTORY - _new_path = paths.create_tmp_dir(is_auto_mkdir=True) + _new_path = paths.create_tmp_path(is_auto_mkdir=True) hist_class.SAVE_BASE_DIRECTORY = _new_path.as_posix() DoajTestCase.app_test.config[dir_key] = _new_path.as_posix() @@ -366,8 +373,8 @@ def setUp(self, cur_app): self.org_store_local_dir = cur_app.config["STORE_LOCAL_DIR"] self.org_store_tmp_dir = cur_app.config["STORE_TMP_DIR"] - self.new_store_local_dir = paths.create_tmp_dir(is_auto_mkdir=True) - self.new_store_tmp_dir = paths.create_tmp_dir(is_auto_mkdir=True) + self.new_store_local_dir = paths.create_tmp_path(is_auto_mkdir=True) + self.new_store_tmp_dir = paths.create_tmp_path(is_auto_mkdir=True) cur_app.config["STORE_IMPL"] = "portality.store.StoreLocal" cur_app.config["STORE_LOCAL_DIR"] = self.new_store_local_dir @@ -435,3 +442,26 @@ def _cond_fn(): return not any_pending_tasks() and len(query_data_tasks(timeout='3m')) == 0 return wait_until(_cond_fn, 10, 0.2) + + +def wait_unit(exit_cond_fn, timeout=10, check_interval=0.1, + timeout_msg="wait_unit but exit_cond timeout"): + start = time.time() + while (time.time() - start) < timeout: + if exit_cond_fn(): + return + time.sleep(check_interval) + raise TimeoutError(timeout_msg) + + +def save_all_block_last(model_list): + model_list = list(model_list) + if not model_list: + return model_list + + *model_list, last = model_list + for model in model_list: + model.save() + last.save(blocking=True) + + return model_list diff --git a/doajtest/mocks/events_Consumer.py b/doajtest/mocks/events_Consumer.py index 7d8928824c..ae9f1c817c 100644 --- a/doajtest/mocks/events_Consumer.py +++ b/doajtest/mocks/events_Consumer.py @@ -11,7 +11,7 @@ class MockConsumer(EventConsumer): CONSUMED = [] @classmethod - def consumes(cls, event): + def should_consume(cls, event): cls.CONSUMES.append(event) return cls.CONSUME_RESULT diff --git a/doajtest/mocks/mock_mail.py b/doajtest/mocks/mock_mail.py new file mode 100644 index 0000000000..765404bad3 --- /dev/null +++ b/doajtest/mocks/mock_mail.py @@ -0,0 +1,17 @@ +from flask_mail import Message + + +class MockMail: + def __init__(self, *args, **kwargs): + self.messages = [] + + def send(self, message: Message): + print('----------- Mock send ----------------') + print(f'{message.subject=}') + print(f'{message.date=}') + print(f'{message.sender=}') + print(f'{message.recipients=}') + print(f'message.body: ') + print(message.body) + print('---------------------------') + self.messages.append(message) diff --git a/doajtest/seleniumtest/test_article_xml_upload.py b/doajtest/seleniumtest/test_article_xml_upload.py index e8fe1e1671..73be167c49 100644 --- a/doajtest/seleniumtest/test_article_xml_upload.py +++ b/doajtest/seleniumtest/test_article_xml_upload.py @@ -15,6 +15,7 @@ from doajtest.selenium_helpers import SeleniumTestCase from portality import models, dao from portality.constants import FileUploadStatus +from portality.ui.messages import Messages HISTORY_ROW_PROCESSING_FAILED = 'processing failed' XML_FORMAT_DOAJ = 'doaj' @@ -92,10 +93,10 @@ class ArticleXmlUploadDoajXmlFailSTC(ArticleXmlUploadCommonSTC): 'Unable to parse XML file'), # case "Upload a file containing 2 identical ISSNs" (article_doajxml.IDENTICAL_ISSNS, - '', 'failed The Print and Online ISSNs supplied are identical'), + '', Messages.EXCEPTION_IDENTICAL_PISSN_AND_EISSN), # case "Upload a file without ISSN" (article_doajxml.NO_ISSN, - '', 'Neither Print ISSN nor Online ISSN has been supplied'), + '', Messages.EXCEPTION_NO_ISSNS), ]) def test_upload_fail(self, file_path, err_msg, expected_note): """ cases about upload article failed with error message """ diff --git a/doajtest/testbook/administrative_search/editor_groups.yml b/doajtest/testbook/administrative_search/editor_groups.yml index 16f3ccffea..5cad7e334c 100644 --- a/doajtest/testbook/administrative_search/editor_groups.yml +++ b/doajtest/testbook/administrative_search/editor_groups.yml @@ -65,3 +65,19 @@ tests: - step: Click the black X button to clear all filters results: - The interface returns to the initial state +- title: Test Restrict editing group name + context: + role: admin + steps: + - step: Go to the DOAJ editor groups search at /admin/editor_groups + results: + - All available records are found, and the first page of results is shown + - step: Select a group and click on 'Edit This Group' button + results: + - The group is displayed in edit mode + - step: Check if the group name is editable + results: + - The name is not editable. + - step: Do any other changes and the save the changes + results: + - Changes saved successfully \ No newline at end of file diff --git a/doajtest/testbook/new_application_form/publishers_form.yml b/doajtest/testbook/new_application_form/publishers_form.yml index 515a527aa1..1367eff7ba 100644 --- a/doajtest/testbook/new_application_form/publishers_form.yml +++ b/doajtest/testbook/new_application_form/publishers_form.yml @@ -109,15 +109,18 @@ tests: - step: Click 'Remove' next to it results: - It disappears - - step: Enter the same value in 'Publisher's name' and 'Society's name' + - step: Enter the same value in 'Publisher's name' and Other organisation's name' - step: Click Next results: - - "You see the error message: The value of this field and the Publisher's Country field must be different." + - "You see the error message: The Publisher name oand Other organisation name cannot be the same." - step: Repeat the previous step, checking if the validator is case insensitive (e.g. "Publisher's name" and "publisher's Name") results: - The error message is still displayed - - step: Repeat steps X to X for the Publisher's Name, Country, Society name, Country - - step: Delete the values from 'Society name' and 'Society country', click Next + - step: Repeat steps X to X for the Publisher's Name, Country, Other organisation's name, Country + - step: Delete the values from 'Other organisation's name', leave 'Other organisation's country' filled in + results: + - "You see the error message: You cannot provide a country for the other organisation question without providing the other organisation's name" + - step: Fill back in the 'Other organisation's country" field results: - You can submit the page without providing answers to optional questions. diff --git a/doajtest/testbook/user_management/login_and_registration.yml b/doajtest/testbook/user_management/login_and_registration.yml new file mode 100644 index 0000000000..da6f025fbb --- /dev/null +++ b/doajtest/testbook/user_management/login_and_registration.yml @@ -0,0 +1,42 @@ +suite: User Management +testset: Login and Registration +tests: +- title: Ensure Case Insensitive login + context: + role: anonymous + steps: + - step: Ensure a user exists with email "test@test.com" and password "password123" + - step: Go to login page at /account/login + - step: Provide email "test@test.com" and password "password123" + results: + - user correctly logged in + - step: Log out + - step: Go to login page at /account/login + - step: Provide email "TEST@test.com" and password "password123" + results: + - user correctly logged in +- title: Ensure Case Sensitive Registration + context: + role: anonymous + steps: + - step: Ensure a user exists with email "test@test.com" and password "password123" + - step: Ensure a user with email "TestUser@test.com" does NOT exist + - step: Ensure you're logged out + - step: Go to registration page at /account/register + - step: Provide "Test User" as a Name and "test@test.com", check captcha and click "Register" button + results: + - The "That email is already in use" error is displayed + - step: Provide "Test User" as a Name and "TEST@test.com", check captcha and click "Register" button + results: + - The "That email is already in use" error is displayed + - step: Provide "Test User" as a Name and "TestUser@test.com", check captch and click "Register" button + results: + - The User is registered and redirected to the homepage + - A prompt to verify email address is displayed at the top + - step: Verify the email, set password "password123" and user name to "TestUser" + - step: Log in with the new account + - step: Go to your account setting at /account/testuser + results: + - Email address is displayed as "TestUser@test.com" (confirm correct casing). + + diff --git a/doajtest/testdrive/publisher_with_journal.py b/doajtest/testdrive/publisher_with_journal.py new file mode 100644 index 0000000000..bd4173a822 --- /dev/null +++ b/doajtest/testdrive/publisher_with_journal.py @@ -0,0 +1,38 @@ +from portality import constants +from doajtest.testdrive.factory import TestDrive +from doajtest.fixtures.v2.journals import JournalFixtureFactory +from portality import models + + +class PublisherWithJournal(TestDrive): + def setup(self) -> dict: + un = self.create_random_str() + pw = self.create_random_str() + acc = models.Account.make_account(un + "@example.com", un, "Publisher " + un, [constants.ROLE_PUBLISHER, constants.ROLE_API]) + acc.set_password(pw) + acc.generate_api_key() + acc.save() + + source = JournalFixtureFactory.make_journal_source(in_doaj=True) + j = models.Journal(**source) + j.remove_current_application() + j.set_id(j.makeid()) + j.set_owner(acc.id) + j.bibjson().eissn = "1111-1111" + j.bibjson().pissn = "2222-2222" + j.save() + + return { + "account": { + "username": acc.id, + "password": pw, + "api_key": acc.api_key + }, + "journals": [j.id] + } + + def teardown(self, params) -> dict: + models.Account.remove_by_id(params["account"]["username"]) + for jid in params["journals"]: + models.Journal.remove_by_id(jid) + return {"status": "success"} \ No newline at end of file diff --git a/doajtest/unit/api_tests/test_api_bulk_article.py b/doajtest/unit/api_tests/test_api_bulk_article_class.py similarity index 92% rename from doajtest/unit/api_tests/test_api_bulk_article.py rename to doajtest/unit/api_tests/test_api_bulk_article_class.py index 7aca8f76b2..353966da47 100644 --- a/doajtest/unit/api_tests/test_api_bulk_article.py +++ b/doajtest/unit/api_tests/test_api_bulk_article_class.py @@ -1,13 +1,19 @@ -from doajtest.helpers import DoajTestCase, with_es -from portality.api.current import ArticlesBulkApi, Api401Error, Api400Error -from portality import models -from doajtest.fixtures import ArticleFixtureFactory, JournalFixtureFactory -from copy import deepcopy -from flask import url_for +""" +test the bulk article API by ArticlesBulkApi layer +""" import json import time +from pathlib import Path +from unittest.mock import patch + +from flask import url_for from portality.lib.thread_utils import wait_until +from doajtest.fixtures import ArticleFixtureFactory, JournalFixtureFactory +from doajtest.helpers import DoajTestCase, with_es, wait_unit +from portality import models +from portality.api.current import ArticlesBulkApi, Api401Error, Api400Error + class TestBulkArticle(DoajTestCase): @@ -20,29 +26,8 @@ def tearDown(self): @with_es(indices=[models.Article.__type__, models.Journal.__type__], warm_mappings=[models.Article.__type__]) def test_01_create_articles_success(self): - def find_dict_in_list(lst, key, value): - for i, dic in enumerate(lst): - if dic[key] == value: - return i - return -1 - - # set up all the bits we need - 10 articles - dataset = [] - for i in range(1, 11): - data = ArticleFixtureFactory.make_incoming_api_article() - # change the DOI and fulltext URLs to escape duplicate detection - # and try with multiple articles - doi_ix = find_dict_in_list(data['bibjson']['identifier'], 'type', 'doi') - if doi_ix == -1: - data['bibjson']['identifier'].append({"type": "doi"}) - data['bibjson']['identifier'][doi_ix]['id'] = '10.0000/SOME.IDENTIFIER.{0}'.format(i) - - fulltext_url_ix = find_dict_in_list(data['bibjson']['link'], 'type', 'fulltext') - if fulltext_url_ix == -1: - data['bibjson']['link'].append({"type": "fulltext"}) - data['bibjson']['link'][fulltext_url_ix]['url'] = 'http://www.example.com/article_{0}'.format(i) - dataset.append(deepcopy(data)) + dataset = list(ArticleFixtureFactory.make_bulk_incoming_api_article(10)) # create an account that we'll do the create as account = models.Account() @@ -459,3 +444,34 @@ def test_09_article_unacceptable(self): # check that 400 is raised with self.assertRaises(Api400Error): ids = ArticlesBulkApi.create(dataset, account) + + + def test_create_async__success(self): + income_articles = list(ArticleFixtureFactory.make_bulk_incoming_api_article(10)) + self.assert_create_async(income_articles, 1, 1) + + def test_create_async__invalid_income_articles_format(self): + income_articles = [{'invalid_input': 1}] + self.assert_create_async(income_articles, 1, 1) + + def test_create_async__invalid_json_format(self): + income_articles = [{'invalid_input': set()}] + with self.assertRaises(TypeError): + ArticlesBulkApi.create_async(income_articles, models.Account()) + + def assert_create_async(self, income_articles, offset_articles, offset_files): + def _count_files(): + return len(list(Path(self.app_test.config.get("UPLOAD_ASYNC_DIR", "/tmp")).glob("*.json"))) + + n_org_articles = models.BulkArticles.count() + n_org_files = _count_files() + with patch_bgtask_submit() as mock_submit: + ArticlesBulkApi.create_async(income_articles, models.Account()) + mock_submit.assert_called_once() + + assert _count_files() == n_org_files + offset_files + wait_unit(lambda: models.BulkArticles.count() == n_org_articles + offset_articles, 5, 0.5) + + +def patch_bgtask_submit(): + return patch('portality.tasks.article_bulk_create.ArticleBulkCreateBackgroundTask.submit') diff --git a/doajtest/unit/api_tests/test_api_bulk_article_interface.py b/doajtest/unit/api_tests/test_api_bulk_article_interface.py new file mode 100644 index 0000000000..a87b761892 --- /dev/null +++ b/doajtest/unit/api_tests/test_api_bulk_article_interface.py @@ -0,0 +1,90 @@ +""" +test the bulk article API by web interface +""" +import json +import time + +from doajtest.fixtures import article_doajxml +from doajtest.fixtures.article_doajxml import DoajXmlArticleFixtureFactory +from doajtest.helpers import DoajTestCase +from portality import models +from portality.models import BulkArticles +from portality.util import url_for + + +def load_json_by_handle(handle) -> str: + articles = [article.data for article in article_doajxml.to_articles(handle)] + data_json_str = json.dumps(articles) + return data_json_str + + +def create_user_with_api_key(): + acc = models.Account.make_account(email="a1@example.com", roles=["user", "api"]) + acc.save(blocking=True) + return acc + + +class TestBulkArticlesCreate(DoajTestCase): + + def test_bulk_article_create__201(self): + acc = create_user_with_api_key() + handle = DoajXmlArticleFixtureFactory.upload_1_issn_correct() + with self.app_test.test_client() as t_client: + response = t_client.post(url_for('api_v4.bulk_article_create', api_key=acc.api_key), + data=load_json_by_handle(handle), + ) + assert response.status_code == 202 + + def test_bulk_article_create__invalid_input(self): + acc = create_user_with_api_key() + with self.app_test.test_client() as t_client: + response = t_client.post(url_for('api_v4.bulk_article_create', api_key=acc.api_key), + data='{invalid json forma]', + ) + assert response.status_code == 400 + + +class TestBulkArticlesStatus(DoajTestCase): + + def test_bulk_article_create_status__processed(self): + acc = create_user_with_api_key() + task = BulkArticles() + task.incoming(acc.id) + task.processed(1, 2, 3) + task.save(blocking=True) + + time.sleep(1) + + with self.app_test.test_client() as t_client: + resp = t_client.get(url_for('api_v4.bulk_article_create_status', + upload_id=task.id, + api_key=acc.api_key)) + + resp_content = json.loads(resp.data.decode('utf-8')) + assert resp.status_code == 200 + assert resp_content['status'] == 'processed' + assert resp_content['results']['imported'] == task.imported + + def test_bulk_article_create_status__acc_id_mismatch(self): + acc = create_user_with_api_key() + task = BulkArticles() + task.incoming('askdjalskdjaslk') + task.processed(1, 2, 3) + task.save(blocking=True) + + time.sleep(1) + + with self.app_test.test_client() as t_client: + resp = t_client.get(url_for('api_v4.bulk_article_create_status', + upload_id=task.id, + api_key=acc.api_key)) + assert resp.status_code == 400 + + def test_bulk_article_create_status__upload_id_not_exist(self): + acc = create_user_with_api_key() + + with self.app_test.test_client() as t_client: + resp = t_client.get(url_for('api_v4.bulk_article_create_status', + upload_id='lkadjlaksdjlaksdjlask', + api_key=acc.api_key)) + assert resp.status_code == 400 diff --git a/doajtest/unit/api_tests/test_api_errors.py b/doajtest/unit/api_tests/test_api_errors.py index 2cf5d15627..65b726edfb 100644 --- a/doajtest/unit/api_tests/test_api_errors.py +++ b/doajtest/unit/api_tests/test_api_errors.py @@ -1,6 +1,7 @@ from doajtest.helpers import DoajTestCase, with_es from portality import models import time +import portality.app # noqa, setup routes class TestApiErrors(DoajTestCase): diff --git a/doajtest/unit/api_tests/test_openapi_schema.py b/doajtest/unit/api_tests/test_openapi_schema.py new file mode 100644 index 0000000000..06d6442573 --- /dev/null +++ b/doajtest/unit/api_tests/test_openapi_schema.py @@ -0,0 +1,15 @@ +import json + +import openapi_spec_validator + +from doajtest.helpers import DoajTestCase +from portality.util import url_for + + +class TestDoajOpenapiSchema(DoajTestCase): + + def test_validate(self): + with self.app_test.test_client() as t_client: + resp = t_client.get(url_for('api.api_spec')) + api_json = json.loads(resp.data) + openapi_spec_validator.validate_spec(api_json) diff --git a/doajtest/unit/event_consumers/test_account_created_email.py b/doajtest/unit/event_consumers/test_account_created_email.py index 0016d810be..d360eb673e 100644 --- a/doajtest/unit/event_consumers/test_account_created_email.py +++ b/doajtest/unit/event_consumers/test_account_created_email.py @@ -31,7 +31,7 @@ def tearDown(self): self.info_stream.truncate(0) self.app_test.logger.removeHandler(self.read_info) - def test_consumes(self): + def test_should_consume(self): source = AccountFixtureFactory.make_publisher_source() acc = models.Account(**source) acc.clear_password() @@ -39,13 +39,13 @@ def test_consumes(self): acc.set_reset_token(reset_token, 86400) event = models.Event(constants.EVENT_ACCOUNT_CREATED, context={"account" : acc.data}) - assert AccountCreatedEmail.consumes(event) + assert AccountCreatedEmail.should_consume(event) event = models.Event(constants.EVENT_ACCOUNT_CREATED) - assert not AccountCreatedEmail.consumes(event) + assert not AccountCreatedEmail.should_consume(event) event = models.Event("test:event", context={"application" : "2345"}) - assert not AccountCreatedEmail.consumes(event) + assert not AccountCreatedEmail.should_consume(event) def test_consume_success(self): self._make_and_push_test_context("/") diff --git a/doajtest/unit/event_consumers/test_account_passwordreset_email.py b/doajtest/unit/event_consumers/test_account_passwordreset_email.py index 276e797b4c..43119416cd 100644 --- a/doajtest/unit/event_consumers/test_account_passwordreset_email.py +++ b/doajtest/unit/event_consumers/test_account_passwordreset_email.py @@ -31,7 +31,7 @@ def tearDown(self): self.info_stream.truncate(0) self.app_test.logger.removeHandler(self.read_info) - def test_consumes(self): + def test_should_consume(self): source = AccountFixtureFactory.make_publisher_source() acc = models.Account(**source) acc.clear_password() @@ -39,13 +39,13 @@ def test_consumes(self): acc.set_reset_token(reset_token, 86400) event = models.Event(constants.EVENT_ACCOUNT_PASSWORD_RESET, context={"account" : acc.data}) - assert AccountPasswordResetEmail.consumes(event) + assert AccountPasswordResetEmail.should_consume(event) event = models.Event(constants.EVENT_ACCOUNT_PASSWORD_RESET) - assert not AccountPasswordResetEmail.consumes(event) + assert not AccountPasswordResetEmail.should_consume(event) event = models.Event("test:event", context={"application" : "2345"}) - assert not AccountPasswordResetEmail.consumes(event) + assert not AccountPasswordResetEmail.should_consume(event) def test_consume_success(self): self._make_and_push_test_context("/") diff --git a/doajtest/unit/event_consumers/test_application_assed_assigned_notify.py b/doajtest/unit/event_consumers/test_application_assed_assigned_notify.py index 4920539ae2..927f70b39e 100644 --- a/doajtest/unit/event_consumers/test_application_assed_assigned_notify.py +++ b/doajtest/unit/event_consumers/test_application_assed_assigned_notify.py @@ -14,15 +14,15 @@ def setUp(self): def tearDown(self): super(TestApplicationAssedAssignedNotify, self).tearDown() - def test_consumes(self): + def test_should_consume(self): event = models.Event(constants.EVENT_APPLICATION_ASSED_ASSIGNED, context={"application" : {}}) - assert ApplicationAssedAssignedNotify.consumes(event) + assert ApplicationAssedAssignedNotify.should_consume(event) event = models.Event("test:event", context={"application" : {}}) - assert not ApplicationAssedAssignedNotify.consumes(event) + assert not ApplicationAssedAssignedNotify.should_consume(event) event = models.Event(constants.EVENT_APPLICATION_ASSED_ASSIGNED) - assert not ApplicationAssedAssignedNotify.consumes(event) + assert not ApplicationAssedAssignedNotify.should_consume(event) def test_consume_success(self): self._make_and_push_test_context("/") diff --git a/doajtest/unit/event_consumers/test_application_assed_inprogress_notify.py b/doajtest/unit/event_consumers/test_application_assed_inprogress_notify.py index 6b62e36368..75127191da 100644 --- a/doajtest/unit/event_consumers/test_application_assed_inprogress_notify.py +++ b/doajtest/unit/event_consumers/test_application_assed_inprogress_notify.py @@ -14,20 +14,20 @@ def setUp(self): def tearDown(self): super(TestApplicationAssedInprogressNotify, self).tearDown() - def test_consumes(self): + def test_should_consume(self): event = models.Event(constants.EVENT_APPLICATION_STATUS, context={"application" : {}, "old_status" : "completed", "new_status": "in progress"}) - assert ApplicationAssedInprogressNotify.consumes(event) + assert ApplicationAssedInprogressNotify.should_consume(event) event = models.Event(constants.EVENT_APPLICATION_STATUS, context={"old_status": "ready", "new_status": "ready"}) - assert not ApplicationAssedInprogressNotify.consumes(event) + assert not ApplicationAssedInprogressNotify.should_consume(event) event = models.Event("test:event", context={"application" : "2345"}) - assert not ApplicationAssedInprogressNotify.consumes(event) + assert not ApplicationAssedInprogressNotify.should_consume(event) event = models.Event(constants.EVENT_APPLICATION_STATUS) - assert not ApplicationAssedInprogressNotify.consumes(event) + assert not ApplicationAssedInprogressNotify.should_consume(event) def test_consume_success(self): self._make_and_push_test_context("/") diff --git a/doajtest/unit/event_consumers/test_application_editor_completed_notify.py b/doajtest/unit/event_consumers/test_application_editor_completed_notify.py index 436b60559d..0748b4a9d4 100644 --- a/doajtest/unit/event_consumers/test_application_editor_completed_notify.py +++ b/doajtest/unit/event_consumers/test_application_editor_completed_notify.py @@ -14,19 +14,19 @@ def setUp(self): def tearDown(self): super(TestApplicationEditorCompletedNotify, self).tearDown() - def test_consumes(self): + def test_should_consume(self): event = models.Event(constants.EVENT_APPLICATION_STATUS, context={"application": ApplicationFixtureFactory.make_application_source(), "old_status": "in progress", "new_status": "completed"}) - assert ApplicationEditorCompletedNotify.consumes(event) + assert ApplicationEditorCompletedNotify.should_consume(event) event = models.Event(constants.EVENT_APPLICATION_STATUS, context={"application": ApplicationFixtureFactory.make_application_source(), "old_status": "completed", "new_status": "completed"}) - assert not ApplicationEditorCompletedNotify.consumes(event) + assert not ApplicationEditorCompletedNotify.should_consume(event) event = models.Event("test:event", context={"application": ApplicationFixtureFactory.make_application_source()}) - assert not ApplicationEditorCompletedNotify.consumes(event) + assert not ApplicationEditorCompletedNotify.should_consume(event) event = models.Event(constants.EVENT_APPLICATION_STATUS) - assert not ApplicationEditorCompletedNotify.consumes(event) + assert not ApplicationEditorCompletedNotify.should_consume(event) def test_consume_success(self): self._make_and_push_test_context("/") diff --git a/doajtest/unit/event_consumers/test_application_editor_group_assigned_notify.py b/doajtest/unit/event_consumers/test_application_editor_group_assigned_notify.py index 17e0c74b0e..0381c529c4 100644 --- a/doajtest/unit/event_consumers/test_application_editor_group_assigned_notify.py +++ b/doajtest/unit/event_consumers/test_application_editor_group_assigned_notify.py @@ -14,15 +14,15 @@ def setUp(self): def tearDown(self): super(TestApplicationEditorGroupAssignedNotify, self).tearDown() - def test_consumes(self): + def test_should_consume(self): event = models.Event(constants.EVENT_APPLICATION_EDITOR_GROUP_ASSIGNED, context={"application" : {}}) - assert ApplicationEditorGroupAssignedNotify.consumes(event) + assert ApplicationEditorGroupAssignedNotify.should_consume(event) event = models.Event("test:event", context={"application" : {}}) - assert not ApplicationEditorGroupAssignedNotify.consumes(event) + assert not ApplicationEditorGroupAssignedNotify.should_consume(event) event = models.Event(constants.EVENT_APPLICATION_EDITOR_GROUP_ASSIGNED) - assert not ApplicationEditorGroupAssignedNotify.consumes(event) + assert not ApplicationEditorGroupAssignedNotify.should_consume(event) def test_consume_success(self): self._make_and_push_test_context("/") diff --git a/doajtest/unit/event_consumers/test_application_editor_inprogress_notify.py b/doajtest/unit/event_consumers/test_application_editor_inprogress_notify.py index 215d36538c..3f1ed58bcc 100644 --- a/doajtest/unit/event_consumers/test_application_editor_inprogress_notify.py +++ b/doajtest/unit/event_consumers/test_application_editor_inprogress_notify.py @@ -14,23 +14,23 @@ def setUp(self): def tearDown(self): super(TestApplicationEditorInProgressNotify, self).tearDown() - def test_consumes(self): + def test_should_consume(self): event = models.Event(constants.EVENT_APPLICATION_STATUS, context={"application": ApplicationFixtureFactory.make_application_source(), "old_status": "ready", "new_status": "in progress"}) - assert ApplicationEditorInProgressNotify.consumes(event) + assert ApplicationEditorInProgressNotify.should_consume(event) event = models.Event(constants.EVENT_APPLICATION_STATUS, context={"application": ApplicationFixtureFactory.make_application_source(), "old_status": "completed", "new_status": "in progress"}) - assert ApplicationEditorInProgressNotify.consumes(event) + assert ApplicationEditorInProgressNotify.should_consume(event) event = models.Event(constants.EVENT_APPLICATION_STATUS, context={"application": ApplicationFixtureFactory.make_application_source(), "old_status": "in progress", "new_status": "in progress"}) - assert not ApplicationEditorInProgressNotify.consumes(event) + assert not ApplicationEditorInProgressNotify.should_consume(event) event = models.Event("test:event", context={"application": ApplicationFixtureFactory.make_application_source()}) - assert not ApplicationEditorInProgressNotify.consumes(event) + assert not ApplicationEditorInProgressNotify.should_consume(event) event = models.Event(constants.EVENT_APPLICATION_STATUS) - assert not ApplicationEditorInProgressNotify.consumes(event) + assert not ApplicationEditorInProgressNotify.should_consume(event) def test_consume_success(self): self._make_and_push_test_context("/") diff --git a/doajtest/unit/event_consumers/test_application_maned_ready_notify.py b/doajtest/unit/event_consumers/test_application_maned_ready_notify.py index a11967aef0..bdb07df500 100644 --- a/doajtest/unit/event_consumers/test_application_maned_ready_notify.py +++ b/doajtest/unit/event_consumers/test_application_maned_ready_notify.py @@ -14,19 +14,19 @@ def setUp(self): def tearDown(self): super(TestApplicationManedReadyNotify, self).tearDown() - def test_consumes(self): + def test_should_consume(self): event = models.Event(constants.EVENT_APPLICATION_STATUS, context={"application" : {}, "old_status" : "in progress", "new_status": "ready"}) - assert ApplicationManedReadyNotify.consumes(event) + assert ApplicationManedReadyNotify.should_consume(event) event = models.Event(constants.EVENT_APPLICATION_STATUS, context={"application": {}, "old_status": "ready", "new_status": "ready"}) - assert not ApplicationManedReadyNotify.consumes(event) + assert not ApplicationManedReadyNotify.should_consume(event) event = models.Event("test:event", context={"application" : "2345"}) - assert not ApplicationManedReadyNotify.consumes(event) + assert not ApplicationManedReadyNotify.should_consume(event) event = models.Event(constants.EVENT_APPLICATION_STATUS) - assert not ApplicationManedReadyNotify.consumes(event) + assert not ApplicationManedReadyNotify.should_consume(event) def test_consume_success(self): self._make_and_push_test_context("/") diff --git a/doajtest/unit/event_consumers/test_application_publisher_accepted_notify.py b/doajtest/unit/event_consumers/test_application_publisher_accepted_notify.py index 0383f9a8a1..73f2754b5d 100644 --- a/doajtest/unit/event_consumers/test_application_publisher_accepted_notify.py +++ b/doajtest/unit/event_consumers/test_application_publisher_accepted_notify.py @@ -14,22 +14,22 @@ def setUp(self): def tearDown(self): super(TestApplicationPublisherAcceptedNotify, self).tearDown() - def test_consumes(self): + def test_should_consume(self): source = ApplicationFixtureFactory.make_application_source() source["admin"]["application_type"] = constants.APPLICATION_TYPE_NEW_APPLICATION event = models.Event(constants.EVENT_APPLICATION_STATUS, context={"application" : source, "old_status" : "in progress", "new_status": "accepted"}) - assert ApplicationPublisherAcceptedNotify.consumes(event) + assert ApplicationPublisherAcceptedNotify.should_consume(event) event = models.Event(constants.EVENT_APPLICATION_STATUS, context={"application": source, "old_status": "ready", "new_status": "ready"}) - assert not ApplicationPublisherAcceptedNotify.consumes(event) + assert not ApplicationPublisherAcceptedNotify.should_consume(event) event = models.Event("test:event", context={"application" : source}) - assert not ApplicationPublisherAcceptedNotify.consumes(event) + assert not ApplicationPublisherAcceptedNotify.should_consume(event) event = models.Event(constants.EVENT_APPLICATION_STATUS) - assert not ApplicationPublisherAcceptedNotify.consumes(event) + assert not ApplicationPublisherAcceptedNotify.should_consume(event) def test_consume_success(self): self._make_and_push_test_context("/") diff --git a/doajtest/unit/event_consumers/test_application_publisher_assigned_notify.py b/doajtest/unit/event_consumers/test_application_publisher_assigned_notify.py index fcabc738c4..f64f96b507 100644 --- a/doajtest/unit/event_consumers/test_application_publisher_assigned_notify.py +++ b/doajtest/unit/event_consumers/test_application_publisher_assigned_notify.py @@ -14,22 +14,22 @@ def setUp(self): def tearDown(self): super(TestApplicationPublisherAssignedNotify, self).tearDown() - def test_consumes(self): + def test_should_consume(self): source = ApplicationFixtureFactory.make_application_source() source["admin"]["application_type"] = constants.APPLICATION_TYPE_NEW_APPLICATION event = models.Event(constants.EVENT_APPLICATION_ASSED_ASSIGNED, context={"application" : source, "old_editor": "", "new_editor": source["admin"]["editor"]}) - assert ApplicationPublisherAssignedNotify.consumes(event) + assert ApplicationPublisherAssignedNotify.should_consume(event) event = models.Event(constants.EVENT_APPLICATION_ASSED_ASSIGNED, context={"application": source, "old_editor": "editor"}) - assert not ApplicationPublisherAssignedNotify.consumes(event) + assert not ApplicationPublisherAssignedNotify.should_consume(event) event = models.Event("test:event", context={"application" : source}) - assert not ApplicationPublisherAssignedNotify.consumes(event) + assert not ApplicationPublisherAssignedNotify.should_consume(event) event = models.Event(constants.EVENT_APPLICATION_ASSED_ASSIGNED) - assert not ApplicationPublisherAssignedNotify.consumes(event) + assert not ApplicationPublisherAssignedNotify.should_consume(event) def test_consume_success(self): self._make_and_push_test_context("/") diff --git a/doajtest/unit/event_consumers/test_application_publisher_created_notify.py b/doajtest/unit/event_consumers/test_application_publisher_created_notify.py index 418dd2ade7..de3bb96a7c 100644 --- a/doajtest/unit/event_consumers/test_application_publisher_created_notify.py +++ b/doajtest/unit/event_consumers/test_application_publisher_created_notify.py @@ -14,15 +14,15 @@ def setUp(self): def tearDown(self): super(TestApplicationPublisherCreatedNotify, self).tearDown() - def test_consumes(self): + def test_should_consume(self): event = models.Event(constants.EVENT_APPLICATION_CREATED, context={"application" : {}}) - assert ApplicationPublisherCreatedNotify.consumes(event) + assert ApplicationPublisherCreatedNotify.should_consume(event) event = models.Event("test:event", context={"application" : {}}) - assert not ApplicationPublisherCreatedNotify.consumes(event) + assert not ApplicationPublisherCreatedNotify.should_consume(event) event = models.Event(constants.EVENT_APPLICATION_CREATED) - assert not ApplicationPublisherCreatedNotify.consumes(event) + assert not ApplicationPublisherCreatedNotify.should_consume(event) def test_consume_success(self): self._make_and_push_test_context("/") diff --git a/doajtest/unit/event_consumers/test_application_publisher_in_progress_notify.py b/doajtest/unit/event_consumers/test_application_publisher_in_progress_notify.py index 46daf5a5ce..9218f16e5b 100644 --- a/doajtest/unit/event_consumers/test_application_publisher_in_progress_notify.py +++ b/doajtest/unit/event_consumers/test_application_publisher_in_progress_notify.py @@ -15,21 +15,21 @@ def setUp(self): def tearDown(self): super(TestApplicationPublisherInProgressNotify, self).tearDown() - def test_consumes(self): + def test_should_consume(self): source = ApplicationFixtureFactory.make_application_source() event = models.Event(constants.EVENT_APPLICATION_STATUS, context={"application": {}, "old_status": "pending", "new_status": "in progress"}) - assert ApplicationPublisherInprogressNotify.consumes(event) + assert ApplicationPublisherInprogressNotify.should_consume(event) event = models.Event(constants.EVENT_APPLICATION_STATUS, context={"application": {}, "old_status": "in progress", "new_status": "in progress"}) - assert not ApplicationPublisherInprogressNotify.consumes(event) + assert not ApplicationPublisherInprogressNotify.should_consume(event) event = models.Event("test:event", context={"application" : {}}) - assert not ApplicationPublisherInprogressNotify.consumes(event) + assert not ApplicationPublisherInprogressNotify.should_consume(event) event = models.Event(constants.EVENT_APPLICATION_STATUS) - assert not ApplicationPublisherInprogressNotify.consumes(event) + assert not ApplicationPublisherInprogressNotify.should_consume(event) def test_consume_success(self): self._make_and_push_test_context("/") diff --git a/doajtest/unit/event_consumers/test_application_publisher_quickreject_notify.py b/doajtest/unit/event_consumers/test_application_publisher_quickreject_notify.py index 6555e398c0..100eb2357e 100644 --- a/doajtest/unit/event_consumers/test_application_publisher_quickreject_notify.py +++ b/doajtest/unit/event_consumers/test_application_publisher_quickreject_notify.py @@ -14,24 +14,24 @@ def setUp(self): def tearDown(self): super(TestApplicationPublisherQuickRejectNotify, self).tearDown() - def test_consumes(self): + def test_should_consume(self): event = models.Event(constants.EVENT_APPLICATION_STATUS, context={"application" : {}, "old_status" : "in progress", "new_status": "rejected", "process": constants.PROCESS__QUICK_REJECT}) - assert ApplicationPublisherQuickRejectNotify.consumes(event) + assert ApplicationPublisherQuickRejectNotify.should_consume(event) event = models.Event(constants.EVENT_APPLICATION_STATUS, context={"application": {}, "old_status": "in progress", "new_status": "rejected"}) - assert not ApplicationPublisherQuickRejectNotify.consumes(event) + assert not ApplicationPublisherQuickRejectNotify.should_consume(event) event = models.Event(constants.EVENT_APPLICATION_STATUS, context={"old_status": "rejected", "new_status": "rejected"}) - assert not ApplicationPublisherQuickRejectNotify.consumes(event) + assert not ApplicationPublisherQuickRejectNotify.should_consume(event) event = models.Event("test:event", context={"application" : {}, "old_status" : "in progress", "new_status": "rejected", "process": constants.PROCESS__QUICK_REJECT}) - assert not ApplicationPublisherQuickRejectNotify.consumes(event) + assert not ApplicationPublisherQuickRejectNotify.should_consume(event) event = models.Event(constants.EVENT_APPLICATION_STATUS) - assert not ApplicationPublisherQuickRejectNotify.consumes(event) + assert not ApplicationPublisherQuickRejectNotify.should_consume(event) def test_consume_success(self): self._make_and_push_test_context("/") diff --git a/doajtest/unit/event_consumers/test_application_publisher_revision_notify.py b/doajtest/unit/event_consumers/test_application_publisher_revision_notify.py index 7993c5d307..efdb5e3d44 100644 --- a/doajtest/unit/event_consumers/test_application_publisher_revision_notify.py +++ b/doajtest/unit/event_consumers/test_application_publisher_revision_notify.py @@ -15,21 +15,21 @@ def setUp(self): def tearDown(self): super(TestApplicationPublisherRevisionNotify, self).tearDown() - def test_consumes(self): + def test_should_consume(self): source = ApplicationFixtureFactory.make_application_source() event = models.Event(constants.EVENT_APPLICATION_STATUS, context={"application": {}, "old_status": "in progress", "new_status": "revisions_required"}) - assert ApplicationPublisherRevisionNotify.consumes(event) + assert ApplicationPublisherRevisionNotify.should_consume(event) event = models.Event(constants.EVENT_APPLICATION_STATUS, context={"application": {}, "old_status": "revisions_required", "new_status": "revisions_required"}) - assert not ApplicationPublisherRevisionNotify.consumes(event) + assert not ApplicationPublisherRevisionNotify.should_consume(event) event = models.Event("test:event", context={"application" : {}}) - assert not ApplicationPublisherRevisionNotify.consumes(event) + assert not ApplicationPublisherRevisionNotify.should_consume(event) event = models.Event(constants.EVENT_APPLICATION_STATUS) - assert not ApplicationPublisherRevisionNotify.consumes(event) + assert not ApplicationPublisherRevisionNotify.should_consume(event) def test_consume_success(self): self._make_and_push_test_context("/") diff --git a/doajtest/unit/event_consumers/test_bg_job_finished_notify.py b/doajtest/unit/event_consumers/test_bg_job_finished_notify.py index 30e34feeec..418cdf7c76 100644 --- a/doajtest/unit/event_consumers/test_bg_job_finished_notify.py +++ b/doajtest/unit/event_consumers/test_bg_job_finished_notify.py @@ -14,15 +14,15 @@ def setUp(self): def tearDown(self): super(TestBGJobFinishedNotify, self).tearDown() - def test_consumes(self): + def test_should_consume(self): event = models.Event(constants.BACKGROUND_JOB_FINISHED, context={"job" : {}}) - assert BGJobFinishedNotify.consumes(event) + assert BGJobFinishedNotify.should_consume(event) event = models.Event("test:event", context={"job" : "2345"}) - assert not BGJobFinishedNotify.consumes(event) + assert not BGJobFinishedNotify.should_consume(event) event = models.Event(constants.BACKGROUND_JOB_FINISHED) - assert not BGJobFinishedNotify.consumes(event) + assert not BGJobFinishedNotify.should_consume(event) def test_consume_success(self): self._make_and_push_test_context("/") diff --git a/doajtest/unit/event_consumers/test_journal_assed_assigned_notify.py b/doajtest/unit/event_consumers/test_journal_assed_assigned_notify.py index fc7831c1d0..2882e51ea7 100644 --- a/doajtest/unit/event_consumers/test_journal_assed_assigned_notify.py +++ b/doajtest/unit/event_consumers/test_journal_assed_assigned_notify.py @@ -14,15 +14,15 @@ def setUp(self): def tearDown(self): super(TestJournalAssedAssignedNotify, self).tearDown() - def test_consumes(self): + def test_should_consume(self): event = models.Event(constants.EVENT_JOURNAL_ASSED_ASSIGNED, context={"journal" : {}}) - assert JournalAssedAssignedNotify.consumes(event) + assert JournalAssedAssignedNotify.should_consume(event) event = models.Event("test:event", context={"journal" : {}}) - assert not JournalAssedAssignedNotify.consumes(event) + assert not JournalAssedAssignedNotify.should_consume(event) event = models.Event(constants.EVENT_JOURNAL_ASSED_ASSIGNED) - assert not JournalAssedAssignedNotify.consumes(event) + assert not JournalAssedAssignedNotify.should_consume(event) def test_consume_success(self): self._make_and_push_test_context("/") diff --git a/doajtest/unit/event_consumers/test_journal_discontinuing_soon_notify.py b/doajtest/unit/event_consumers/test_journal_discontinuing_soon_notify.py index f4f70f2f78..9da78fd1c0 100644 --- a/doajtest/unit/event_consumers/test_journal_discontinuing_soon_notify.py +++ b/doajtest/unit/event_consumers/test_journal_discontinuing_soon_notify.py @@ -38,19 +38,19 @@ def tearDown(self): models.Application.pull = self.pull_application models.EditorGroup.pull_by_key = self.pull_by_key - def test_consumes(self): + def test_should_consume(self): event = models.Event("test:event", context={"data" : {"1234"}}) - assert not JournalDiscontinuingSoonNotify.consumes(event) + assert not JournalDiscontinuingSoonNotify.should_consume(event) event = models.Event("test:event", context={"data": {}}) - assert not JournalDiscontinuingSoonNotify.consumes(event) + assert not JournalDiscontinuingSoonNotify.should_consume(event) event = models.Event(constants.EVENT_JOURNAL_DISCONTINUING_SOON) - assert not JournalDiscontinuingSoonNotify.consumes(event) + assert not JournalDiscontinuingSoonNotify.should_consume(event) event = models.Event(constants.EVENT_JOURNAL_DISCONTINUING_SOON, context = {"journal": {"1234"}, "discontinue_date": "2002-22-02"}) - assert JournalDiscontinuingSoonNotify.consumes(event) + assert JournalDiscontinuingSoonNotify.should_consume(event) def test_consume_success(self): self._make_and_push_test_context("/") diff --git a/doajtest/unit/event_consumers/test_journal_editor_group_assigned_notify.py b/doajtest/unit/event_consumers/test_journal_editor_group_assigned_notify.py index b3948af0ea..ed0b46d775 100644 --- a/doajtest/unit/event_consumers/test_journal_editor_group_assigned_notify.py +++ b/doajtest/unit/event_consumers/test_journal_editor_group_assigned_notify.py @@ -14,15 +14,15 @@ def setUp(self): def tearDown(self): super(TestJournalEditorGroupAssignedNotify, self).tearDown() - def test_consumes(self): + def test_should_consume(self): event = models.Event(constants.EVENT_JOURNAL_EDITOR_GROUP_ASSIGNED, context={"journal" : {}}) - assert JournalEditorGroupAssignedNotify.consumes(event) + assert JournalEditorGroupAssignedNotify.should_consume(event) event = models.Event("test:event", context={"journal" : {}}) - assert not JournalEditorGroupAssignedNotify.consumes(event) + assert not JournalEditorGroupAssignedNotify.should_consume(event) event = models.Event(constants.EVENT_JOURNAL_EDITOR_GROUP_ASSIGNED) - assert not JournalEditorGroupAssignedNotify.consumes(event) + assert not JournalEditorGroupAssignedNotify.should_consume(event) def test_consume_success(self): self._make_and_push_test_context("/") diff --git a/doajtest/unit/event_consumers/test_update_request_publisher_accepted_notify.py b/doajtest/unit/event_consumers/test_update_request_publisher_accepted_notify.py index 30f67bc75b..3fa6de3c84 100644 --- a/doajtest/unit/event_consumers/test_update_request_publisher_accepted_notify.py +++ b/doajtest/unit/event_consumers/test_update_request_publisher_accepted_notify.py @@ -14,21 +14,21 @@ def setUp(self): def tearDown(self): super(TestUpdateRequestPublisherAcceptedNotify, self).tearDown() - def test_consumes(self): + def test_should_consume(self): source = ApplicationFixtureFactory.make_application_source() event = models.Event(constants.EVENT_APPLICATION_STATUS, context={"application" : source, "old_status" : "in progress", "new_status": "accepted"}) - assert UpdateRequestPublisherAcceptedNotify.consumes(event) + assert UpdateRequestPublisherAcceptedNotify.should_consume(event) event = models.Event(constants.EVENT_APPLICATION_STATUS, context={"application": source, "old_status": "ready", "new_status": "ready"}) - assert not UpdateRequestPublisherAcceptedNotify.consumes(event) + assert not UpdateRequestPublisherAcceptedNotify.should_consume(event) event = models.Event("test:event", context={"application" : source}) - assert not UpdateRequestPublisherAcceptedNotify.consumes(event) + assert not UpdateRequestPublisherAcceptedNotify.should_consume(event) event = models.Event(constants.EVENT_APPLICATION_STATUS) - assert not UpdateRequestPublisherAcceptedNotify.consumes(event) + assert not UpdateRequestPublisherAcceptedNotify.should_consume(event) def test_consume_success(self): self._make_and_push_test_context("/") diff --git a/doajtest/unit/event_consumers/test_update_request_publisher_assigned_notify.py b/doajtest/unit/event_consumers/test_update_request_publisher_assigned_notify.py index 48747a2974..9c293e94db 100644 --- a/doajtest/unit/event_consumers/test_update_request_publisher_assigned_notify.py +++ b/doajtest/unit/event_consumers/test_update_request_publisher_assigned_notify.py @@ -14,21 +14,21 @@ def setUp(self): def tearDown(self): super(TestApplicationPublisherAssignedNotify, self).tearDown() - def test_consumes(self): + def test_should_consume(self): source = ApplicationFixtureFactory.make_application_source() event = models.Event(constants.EVENT_APPLICATION_ASSED_ASSIGNED, context={"application" : source, "old_editor": "", "new_editor" : source["admin"]["editor"]}) - assert UpdateRequestPublisherAssignedNotify.consumes(event) + assert UpdateRequestPublisherAssignedNotify.should_consume(event) event = models.Event(constants.EVENT_APPLICATION_ASSED_ASSIGNED, context={"application": source, "old_editor": "editor"}) - assert not UpdateRequestPublisherAssignedNotify.consumes(event) + assert not UpdateRequestPublisherAssignedNotify.should_consume(event) event = models.Event("test:event", context={"application" : source}) - assert not UpdateRequestPublisherAssignedNotify.consumes(event) + assert not UpdateRequestPublisherAssignedNotify.should_consume(event) event = models.Event(constants.EVENT_APPLICATION_ASSED_ASSIGNED) - assert not UpdateRequestPublisherAssignedNotify.consumes(event) + assert not UpdateRequestPublisherAssignedNotify.should_consume(event) def test_consume_success(self): self._make_and_push_test_context("/") diff --git a/doajtest/unit/event_consumers/test_update_request_publisher_rejected_notify.py b/doajtest/unit/event_consumers/test_update_request_publisher_rejected_notify.py index 4095026f54..34be346ae1 100644 --- a/doajtest/unit/event_consumers/test_update_request_publisher_rejected_notify.py +++ b/doajtest/unit/event_consumers/test_update_request_publisher_rejected_notify.py @@ -14,21 +14,21 @@ def setUp(self): def tearDown(self): super(TestUpdateRequestPublisherRejectedNotify, self).tearDown() - def test_consumes(self): + def test_should_consume(self): source = ApplicationFixtureFactory.make_application_source() event = models.Event(constants.EVENT_APPLICATION_STATUS, context={"application" : source, "old_status" : "in progress", "new_status": "rejected"}) - assert UpdateRequestPublisherRejectedNotify.consumes(event) + assert UpdateRequestPublisherRejectedNotify.should_consume(event) event = models.Event(constants.EVENT_APPLICATION_STATUS, context={"old_status": "rejected", "new_status": "rejected"}) - assert not UpdateRequestPublisherRejectedNotify.consumes(event) + assert not UpdateRequestPublisherRejectedNotify.should_consume(event) event = models.Event("test:event", context={"application" : "2345"}) - assert not UpdateRequestPublisherRejectedNotify.consumes(event) + assert not UpdateRequestPublisherRejectedNotify.should_consume(event) event = models.Event(constants.EVENT_APPLICATION_STATUS) - assert not UpdateRequestPublisherRejectedNotify.consumes(event) + assert not UpdateRequestPublisherRejectedNotify.should_consume(event) def test_consume_success(self): self._make_and_push_test_context("/") diff --git a/doajtest/unit/event_consumers/test_update_request_publisher_submitted_notify.py b/doajtest/unit/event_consumers/test_update_request_publisher_submitted_notify.py new file mode 100644 index 0000000000..f0f80887b5 --- /dev/null +++ b/doajtest/unit/event_consumers/test_update_request_publisher_submitted_notify.py @@ -0,0 +1,81 @@ +import time +from copy import deepcopy + +from doajtest.fixtures import ApplicationFixtureFactory +from doajtest.helpers import DoajTestCase +from portality import constants +from portality import models +from portality.events.consumers.update_request_publisher_submitted_notify import UpdateRequestPublisherSubmittedNotify + + +class TestUpdateRequestPublisherSubmittedNotify(DoajTestCase): + def setUp(self): + super(TestUpdateRequestPublisherSubmittedNotify, self).setUp() + + def tearDown(self): + super(TestUpdateRequestPublisherSubmittedNotify, self).tearDown() + + def test_should_consume(self): + # success + source = ApplicationFixtureFactory.make_application_source() + assert UpdateRequestPublisherSubmittedNotify.should_consume(models.Event( + constants.EVENT_APPLICATION_UR_SUBMITTED, + context={"application": source}, + )) + + # event id mismatch + assert not UpdateRequestPublisherSubmittedNotify.should_consume(models.Event( + 'akdjlaskdjalksjdlaskjdlaks', + context={"application": source}, + )) + + # no application in context + assert not UpdateRequestPublisherSubmittedNotify.should_consume(models.Event( + constants.EVENT_APPLICATION_UR_SUBMITTED, + context={}, + )) + + # application type mismatch + source_type_mismatch = deepcopy(source) + source_type_mismatch['admin']['application_type'] = constants.APPLICATION_TYPE_NEW_APPLICATION + assert not UpdateRequestPublisherSubmittedNotify.should_consume(models.Event( + constants.EVENT_APPLICATION_UR_SUBMITTED, + context={"application": source_type_mismatch}, + )) + + # no owner + source_no_owner = deepcopy(source) + source_no_owner['admin']['owner'] = None + assert not UpdateRequestPublisherSubmittedNotify.should_consume(models.Event( + constants.EVENT_APPLICATION_UR_SUBMITTED, + context={"application": source_no_owner}, + )) + + def test_consume_success(self): + acc = models.Account() + acc.set_id("publisher") + acc.set_email("test@example.com") + acc.save(blocking=True) + + source = ApplicationFixtureFactory.make_application_source() + context: UpdateRequestPublisherSubmittedNotify.Context = { + 'application': source, + } + + event = models.Event(constants.EVENT_APPLICATION_STATUS, + who=acc.id, + context=context, ) + UpdateRequestPublisherSubmittedNotify.consume(event) + + time.sleep(1) + models.Notification.refresh() + ns = models.Notification.all() + assert len(ns) == 1 + + n = ns[0] + assert n.who == "publisher" + assert n.created_by == UpdateRequestPublisherSubmittedNotify.ID + assert n.classification == constants.NOTIFICATION_CLASSIFICATION_STATUS_CHANGE + assert source['bibjson']['title'] in n.long + assert n.short is not None + assert not n.is_seen() diff --git a/doajtest/unit/test_admin_editor_groups.py b/doajtest/unit/test_admin_editor_groups.py new file mode 100644 index 0000000000..b5fb728a27 --- /dev/null +++ b/doajtest/unit/test_admin_editor_groups.py @@ -0,0 +1,42 @@ +import time +from doajtest.helpers import DoajTestCase, login +from portality.models import Account, EditorGroup +from doajtest.fixtures import AccountFixtureFactory + + +class TestAdminEditorGroups(DoajTestCase): + + def setUp(self): + super(TestAdminEditorGroups, self).setUp() + admin_account = Account.make_account(email="admin@test.com", username="admin", name="Admin", roles=["admin"]) + admin_account.set_password('password123') + admin_account.save() + + asource = AccountFixtureFactory.make_editor_source() + self.editor = Account(**asource) + self.editor.save() + + def test_editor_group_creation_and_update(self): + with self.app_test.test_client() as t_client: + # Test creating an EditorGroup + + login(t_client, "admin", "password123") + data = {"name": "Test Group", "editor": "eddie"} + response = t_client.post('/admin/editor_group', data=data) + assert response.status_code == 302 + + # give some time for the new record to be indexed + time.sleep(1) + editor_group_id = EditorGroup.group_exists_by_name("Test Group") + self.assertIsNotNone(editor_group_id) + + # Test EditorGroup name is not editable (silent failure if supplied) + data = {"name": "New Test Group", "editor": "eddie"} + response = t_client.post('/admin/editor_group/' + editor_group_id, data=data) + assert response.status_code == 302 + + # give some time for the new record to be indexed + time.sleep(1) + updated_group = EditorGroup.pull(editor_group_id) + self.assertEquals(updated_group.name, "Test Group") + self.assertNotEquals(updated_group.name, "New Test Group") diff --git a/doajtest/unit/test_article_forms.py b/doajtest/unit/test_article_forms.py new file mode 100644 index 0000000000..378c54a31d --- /dev/null +++ b/doajtest/unit/test_article_forms.py @@ -0,0 +1,31 @@ +from doajtest.fixtures import AccountFixtureFactory, JournalFixtureFactory +from doajtest.helpers import DoajTestCase, save_all_block_last +from portality import models +from portality.forms import article_forms +from portality.forms.article_forms import ArticleFormFactory, PublisherMetadataForm + + +class TestArticleFormsFunction(DoajTestCase): + def test_choices_for_article_issns(self): + account = models.Account(**(AccountFixtureFactory.make_managing_editor_source())) + + journals = [models.Journal(**j) for j in JournalFixtureFactory.make_many_journal_sources()] + for j in journals: + j.set_owner(account.id) + j.set_in_doaj(True) + save_all_block_last(journals) + + pissns = article_forms.choices_for_article_issns(account, issn_type='pissn') + eissns = article_forms.choices_for_article_issns(account, issn_type='eissn') + issns = article_forms.choices_for_article_issns(account, issn_type='all') + + assert pissns != eissns + assert len(issns) + assert set(pissns) | set(eissns) == set(issns) + + def test_empty_article_form(self): + user = models.Account(**AccountFixtureFactory.make_publisher_source()) + form: PublisherMetadataForm = ArticleFormFactory.get_from_context(user=user, role="publisher") + assert form is not None + assert form.source is None + assert form.form_data is None diff --git a/doajtest/unit/test_articles_upload_helper.py b/doajtest/unit/test_articles_upload_helper.py new file mode 100644 index 0000000000..1aa602d8f4 --- /dev/null +++ b/doajtest/unit/test_articles_upload_helper.py @@ -0,0 +1,78 @@ +from doajtest.fixtures import article_doajxml +from doajtest.helpers import DoajTestCase +from doajtest.unit_tester import article_upload_tester +from portality import models +from portality.tasks.helpers import articles_upload_helper, background_helper + + +class TestArticlesUploadHelper(DoajTestCase): + + def setUp(self): + super().setUp() + self.fix_es_mapping() + + def test_submit_success(self): + article_upload_tester.test_submit_success(run_background_process_common) + + def test_fail_unmatched_issn(self): + article_upload_tester.test_fail_unmatched_issn(run_background_process_common) + + def test_doaj_fail_shared_issn(self): + article_upload_tester.test_fail_shared_issn(run_background_process_common) + + def test_fail_unowned_issn(self): + article_upload_tester.test_fail_unowned_issn(run_background_process_common) + + def test_journal_2_article_2_success(self): + article_upload_tester.test_journal_2_article_2_success(run_background_process_common) + + def test_journal_2_article_1_success(self): + article_upload_tester.test_journal_2_article_1_success(run_background_process_common) + + def test_journal_1_article_1_success(self): + article_upload_tester.test_journal_1_article_1_success(run_background_process_common) + + def test_journal_2_article_2_1_different_success(self): + article_upload_tester.test_journal_2_article_2_1_different_success(run_background_process_common) + + def test_2_journals_different_owners_both_issns_fail(self): + article_upload_tester.test_2_journals_different_owners_both_issns_fail( + run_background_process_common) + + def test_2_journals_different_owners_issn_each_fail(self): + article_upload_tester.test_2_journals_different_owners_issn_each_fail( + run_background_process_common) + + def test_2_journals_same_owner_issn_each_fail(self): + article_upload_tester.test_2_journals_same_owner_issn_each_fail(run_background_process_common) + + def test_2_journals_different_owners_different_issns_mixed_article_fail(self): + article_upload_tester.test_2_journals_different_owners_different_issns_mixed_article_fail( + run_background_process_common) + + def test_journal_1_article_1_superlong_noclip(self): + article_upload_tester.test_journal_1_article_1_superlong_noclip(run_background_process_common) + + def test_45_journal_1_article_1_superlong_clip(self): + article_upload_tester.test_journal_1_article_1_superlong_clip(run_background_process_common) + + def test_one_journal_one_article_2_issns_one_unknown(self): + article_upload_tester.test_one_journal_one_article_2_issns_one_unknown(run_background_process_common) + + def test_lcc_spelling_error(self): + article_upload_tester.test_lcc_spelling_error(run_background_process_common) + + def test_unknown_journal_issn(self): + article_upload_tester.test_unknown_journal_issn(run_background_process_common) + + +def run_background_process_common(acc_id, handle): + articles = article_doajxml.to_articles(handle) + base_articles_upload = models.BulkArticles(owner=acc_id) + articles_upload_helper.upload_process( + base_articles_upload, + background_helper.create_job(acc_id, '__action__'), + articles, + lambda _articles: _articles, + ) + return base_articles_upload diff --git a/doajtest/unit/test_background_task_status.py b/doajtest/unit/test_background_task_status.py index 0cccee94ed..05b163a356 100644 --- a/doajtest/unit/test_background_task_status.py +++ b/doajtest/unit/test_background_task_status.py @@ -147,8 +147,11 @@ def test_create_background_status__empty_errors_config(self): journal_csv_dict = status_dict['queues']['main_queue']['errors'].get(JournalCSVBackgroundTask.__action__, {}) - assert is_stable(status_dict['status']) - assert not journal_csv_dict + assert not is_stable(status_dict['status']) + assert journal_csv_dict + # unstable action should be on top of the list after sorting + first_key = next(iter(status_dict['queues']['main_queue']['errors'])) + assert not is_stable(status_dict['queues']['main_queue']['errors'][first_key]['status']) @apply_test_case_config(bg_monitor_errors_config__a) def test_create_background_status__error_in_period_found(self): diff --git a/doajtest/unit/test_concurrent_saves.py b/doajtest/unit/test_concurrent_saves.py new file mode 100644 index 0000000000..5a4c75c2ce --- /dev/null +++ b/doajtest/unit/test_concurrent_saves.py @@ -0,0 +1,49 @@ +from doajtest.helpers import DoajTestCase, patch_config +from doajtest.fixtures import JournalFixtureFactory +from portality.models import Journal +from portality.bll.exceptions import ConcurrentUpdateRequestException +from portality.bll import DOAJ +from portality.core import app +import time + + +class TestConcurrentSaves(DoajTestCase): + def setUp(self): + super(TestConcurrentSaves, self).setUp() + # Re-enable concurrency check for this test + self.original_config = patch_config(self.app_test, {"UR_CONCURRENCY_TIMEOUT": 10}) + + def tearDown(self): + super(TestConcurrentSaves, self).tearDown() + patch_config(self.app_test, self.original_config) + + def test_01_update_request(self): + # we need a journal to create update requests for + source = JournalFixtureFactory.make_journal_source(in_doaj=True) + j = Journal(**source) + j.save(blocking=True) + + # create two update requests at the same time. These are our duplicates + appsvc = DOAJ.applicationService() + ur1, jl1, al1 = appsvc.update_request_for_journal(j.id) + ur2, jl2, al2 = appsvc.update_request_for_journal(j.id) + + # save the first update request, this should succeed + ur1.save() + + # immediately attempt to save the second update request. This should + # fail as there is already an UR in the pipeline + with self.assertRaises(ConcurrentUpdateRequestException): + ur2.save() + + # wait until the redis key times out, and then try making a 3rd UR + wait = app.config.get("UR_CONCURRENCY_TIMEOUT", 10) + 1 + time.sleep(wait) + ur3, jl3, al3 = appsvc.update_request_for_journal(j.id) + + # this third UR should be the same as the first one, as the index is now + # in a consistent state + assert ur3.id == ur1.id + + # saving it should work as normal + ur3.save() diff --git a/doajtest/unit/test_cookie_consent_own_domain.py b/doajtest/unit/test_cookie_consent_own_domain.py new file mode 100644 index 0000000000..d93458426c --- /dev/null +++ b/doajtest/unit/test_cookie_consent_own_domain.py @@ -0,0 +1,44 @@ +from doajtest.helpers import DoajTestCase +from urllib.parse import quote_plus, urlparse + + +class TestCookieConsent(DoajTestCase): + + def test_01_cookie_consent_permitted_domains(self): + """ Ensure we only redirect to our own domain via cookie consent """ + + with self.app_test.test_client() as t_client: + # Ensure only relative redirects are permitted + empty_redirect = t_client.get('/cookie_consent') + assert empty_redirect.status_code == 200 + + permitted_redirect = t_client.get('/cookie_consent?continue=%2Farticle%2Fuuid') + assert permitted_redirect.status_code == 302 + assert permitted_redirect.location == '/article/uuid' + + permitted_redirect_params = t_client.get('/cookie_consent?continue=' + quote_plus('/apply?errors=numerous')) + assert permitted_redirect_params.status_code == 302 + assert permitted_redirect_params.location == '/apply?errors=numerous' + + def test_02_cookie_consent_invalid_domains(self): + """ Any redirect to another domain is rejected via cookie consent """ + + with self.app_test.test_client() as t_client: + invalid_redirect = t_client.get( + '/cookie_consent?continue=https%3A%2F%2Fa_nasty_phishing_site.com%2Femailform%3Fdeeds%3Devil') + assert invalid_redirect.status_code == 400 + + # The best we can do - a redirect that looks like a path should try to serve from our domain, fail with 404 + invalid_redirect_no_scheme = t_client.get( + '/cookie_consent?continue=a_nasty_phishing_site.com%2Femailform%3Fdeeds%3Devil') + assert invalid_redirect_no_scheme.status_code == 302 + assert not invalid_redirect_no_scheme.location.startswith('http') + assert urlparse(invalid_redirect_no_scheme.location).path == 'a_nasty_phishing_site.com/emailform' + assert urlparse(invalid_redirect_no_scheme.location).netloc == '' + + invalid_redirect_ip = t_client.get( + '/cookie_consent?continue=1.2.3.4%2Femailform%3Fdeeds%3Devil') + assert invalid_redirect_ip.status_code == 302 + assert not invalid_redirect_ip.location.startswith('http') + assert urlparse(invalid_redirect_ip.location).path == '1.2.3.4/emailform' + assert urlparse(invalid_redirect_ip.location).netloc == '' diff --git a/doajtest/unit/test_duplicate_report_script.py b/doajtest/unit/test_duplicate_report_script.py index c4e78e0028..eb477f2646 100644 --- a/doajtest/unit/test_duplicate_report_script.py +++ b/doajtest/unit/test_duplicate_report_script.py @@ -22,7 +22,7 @@ class TestArticleMatch(DoajTestCase): def test_01_duplicates_report(self): """Check duplication reporting across all articles in the index""" - tmp_dir = paths.create_tmp_dir(is_auto_mkdir=True).as_posix() + tmp_dir = paths.create_tmp_path(is_auto_mkdir=True).as_posix() # Create 2 identical articles, a duplicate pair article1 = models.Article(**ArticleFixtureFactory.make_article_source( @@ -80,7 +80,7 @@ def test_01_duplicates_report(self): def test_02_duplicates_global_criteria(self): """ Check we match only the actual duplicates, amongst other articles in the index. """ - tmp_dir = paths.create_tmp_dir(is_auto_mkdir=True).as_posix() + tmp_dir = paths.create_tmp_path(is_auto_mkdir=True).as_posix() dup_doi = '10.xxx/xxx/duplicate' dup_fulltext = 'http://fulltext.url/article/duplicate' diff --git a/doajtest/unit/test_models.py b/doajtest/unit/test_models.py index bf626153d7..af07a92859 100644 --- a/doajtest/unit/test_models.py +++ b/doajtest/unit/test_models.py @@ -3,7 +3,7 @@ from doajtest.fixtures import ApplicationFixtureFactory, JournalFixtureFactory, ArticleFixtureFactory, \ BibJSONFixtureFactory, ProvenanceFixtureFactory, BackgroundFixtureFactory, AccountFixtureFactory -from doajtest.helpers import DoajTestCase, patch_history_dir +from doajtest.helpers import DoajTestCase, patch_history_dir, save_all_block_last from portality import constants from portality import models from portality.constants import BgjobOutcomeStatus @@ -945,6 +945,33 @@ def test_15_continuations(self): assert future[0].bibjson().get_one_identifier(bj.E_ISSN) == "2222-2222" assert future[1].bibjson().get_one_identifier(bj.E_ISSN) == "3333-3333" + + def test_journal__recursive_future_continuations(self): + journal_a, journal_b = [models.Journal(**j) for j in + JournalFixtureFactory.make_many_journal_sources(count=2, in_doaj=True)] + + journal_a.bibjson().is_replaced_by = journal_b.bibjson().issns()[0] + journal_b.bibjson().is_replaced_by = journal_a.bibjson().issns()[0] + + save_all_block_last([journal_a, journal_b]) + + assert {j.id for j in journal_a.get_future_continuations()} == {journal_b.id} + + + def test_journal__recursive_pass_continuations(self): + journal_a, journal_b, journal_c = [ + models.Journal(**j) + for j in JournalFixtureFactory.make_many_journal_sources(count=3, in_doaj=True)] + + journal_a.bibjson().replaces = journal_b.bibjson().issns()[0] + journal_b.bibjson().replaces = journal_c.bibjson().issns()[0] + journal_c.bibjson().replaces = journal_a.bibjson().issns()[0] + + save_all_block_last([journal_a, journal_b, journal_c]) + + assert {j.id for j in journal_b.get_past_continuations()} == {journal_a.id, journal_c.id} + + def test_16_article_bibjson(self): source = BibJSONFixtureFactory.article_bibjson() bj = models.ArticleBibJSON(source) @@ -1204,7 +1231,7 @@ def test_24_save_valid_seamless_or_dataobj(self): s.data["junk"] = "in here" with self.assertRaises(seamless.SeamlessException): s.save() - assert s.id is None + assert s.id is not None # ID is necessary for duplication check p = models.Provenance() p.type = "suggestion" diff --git a/doajtest/unit/test_oaipmh.py b/doajtest/unit/test_oaipmh.py index bab8102499..0a6540eece 100644 --- a/doajtest/unit/test_oaipmh.py +++ b/doajtest/unit/test_oaipmh.py @@ -1,16 +1,20 @@ -from doajtest.helpers import DoajTestCase -from doajtest.fixtures import JournalFixtureFactory -from doajtest.fixtures import ArticleFixtureFactory -from portality import models -from portality.app import app -from lxml import etree +import base64 +import time from datetime import timedelta -from freezegun import freeze_time + +import pytest from flask import url_for -import time +from freezegun import freeze_time +from lxml import etree +from doajtest.fixtures import ArticleFixtureFactory +from doajtest.fixtures import JournalFixtureFactory +from doajtest.helpers import DoajTestCase +from portality import models +from portality.app import app from portality.lib import dates from portality.lib.dates import FMT_DATE_STD +from portality.view.oaipmh import ResumptionTokenException, decode_resumption_token class TestClient(DoajTestCase): @@ -446,3 +450,15 @@ def test_11_oai_dc_attr(self): assert len(oai_dc) == 1 assert oai_dc[0].tag == "{%s}" % self.oai_ns["oai_dc"] + "dc" assert oai_dc[0].nsmap["xsi"] == self.oai_ns["xsi"] + + +class TestOaipmhFunction(DoajTestCase): + def test_decode_resumption_token__fail(self): + with pytest.raises(ResumptionTokenException): + decode_resumption_token('aaaaa@@@@@') + with pytest.raises(ResumptionTokenException): + decode_resumption_token(base64.urlsafe_b64encode(b'"m":1xxxxx').decode('utf-8')) + + def test_decode_resumption_token(self): + params = decode_resumption_token(base64.urlsafe_b64encode(b'{"m":1}').decode('utf-8')) + assert params == {"metadata_prefix": 1} \ No newline at end of file diff --git a/doajtest/unit/test_query.py b/doajtest/unit/test_query.py index d1717341c7..4b853c65d9 100644 --- a/doajtest/unit/test_query.py +++ b/doajtest/unit/test_query.py @@ -1,3 +1,6 @@ +import pytest +from elasticsearch import RequestError + from portality import models from doajtest.fixtures import AccountFixtureFactory, ArticleFixtureFactory, EditorGroupFixtureFactory, \ @@ -591,3 +594,14 @@ def test_journal_article_query_notes(self): {'query': 'application test','default_operator': 'AND'}}, 'size': 0, 'track_total_hits': True}, account=None, additional_parameters={"ref":"fqw"}) assert res['hits']['total']["value"] == 0, res['hits']['total']["value"] + + def test_search__invalid_from(self): + acc = models.Account(**AccountFixtureFactory.make_managing_editor_source()) + acc.save(blocking=True) + query = {'query': {'bool': {'must': [{'term': {'es_type.exact': 'journal'}}], + 'filter': [{'term': {'admin.in_doaj': True}}]}}, + 'size': '10', 'from': '@@PQF0l', + 'sort': [{'_score': {'order': 'desc'}}], + 'track_total_hits': 'true'} + with pytest.raises(RequestError): + QueryService().search('admin_query', 'journal', query, account=acc, additional_parameters={}) diff --git a/doajtest/unit/test_scripts_accounts_with_marketing_consent.py b/doajtest/unit/test_scripts_accounts_with_marketing_consent.py index cad2cb9870..fcd24db85c 100644 --- a/doajtest/unit/test_scripts_accounts_with_marketing_consent.py +++ b/doajtest/unit/test_scripts_accounts_with_marketing_consent.py @@ -15,7 +15,7 @@ class TestScriptsAccountsWithMarketingConsent(DoajTestCase): @with_es(indices=[models.Account.__type__], warm_mappings=[models.Account.__type__]) def test_01_publishers_with_consent(self): - tmp_dir = paths.create_tmp_dir(is_auto_mkdir=True) + tmp_dir = paths.create_tmp_path(is_auto_mkdir=True) num_new_records = 20 org_size = Account.count() diff --git a/doajtest/unit/test_task_datalog_journal_added_update.py b/doajtest/unit/test_task_datalog_journal_added_update.py index 46d5bff5c1..c887e8232a 100644 --- a/doajtest/unit/test_task_datalog_journal_added_update.py +++ b/doajtest/unit/test_task_datalog_journal_added_update.py @@ -22,19 +22,16 @@ DatalogJournalAdded(title='titlec', issn='1234-3000', date_added='2021-01-01', - has_seal=True, has_continuations=True, ), DatalogJournalAdded(title='titleb', issn='1234-2000', date_added='2021-01-01', - has_seal=True, has_continuations=True, ), DatalogJournalAdded(title='titlea', issn='1234-1000', date_added='2020-01-01', - has_seal=True, has_continuations=True, ), ] @@ -94,20 +91,18 @@ def test_find_new_xlsx_rows(self): ] def test_to_display_data(self): - assert ['titleg', '1234-7000', '01-January-2222', 'Seal', 'Yes', ] == to_display_data( + assert ['titleg', '1234-7000', '01-January-2222', 'Yes', ] == to_display_data( DatalogJournalAdded(title='titleg', issn='1234-7000', date_added='2222-01-01', - has_seal=True, has_continuations=True, ), ) - assert ['titlexxx', '1234-9999', '02-January-2222', '', ''] == to_display_data( + assert ['titlexxx', '1234-9999', '02-January-2222', ''] == to_display_data( DatalogJournalAdded(title='titlexxx', issn='1234-9999', date_added='2222-01-02', - has_seal=False, has_continuations=False, ), ) diff --git a/doajtest/unit/test_tasks_article_bulk_create.py b/doajtest/unit/test_tasks_article_bulk_create.py new file mode 100644 index 0000000000..6310684372 --- /dev/null +++ b/doajtest/unit/test_tasks_article_bulk_create.py @@ -0,0 +1,40 @@ +from doajtest import helpers +from doajtest.fixtures import article_doajxml +from doajtest.helpers import DoajTestCase +from doajtest.unit_tester import article_upload_tester +from portality.models import BulkArticles +from portality.tasks.article_bulk_create import ArticleBulkCreateBackgroundTask + + +def run_background_process_simple(acc_id, handle): + articles = article_doajxml.to_articles(handle) + articles = [article.data for article in articles] + return run_background_process_by_incoming_articles(acc_id, articles) + + +def run_background_process_by_incoming_articles(acc_id, articles): + job = ArticleBulkCreateBackgroundTask.prepare(acc_id, incoming_articles=articles) + task = ArticleBulkCreateBackgroundTask(job) + task.run() + bulk_articles = BulkArticles.pull(task.get_param(job.params, "upload_id")) + return bulk_articles + + +class TestArticleBulkCreateBackgroundTask(DoajTestCase): + def setUp(self): + super().setUp() + self.fix_es_mapping() + + def test_submit_success(self): + article_upload_tester.test_submit_success(run_background_process_simple) + + def test_fail_shared_issn(self): + article_upload_tester.test_fail_shared_issn(run_background_process_simple) + + def test_invalid_income_articles_format(self): + acc_id = "testowner" + helpers.save_all_block_last([ + article_upload_tester.create_simple_publisher(acc_id), + ]) + bulk_articles = run_background_process_by_incoming_articles(acc_id, [{"invalid": "format"}]) + article_upload_tester.assert_failed(bulk_articles, expected_details=False) diff --git a/doajtest/unit/test_tasks_ingestDOAJarticles.py b/doajtest/unit/test_tasks_ingestDOAJarticles.py index a2eb5f2be9..4421d97ef9 100644 --- a/doajtest/unit/test_tasks_ingestDOAJarticles.py +++ b/doajtest/unit/test_tasks_ingestDOAJarticles.py @@ -1,27 +1,27 @@ -from doajtest.helpers import DoajTestCase +import ftplib +import os +import time +from urllib.parse import urlparse + +import requests from lxml import etree +from doajtest import helpers +from doajtest.fixtures.article import ArticleFixtureFactory +from doajtest.fixtures.article_doajxml import DoajXmlArticleFixtureFactory +from doajtest.helpers import DoajTestCase from doajtest.mocks.bll_article import BLLArticleMockFactory -from doajtest.mocks.ftp import FTPMockFactory from doajtest.mocks.file import FileMockFactory +from doajtest.mocks.ftp import FTPMockFactory from doajtest.mocks.response import ResponseMockFactory from doajtest.mocks.xwalk import XwalkMockFactory -from portality.tasks import ingestarticles -from doajtest.fixtures.article_doajxml import DoajXmlArticleFixtureFactory -from doajtest.fixtures.accounts import AccountFixtureFactory -from doajtest.fixtures.article import ArticleFixtureFactory -import time -from portality.crosswalks import article_doaj_xml -from portality.bll.services import article as articleSvc - +from doajtest.unit_tester import article_upload_tester from portality import models -from portality.core import app - from portality.background import BackgroundException - -import ftplib, os, requests -from urllib.parse import urlparse - +from portality.bll.services import article as articleSvc +from portality.core import app +from portality.crosswalks import article_doaj_xml +from portality.tasks import ingestarticles from portality.ui.messages import Messages @@ -95,10 +95,27 @@ def tearDown(self): if os.path.exists(path): os.remove(path) - def mock_load_schema(self, doc): return self.schema + def run_background_process_simple(self, acc_id, handle): + f = FileMockFactory(stream=handle) + + job = ingestarticles.IngestArticlesBackgroundTask.prepare(acc_id, schema="doaj", upload_file=f) + id = job.params.get("ingest_articles__file_upload_id") + self.cleanup_ids.append(id) + + # because file upload gets created and saved by prepare + time.sleep(1) + + task = ingestarticles.IngestArticlesBackgroundTask(job) + task.run() + + # because file upload needs to be re-saved + time.sleep(1) + + return models.FileUpload.pull(id) + def test_01_doaj_file_upload_success(self): handle = DoajXmlArticleFixtureFactory.upload_1_issn_correct() @@ -118,7 +135,6 @@ def test_01_doaj_file_upload_success(self): assert len(previous) == 1 - def test_02_doaj_file_upload_invalid(self): handle = DoajXmlArticleFixtureFactory.invalid_schema_xml() @@ -134,10 +150,7 @@ def test_02_doaj_file_upload_invalid(self): fu = models.FileUpload.pull(id) assert fu is not None - assert fu.status == "failed" - assert fu.error is not None and fu.error != "" - assert fu.error_details is not None and fu.error != "" - assert list(fu.failure_reasons.keys()) == [] + article_upload_tester.assert_failed(fu, expected_details=True) # file should have been removed from upload dir path = os.path.join(app.config.get("UPLOAD_DIR", "."), id + ".xml") @@ -164,11 +177,7 @@ def test_03_doaj_file_upload_fail(self): self.cleanup_ids.append(id) fu = models.FileUpload.pull(id) - assert fu is not None - assert fu.status == "failed" - assert fu.error is not None and fu.error != "" - assert fu.error_details is None - assert list(fu.failure_reasons.keys()) == [] + article_upload_tester.assert_failed(fu, expected_details=False) # file should have been removed from disk path = os.path.join(app.config.get("UPLOAD_DIR", "."), id + ".xml") @@ -220,11 +229,7 @@ def test_05_doaj_url_upload_http_fail(self): id = previous[0].id fu = models.FileUpload.pull(id) - assert fu is not None - assert fu.status == "failed" - assert fu.error is not None and fu.error != "" - assert fu.error_details is None - assert list(fu.failure_reasons.keys()) == [] + article_upload_tester.assert_failed(fu, expected_details=False) # now try again with an invalid url requests.head = ResponseMockFactory.head_success @@ -239,11 +244,7 @@ def test_05_doaj_url_upload_http_fail(self): id = previous[0].id fu = models.FileUpload.pull(id) - assert fu is not None - assert fu.status == "failed" - assert fu.error is not None and fu.error != "" - assert fu.error_details is None - assert list(fu.failure_reasons.keys()) == [] + article_upload_tester.assert_failed(fu, expected_details=False) def test_06_doaj_url_upload_ftp_success(self): ftplib.FTP = FTPMockFactory.create("doaj") @@ -275,11 +276,7 @@ def test_07_url_upload_ftp_fail(self): id = previous[0].id fu = models.FileUpload.pull(id) - assert fu is not None - assert fu.status == "failed" - assert fu.error is not None and fu.error != "" - assert fu.error_details is None - assert list(fu.failure_reasons.keys()) == [] + article_upload_tester.assert_failed(fu, expected_details=False) def test_08_doajxml_prepare_file_upload_success(self): @@ -287,7 +284,8 @@ def test_08_doajxml_prepare_file_upload_success(self): f = FileMockFactory(stream=handle) previous = [] - job = ingestarticles.IngestArticlesBackgroundTask.prepare("testuser", upload_file=f, schema="doaj", previous=previous) + job = ingestarticles.IngestArticlesBackgroundTask.prepare("testuser", upload_file=f, schema="doaj", + previous=previous) assert job is not None assert "ingest_articles__file_upload_id" in job.params @@ -308,7 +306,8 @@ def test_09_prepare_file_upload_fail(self): previous = [] with self.assertRaises(BackgroundException): - job = ingestarticles.IngestArticlesBackgroundTask.prepare("testuser", upload_file=f, schema="doaj", previous=previous) + job = ingestarticles.IngestArticlesBackgroundTask.prepare("testuser", upload_file=f, schema="doaj", + previous=previous) assert len(previous) == 1 id = previous[0].id @@ -347,7 +346,8 @@ def test_11_prepare_url_upload_fail(self): previous = [] with self.assertRaises(BackgroundException): - job = ingestarticles.IngestArticlesBackgroundTask.prepare("testuser", url=url, schema="doaj", previous=previous) + job = ingestarticles.IngestArticlesBackgroundTask.prepare("testuser", url=url, schema="doaj", + previous=previous) assert len(previous) == 1 id = previous[0].id @@ -369,7 +369,8 @@ def test_12_prepare_parameter_errors(self): # upload dir not configured del app.config["UPLOAD_DIR"] with self.assertRaises(BackgroundException): - job = ingestarticles.IngestArticlesBackgroundTask.prepare("testuser", url="http://whatever", schema="doaj", previous=[]) + job = ingestarticles.IngestArticlesBackgroundTask.prepare("testuser", url="http://whatever", schema="doaj", + previous=[]) def test_13_ftp_upload_success(self): ftplib.FTP = FTPMockFactory.create("doaj") @@ -381,7 +382,7 @@ def test_13_ftp_upload_success(self): path = os.path.join(upload_dir, file_upload.local_filename) self.cleanup_paths.append(path) - url= "ftp://upload" + url = "ftp://upload" parsed_url = urlparse(url) job = models.BackgroundJob() @@ -403,7 +404,7 @@ def test_14_ftp_upload_fail(self): path = os.path.join(upload_dir, file_upload.local_filename) self.cleanup_paths.append(path) - url= "ftp://fail" + url = "ftp://fail" parsed_url = urlparse(url) job = models.BackgroundJob() @@ -411,16 +412,13 @@ def test_14_ftp_upload_fail(self): result = ingestarticles.ftp_upload(job, path, parsed_url, file_upload) assert result is False - assert file_upload.status == "failed" - assert file_upload.error is not None and file_upload.error != "" - assert file_upload.error_details is None - assert list(file_upload.failure_reasons.keys()) == [] + article_upload_tester.assert_failed(file_upload, expected_details=False) def test_15_http_upload_success(self): requests.head = ResponseMockFactory.head_fail requests.get = ResponseMockFactory.doaj_get_success - url= "http://upload" + url = "http://upload" file_upload = models.FileUpload() file_upload.set_id() @@ -484,10 +482,7 @@ def test_18_download_http_invalid(self): task = ingestarticles.IngestArticlesBackgroundTask(job) result = task._download(file_upload) - assert file_upload.status == "failed" - assert file_upload.error is not None and file_upload.error != "" - assert file_upload.error_details is not None and file_upload.error_details != "" - assert list(file_upload.failure_reasons.keys()) == [] + article_upload_tester.assert_failed(file_upload, expected_details=True) def test_19_download_http_error(self): requests.head = ResponseMockFactory.head_fail @@ -510,10 +505,7 @@ def test_19_download_http_error(self): result = task._download(file_upload) assert result is False - assert file_upload.status == "failed" - assert file_upload.error is not None and file_upload.error != "" - assert file_upload.error_details is None - assert list(file_upload.failure_reasons.keys()) == [] + article_upload_tester.assert_failed(file_upload, expected_details=False) def test_20_download_ftp_valid(self): ftplib.FTP = FTPMockFactory.create("doaj") @@ -557,10 +549,7 @@ def test_21_download_ftp_invalid(self): task = ingestarticles.IngestArticlesBackgroundTask(job) result = task._download(file_upload) - assert file_upload.status == "failed" - assert file_upload.error is not None and file_upload.error != "" - assert file_upload.error_details is not None and file_upload.error_details != "" - assert list(file_upload.failure_reasons.keys()) == [] + article_upload_tester.assert_failed(file_upload, expected_details=True) def test_22_download_ftp_error(self): ftplib.FTP = FTPMockFactory.create("doaj") @@ -582,24 +571,13 @@ def test_22_download_ftp_error(self): result = task._download(file_upload) assert result is False - assert file_upload.status == "failed" - assert file_upload.error is not None and file_upload.error != "" - assert file_upload.error_details is None - assert list(file_upload.failure_reasons.keys()) == [] + article_upload_tester.assert_failed(file_upload, expected_details=False) def test_23_doaj_process_success(self): - - j = models.Journal() - j.set_owner("testowner") - bj = j.bibjson() - bj.add_identifier(bj.P_ISSN, "1234-5678") - j.set_in_doaj(True) - j.save(blocking=True) - - asource = AccountFixtureFactory.make_publisher_source() - account = models.Account(**asource) - account.set_id("testowner") - account.save(blocking=True) + helpers.save_all_block_last([ + article_upload_tester.create_simple_journal('testowner', pissn='1234-5678'), + article_upload_tester.create_simple_publisher("testowner"), + ]) job = models.BackgroundJob() @@ -621,16 +599,12 @@ def test_23_doaj_process_success(self): assert not os.path.exists(path) - assert file_upload.status == "processed" - assert file_upload.imported == 1 - assert file_upload.new == 1 + article_upload_tester.assert_processed(file_upload) def test_24_process_invalid_file(self): - j = models.Journal() - j.set_owner("testowner") - bj = j.bibjson() - bj.add_identifier(bj.P_ISSN, "1234-5678") - j.save(blocking=True) + helpers.save_all_block_last([ + article_upload_tester.create_simple_journal('testowner', pissn='1234-5678'), + ]) job = models.BackgroundJob() @@ -651,19 +625,14 @@ def test_24_process_invalid_file(self): task._process(file_upload) assert not os.path.exists(path) - assert file_upload.status == "failed" - assert file_upload.error is not None and file_upload.error != "" - assert file_upload.error_details is not None and file_upload.error_details != "" - assert list(file_upload.failure_reasons.keys()) == [] + article_upload_tester.assert_failed(file_upload, expected_details=True) def test_25_process_filesystem_error(self): articleSvc.ArticleService.batch_create_articles = BLLArticleMockFactory.batch_create - j = models.Journal() - j.set_owner("testowner") - bj = j.bibjson() - bj.add_identifier(bj.P_ISSN, "1234-5678") - j.save(blocking=True) + helpers.save_all_block_last([ + article_upload_tester.create_simple_journal('testowner', pissn='1234-5678'), + ]) job = models.BackgroundJob() @@ -684,67 +653,23 @@ def test_25_process_filesystem_error(self): task._process(file_upload) assert not os.path.exists(path) - assert file_upload.status == "failed" - assert file_upload.error is not None and file_upload.error != "" - assert file_upload.error_details is None - assert list(file_upload.failure_reasons.keys()) == [] - - def test_26_run_validated(self): - j = models.Journal() - j.set_owner("testowner") - bj = j.bibjson() - bj.add_identifier(bj.P_ISSN, "1234-5678") - j.set_in_doaj(True) - j.save(blocking=True) - - asource = AccountFixtureFactory.make_publisher_source() - account = models.Account(**asource) - account.set_id("testowner") - account.save(blocking=True) - - handle = DoajXmlArticleFixtureFactory.upload_1_issn_correct() - f = FileMockFactory(stream=handle) - - previous = [] - - job = ingestarticles.IngestArticlesBackgroundTask.prepare("testowner", upload_file=f, schema="doaj", previous=previous) - id = job.params.get("ingest_articles__file_upload_id") - self.cleanup_ids.append(id) - - # because file upload gets created and saved by prepare - time.sleep(1) - - task = ingestarticles.IngestArticlesBackgroundTask(job) - task.run() - - # because file upload needs to be re-saved - time.sleep(1) - - fu = models.FileUpload.pull(id) - assert fu is not None - assert fu.status == "processed" + article_upload_tester.assert_failed(file_upload, expected_details=False) def test_27_run_exists(self): requests.head = ResponseMockFactory.head_fail requests.get = ResponseMockFactory.doaj_get_success - j = models.Journal() - j.set_owner("testowner") - bj = j.bibjson() - bj.add_identifier(bj.P_ISSN, "1234-5678") - j.set_in_doaj(True) - j.save(blocking=True) - - asource = AccountFixtureFactory.make_publisher_source() - account = models.Account(**asource) - account.set_id("testowner") - account.save(blocking=True) + helpers.save_all_block_last([ + article_upload_tester.create_simple_journal('testowner', pissn='1234-5678'), + article_upload_tester.create_simple_publisher("testowner"), + ]) url = "http://valid" previous = [] - job = ingestarticles.IngestArticlesBackgroundTask.prepare("testowner", url=url, schema="doaj", previous=previous) + job = ingestarticles.IngestArticlesBackgroundTask.prepare("testowner", url=url, schema="doaj", + previous=previous) id = job.params.get("ingest_articles__file_upload_id") self.cleanup_ids.append(id) @@ -773,617 +698,55 @@ def test_28_run_errors(self): with self.assertRaises(BackgroundException): task.run() - job.params = {"ingest_articles__file_upload_id" : "whatever"} + job.params = {"ingest_articles__file_upload_id": "whatever"} with self.assertRaises(BackgroundException): task.run() def test_29_submit_success(self): - j = models.Journal() - j.set_owner("testowner") - bj = j.bibjson() - bj.add_identifier(bj.P_ISSN, "1234-5678") - j.set_in_doaj(True) - j.save(blocking=True) - - asource = AccountFixtureFactory.make_publisher_source() - account = models.Account(**asource) - account.set_id("testowner") - account.save(blocking=True) - - handle = DoajXmlArticleFixtureFactory.upload_1_issn_correct() - f = FileMockFactory(stream=handle) - - previous = [] - - job = ingestarticles.IngestArticlesBackgroundTask.prepare("testowner", upload_file=f, schema="doaj", previous=previous) - id = job.params.get("ingest_articles__file_upload_id") - self.cleanup_ids.append(id) - - # because file upload gets created and saved by prepare - time.sleep(1) - - # this assumes that huey is in always eager mode, and thus this immediately calls the async task, - # which in turn calls execute, which ultimately calls run - task = ingestarticles.IngestArticlesBackgroundTask(job) - task.run() - - fu = models.FileUpload.pull(id) - assert fu is not None - assert fu.status == "processed" + article_upload_tester.test_submit_success(self.run_background_process_simple) def test_31_doaj_run_fail_unmatched_issn(self): - # Create a journal with 2 issns, one of which is the same as an issn on the - # article, but the article also contains an issn which doesn't match the journal - # We expect a failed ingest - - j = models.Journal() - j.set_owner("testowner") - bj = j.bibjson() - bj.add_identifier(bj.P_ISSN, "1234-5678") - bj.add_identifier(bj.E_ISSN, "9876-5432") - j.set_in_doaj(True) - j.save(blocking=True) - - asource = AccountFixtureFactory.make_publisher_source() - account = models.Account(**asource) - account.set_id("testowner") - account.save(blocking=True) - - handle = DoajXmlArticleFixtureFactory.upload_2_issns_ambiguous() - f = FileMockFactory(stream=handle) - - job = ingestarticles.IngestArticlesBackgroundTask.prepare("testowner", schema="doaj", upload_file=f) - id = job.params.get("ingest_articles__file_upload_id") - self.cleanup_ids.append(id) - - # because file upload gets created and saved by prepare - time.sleep(1) - - task = ingestarticles.IngestArticlesBackgroundTask(job) - task.run() - - # because file upload needs to be re-saved - time.sleep(1) - - fu = models.FileUpload.pull(id) - assert fu is not None - assert fu.status == "failed", "received status: {}".format(fu.status) - assert fu.error is not None and fu.error != "" - assert fu.error_details is None - - fr = fu.failure_reasons - assert "unmatched" in fr - assert fr["unmatched"] == ["2345-6789"] + article_upload_tester.test_fail_unmatched_issn(self.run_background_process_simple) def test_32_run_doaj_fail_shared_issn(self): - # Create 2 journals with the same issns but different owners, which match the issns on the article - # We expect an ingest failure - - j1 = models.Journal() - j1.set_owner("testowner1") - bj1 = j1.bibjson() - bj1.add_identifier(bj1.P_ISSN, "1234-5678") - bj1.add_identifier(bj1.E_ISSN, "9876-5432") - j1.set_in_doaj(True) - j1.save() - - j2 = models.Journal() - j2.set_owner("testowner2") - j2.set_in_doaj(True) - bj2 = j2.bibjson() - bj2.add_identifier(bj2.P_ISSN, "1234-5678") - bj2.add_identifier(bj2.E_ISSN, "9876-5432") - j2.save(blocking=True) - - asource = AccountFixtureFactory.make_publisher_source() - account = models.Account(**asource) - account.set_id("testowner1") - account.save(blocking=True) - - handle = DoajXmlArticleFixtureFactory.upload_2_issns_correct() - f = FileMockFactory(stream=handle) - - job = ingestarticles.IngestArticlesBackgroundTask.prepare("testowner1", schema="doaj", upload_file=f) - id = job.params.get("ingest_articles__file_upload_id") - self.cleanup_ids.append(id) - - # because file upload gets created and saved by prepare - time.sleep(1) - - task = ingestarticles.IngestArticlesBackgroundTask(job) - task.run() - - # because file upload needs to be re-saved - time.sleep(1) - - fu = models.FileUpload.pull(id) - assert fu is not None - assert fu.status == "failed" - assert fu.error is not None and fu.error != "" - assert fu.error_details is None - - fr = fu.failure_reasons - assert "shared" in fr - assert "1234-5678" in fr["shared"] - assert "9876-5432" in fr["shared"] + article_upload_tester.test_fail_shared_issn(self.run_background_process_simple) def test_33_run_fail_unowned_issn(self): - # Create 2 journals with different owners and one different issn each. The two issns in the - # article match each of the journals respectively - # article match each of the journals respectively - # We expect an ingest failure - - j1 = models.Journal() - j1.set_owner("testowner1") - bj1 = j1.bibjson() - bj1.add_identifier(bj1.P_ISSN, "1234-5678") - j1.set_in_doaj(True) - j1.save() - - j2 = models.Journal() - j2.set_owner("testowner2") - j2.set_in_doaj(True) - bj2 = j2.bibjson() - bj2.add_identifier(bj2.E_ISSN, "9876-5432") - j2.save(blocking=True) - - asource = AccountFixtureFactory.make_publisher_source() - account = models.Account(**asource) - account.set_id("testowner") - account.save(blocking=True) - - handle = DoajXmlArticleFixtureFactory.upload_2_issns_correct() - f = FileMockFactory(stream=handle) - - job = ingestarticles.IngestArticlesBackgroundTask.prepare("testowner", schema="doaj", upload_file=f) - id = job.params.get("ingest_articles__file_upload_id") - self.cleanup_ids.append(id) - - # because file upload gets created and saved by prepare - time.sleep(1) - - task = ingestarticles.IngestArticlesBackgroundTask(job) - task.run() - - # because file upload needs to be re-saved - time.sleep(1) - - fu = models.FileUpload.pull(id) - assert fu is not None - assert fu.status == "failed" - assert fu.error is not None and fu.error != "" - assert fu.error_details is None - - fr = fu.failure_reasons - assert "unowned" in fr - assert "9876-5432" in fr["unowned"] + article_upload_tester.test_fail_unowned_issn(self.run_background_process_simple) def test_34_doaj_journal_2_article_2_success(self): - # Create a journal with two issns both of which match the 2 issns in the article - # we expect a successful article ingest - j = models.Journal() - j.set_owner("testowner") - bj = j.bibjson() - bj.add_identifier(bj.P_ISSN, "1234-5678") - bj.add_identifier(bj.E_ISSN, "9876-5432") - j.set_in_doaj(True) - j.save(blocking=True) - - asource = AccountFixtureFactory.make_publisher_source() - account = models.Account(**asource) - account.set_id("testowner") - account.save(blocking=True) - - handle = DoajXmlArticleFixtureFactory.upload_2_issns_correct() - f = FileMockFactory(stream=handle) - - job = ingestarticles.IngestArticlesBackgroundTask.prepare("testowner", schema="doaj", upload_file=f) - id = job.params.get("ingest_articles__file_upload_id") - self.cleanup_ids.append(id) - - # because file upload gets created and saved by prepare - time.sleep(1) - - task = ingestarticles.IngestArticlesBackgroundTask(job) - task.run() - - # because file upload needs to be re-saved - time.sleep(1) - - fu = models.FileUpload.pull(id) - assert fu is not None - assert fu.status == "processed" - assert fu.imported == 1 - assert fu.updates == 0 - assert fu.new == 1 - - fr = fu.failure_reasons - assert len(fr.get("shared", [])) == 0 - assert len(fr.get("unowned", [])) == 0 - assert len(fr.get("unmatched", [])) == 0 - - found = [a for a in models.Article.find_by_issns(["1234-5678", "9876-5432"])] - assert len(found) == 1 + article_upload_tester.test_journal_2_article_2_success(self.run_background_process_simple) def test_35_doaj_journal_2_article_1_success(self): - # Create a journal with 2 issns, one of which is present in the article as the - # only issn - # We expect a successful article ingest - j = models.Journal() - j.set_owner("testowner") - bj = j.bibjson() - bj.add_identifier(bj.P_ISSN, "1234-5678") - bj.add_identifier(bj.E_ISSN, "9876-5432") - j.set_in_doaj(True) - j.save() - - asource = AccountFixtureFactory.make_publisher_source() - account = models.Account(**asource) - account.set_id("testowner") - account.save(blocking=True) - - handle = DoajXmlArticleFixtureFactory.upload_1_issn_correct() - f = FileMockFactory(stream=handle) - - job = ingestarticles.IngestArticlesBackgroundTask.prepare("testowner", schema="doaj", upload_file=f) - id = job.params.get("ingest_articles__file_upload_id") - self.cleanup_ids.append(id) - - # because file upload gets created and saved by prepare - time.sleep(1) - - task = ingestarticles.IngestArticlesBackgroundTask(job) - task.run() - - # because file upload needs to be re-saved - time.sleep(1) - - fu = models.FileUpload.pull(id) - assert fu is not None - assert fu.status == "processed" - assert fu.imported == 1 - assert fu.updates == 0 - assert fu.new == 1 - - fr = fu.failure_reasons - assert len(fr.get("shared", [])) == 0 - assert len(fr.get("unowned", [])) == 0 - assert len(fr.get("unmatched", [])) == 0 - - found = [a for a in models.Article.find_by_issns(["1234-5678"])] - assert len(found) == 1 + article_upload_tester.test_journal_2_article_1_success(self.run_background_process_simple) def test_37_doaj_journal_1_article_1_success(self): - # Create a journal with 1 issn, which is the same 1 issn on the article - # we expect a successful article ingest - j = models.Journal() - j.set_owner("testowner") - bj = j.bibjson() - bj.add_identifier(bj.P_ISSN, "1234-5678") - j.set_in_doaj(True) - j.save() - - asource = AccountFixtureFactory.make_publisher_source() - account = models.Account(**asource) - account.set_id("testowner") - account.save(blocking=True) - - handle = DoajXmlArticleFixtureFactory.upload_1_issn_correct() - f = FileMockFactory(stream=handle) - - job = ingestarticles.IngestArticlesBackgroundTask.prepare("testowner", schema="doaj", upload_file=f) - id = job.params.get("ingest_articles__file_upload_id") - self.cleanup_ids.append(id) - - # because file upload gets created and saved by prepare - time.sleep(1) - - task = ingestarticles.IngestArticlesBackgroundTask(job) - task.run() - - # because file upload needs to be re-saved - time.sleep(1) - - fu = models.FileUpload.pull(id) - assert fu is not None - assert fu.status == "processed" - assert fu.imported == 1 - assert fu.updates == 0 - assert fu.new == 1 - - fr = fu.failure_reasons - assert len(fr.get("shared", [])) == 0 - assert len(fr.get("unowned", [])) == 0 - assert len(fr.get("unmatched", [])) == 0 - - found = [a for a in models.Article.find_by_issns(["1234-5678"])] - assert len(found) == 1 + article_upload_tester.test_journal_1_article_1_success(self.run_background_process_simple) def test_38_doaj_journal_2_article_2_1_different_success(self): - # Create a journal with 2 issns, one of which is the same as an issn on the - # article, but the article also contains an issn which doesn't match the journal - # We expect a failed ingest - j = models.Journal() - j.set_owner("testowner") - bj = j.bibjson() - bj.add_identifier(bj.P_ISSN, "1234-5678") - bj.add_identifier(bj.E_ISSN, "9876-5432") - j.set_in_doaj(True) - j.save() - - asource = AccountFixtureFactory.make_publisher_source() - account = models.Account(**asource) - account.set_id("testowner") - account.save(blocking=True) - - handle = DoajXmlArticleFixtureFactory.upload_2_issns_ambiguous() - f = FileMockFactory(stream=handle) - - job = ingestarticles.IngestArticlesBackgroundTask.prepare("testowner", schema="doaj", upload_file=f) - id = job.params.get("ingest_articles__file_upload_id") - self.cleanup_ids.append(id) - - # because file upload gets created and saved by prepare - time.sleep(1) - - task = ingestarticles.IngestArticlesBackgroundTask(job) - task.run() - - # because file upload needs to be re-saved - time.sleep(1) - - fu = models.FileUpload.pull(id) - assert fu is not None - assert fu.status == "failed" - assert fu.imported == 0 - assert fu.updates == 0 - assert fu.new == 0 - - fr = fu.failure_reasons - assert len(fr.get("shared", [])) == 0 - assert len(fr.get("unowned", [])) == 0 - assert len(fr.get("unmatched", [])) == 1 - - found = [a for a in models.Article.find_by_issns(["1234-5678", "2345-6789"])] - assert len(found) == 0 + article_upload_tester.test_journal_2_article_2_1_different_success(self.run_background_process_simple) def test_39_doaj_2_journals_different_owners_both_issns_fail(self): - # Create 2 journals with the same issns but different owners, which match the issns on the article - # We expect an ingest failure - j1 = models.Journal() - j1.set_owner("testowner1") - bj1 = j1.bibjson() - bj1.add_identifier(bj1.P_ISSN, "1234-5678") - bj1.add_identifier(bj1.E_ISSN, "9876-5432") - j1.set_in_doaj(True) - j1.save() - - j2 = models.Journal() - j2.set_owner("testowner2") - j2.set_in_doaj(True) - bj2 = j2.bibjson() - bj2.add_identifier(bj2.P_ISSN, "1234-5678") - bj2.add_identifier(bj2.E_ISSN, "9876-5432") - j2.save() - - asource = AccountFixtureFactory.make_publisher_source() - account = models.Account(**asource) - account.set_id("testowner1") - account.save(blocking=True) - - handle = DoajXmlArticleFixtureFactory.upload_2_issns_correct() - f = FileMockFactory(stream=handle) - - job = ingestarticles.IngestArticlesBackgroundTask.prepare("testowner1", schema="doaj", upload_file=f) - id = job.params.get("ingest_articles__file_upload_id") - self.cleanup_ids.append(id) - - # because file upload gets created and saved by prepare - time.sleep(1) - - task = ingestarticles.IngestArticlesBackgroundTask(job) - task.run() - - # because file upload needs to be re-saved - time.sleep(1) - - fu = models.FileUpload.pull(id) - assert fu is not None - assert fu.status == "failed" - assert fu.imported == 0 - assert fu.updates == 0 - assert fu.new == 0 - - fr = fu.failure_reasons - assert len(fr.get("shared", [])) == 2 - assert "1234-5678" in fr["shared"] - assert "9876-5432" in fr["shared"] - assert len(fr.get("unowned", [])) == 0 - assert len(fr.get("unmatched", [])) == 0 - - found = [a for a in models.Article.find_by_issns(["1234-5678", "9876-5432"])] - assert len(found) == 0 + article_upload_tester.test_2_journals_different_owners_both_issns_fail( + self.run_background_process_simple) def test_40_doaj_2_journals_different_owners_issn_each_fail(self): - # Create 2 journals with different owners and one different issn each. The two issns in the - # article match each of the journals respectively - # We expect an ingest failure - j1 = models.Journal() - j1.set_owner("testowner1") - bj1 = j1.bibjson() - bj1.add_identifier(bj1.P_ISSN, "1234-5678") - j1.set_in_doaj(True) - j1.save() - - j2 = models.Journal() - j2.set_owner("testowner2") - j2.set_in_doaj(True) - bj2 = j2.bibjson() - bj2.add_identifier(bj2.E_ISSN, "9876-5432") - j2.save() - - asource = AccountFixtureFactory.make_publisher_source() - account = models.Account(**asource) - account.set_id("testowner1") - account.save(blocking=True) - - handle = DoajXmlArticleFixtureFactory.upload_2_issns_correct() - f = FileMockFactory(stream=handle) - - job = ingestarticles.IngestArticlesBackgroundTask.prepare("testowner1", schema="doaj", upload_file=f) - id = job.params.get("ingest_articles__file_upload_id") - self.cleanup_ids.append(id) - - # because file upload gets created and saved by prepare - time.sleep(1) - - task = ingestarticles.IngestArticlesBackgroundTask(job) - task.run() - - # because file upload needs to be re-saved - time.sleep(1) - - fu = models.FileUpload.pull(id) - assert fu is not None - assert fu.status == "failed" - assert fu.imported == 0 - assert fu.updates == 0 - assert fu.new == 0 - - fr = fu.failure_reasons - assert len(fr.get("shared", [])) == 0 - assert len(fr.get("unowned", [])) == 1 - assert "9876-5432" in fr["unowned"] - assert len(fr.get("unmatched", [])) == 0 - - found = [a for a in models.Article.find_by_issns(["1234-5678", "9876-5432"])] - assert len(found) == 0 + article_upload_tester.test_2_journals_different_owners_issn_each_fail( + self.run_background_process_simple) def test_41_doaj_2_journals_same_owner_issn_each_fail(self): - # Create 2 journals with the same owner, each with one different issn. The article's 2 issns - # match each of these issns - # We expect a failed article ingest - articles must match only ONE journal - j1 = models.Journal() - j1.set_owner("testowner") - bj1 = j1.bibjson() - bj1.add_identifier(bj1.P_ISSN, "1234-5678") - j1.set_in_doaj(True) - j1.save() - - j2 = models.Journal() - j2.set_owner("testowner") - j2.set_in_doaj(True) - bj2 = j2.bibjson() - bj2.add_identifier(bj2.E_ISSN, "9876-5432") - j2.save() - - asource = AccountFixtureFactory.make_publisher_source() - account = models.Account(**asource) - account.set_id("testowner") - account.save(blocking=True) - - handle = DoajXmlArticleFixtureFactory.upload_2_issns_correct() - f = FileMockFactory(stream=handle) - - job = ingestarticles.IngestArticlesBackgroundTask.prepare("testowner", schema="doaj", upload_file=f) - id = job.params.get("ingest_articles__file_upload_id") - self.cleanup_ids.append(id) - - # because file upload gets created and saved by prepare - time.sleep(1) - - task = ingestarticles.IngestArticlesBackgroundTask(job) - task.run() - - # because file upload needs to be re-saved - time.sleep(1) - - fu = models.FileUpload.pull(id) - assert fu is not None - assert fu.status == "failed" - assert fu.imported == 0 - assert fu.updates == 0 - assert fu.new == 0 - - fr = fu.failure_reasons - assert len(fr.get("shared", [])) == 0 - assert len(fr.get("unowned", [])) == 0 - assert len(fr.get("unmatched", [])) == 2 # error message for each article - - found = [a for a in models.Article.find_by_issns(["1234-5678", "9876-5432"])] - assert len(found) == 0 + article_upload_tester.test_2_journals_same_owner_issn_each_fail(self.run_background_process_simple) def test_42_doaj_2_journals_different_owners_different_issns_mixed_article_fail(self): - # Create 2 different journals with different owners and different issns (2 each). - # The article's issns match one issn in each journal - # We expect an ingest failure - j1 = models.Journal() - j1.set_owner("testowner1") - bj1 = j1.bibjson() - bj1.add_identifier(bj1.P_ISSN, "1234-5678") - bj1.add_identifier(bj1.E_ISSN, "2345-6789") - j1.set_in_doaj(True) - j1.save() - - j2 = models.Journal() - j2.set_owner("testowner2") - j2.set_in_doaj(True) - bj2 = j2.bibjson() - bj2.add_identifier(bj2.P_ISSN, "8765-4321") - bj2.add_identifier(bj2.E_ISSN, "9876-5432") - j2.save() - - asource = AccountFixtureFactory.make_publisher_source() - account = models.Account(**asource) - account.set_id("testowner1") - account.save(blocking=True) - - handle = DoajXmlArticleFixtureFactory.upload_2_issns_correct() - f = FileMockFactory(stream=handle) - - job = ingestarticles.IngestArticlesBackgroundTask.prepare("testowner1", schema="doaj", upload_file=f) - id = job.params.get("ingest_articles__file_upload_id") - self.cleanup_ids.append(id) - - # because file upload gets created and saved by prepare - time.sleep(1) - - task = ingestarticles.IngestArticlesBackgroundTask(job) - task.run() - - # because file upload needs to be re-saved - time.sleep(1) - - fu = models.FileUpload.pull(id) - assert fu is not None - assert fu.status == "failed" - assert fu.imported == 0 - assert fu.updates == 0 - assert fu.new == 0 - - fr = fu.failure_reasons - assert len(fr.get("shared", [])) == 0 - assert len(fr.get("unowned", [])) == 1 - assert "9876-5432" in fr["unowned"] - assert len(fr.get("unmatched", [])) == 0 - - found = [a for a in models.Article.find_by_issns(["1234-5678", "9876-5432"])] - assert len(found) == 0 + article_upload_tester.test_2_journals_different_owners_different_issns_mixed_article_fail( + self.run_background_process_simple) def test_43_doaj_duplication(self): - j = models.Journal() - j.set_owner("testowner") - bj = j.bibjson() - bj.add_identifier(bj.P_ISSN, "1234-5678") - bj.add_identifier(bj.E_ISSN, "9876-5432") - j.set_in_doaj(True) - j.save() - - asource = AccountFixtureFactory.make_publisher_source() - account = models.Account(**asource) - account.set_id("testowner") - account.save(blocking=True) + helpers.save_all_block_last([ + article_upload_tester.create_simple_journal("testowner", pissn="1234-5678", eissn="9876-5432"), + article_upload_tester.create_simple_publisher("testowner") + ]) # make both handles, as we want as little gap as possible between requests in a moment handle1 = DoajXmlArticleFixtureFactory.upload_2_issns_correct() @@ -1419,261 +782,28 @@ def test_43_doaj_duplication(self): assert fu2.status == "processed", "received status: {}".format(fu2.status) # now let's check that only one article got created - found = [a for a in models.Article.find_by_issns(["1234-5678", "9876-5432"])] - assert len(found) == 1, "found: {}".format(len(found)) + assert models.Article.count_by_issns(["1234-5678", "9876-5432"]) == 1 def test_44_doaj_journal_1_article_1_superlong_noclip(self): - # Create a journal with 1 issn, which is the same 1 issn on the article - # we expect a successful article ingest - # But it's just shy of 30000 unicode characters long! - j = models.Journal() - j.set_owner("testowner") - bj = j.bibjson() - bj.add_identifier(bj.P_ISSN, "1234-5678") - j.set_in_doaj(True) - j.save() - - asource = AccountFixtureFactory.make_publisher_source() - account = models.Account(**asource) - account.set_id("testowner") - account.save(blocking=True) - - handle = DoajXmlArticleFixtureFactory.upload_1_issn_superlong_should_not_clip() - f = FileMockFactory(stream=handle) - - job = ingestarticles.IngestArticlesBackgroundTask.prepare("testowner", schema="doaj", upload_file=f) - id = job.params.get("ingest_articles__file_upload_id") - self.cleanup_ids.append(id) - - # because file upload gets created and saved by prepare - time.sleep(1) - - task = ingestarticles.IngestArticlesBackgroundTask(job) - task.run() - - # because file upload needs to be re-saved - time.sleep(1) - - fu = models.FileUpload.pull(id) - assert fu is not None - assert fu.status == "processed" - assert fu.imported == 1 - assert fu.updates == 0 - assert fu.new == 1 - - fr = fu.failure_reasons - assert len(fr.get("shared", [])) == 0 - assert len(fr.get("unowned", [])) == 0 - assert len(fr.get("unmatched", [])) == 0 - - found = [a for a in models.Article.find_by_issns(["1234-5678"])] - assert len(found) == 1 - assert len(found[0].bibjson().abstract) == 26264 + article_upload_tester.test_journal_1_article_1_superlong_noclip(self.run_background_process_simple) def test_doaj_45_journal_1_article_1_superlong_clip(self): - # Create a journal with 1 issn, which is the same 1 issn on the article - # we expect a successful article ingest - # But it's over 40k unicode characters long! - j = models.Journal() - j.set_owner("testowner") - bj = j.bibjson() - bj.add_identifier(bj.P_ISSN, "1234-5678") - j.set_in_doaj(True) - j.save() - - asource = AccountFixtureFactory.make_publisher_source() - account = models.Account(**asource) - account.set_id("testowner") - account.save(blocking=True) - - handle = DoajXmlArticleFixtureFactory.upload_1_issn_superlong_should_clip() - f = FileMockFactory(stream=handle) - - job = ingestarticles.IngestArticlesBackgroundTask.prepare("testowner", schema="doaj", upload_file=f) - id = job.params.get("ingest_articles__file_upload_id") - self.cleanup_ids.append(id) - - # because file upload gets created and saved by prepare - time.sleep(1) - - task = ingestarticles.IngestArticlesBackgroundTask(job) - task.run() - - # because file upload needs to be re-saved - time.sleep(1) - - fu = models.FileUpload.pull(id) - assert fu is not None - assert fu.status == "processed" - assert fu.imported == 1 - assert fu.updates == 0 - assert fu.new == 1 - - fr = fu.failure_reasons - assert len(fr.get("shared", [])) == 0 - assert len(fr.get("unowned", [])) == 0 - assert len(fr.get("unmatched", [])) == 0 - - found = [a for a in models.Article.find_by_issns(["1234-5678"])] - assert len(found) == 1 - assert len(found[0].bibjson().abstract) == 30000 + article_upload_tester.test_journal_1_article_1_superlong_clip(self.run_background_process_simple) def test_46_doaj_one_journal_one_article_2_issns_one_unknown(self): - # Create one journal and ingest one article. The Journal has two issns, and the article - # has two issns, but one of the journal's issns is unknown - # We expect an ingest failure - j1 = models.Journal() - j1.set_owner("testowner1") - bj1 = j1.bibjson() - bj1.add_identifier(bj1.P_ISSN, "1234-5678") - bj1.add_identifier(bj1.E_ISSN, "2222-2222") - j1.set_in_doaj(True) - j1.save() - - asource = AccountFixtureFactory.make_publisher_source() - account = models.Account(**asource) - account.set_id("testowner1") - account.save(blocking=True) - - handle = DoajXmlArticleFixtureFactory.upload_2_issns_correct() - f = FileMockFactory(stream=handle) - - job = ingestarticles.IngestArticlesBackgroundTask.prepare("testowner1", schema="doaj", upload_file=f) - id = job.params.get("ingest_articles__file_upload_id") - self.cleanup_ids.append(id) - - # because file upload gets created and saved by prepare - time.sleep(1) - - task = ingestarticles.IngestArticlesBackgroundTask(job) - task.run() - - # because file upload needs to be re-saved - time.sleep(1) - - fu = models.FileUpload.pull(id) - assert fu is not None - assert fu.status == "failed" - assert fu.imported == 0 - assert fu.updates == 0 - assert fu.new == 0 - - fr = fu.failure_reasons - assert len(fr.get("shared", [])) == 0 - assert len(fr.get("unowned", [])) == 0 - assert len(fr.get("unmatched", [])) == 1 - assert "9876-5432" in fr["unmatched"] - - found = [a for a in models.Article.find_by_issns(["1234-5678", "9876-5432"])] - assert len(found) == 0 + article_upload_tester.test_one_journal_one_article_2_issns_one_unknown(self.run_background_process_simple) def test_47_doaj_lcc_spelling_error(self): - # create a journal with a broken subject classification - j1 = models.Journal() - j1.set_owner("testowner1") - bj1 = j1.bibjson() - bj1.add_identifier(bj1.P_ISSN, "1234-5678") - bj1.add_identifier(bj1.E_ISSN, "9876-5432") - bj1.add_subject("LCC", "Whatever", "WHATEVA") - bj1.add_subject("LCC", "Aquaculture. Fisheries. Angling", "SH1-691") - j1.set_in_doaj(True) - j1.save() - - asource = AccountFixtureFactory.make_publisher_source() - account = models.Account(**asource) - account.set_id("testowner1") - account.save(blocking=True) - - handle = DoajXmlArticleFixtureFactory.upload_2_issns_correct() - f = FileMockFactory(stream=handle) - - job = ingestarticles.IngestArticlesBackgroundTask.prepare("testowner1", schema="doaj", upload_file=f) - id = job.params.get("ingest_articles__file_upload_id") - self.cleanup_ids.append(id) - - # because file upload gets created and saved by prepare - time.sleep(1) - - task = ingestarticles.IngestArticlesBackgroundTask(job) - task.run() - - # because file upload needs to be re-saved - time.sleep(1) - - fu = models.FileUpload.pull(id) - assert fu is not None, 'expected FileUpload is not None, received: {}'.format(fu) - assert fu.status == "processed", 'expected status processed, received: {}'.format(fu.status) - assert fu.imported == 1, 'expected 1 imported, received: {}'.format(fu.imported) - assert fu.updates == 0, 'expected 0 updates, received: {}'.format(fu.updates) - assert fu.new == 1, 'expected 1 new, received: {}'.format(fu.new) - - fr = fu.failure_reasons - assert len(fr.get("shared", [])) == 0 - assert len(fr.get("unowned", [])) == 0 - assert len(fr.get("unmatched", [])) == 0 - - found = [a for a in models.Article.find_by_issns(["1234-5678", "9876-5432"])] - assert len(found) == 1 - - cpaths = found[0].data["index"]["classification_paths"] - assert len(cpaths) == 1 - assert cpaths[0] == "Agriculture: Aquaculture. Fisheries. Angling" + article_upload_tester.test_lcc_spelling_error(self.run_background_process_simple) def test_48_doaj_unknown_journal_issn(self): - # create a journal with one of the ISSNs specified - j1 = models.Journal() - j1.set_owner("testowner1") - bj1 = j1.bibjson() - bj1.add_identifier(bj1.P_ISSN, "1234-5678") - j1.set_in_doaj(True) - j1.save(blocking=True) - - asource = AccountFixtureFactory.make_publisher_source() - account = models.Account(**asource) - account.set_id("testowner1") - account.save(blocking=True) - - # take an article with 2 issns, but one of which is not in the index - handle = DoajXmlArticleFixtureFactory.upload_2_issns_correct() - f = FileMockFactory(stream=handle) - - job = ingestarticles.IngestArticlesBackgroundTask.prepare("testowner1", schema="doaj", upload_file=f) - id = job.params.get("ingest_articles__file_upload_id") - self.cleanup_ids.append(id) - - # because file upload gets created and saved by prepare - time.sleep(1) - - task = ingestarticles.IngestArticlesBackgroundTask(job) - task.run() - - # because file upload needs to be re-saved - time.sleep(1) - - fu = models.FileUpload.pull(id) - assert fu is not None - assert fu.status == "failed" - assert fu.imported == 0 - assert fu.updates == 0 - assert fu.new == 0 - - fr = fu.failure_reasons - assert len(fr.get("shared", [])) == 0 - assert len(fr.get("unowned", [])) == 0 - assert len(fr.get("unmatched", [])) == 1 - + article_upload_tester.test_unknown_journal_issn(self.run_background_process_simple) def test_49_doaj_noids(self): - j = models.Journal() - j.set_owner("testowner") - bj = j.bibjson() - bj.add_identifier(bj.P_ISSN, "1234-5678") - j.save(blocking=True) - - asource = AccountFixtureFactory.make_publisher_source() - account = models.Account(**asource) - account.set_id("testowner") - account.save(blocking=True) + helpers.save_all_block_last([ + article_upload_tester.create_simple_journal("testowner", pissn="1234-5678"), + article_upload_tester.create_simple_publisher("testowner"), + ]) job = models.BackgroundJob() @@ -1820,10 +950,7 @@ def test_58_file_with_invalid_orcid_id(self): fu = models.FileUpload.pull(id) assert fu is not None - assert fu.status == "failed" - assert fu.error is not None and fu.error != "" - assert fu.error_details is not None and fu.error != "" - assert list(fu.failure_reasons.keys()) == [] + article_upload_tester.assert_failed(fu, expected_details=True) # file should have been removed from upload dir path = os.path.join(app.config.get("UPLOAD_DIR", "."), id + ".xml") @@ -1834,16 +961,10 @@ def test_58_file_with_invalid_orcid_id(self): assert os.path.exists(fad) def test_59_same_issns(self): - j = models.Journal() - j.set_owner("testowner") - bj = j.bibjson() - bj.add_identifier(bj.P_ISSN, "1234-5678") - j.save(blocking=True) - - asource = AccountFixtureFactory.make_publisher_source() - account = models.Account(**asource) - account.set_id("testowner") - account.save(blocking=True) + helpers.save_all_block_last([ + article_upload_tester.create_simple_journal("testowner", pissn="1234-5678"), + article_upload_tester.create_simple_publisher("testowner"), + ]) job = models.BackgroundJob() @@ -1866,20 +987,14 @@ def test_59_same_issns(self): assert not os.path.exists(path) assert file_upload.status == "failed", "expected: failed, received: {}".format(file_upload.status) - assert file_upload.error == Messages.EXCEPTION_IDENTICAL_PISSN_AND_EISSN, "Expected: '{}', received: {}".format(Messages.EXCEPTION_IDENTICAL_PISSN_AND_EISSN, file_upload.error) + assert file_upload.error == Messages.EXCEPTION_IDENTICAL_PISSN_AND_EISSN, "Expected: '{}', received: {}".format( + Messages.EXCEPTION_IDENTICAL_PISSN_AND_EISSN, file_upload.error) def test_60_doaj_no_issns(self): - j = models.Journal() - j.set_owner("testowner") - bj = j.bibjson() - bj.add_identifier(bj.P_ISSN, "1234-5678") - j.set_in_doaj(True) - j.save(blocking=True) - - asource = AccountFixtureFactory.make_publisher_source() - account = models.Account(**asource) - account.set_id("testowner") - account.save(blocking=True) + helpers.save_all_block_last([ + article_upload_tester.create_simple_journal("testowner", pissn="1234-5678"), + article_upload_tester.create_simple_publisher("testowner"), + ]) job = models.BackgroundJob() @@ -1906,17 +1021,10 @@ def test_60_doaj_no_issns(self): def test_61_journal_not_indoaj(self): """ You can't upload an article for a journal that's been withdrawn""" - j = models.Journal() - j.set_owner("testowner") - bj = j.bibjson() - bj.add_identifier(bj.P_ISSN, "1234-5678") - j.set_in_doaj(False) - j.save(blocking=True) - - asource = AccountFixtureFactory.make_publisher_source() - account = models.Account(**asource) - account.set_id("testowner") - account.save(blocking=True) + helpers.save_all_block_last([ + article_upload_tester.create_simple_journal("testowner", pissn="1234-5678", in_doaj=False), + article_upload_tester.create_simple_publisher("testowner"), + ]) job = models.BackgroundJob() diff --git a/doajtest/unit_tester/article_upload_tester.py b/doajtest/unit_tester/article_upload_tester.py new file mode 100644 index 0000000000..f273e50987 --- /dev/null +++ b/doajtest/unit_tester/article_upload_tester.py @@ -0,0 +1,363 @@ +from typing import Dict, List + +from doajtest import helpers +from doajtest.fixtures.accounts import AccountFixtureFactory +from doajtest.fixtures.article_doajxml import DoajXmlArticleFixtureFactory +from portality import models +from portality.models.uploads import BaseArticlesUpload + + +def assert_failed(fu: BaseArticlesUpload, + reason_cases: Dict[str, List] = None, + reason_size: Dict[str, int] = None, + expected_details=None): + assert fu is not None + assert fu.status == "failed" + assert fu.imported == 0 + assert fu.updates == 0 + assert fu.new == 0 + + assert fu.error is not None + assert fu.error != "" + + # assert error details + if expected_details is None: + pass + elif isinstance(expected_details, str): + assert fu.error_details == expected_details + elif expected_details: + assert fu.error_details is not None and fu.error_details != "" + elif not expected_details: + assert fu.error_details is None + + # assert failure reasons + fr = fu.failure_reasons + if not reason_cases and not reason_size: + assert list(fr.keys()) == [] + + reason_keys = ["shared", "unowned", "unmatched"] + reason_size = reason_size or {} + + if reason_cases: + reason_size = {k: len(reason_cases.get(k, [])) for k in reason_keys} + + # assert list match + for k, expected_cases in reason_cases.items(): + assert set(expected_cases) == set(fr.get(k, set())), f'list mismatch {k} ~ {expected_cases}' + + # assert reason size + for k, expected_size in reason_size.items(): + assert len(fr.get(k, [])) == expected_size, f'size mismatch {k} ~ {expected_size}' + + +def assert_processed(fu, target_issns=None, n_abstract=None): + assert fu is not None + assert fu.status == "processed" + assert fu.imported == 1 + assert fu.updates == 0 + assert fu.new == 1 + + fr = fu.failure_reasons + assert len(fr.get("shared", [])) == 0 + assert len(fr.get("unowned", [])) == 0 + assert len(fr.get("unmatched", [])) == 0 + + if target_issns is not None: + found = [a for a in models.Article.find_by_issns(target_issns)] + assert len(found) == 1 + if n_abstract is not None: + assert len(found[0].bibjson().abstract) == n_abstract + + +def create_simple_journal(owner, pissn=None, eissn=None, in_doaj=True, blocking=None): + j = models.Journal() + j.set_owner(owner) + bj1 = j.bibjson() + if pissn is not None: + bj1.add_identifier(bj1.P_ISSN, pissn) + if eissn is not None: + bj1.add_identifier(bj1.E_ISSN, eissn) + j.set_in_doaj(in_doaj) + if blocking is not None: + j.save(blocking=blocking) + return j + + +def create_simple_publisher(user_id, blocking=None): + asource = AccountFixtureFactory.make_publisher_source() + account = models.Account(**asource) + account.set_id(user_id) + if blocking is not None: + account.save(blocking=blocking) + return account + + +def test_lcc_spelling_error(run_background_process_fn): + # create a journal with a broken subject classification + j1 = models.Journal() + j1.set_owner("testowner1") + bj1 = j1.bibjson() + bj1.add_identifier(bj1.P_ISSN, "1234-5678") + bj1.add_identifier(bj1.E_ISSN, "9876-5432") + bj1.add_subject("LCC", "Whatever", "WHATEVA") + bj1.add_subject("LCC", "Aquaculture. Fisheries. Angling", "SH1-691") + j1.set_in_doaj(True) + j1.save() + + helpers.save_all_block_last([ + j1, + create_simple_publisher("testowner1"), + ]) + + handle = DoajXmlArticleFixtureFactory.upload_2_issns_correct() + fu = run_background_process_fn("testowner1", handle) + target_issns = ["1234-5678", "9876-5432"] + assert_processed(fu, target_issns=target_issns) + + found = [a for a in models.Article.find_by_issns(target_issns)] + cpaths = found[0].data["index"]["classification_paths"] + assert len(cpaths) == 1 + assert cpaths[0] == "Agriculture: Aquaculture. Fisheries. Angling" + + +def test_one_journal_one_article_2_issns_one_unknown(run_background_process_fn): + # Create one journal and ingest one article. The Journal has two issns, and the article + # has two issns, but one of the journal's issns is unknown + # We expect an ingest failure + helpers.save_all_block_last([ + create_simple_journal("testowner1", pissn="1234-5678", eissn="2222-2222"), + create_simple_publisher("testowner1"), + ]) + + handle = DoajXmlArticleFixtureFactory.upload_2_issns_correct() + fu = run_background_process_fn("testowner1", handle) + assert_failed(fu, reason_cases={"unmatched": ["9876-5432"]}) + assert models.Article.count_by_issns(["1234-5678", "9876-5432"]) == 0 + + +def test_unknown_journal_issn(run_background_process_fn): + # create a journal with one of the ISSNs specified + helpers.save_all_block_last([ + create_simple_journal("testowner1", pissn="1234-5678"), + create_simple_publisher("testowner1"), + ]) + + # take an article with 2 issns, but one of which is not in the index + handle = DoajXmlArticleFixtureFactory.upload_2_issns_correct() + fu = run_background_process_fn("testowner1", handle) + assert_failed(fu, reason_size={"unmatched": 1}) + + +def test_2_journals_different_owners_different_issns_mixed_article_fail(run_background_process_fn): + # Create 2 different journals with different owners and different issns (2 each). + # The article's issns match one issn in each journal + # We expect an ingest failure + + helpers.save_all_block_last([ + create_simple_journal("testowner1", pissn="1234-5678", eissn="2345-6789"), + create_simple_journal("testowner2", pissn="8765-4321", eissn="9876-5432"), + create_simple_publisher("testowner1"), + ]) + + handle = DoajXmlArticleFixtureFactory.upload_2_issns_correct() + fu = run_background_process_fn("testowner1", handle) + assert_failed(fu, reason_cases={'unowned': ['9876-5432']}) + + assert models.Article.count_by_issns(["1234-5678", "9876-5432"]) == 0 + + +def test_2_journals_same_owner_issn_each_fail(run_background_process_fn): + # Create 2 journals with the same owner, each with one different issn. The article's 2 issns + # match each of these issns + # We expect a failed article ingest + helpers.save_all_block_last([ + create_simple_journal("testowner", pissn="1234-5678"), + create_simple_journal("testowner", eissn="9876-5432"), + create_simple_publisher("testowner"), + ]) + + handle = DoajXmlArticleFixtureFactory.upload_2_issns_correct() + fu = run_background_process_fn("testowner", handle) + target_issns = ["1234-5678", "9876-5432"] + assert_failed(fu, reason_size={'unmatched': 2}) + + assert models.Article.count_by_issns(["1234-5678", "9876-5432"]) == 0 + + +def test_2_journals_different_owners_issn_each_fail(run_background_process_fn): + # Create 2 journals with different owners and one different issn each. The two issns in the + # article match each of the journals respectively + # We expect an ingest failure + + user_id = "testowner1" + helpers.save_all_block_last([ + create_simple_journal(user_id, pissn="1234-5678"), + create_simple_journal("testowner2", eissn="9876-5432"), + create_simple_publisher(user_id), + ]) + + handle = DoajXmlArticleFixtureFactory.upload_2_issns_correct() + fu = run_background_process_fn(user_id, handle) + + assert_failed(fu, reason_cases={'unowned': ['9876-5432']}) + assert models.Article.count_by_issns(["1234-5678", "9876-5432"]) == 0 + + +def test_2_journals_different_owners_both_issns_fail(run_background_process_fn): + # Create 2 journals with the same issns but different owners, which match the issns on the article + # We expect an ingest failure + helpers.save_all_block_last([ + create_simple_journal("testowner1", pissn="1234-5678", eissn="9876-5432"), + create_simple_journal("testowner2", pissn="1234-5678", eissn="9876-5432"), + create_simple_publisher("testowner1"), + ]) + + handle = DoajXmlArticleFixtureFactory.upload_2_issns_correct() + fu = run_background_process_fn("testowner1", handle) + target_issns = ["1234-5678", "9876-5432"] + assert_failed(fu, reason_cases={'shared': target_issns}) + assert models.Article.count_by_issns(["1234-5678", "9876-5432"]) == 0 + + +def test_journal_2_article_2_1_different_success(run_background_process_fn): + # Create a journal with 2 issns, one of which is the same as an issn on the + # article, but the article also contains an issn which doesn't match the journal + # We expect a failed ingest + helpers.save_all_block_last([ + create_simple_journal("testowner", pissn="1234-5678", eissn="9876-5432"), + create_simple_publisher("testowner"), + ]) + + handle = DoajXmlArticleFixtureFactory.upload_2_issns_ambiguous() + fu = run_background_process_fn("testowner", handle) + assert_failed(fu, reason_size={"unmatched": 1}) + assert models.Article.count_by_issns(["1234-5678", "9876-5432"]) == 0 + + +def test_journal_1_article_1_success(run_background_process_fn): + # Create a journal with 1 issn, which is the same 1 issn on the article + # we expect a successful article ingest + helpers.save_all_block_last([ + create_simple_journal("testowner", pissn="1234-5678"), + create_simple_publisher("testowner"), + ]) + + handle = DoajXmlArticleFixtureFactory.upload_1_issn_correct() + fu = run_background_process_fn("testowner", handle) + assert_processed(fu, target_issns=["1234-5678"]) + + +def test_journal_1_article_1_superlong_clip(run_background_process_fn): + # Create a journal with 1 issn, which is the same 1 issn on the article + # we expect a successful article ingest + # But it's over 40k unicode characters long! + helpers.save_all_block_last([ + create_simple_journal("testowner", pissn="1234-5678"), + create_simple_publisher("testowner"), + ]) + + handle = DoajXmlArticleFixtureFactory.upload_1_issn_superlong_should_clip() + fu = run_background_process_fn("testowner", handle) + assert_processed(fu, target_issns=["1234-5678"], n_abstract=30000) + + +def test_journal_1_article_1_superlong_noclip(run_background_process_fn): + # Create a journal with 1 issn, which is the same 1 issn on the article + # we expect a successful article ingest + # But it's just shy of 30000 unicode characters long! + helpers.save_all_block_last([ + create_simple_journal("testowner", pissn="1234-5678"), + create_simple_publisher("testowner"), + ]) + + handle = DoajXmlArticleFixtureFactory.upload_1_issn_superlong_should_not_clip() + fu = run_background_process_fn("testowner", handle) + assert_processed(fu, target_issns=["1234-5678"], n_abstract=26264) + + +def test_journal_2_article_2_success(run_background_process_fn): + # Create a journal with two issns both of which match the 2 issns in the article + # we expect a successful article ingest + acc_id = "testowner" + helpers.save_all_block_last([ + create_simple_journal(acc_id, pissn="1234-5678", eissn="9876-5432"), + create_simple_publisher(acc_id), + ]) + + handle = DoajXmlArticleFixtureFactory.upload_2_issns_correct() + + job = run_background_process_fn(acc_id, handle) + + assert_processed(job, target_issns=["1234-5678", "9876-5432"]) + + +def test_journal_2_article_1_success(run_background_process_fn): + # Create a journal with 2 issns, one of which is present in the article as the + # only issn + # We expect a successful article ingest + acc_id = "testowner" + helpers.save_all_block_last([ + create_simple_journal(acc_id, pissn="1234-5678", eissn="9876-5432"), + create_simple_publisher(acc_id), + ]) + + handle = DoajXmlArticleFixtureFactory.upload_1_issn_correct() + job = run_background_process_fn(acc_id, handle) + assert_processed(job, target_issns=["1234-5678"]) + + +def test_fail_unowned_issn(run_background_process_fn): + # Create 2 journals with different owners and one different issn each. The two issns in the + # article match each of the journals respectively + # article match each of the journals respectively + # We expect an ingest failure + helpers.save_all_block_last([ + create_simple_journal("testowner1", pissn="1234-5678"), + create_simple_journal("testowner2", eissn="9876-5432"), + create_simple_publisher("testowner"), + ]) + + handle = DoajXmlArticleFixtureFactory.upload_2_issns_correct() + fu = run_background_process_fn("testowner", handle) + assert_failed(fu, expected_details=False, + reason_cases={"unowned": ["9876-5432", '1234-5678']}) + + +def test_fail_shared_issn(run_background_process_fn): + # Create 2 journals with the same issns but different owners, which match the issns on the article + # We expect an ingest failure + helpers.save_all_block_last([ + create_simple_journal("testowner1", pissn="1234-5678", eissn="9876-5432"), + create_simple_journal("testowner2", pissn="1234-5678", eissn="9876-5432"), + create_simple_publisher("testowner1"), + ]) + + handle = DoajXmlArticleFixtureFactory.upload_2_issns_correct() + fu = run_background_process_fn("testowner1", handle) + assert_failed(fu, expected_details=False, reason_cases={"shared": ["1234-5678", "9876-5432"]}) + + +def test_submit_success(run_background_process_fn): + helpers.save_all_block_last([ + create_simple_journal('testowner', pissn='1234-5678'), + create_simple_publisher("testowner"), + ]) + + handle = DoajXmlArticleFixtureFactory.upload_1_issn_correct() + fu = run_background_process_fn("testowner", handle) + assert_processed(fu) + + +def test_fail_unmatched_issn(run_background_process_fn): + # Create a journal with 2 issns, one of which is the same as an issn on the + # article, but the article also contains an issn which doesn't match the journal + # We expect a failed ingest + + helpers.save_all_block_last([ + create_simple_journal("testowner", pissn="1234-5678", eissn="9876-5432"), + create_simple_publisher("testowner"), + ]) + + handle = DoajXmlArticleFixtureFactory.upload_2_issns_ambiguous() + fu = run_background_process_fn("testowner", handle) + assert_failed(fu, reason_cases={"unmatched": ["2345-6789"]}) diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 75aefe7aa1..b459e1ad5e 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -3,38 +3,12 @@ version: '2.1' services: doaj-redis: build: redis/ - command: "redis-server /usr/local/etc/redis/redis.conf --appendonly yes --port ${HUEY_REDIS_PORT:-6379}" + command: "redis-server /usr/local/etc/redis/redis.conf --appendonly yes --port ${REDIS_PORT:-6379}" ports: - - "${REDIS_BIND_HOST:-127.0.0.1}:${HUEY_REDIS_PORT:-6379}:${HUEY_REDIS_PORT:-6379}" + - "${REDIS_BIND_HOST:-127.0.0.1}:${REDIS_PORT:-6379}:${REDIS_PORT:-6379}" volumes: - doaj_redis_data:/data - zookeeper: - image: 'bitnami/zookeeper:latest' - environment: - - ALLOW_ANONYMOUS_LOGIN=yes - - kafka: - image: 'bitnami/kafka:latest' - ports: - - '9092:9092' - environment: - - KAFKA_CFG_ZOOKEEPER_CONNECT=zookeeper:2181 - - KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP=CLIENT:PLAINTEXT,EXTERNAL:PLAINTEXT,INTERNAL:PLAINTEXT - - KAFKA_CFG_LISTENERS=CLIENT://:9103,EXTERNAL://:9092,INTERNAL://:19092, - - KAFKA_CFG_ADVERTISED_LISTENERS=CLIENT://kafka:9103,EXTERNAL://localhost:9092,,INTERNAL://kafka:19092 - - KAFKA_INTER_BROKER_LISTENER_NAME=CLIENT - - ALLOW_PLAINTEXT_LISTENER=yes - - KAFKA_AUTO_CREATE_TOPICS_ENABLE=true - - KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR=1 - depends_on: - - zookeeper - healthcheck: - test: [ "CMD", "kafka-topics.sh", "--list", "--zookeeper", "zookeeper:2181" ] - interval: 30s - timeout: 10s - retries: 10 - selenium-chrome: image: selenium/standalone-chrome:110.0 logging: diff --git a/docs/dev/how-to-implement.md b/docs/dev/how-to-implement.md index ef41b36f18..b76f849d10 100644 --- a/docs/dev/how-to-implement.md +++ b/docs/dev/how-to-implement.md @@ -26,8 +26,10 @@ def journal_bulk_delete(job_id): or, using a shortcut function -``` -journal_bulk_delete = huey_helper.create_common_execute_fn() +```python +@huey_helper.register_execute(is_load_config=False) +def journal_bulk_delete(job_id): + huey_helper.execute_common(job_id) ``` ### For Schedule job @@ -47,8 +49,7 @@ or, using a shortcut function ```python @huey_helper.register_schedule def scheduled_find_discontinued_soon(): - background_helper.submit_by_bg_task_type( - MonitorBgjobsBackgroundTask, + huey_helper.scheduled_common( to_address_list=app.config.get("TASKS_MONITOR_BGJOBS_TO", [get_system_email(), ]), from_address=app.config.get("TASKS_MONITOR_BGJOBS_FROM", get_system_email()), ) diff --git a/docs/dev/how-to-setup.md b/docs/dev/how-to-setup.md index 671336385f..9be9a665b1 100644 --- a/docs/dev/how-to-setup.md +++ b/docs/dev/how-to-setup.md @@ -40,3 +40,10 @@ DATALOG_JA_FILENAME = 'DOAJ: journals added and withdrawn' # worksheet name or tab name that datalog will write to DATALOG_JA_WORKSHEET_NAME = 'Added' ``` + + +How to upgrade swagger-ui +------------------------- +* run download_swagger_ui.py to download version of swagger-ui you want +* change url of js and css in api_docs.html +* ref: https://github.com/swagger-api/swagger-ui/blob/master/docs/usage/installation.md diff --git a/docs/dev/user-guide/user-guide.md b/docs/dev/user-guide/user-guide.md new file mode 100644 index 0000000000..143c09027b --- /dev/null +++ b/docs/dev/user-guide/user-guide.md @@ -0,0 +1,84 @@ +`manage_background_jobs` / `manage-bgjobs` commands example +========================================================== + + +Prepare data +------------ + + +* update dev.cfg to turn on all background job +``` +CRON_ALWAYS = {"month": "*", "day": "*", "day_of_week": "*", "hour": "*", "minute": "*"} + +# Disable the bigger huey tasks. Crontabs must be for unique times to avoid delays due to perceived race conditions +HUEY_SCHEDULE = { + "sitemap": CRON_ALWAYS, + "reporting": CRON_ALWAYS, + "journal_csv": CRON_ALWAYS, + "read_news": CRON_ALWAYS, + "article_cleanup_sync": CRON_ALWAYS, + "async_workflow_notifications": CRON_ALWAYS, + "request_es_backup": CRON_ALWAYS, + "check_latest_es_backup": CRON_ALWAYS, + "prune_es_backups": CRON_ALWAYS, + "public_data_dump": CRON_ALWAYS, + "harvest": CRON_ALWAYS, + "anon_export": CRON_ALWAYS, + "old_data_cleanup": CRON_ALWAYS, + "monitor_bgjobs": CRON_ALWAYS, + "find_discontinued_soon": CRON_ALWAYS, +} +``` + +* run your `main` background job consumer +``` +~/venv/doaj/bin/huey_consumer.py portality.tasks.consumer_main_queue.main_queue +``` + +* wait 10 ~ 30 minute for generate some background jobs + + + +`report` Example +---------------- + +* run report to check progress +``` +~/venv/doaj/bin/mange-bgjobs report + +``` + +* simulate db an redis async cases and try cleanup command +``` + +redis-cli + +# remove last 3 records on mainqueue +redis> LTRIM huey.redis.doajmainqueue 0 -4 + +redis> exit + +# you will found 3 record only in DB +~/venv/doaj/bin/mange-bgjobs report + +# run cleanup to remove delta records +~/venv/doaj/bin/mange-bgjobs clean +``` + + +`rm-all` Example +------------------ +* show current queued jobs +``` +~/venv/doaj/bin/mange-bgjobs report +``` + +* run `rm-all` +``` +~/venv/doaj/bin/mange-bgjobs rm-all +``` + +* check results +``` +~/venv/doaj/bin/mange-bgjobs report +``` \ No newline at end of file diff --git a/docs/dictionary.md b/docs/dictionary.md index 49ba798b8f..c77bedf643 100644 --- a/docs/dictionary.md +++ b/docs/dictionary.md @@ -7,4 +7,5 @@ | fmt | format | | exparam | extra parameter | | maned | Managing Editor | -| gsheet | Google Sheet | \ No newline at end of file +| gsheet | Google Sheet | +| svc | service | \ No newline at end of file diff --git a/history/history.json b/history/history.json index 7b3391ac39..ef284e37d7 100644 --- a/history/history.json +++ b/history/history.json @@ -1,17 +1,17 @@ { "new" : [ { - "tar" : "history.20210519.app1.tar.gz", - "source" : "web-2019" + "tar" : "history.20240905.app1.tar.gz", + "source" : "web-2021" }, { - "tar" : "history.20210519.bg1.tar.gz", - "source" : "background-2019" + "tar" : "history.20240905.bg1.tar.gz", + "source" : "background-2022" } ], "existing" : [ - "article_2021_background-2019.tar.gz", - "article_2021_web-2019.tar.gz", - "journal_2021_background-2019.tar.gz" + "article_2024_background-2022.tar.gz", + "article_2024_web-2021.tar.gz", + "journal_2024_background-2022.tar.gz" ] } \ No newline at end of file diff --git a/portality/api/common.py b/portality/api/common.py index 5a5bbf9932..9930182a5b 100644 --- a/portality/api/common.py +++ b/portality/api/common.py @@ -53,7 +53,7 @@ class Api(object): "contains bad data"} SWAG_API_KEY_REQ_PARAM = { - "description": "
Go to 'MY ACCOUNT' and 'Settings' to find your API key. If there is no key, click 'Generate a new API key'. If you do not see that button, contact us.", + "description": "
Go to 'DASHBOARD' and 'Settings' to find your API key. If there is no key, click 'Generate a new API key'. If you do not see that button, contact us.", "required": True, "type": "string", "name": "api_key", diff --git a/portality/api/current/bulk/applications.py b/portality/api/current/bulk/applications.py index 9665035bac..432c921951 100644 --- a/portality/api/current/bulk/applications.py +++ b/portality/api/current/bulk/applications.py @@ -12,7 +12,7 @@ class ApplicationsBulkApi(CrudApi): # ~~->API:Documentation~~ SWAG_TAG = 'Bulk API' SWAG_DELETE_PARAM = { - "description": "
List of DOAJ application IDs to be deleted. You must own all of the ids, and they must all not have entered the DOAJ workflow yet, or none of them will be processed.e.g. [4cf8b72139a749c88d043129f00e1b07, 8e896b60-35f1-4cd3-b3f9-07f7f29d8a98].
", + "description": "
List of DOAJ application IDs to be deleted. You must own all the IDs, and none of them should have entered the DOAJ workflow yet. Otherwise, processing will stop. e.g. [4cf8b72139a749c88d043129f00e1b07, 8e896b60-35f1-4cd3-b3f9-07f7f29d8a98].
", "required": True, "schema": {"type" : "string"}, "name": "application_ids", diff --git a/portality/api/current/bulk/articles.py b/portality/api/current/bulk/articles.py index d1a777ace7..e148a5e3aa 100644 --- a/portality/api/current/bulk/articles.py +++ b/portality/api/current/bulk/articles.py @@ -1,18 +1,20 @@ # ~~APIBulkArticles:Feature->APIBulk:Feature~~ +import warnings +from copy import deepcopy +from typing import List, Dict + +from portality import models from portality.api.common import Api, Api404Error, Api400Error, Api403Error, Api401Error from portality.api.current.crud import ArticlesCrudApi - from portality.bll import DOAJ from portality.bll import exceptions - -from copy import deepcopy - from portality.bll.exceptions import DuplicateArticleException +from portality.models import BulkArticles +from portality.tasks.article_bulk_create import ArticleBulkCreateBackgroundTask class ArticlesBulkApi(Api): - - #~~->Swagger:Feature~~ + # ~~->Swagger:Feature~~ # ~~->API:Documentation~~ SWAG_TAG = 'Bulk API' @@ -21,9 +23,9 @@ def create_swag(cls): template = deepcopy(cls.SWAG_TEMPLATE) template['parameters'].append( { - "description": "
A list/array of article JSON objects that you would like to create or update. The contents should be a list, and each object in the list should comply with the schema displayed in the GET (Retrieve) an article route. Partial updates are not allowed, you have to supply the full JSON.
", + "description": "
A list/array of article JSON objects that you would like to create or update. The contents should be a list, and each object in the list should comply with the schema displayed in the GET (Retrieve) an article route. Partial updates are not allowed; you have to supply the full JSON.
", "required": True, - "schema": {"type" : "string"}, + "schema": {"type": "string"}, "name": "article_json", "in": "body" } @@ -37,6 +39,7 @@ def create_swag(cls): @classmethod def create(cls, articles, account): + warnings.warn("This method is deprecated, use create_async instead", DeprecationWarning) # We run through the articles once, validating in dry-run mode # and deduplicating as we go. Then we .save() everything once # we know all incoming articles are valid. @@ -47,7 +50,7 @@ def create(cls, articles, account): raise Api401Error() # convert the data into a suitable article models - articles = [ArticlesCrudApi.prep_article(data, account) for data in articles] + articles = [ArticlesCrudApi.prep_article_for_api(data, account) for data in articles] # ~~->Article:Service~~ articleService = DOAJ.articleService() @@ -62,6 +65,146 @@ def create(cls, articles, account): except exceptions.ArticleNotAcceptable as e: raise Api400Error(str(e)) + @classmethod + def create_async_swag(cls): + template = deepcopy(cls.SWAG_TEMPLATE) + template['parameters'].append( + { + "description": "

A list/array of article JSON objects that you would like to create or update. The contents should be a list, and each object in the list should comply with the schema displayed in the GET (Retrieve) an article route. Partial updates are not allowed; you have to supply the full JSON.

This request is asynchronous; the response will contain an upload_id. You can use this id to query the task status.

", + "required": True, + "schema": {"type": "string"}, + "name": "article_json", + "in": "body" + } + ) + template['parameters'].append(cls.SWAG_API_KEY_REQ_PARAM) + + template['responses']['202'] = { + "schema": { + "properties": { + "msg": {"type": "string", }, + "upload_id": {"type": "string", + "description": "The upload id of the task, " + "User can use this ID to check the bulk upload status."}, + "status": {"type": "string", "description": "Link to the status URL for the task"} + }, + "type": "object" + }, + "description": "Resources are being created asynchronously; response contains the task IDs " + } + template['responses']['400'] = cls.R400 + return cls._build_swag_response(template) + + @classmethod + def create_async(cls, income_articles: List[Dict], account: models.Account): + job = ArticleBulkCreateBackgroundTask.prepare(account.id, incoming_articles=income_articles) + ArticleBulkCreateBackgroundTask.submit(job) + upload_id = next(v for k, v in job.params.items() if k.endswith('__upload_id')) + return upload_id + + @classmethod + def get_async_status_swag(cls): + template = deepcopy(cls.SWAG_TEMPLATE) + template['parameters'].append( + { + "description": "
The upload id of the task, " + "User can use this id to check the bulk upload status.
", + "required": True, + "name": "upload_id", + "type": "string", + "in": "path", + } + ) + template['parameters'].append(cls.SWAG_API_KEY_REQ_PARAM) + template['responses']['200'] = { + "description": "Return status of upload ids", + "schema": { + "type": "object", + "properties": { + "id": { + "type": "string", + "description": "The status of the task", + }, + "status": { + "type": "string", + "description": "The status of the task", + "enum": ["pending", "validated", "failed", "processed", "processed_partial"] + }, + "results": { + 'type': 'object', + 'description': 'The result of the upload', + "properties": { + "imported": { + "type": "integer", + "description": "The number of articles imported", + }, + "failed": { + "type": "integer", + "description": "The number of articles failed to import", + }, + "update": { + "type": "integer", + "description": "The number of articles updated", + }, + "new": { + "type": "integer", + "description": "The number of articles created", + }, + }, + } + + }, + }, + } + template['responses']['400'] = { + "description": "Fail get status reason", + "schema": { + "type": "object", + "properties": { + "msg": {"type": "string", "description": "The error message"}, + }, + }, + } + return cls._build_swag_response(template) + + @classmethod + def get_async_status(cls, current_user_id, upload_id=None, ) -> Dict: + if not upload_id: + raise Api400Error("upload_id is required") + + bulk_article = BulkArticles.pull(upload_id) + if bulk_article is None or bulk_article.owner != current_user_id: + raise Api400Error("upload_id is invalid") + + internal_external_status_map = { + "incoming": "pending", + "partial": "processed_partial" + } + + status = { + "id": upload_id, + "created": bulk_article.created_date, + 'status': internal_external_status_map.get(bulk_article.status, bulk_article.status), + } + + if bulk_article.status in ["processed", "partial"]: + status['results'] = { + "imported": bulk_article.imported, + "failed": bulk_article.failed_imports, + "update": bulk_article.updates, + "new": bulk_article.new, + } + + if bulk_article.error: + status['error'] = bulk_article.error + + if bulk_article.error_details: + status['error_details'] = bulk_article.error_details + + if bulk_article.failure_reasons: + status['failure_reasons'] = bulk_article.failure_reasons + + return status @classmethod def delete_swag(cls): @@ -70,7 +213,7 @@ def delete_swag(cls): { "description": "
A list/array of DOAJ article IDs. E.g. [\"4cf8b72139a749c88d043129f00e1b07\", \"232b53726fb74cc4a8eb4717e5a43193\"].
", "required": True, - "schema": {"type" : "string"}, + "schema": {"type": "string"}, "name": "article_ids", "in": "body" } diff --git a/portality/api/current/crud/applications.py b/portality/api/current/crud/applications.py index 8bcb3533c7..064e099b5a 100644 --- a/portality/api/current/crud/applications.py +++ b/portality/api/current/crud/applications.py @@ -35,8 +35,8 @@ class ApplicationsCrudApi(CrudApi): "description": """
Application JSON that you would like to create or update. The contents should comply with the schema displayed in the GET (Retrieve) an application route. - Explicit documentation for the structure of this data is also provided here. - Partial updates are not allowed, you have to supply the full JSON.
""", + Explicit documentation for the structure of this data is available. + Partial updates are not allowed; you have to supply the full JSON.
""", "required": True, "schema": {"type" : "string"}, "name": "application_json", @@ -55,12 +55,12 @@ def create_swag(cls): @classmethod def create(cls, data, account, dry_run=False): - # as long as authentication (in the layer above) has been successful, and the account exists, then + # as long as authentication (in the layer above) has been successful and the account exists, then # we are good to proceed if account is None: raise Api401Error() - # first thing to do is a structural validation, by instantiating the data object + # first thing to do is a structural validation by instantiating the data object try: ia = IncomingApplication(data) # ~~-> APIIncomingApplication:Model~~ except seamless.SeamlessException as e: @@ -185,7 +185,7 @@ def retrieve_swag(cls): @classmethod def retrieve(cls, id, account): - # as long as authentication (in the layer above) has been successful, and the account exists, then + # as long as authentication (in the layer above) has been successful and the account exists, then # we are good to proceed if account is None: raise Api401Error() @@ -195,12 +195,12 @@ def retrieve(cls, id, account): if ap is None: raise Api404Error() - # is the current account the owner of the application - # if not we raise a 404 because that id does not exist for that user account. - if ap.owner != account.id: + # is the current account the owner of the application? + # if not we raise a 404 because that ID does not exist for that user account. + if not account.is_super and ap.owner != account.id: raise Api404Error() - # if we get to here we're going to give the user back the application + # if we get to here, we're going to give the user back the application oa = OutgoingApplication.from_model(ap) # ~~->APIOutgoingApplication:Model~~ return oa @@ -219,12 +219,12 @@ def update_swag(cls): @classmethod def update(cls, id, data, account): - # as long as authentication (in the layer above) has been successful, and the account exists, then + # as long as authentication (in the layer above) has been successful and the account exists, then # we are good to proceed if account is None: raise Api401Error() - # next thing to do is a structural validation of the replacement data, by instantiating the object + # next thing to do is a structural validation of the replacement data by instantiating the object try: ia = IncomingApplication(data) # ~~->APIIncomingApplication:Model~~ except seamless.SeamlessException as e: @@ -304,7 +304,7 @@ def update(cls, id, data, account): authService.can_edit_application(account, ap) except AuthoriseException as e: if e.reason == e.WRONG_STATUS: - raise Api403Error("The application is no longer in a state in which it can be edited via the API") + raise Api403Error("The application can no longer be edited via the API") else: raise Api404Error() @@ -337,7 +337,7 @@ def delete_swag(cls): @classmethod def delete(cls, id, account, dry_run=False): - # as long as authentication (in the layer above) has been successful, and the account exists, then + # as long as authentication (in the layer above) has been successful and the account exists, then # we are good to proceed if account is None: raise Api401Error() diff --git a/portality/api/current/crud/articles.py b/portality/api/current/crud/articles.py index fdcc4358dc..cb8e0d9e5a 100644 --- a/portality/api/current/crud/articles.py +++ b/portality/api/current/crud/articles.py @@ -1,10 +1,13 @@ # ~~APICrudArticles:Feature->APICrud:Feature~~ import json +from typing import Dict from portality.api.current.crud.common import CrudApi from portality.api.current import Api400Error, Api401Error, Api403Error, Api404Error, Api500Error from portality.api.current.data_objects.article import IncomingArticleDO, OutgoingArticleDO +from portality.bll import exceptions from portality.core import app +from portality.dao import ElasticSearchWriteException, DAOSaveExceptionMaxRetriesReached from portality.lib import dataobj from portality import models, app_email from portality.bll.doaj import DOAJ @@ -32,8 +35,8 @@ class ArticlesCrudApi(CrudApi): "description": """
Article JSON that you would like to create or update. The contents should comply with the schema displayed in the GET (Retrieve) an article route. - Explicit documentation for the structure of this data is also provided here. - Partial updates are not allowed, you have to supply the full JSON.
""", + Explicit documentation for the structure of this data is available. + Partial updates are not allowed; you have to supply the full JSON.
""", "required": True, "schema": {"type" : "string"}, "name": "article_json", @@ -52,7 +55,7 @@ def __handle_journal_info(cls, am): try: am.add_journal_metadata() # overwrite journal part of metadata and in_doaj setting except models.NoJournalException as e: - raise Api400Error("No journal found to attach article to. Each article in DOAJ must belong to a journal and the (E)ISSNs provided in the bibjson.identifiers section of this article record do not match any DOAJ journal.") + raise Api400Error("No journal found to attach the article to. The ISSN(s) provided in the bibjson.identifiers section of this article record do not match any DOAJ journal.") # restore the user's data am.bibjson().number = number @@ -79,15 +82,15 @@ def create(cls, data, account): raise Api401Error() # convert the data into a suitable article model (raises Api400Error if doesn't conform to struct) - am = cls.prep_article(data, account) + am = cls.prep_article_for_api(data, account) # ~~-> Article:Service~~ articleService = DOAJ.articleService() try: result = articleService.create_article(am, account, add_journal_info=True) - except ArticleMergeConflict as e: - raise Api400Error(str(e)) - except ArticleNotAcceptable as e: + except ( + ArticleMergeConflict, ArticleNotAcceptable, IngestException, + ) as e: raise Api400Error(str(e)) except DuplicateArticleException as e: raise Api403Error(str(e)) @@ -99,18 +102,28 @@ def create(cls, data, account): # Check we are allowed to create an article for this journal if result.get("fail", 0) == 1: - raise Api403Error("It is not possible to create an article for this journal. Have you included in the upload an ISSN which is not associated with any journal in your account? ISSNs must match exactly the ISSNs against the journal record.") + raise Api403Error("It is not possible to create an article for this journal. Does the upload include an ISSN that is not associated with any journal in your account? ISSNs must match exactly the ISSNs in the journal record.") return am + @classmethod + def prep_article_for_api(cls, data, account) -> models.Article: + try: + return cls.prep_article(data, account) + except ( + dataobj.DataStructureException, + dataobj.ScriptTagFoundException, + ) as e: + raise Api400Error(str(e)) + @classmethod - def prep_article(cls, data, account): + def prep_article(cls, data: Dict, account: models.Account) -> models.Article: # first thing to do is a structural validation, by instantiating the data object try: ia = IncomingArticleDO(data) except dataobj.DataStructureException as e: - raise Api400Error(str(e)) + raise e # let caller know there could have dataobj.DataStructureException except dataobj.ScriptTagFoundException as e: # ~~->Email:ExternalService~~ email_data = {"article": data, "account": account.__dict__} @@ -129,15 +142,15 @@ def prep_article(cls, data, account): data=jdata) except app_email.EmailException: app.logger.exception('Error sending script tag detection email - ' + jdata) - raise Api400Error(str(e)) + raise e # if that works, convert it to an Article object am = ia.to_article_model() # the user may have supplied metadata in the model for id and created_date - # and we want to can that data. If this is a truly new article its fine for - # us to assign a new id here, and if it's a duplicate, it will get attached - # to its duplicate id anyway. + # and we want to can that data. If this is a truly new article, it's fine for + # us to assign a new ID here, and if it's a duplicate, it will get attached + # to its duplicate ID anyway. am.set_id() am.set_created() @@ -204,7 +217,7 @@ def update_swag(cls): @classmethod def update(cls, id, data, account): - # as long as authentication (in the layer above) has been successful, and the account exists, then + # as long as authentication (in the layer above) has been successful and the account exists, then # we are good to proceed if account is None: raise Api401Error() diff --git a/portality/api/current/crud/journals.py b/portality/api/current/crud/journals.py index 1650672ad4..1e125959bd 100644 --- a/portality/api/current/crud/journals.py +++ b/portality/api/current/crud/journals.py @@ -29,8 +29,7 @@ def retrieve_swag(cls): template['responses']['200'] = cls.R200 template['responses']['200']['schema'] = OutgoingJournal().struct_to_swag(schema_title='Journal schema', struct=JOURNAL_STRUCT) - template['responses']['200']['description'] = 'Detailed documentation on the response format is available here ' + template['responses']['200']['description'] = 'Detailed documentation on the response format is available. ' template['responses']['404'] = cls.R404 return cls._build_swag_response(template, api_key_override=False) diff --git a/portality/api/current/data_objects/application.py b/portality/api/current/data_objects/application.py index a88e9ff32c..5186d2fd0d 100644 --- a/portality/api/current/data_objects/application.py +++ b/portality/api/current/data_objects/application.py @@ -178,7 +178,7 @@ def custom_validate(self): # check they are not the same if pissn is not None and eissn is not None: if pissn == eissn: - raise seamless.SeamlessException("P-ISSN and E-ISSN should be different") + raise seamless.SeamlessException("Print ISSN and Online ISSN should be different") # A link to the journal homepage is required # diff --git a/portality/api/current/data_objects/article.py b/portality/api/current/data_objects/article.py index 77bf6a7d7d..1c733e8051 100644 --- a/portality/api/current/data_objects/article.py +++ b/portality/api/current/data_objects/article.py @@ -161,7 +161,9 @@ class IncomingArticleDO(dataobj.DataObj, swagger.SwaggerSupport): def __init__(self, raw=None): self._add_struct(BASE_ARTICLE_STRUCT) self._add_struct(INCOMING_ARTICLE_REQUIRED) - super(IncomingArticleDO, self).__init__(raw, construct_silent_prune=True, expose_data=True, coerce_map=BASE_ARTICLE_COERCE, swagger_trans=BASE_ARTICLE_SWAGGER_TRANS) + super(IncomingArticleDO, self).__init__(raw, construct_silent_prune=True, expose_data=True, + coerce_map=BASE_ARTICLE_COERCE, + swagger_trans=BASE_ARTICLE_SWAGGER_TRANS) def _trim_empty_strings(self): @@ -208,7 +210,7 @@ def custom_validate(self): # check that there are identifiers at all identifiers = self.bibjson.identifier if identifiers is None or len(identifiers) == 0: - raise dataobj.DataStructureException("You must specify at least one of P-ISSN or E-ISSN in bibjson.identifier") + raise dataobj.DataStructureException("You must specify at least one Print ISSN or online ISSN in bibjson.identifier") # extract the p/e-issn identifier objects pissn = None @@ -221,7 +223,7 @@ def custom_validate(self): # check that at least one of them appears if pissn is None and eissn is None: - raise dataobj.DataStructureException("You must specify at least one of P-ISSN or E-ISSN in bibjson.identifier") + raise dataobj.DataStructureException("You must specify at least one Print ISSN or online ISSN in bibjson.identifier") # normalise the ids if pissn is not None: @@ -232,7 +234,7 @@ def custom_validate(self): # check they are not the same if pissn is not None and eissn is not None: if pissn.id == eissn.id: - raise dataobj.DataStructureException("P-ISSN and E-ISSN should be different") + raise dataobj.DataStructureException("Print ISSN and online ISSN should be different") # check removed: https://github.com/DOAJ/doajPM/issues/2950 diff --git a/portality/api/current/discovery_api_application_swag.json b/portality/api/current/discovery_api_application_swag.json index 1a58bec55c..4936d18646 100644 --- a/portality/api/current/discovery_api_application_swag.json +++ b/portality/api/current/discovery_api_application_swag.json @@ -264,28 +264,28 @@ }, "parameters": [ { - "description": "
Go to 'MY ACCOUNT' and 'Settings' to find your API key. If there is no key, click 'Generate a new API key'. If you do not see that button, contact us.", + "description": "
Go to 'DASHBOARD' and 'Settings' to find your API key. If there is no key, click 'Generate a new API key'. If you do not see that button, contact us.", "required": true, "type": "string", "name": "api_key", "in": "query" }, { - "description": "
What you are searching for, e.g. computers

You can search inside any field you see in the results or the schema. More details
For example, to search for all journals tagged with the keyword \"heritage\"
bibjson.keywords:heritage
Short-hand names are available for some fields
issn:1874-9496
publisher:dove
", + "description": "
What you are searching for, e.g. computers.

You can search in any field in the results or schema. More details
For example, to search for all journals tagged with the keyword \"heritage\"
bibjson.keywords:heritage
Short-hand names are available for some fields
issn:2212-4276
publisher:sciendo
", "required": true, "type": "string", "name": "search_query", "in": "path" }, { - "description": "Which page of the results you wish to see.", + "description": "Which page of the results you wish to see", "required": false, "type": "integer", "name": "page", "in": "query" }, { - "description": "How many results per page you wish to see, the default is 10.", + "description": "How many results per page you wish to see; the default is 10", "required": false, "type": "integer", "name": "pageSize", diff --git a/portality/api/current/discovery_api_article_swag.json b/portality/api/current/discovery_api_article_swag.json index 1c75e89b58..a082ae10d4 100644 --- a/portality/api/current/discovery_api_article_swag.json +++ b/portality/api/current/discovery_api_article_swag.json @@ -187,14 +187,14 @@ "in": "path" }, { - "description": "Which page of the results you wish to see.", + "description": "Which page of the results you want to see.", "required": false, "type": "integer", "name": "page", "in": "query" }, { - "description": "How many results per page you wish to see, the default is 10.", + "description": "How many results per page you want to see. The default is 10.", "required": false, "type": "integer", "name": "pageSize", @@ -211,4 +211,4 @@ "tags": [ "Search" ] -} \ No newline at end of file +} diff --git a/portality/api/current/discovery_api_journal_swag.json b/portality/api/current/discovery_api_journal_swag.json index a01b421294..352ce21666 100644 --- a/portality/api/current/discovery_api_journal_swag.json +++ b/portality/api/current/discovery_api_journal_swag.json @@ -271,14 +271,14 @@ "in": "path" }, { - "description": "Which page of the results you wish to see.", + "description": "Which page of the results you want to see.", "required": false, "type": "integer", "name": "page", "in": "query" }, { - "description": "How many results per page you wish to see, the default is 10.", + "description": "How many results per page you want to see. The default is 10.", "required": false, "type": "integer", "name": "pageSize", @@ -295,4 +295,4 @@ "tags": [ "Search" ] -} \ No newline at end of file +} diff --git a/portality/app.py b/portality/app.py index efd9e5b170..2b6e5115a2 100644 --- a/portality/app.py +++ b/portality/app.py @@ -11,6 +11,8 @@ """ import os, sys + +import elasticsearch.exceptions import tzlocal import pytz @@ -37,12 +39,6 @@ from portality.view.doajservices import blueprint as services from portality.view.jct import blueprint as jct from portality.view.apply import blueprint as apply -if 'api1' in app.config['FEATURES']: - from portality.view.api_v1 import blueprint as api_v1 -if 'api2' in app.config['FEATURES']: - from portality.view.api_v2 import blueprint as api_v2 -if 'api3' in app.config['FEATURES']: - from portality.view.api_v3 import blueprint as api_v3 from portality.view.status import blueprint as status from portality.lib.normalise import normalise_doi from portality.view.dashboard import blueprint as dashboard @@ -63,12 +59,22 @@ app.register_blueprint(editor, url_prefix='/editor') # ~~-> Editor:Blueprint~~ app.register_blueprint(services, url_prefix='/service') # ~~-> Services:Blueprint~~ if 'api1' in app.config['FEATURES']: + from portality.view.api_v1 import blueprint as api_v1 app.register_blueprint(api_v1, url_prefix='/api/v1') # ~~-> APIv1:Blueprint~~ if 'api2' in app.config['FEATURES']: + from portality.view.api_v2 import blueprint as api_v2 app.register_blueprint(api_v2, url_prefix='/api/v2') # ~~-> APIv2:Blueprint~~ if 'api3' in app.config['FEATURES']: - app.register_blueprint(api_v3, name='api', url_prefix='/api') # ~~-> APIv3:Blueprint~~ + from portality.view.api_v3 import blueprint as api_v3 app.register_blueprint(api_v3, name='api_v3', url_prefix='/api/v3') # ~~-> APIv3:Blueprint~~ + if app.config.get("CURRENT_API_MAJOR_VERSION") == "3": + app.register_blueprint(api_v3, name='api', url_prefix='/api') +if 'api4' in app.config['FEATURES']: + from portality.view.api_v4 import blueprint as api_v4 + app.register_blueprint(api_v4, name='api_v4', url_prefix='/api/v4') # ~~-> APIv4:Blueprint~~ + if app.config.get("CURRENT_API_MAJOR_VERSION", "4") == "4": + app.register_blueprint(api_v4, name='api', url_prefix='/api') + app.register_blueprint(status, name='status', url_prefix='/status') # ~~-> Status:Blueprint~~ app.register_blueprint(status, name='_status', url_prefix='/_status') app.register_blueprint(apply, url_prefix='/apply') # ~~-> Apply:Blueprint~~ @@ -434,6 +440,12 @@ def page_not_found(e): return render_template('500.html'), 500 +@app.errorhandler(elasticsearch.exceptions.RequestError) +def handle_es_request_error(e): + app.logger.exception(e) + return render_template('400.html'), 400 + + def run_server(host=None, port=None, fake_https=False): """ :param host: diff --git a/portality/bll/doaj.py b/portality/bll/doaj.py index bd756e8b59..e7a55f6bc6 100644 --- a/portality/bll/doaj.py +++ b/portality/bll/doaj.py @@ -118,6 +118,16 @@ def backgroundTaskStatusService(cls): from portality.bll.services import background_task_status return background_task_status.BackgroundTaskStatusService() + @classmethod + def concurrencyPreventionService(cls): + """ + Obtain an instance of the concurrency_prevention service + ~~->Concurrency_Prevention:Service~~ + :return: UpdateRequestConcurrencyPreventionService + """ + from portality.bll.services import concurrency_prevention + return concurrency_prevention.ConcurrencyPreventionService() + @classmethod def tourService(cls): """ @@ -130,4 +140,13 @@ def tourService(cls): @classmethod def autochecksService(cls, autocheck_plugins=None): from portality.bll.services import autochecks - return autochecks.AutocheckService(autocheck_plugins=autocheck_plugins) \ No newline at end of file + return autochecks.AutocheckService(autocheck_plugins=autocheck_plugins) + + @classmethod + def hueyJobService(cls): + """ + Obtain an instance of the huey_job service ~~->HueyJob:Service~~ + :return: HueyJobService + """ + from portality.bll.services import huey_job + return huey_job.HueyJobService() diff --git a/portality/bll/exceptions.py b/portality/bll/exceptions.py index 005ad7f31c..ca1d0bd8e6 100644 --- a/portality/bll/exceptions.py +++ b/portality/bll/exceptions.py @@ -79,6 +79,12 @@ class ArticleMergeConflict(Exception): """ pass +class ConcurrentUpdateRequestException(Exception): + """ + Exception to raise when two update requests were submitted cuncurrently + """ + pass + class IllegalStatusException(Exception): """ Exception to raise when an application is in a state that is not allowed for the current action diff --git a/portality/bll/services/application.py b/portality/bll/services/application.py index ec1be57011..3f46cc14c1 100644 --- a/portality/bll/services/application.py +++ b/portality/bll/services/application.py @@ -17,11 +17,29 @@ from portality.bll.exceptions import AuthoriseException from portality.forms.application_forms import ApplicationFormFactory + class ApplicationService(object): """ ~~Application:Service->DOAJ:Service~~ """ + @staticmethod + def prevent_concurrent_ur_submission(ur: models.Application, record_if_not_concurrent=True): + """ + Prevent duplicated update request submission + :param ur: + :param record_if_not_concurrent: + :return: + """ + cs = DOAJ.concurrencyPreventionService() + + if ur.current_journal is not None and ur.id is not None: + if cs.check_concurrency(ur.current_journal, ur.id): + raise exceptions.ConcurrentUpdateRequestException(Messages.CONCURRENT_UPDATE_REQUEST) + + if record_if_not_concurrent: + cs.store_concurrency(ur.current_journal, ur.id, timeout=app.config.get("UR_CONCURRENCY_TIMEOUT", 10)) + def reject_application(self, application, account, provenance=True, note=None, manual_update=True): """ Reject an application. This will: @@ -34,6 +52,7 @@ def reject_application(self, application, account, provenance=True, note=None, m :param application: :param account: :param provenance: + :param note: :param manual_update: :return: """ @@ -540,7 +559,7 @@ def delete_application(self, application_id, account): if application.related_journal is not None: try: related_journal, rjlock = journalService.journal(application.related_journal, lock_journal=True, lock_account=account) - except lock.Locked as e: + except lock.Locked: # if the resource is locked, we have to back out if alock is not None: alock.delete() if cjlock is not None: cjlock.delete() @@ -649,14 +668,11 @@ def validate_update_csv(self, file_path, account: models.Account): was=journal_value, now=e.value) continue - - if len(updates) == 0: validation.row(validation.WARN, row_ix, Messages.JOURNAL_CSV_VALIDATE__NO_DATA_CHANGE) continue # if we get to here, then there are updates - [validation.log(upd) for upd in updates] # If a field is disabled in the UR Form Context, then we must confirm that the form data from the @@ -684,7 +700,7 @@ def validate_update_csv(self, file_path, account: models.Account): alock = None try: # ~~ ^->UpdateRequest:Feature ~~ - update_req, jlock, alock = self.update_request_for_journal(j.id, account=j.owner_account, lock_records=False) + update_req, jlock, alock = self.update_request_for_journal(j.id, account=account, lock_records=False) except AuthoriseException as e: validation.row(validation.ERROR, row_ix, Messages.JOURNAL_CSV_VALIDATE__CANNOT_MAKE_UR.format(reason=e.reason)) continue @@ -706,7 +722,7 @@ def validate_update_csv(self, file_path, account: models.Account): question = Journal2PublisherUploadQuestionsXwalk.q(k) try: pos = header_row.index(question) - except: + except ValueError: # this is because the validation is on a field which is not in the csv, so it must # be due to an existing validation error in the data, and not something the publisher # can do anything about @@ -717,6 +733,10 @@ def validate_update_csv(self, file_path, account: models.Account): for sk, sv in v[0].items(): validation.value(validation.ERROR, row_ix, pos, ". ".join(sv), was=was, now=now) + elif isinstance(v[0], list): + # If we have a list, we must go a level deeper + validation.value(validation.ERROR, row_ix, pos, ". ".join(v[0]), + was=was, now=now) else: validation.value(validation.ERROR, row_ix, pos, ". ".join(v), was=was, now=now) @@ -806,7 +826,7 @@ def _cleanhtml(self, raw_html): return cleantext def json(self, indent=None): - repr = { + _repr = { "has_errors": self._errors, "has_warnings": self._warnings, "general": self._general, @@ -815,4 +835,4 @@ def json(self, indent=None): "values": self._values, "log": self._log } - return json.dumps(repr, indent=indent) + return json.dumps(_repr, indent=indent) diff --git a/portality/bll/services/background_task_status.py b/portality/bll/services/background_task_status.py index ae0c6b7908..3ebe187ff7 100644 --- a/portality/bll/services/background_task_status.py +++ b/portality/bll/services/background_task_status.py @@ -122,9 +122,8 @@ def get_config_dict_by_queue_name(config_name, queue_name): if qn == queue_name} return { - k: v - for k, v in app.config.get(config_name, {}).items() - if k in actions + k: app.config.get(config_name, {}).get(k, app.config.get('BG_MONITOR_DEFAULT_CONFIG')) + for k in actions } def create_background_status(self) -> dict: @@ -137,4 +136,30 @@ def create_background_status(self) -> dict: status=(self.all_stable_str(queues.values())), queues=queues, ) - return result_dict + + # sort the results in the order of unstable status + sorted_data = self.sort_dict_by_unstable_status(result_dict) + + return sorted_data + + def sort_dict_by_unstable_status(self, data): + """ + Sorts each dictionary within the nested structure by prioritizing items with 'status': 'unstable'. + The overall structure of the input dictionary is preserved. + """ + if isinstance(data, dict): + # Extract items with 'status': 'unstable' and other items + unstable_items = {k: v for k, v in data.items() if isinstance(v, dict) and v.get('status') == 'unstable'} + other_items = {k: v for k, v in data.items() if k not in unstable_items} + + # Recursively sort nested dictionaries + for k in unstable_items: + unstable_items[k] = self.sort_dict_by_unstable_status(unstable_items[k]) + for k in other_items: + other_items[k] = self.sort_dict_by_unstable_status(other_items[k]) + + # Merge the dictionaries, with unstable items first + return {**unstable_items, **other_items} + else: + # Return the item as is if it's not a dict + return data diff --git a/portality/bll/services/concurrency_prevention.py b/portality/bll/services/concurrency_prevention.py new file mode 100644 index 0000000000..4fe329353e --- /dev/null +++ b/portality/bll/services/concurrency_prevention.py @@ -0,0 +1,21 @@ +from portality.core import app +import redis + + +class ConcurrencyPreventionService: + def __init__(self): + self.rs = redis.Redis(host=app.config.get("REDIS_HOST"), port=app.config.get("REDIS_PORT")) + + def check_concurrency(self, key, _id): + """ + Checks whether concurrent request has been submitted + Returns true if clash is detected + """ + value = self.rs.get(key) + return value is not None and value != _id + + def store_concurrency(self, key, _id, timeout=None): + if timeout is None: + timeout = app.config.get("UR_CONCURRENCY_TIMEOUT", 10) + if timeout > 0: + self.rs.set(key, _id, ex=timeout) diff --git a/portality/bll/services/events.py b/portality/bll/services/events.py index 70041fc9bf..aa7a937f1c 100644 --- a/portality/bll/services/events.py +++ b/portality/bll/services/events.py @@ -1,4 +1,5 @@ from portality.core import app +from portality.events.consumers.update_request_publisher_submitted_notify import UpdateRequestPublisherSubmittedNotify from portality.lib import plugin from portality.events.consumers.account_created_email import AccountCreatedEmail @@ -49,7 +50,8 @@ class EventsService(object): UpdateRequestPublisherAcceptedNotify, UpdateRequestPublisherAssignedNotify, UpdateRequestPublisherRejectedNotify, - JournalDiscontinuingSoonNotify + UpdateRequestPublisherSubmittedNotify, + JournalDiscontinuingSoonNotify, ] def __init__(self): @@ -61,7 +63,7 @@ def trigger(self, event): def consume(self, event): for consumer in self.EVENT_CONSUMERS: try: - if consumer.consumes(event): + if consumer.should_consume(event): consumer.consume(event) except Exception as e: app.logger.error("Error in consumer {x}: {e}".format(e=str(e), x=consumer.ID)) diff --git a/portality/bll/services/huey_job.py b/portality/bll/services/huey_job.py new file mode 100644 index 0000000000..d0f7f88587 --- /dev/null +++ b/portality/bll/services/huey_job.py @@ -0,0 +1,68 @@ +""" +some function for huey background job +""" +import itertools +import pickle +import re +from typing import Iterator + +import redis + +from portality.core import app + + +class HueyJobData: + + def __init__(self, data: tuple): + self.data = data + self.huey_id, self.queue_name, self.schedule_time, self.retries, self.retry_delay, self.args, *_ = data + + @property + def is_scheduled(self): + return self.schedule_time is None + + @property + def bgjob_action(self): + return re.sub(r'^queue_task_(scheduled_)?', '', self.queue_name) + + @property + def bgjob_id(self): + if self.args: + return self.args[0][0] + return None + + @classmethod + def from_redis(cls, redis_row): + return HueyJobData(pickle.loads(redis_row)) + + def as_redis(self): + return pickle.dumps(self.data) + + +HUEY_REDIS_DOAJMAINQUEUE = 'huey.redis.doajmainqueue' +HUEY_REDIS_DOAJLONGRUNNING = 'huey.redis.doajlongrunning' +HUEY_REDIS_KEYS = [HUEY_REDIS_DOAJMAINQUEUE, HUEY_REDIS_DOAJLONGRUNNING] + + +class HueyJobService: + + def create_redis_client(self): + client = redis.StrictRedis(host=app.config['REDIS_HOST'], port=app.config['REDIS_PORT'], db=0) + return client + + def find_all_huey_jobs(self, client=None) -> Iterator[HueyJobData]: + client = client or self.create_redis_client() + huey_rows = itertools.chain.from_iterable((client.lrange(k, 0, -1) + for k in HUEY_REDIS_KEYS)) + huey_rows = (HueyJobData.from_redis(r) for r in huey_rows) + return huey_rows + + def find_queued_huey_jobs(self, client=None) -> Iterator[HueyJobData]: + client = client or self.create_redis_client() + return (r for r in self.find_all_huey_jobs(client=client) if not r.is_scheduled) + + def rm_huey_job_from_redis(self, huey_job_data: 'HueyJobData', client=None): + client = client or self.create_redis_client() + for key in ['huey.redis.doajmainqueue', 'huey.redis.doajlongrunning']: + if client.lrem(key, 1, huey_job_data.as_redis()): + break diff --git a/portality/bll/services/query.py b/portality/bll/services/query.py index 9595dfc801..e753f12575 100644 --- a/portality/bll/services/query.py +++ b/portality/bll/services/query.py @@ -272,12 +272,17 @@ def size(self): try: return int(self.q["size"]) except ValueError: - return 10 + app.logger.warn("Invalid size parameter in query: [{x}], " + "expected integer value".format(x=self.q["size"])) return 10 def from_result(self): if "from" in self.q: - return int(self.q["from"]) + try: + return int(self.q["from"]) + except ValueError: + app.logger.warn("Invalid from parameter in query: [{x}], " + "expected integer value".format(x=self.q["from"])) return 0 def as_dict(self): diff --git a/portality/constants.py b/portality/constants.py index 7d0eda81e6..2fec308772 100644 --- a/portality/constants.py +++ b/portality/constants.py @@ -62,12 +62,14 @@ # Roles ROLE_ASSOCIATE_EDITOR = 'associate_editor' +ROLE_API = "api" EVENT_ACCOUNT_CREATED = "account:created" EVENT_ACCOUNT_PASSWORD_RESET = "account:password_reset" EVENT_APPLICATION_STATUS = "application:status" EVENT_APPLICATION_ASSED_ASSIGNED = "application:assed:assigned" EVENT_APPLICATION_CREATED = "application:created" +EVENT_APPLICATION_UR_SUBMITTED = "application:ur_submitted" EVENT_APPLICATION_EDITOR_GROUP_ASSIGNED = "application:editor_group:assigned" EVENT_JOURNAL_ASSED_ASSIGNED = "journal:assed:assigned" EVENT_JOURNAL_EDITOR_GROUP_ASSIGNED = "journal:editor_group:assigned" @@ -91,12 +93,14 @@ ROLE_PUBLISHER = "publisher" ROLE_PUBLISHER_JOURNAL_CSV = "journal_csv" ROLE_PUBLISHER_PRESERVATION = "preservation" +ROLE_API = "api" CRON_NEVER = {"month": "2", "day": "31", "day_of_week": "*", "hour": "*", "minute": "*"} # ~~-> BackgroundTask:Monitoring~~ # BackgroundJob.status BGJOB_STATUS_QUEUED = 'queued' +BGJOB_STATUS_PROCESSING = 'processing' BGJOB_STATUS_ERROR = 'error' BGJOB_STATUS_COMPLETE = 'complete' @@ -132,6 +136,10 @@ class BgjobOutcomeStatus(ConstantList): Fail = 'fail' +class BaseArticlesUploadStatus(ConstantList): + Processed = 'processed' + + # Storage scopes STORE__SCOPE__PUBLIC_DATA_DUMP = "public_data_dump" diff --git a/portality/core.py b/portality/core.py index 2eb486ea8e..8280edc620 100644 --- a/portality/core.py +++ b/portality/core.py @@ -166,18 +166,10 @@ def load_crossref_schema(app): def create_es_connection(app): # ~~ElasticConnection:Framework->Elasticsearch:Technology~~ - # temporary logging config for debugging index-per-type - #import logging - #esprit.raw.configure_logging(logging.DEBUG) - - # FIXME: we are removing esprit conn in favour of elasticsearch lib - # make a connection to the index - # if app.config['ELASTIC_SEARCH_INDEX_PER_TYPE']: - # conn = esprit.raw.Connection(host=app.config['ELASTIC_SEARCH_HOST'], index='') - # else: - # conn = esprit.raw.Connection(app.config['ELASTIC_SEARCH_HOST'], app.config['ELASTIC_SEARCH_DB']) - - conn = elasticsearch.Elasticsearch(app.config['ELASTICSEARCH_HOSTS'], verify_certs=app.config.get("ELASTIC_SEARCH_VERIFY_CERTS", True)) + + conn = elasticsearch.Elasticsearch(app.config['ELASTICSEARCH_HOSTS'], + verify_certs=app.config.get("ELASTIC_SEARCH_VERIFY_CERTS", True), + request_timeout=app.config.get('ELASTICSEARCH_REQ_TIMEOUT', 15)) return conn diff --git a/portality/dao.py b/portality/dao.py index 1c2b32da5f..80635d714a 100644 --- a/portality/dao.py +++ b/portality/dao.py @@ -446,9 +446,13 @@ def send_query(cls, qobj, retry=50, **kwargs): headers=CONTENT_TYPE_JSON, **kwargs) break except Exception as e: - exception = ESMappingMissingError(e) if ES_MAPPING_MISSING_REGEX.match(json.dumps(e.args[2])) else e - if isinstance(exception, ESMappingMissingError): - raise exception + try: + exception = ESMappingMissingError(e) if ES_MAPPING_MISSING_REGEX.match(json.dumps(e.args[2])) else e + if isinstance(exception, ESMappingMissingError): + raise exception + except TypeError: + raise e + time.sleep(0.5) if r is not None: diff --git a/portality/decorators.py b/portality/decorators.py index c3ab718535..9779a7d806 100644 --- a/portality/decorators.py +++ b/portality/decorators.py @@ -1,10 +1,9 @@ import json, signal +import re from functools import wraps from flask import request, abort, redirect, flash, url_for, render_template, make_response from flask_login import login_user, current_user -from portality.api.common import Api401Error - from portality.core import app from portality.lib import dates from portality.models import Account @@ -17,7 +16,7 @@ def swag(swag_summary, swag_spec): Decorator for API functions, adding swagger info to the swagger spec. """ def decorator(f): - f.summary = swag_summary + f.summary = re.sub('', '', swag_summary) f.swag = swag_spec f.description = swag_summary return f @@ -39,6 +38,7 @@ def decorated_view(*args, **kwargs): if login_user(user, remember=False): return fn(*args, **kwargs) # else + from portality.api.common import Api401Error raise Api401Error("An API Key is required to access this.") return decorated_view diff --git a/portality/events/combined.py b/portality/events/combined.py deleted file mode 100644 index 869d63ab88..0000000000 --- a/portality/events/combined.py +++ /dev/null @@ -1,11 +0,0 @@ -from portality.events.shortcircuit import send_event as shortcircuit_send_event -from portality.core import app - - -def send_event(event): - try: - from portality.events.kafka_producer import send_event as kafka_send_event - kafka_send_event(event) - except Exception as e: - app.logger.exception("Failed to send event to Kafka. " + str(e)) - shortcircuit_send_event(event) diff --git a/portality/events/consumer.py b/portality/events/consumer.py index 268bbfbab3..3018e1d179 100644 --- a/portality/events/consumer.py +++ b/portality/events/consumer.py @@ -3,9 +3,15 @@ class EventConsumer(object): ID = None @classmethod - def consumes(cls, event): + def should_consume(cls, event) -> bool: + """ + Determine whether this consumer should consume the given event + """ raise NotImplementedError() @classmethod - def consume(cls, event): + def consume(cls, event) -> None: + """ + run operation to consume and handle the event + """ raise NotImplementedError() \ No newline at end of file diff --git a/portality/events/consumer_utils.py b/portality/events/consumer_utils.py new file mode 100644 index 0000000000..7fbbb0a24c --- /dev/null +++ b/portality/events/consumer_utils.py @@ -0,0 +1,11 @@ +from portality import models +from portality.bll import exceptions + + +def parse_application(application_raw: dict) -> models.Application: + try: + return models.Application(**application_raw) + except Exception as e: + raise exceptions.NoSuchObjectException( + "Unable to construct Application from supplied source - data structure validation error, {x}".format(x=e) + ) diff --git a/portality/events/consumers/account_created_email.py b/portality/events/consumers/account_created_email.py index 5a102fe7b0..6f1f2ca171 100644 --- a/portality/events/consumers/account_created_email.py +++ b/portality/events/consumers/account_created_email.py @@ -12,7 +12,7 @@ class AccountCreatedEmail(EventConsumer): ID = "account:created:email" @classmethod - def consumes(cls, event): + def should_consume(cls, event): return event.id == constants.EVENT_ACCOUNT_CREATED and event.context.get("account") is not None @classmethod diff --git a/portality/events/consumers/account_passwordreset_email.py b/portality/events/consumers/account_passwordreset_email.py index 130a837b29..92c00824e8 100644 --- a/portality/events/consumers/account_passwordreset_email.py +++ b/portality/events/consumers/account_passwordreset_email.py @@ -12,7 +12,7 @@ class AccountPasswordResetEmail(EventConsumer): ID = "account:password_reset:email" @classmethod - def consumes(cls, event): + def should_consume(cls, event): return event.id == constants.EVENT_ACCOUNT_PASSWORD_RESET and event.context.get("account") is not None @classmethod diff --git a/portality/events/consumers/application_assed_assigned_notify.py b/portality/events/consumers/application_assed_assigned_notify.py index 954a359832..a403f5ab34 100644 --- a/portality/events/consumers/application_assed_assigned_notify.py +++ b/portality/events/consumers/application_assed_assigned_notify.py @@ -1,4 +1,5 @@ # ~~ ApplicationAssedAssignedNotify:Consumer ~~ +from portality.events import consumer_utils from portality.util import url_for from portality.events.consumer import EventConsumer from portality import constants @@ -11,7 +12,7 @@ class ApplicationAssedAssignedNotify(EventConsumer): ID = "application:assed:assigned:notify" @classmethod - def consumes(cls, event): + def should_consume(cls, event): return event.id == constants.EVENT_APPLICATION_ASSED_ASSIGNED and \ event.context.get("application") is not None @@ -19,11 +20,7 @@ def consumes(cls, event): def consume(cls, event): app_source = event.context.get("application") - try: - application = models.Application(**app_source) - except Exception as e: - raise exceptions.NoSuchObjectException("Unable to construct Application from supplied source - data structure validation error, {x}".format(x=e)) - + application = consumer_utils.parse_application(app_source) if not application.editor: raise exceptions.NoSuchPropertyException("Application {x} does not have property `editor`".format(x=application.id)) @@ -39,7 +36,7 @@ def consume(cls, event): group_name=application.editor_group ) notification.short = svc.short_notification(cls.ID).format( - issns=", ".join(issn for issn in application.bibjson().issns()) + issns=application.bibjson().issns_as_text() ) notification.action = url_for("editor.application", application_id=application.id) diff --git a/portality/events/consumers/application_assed_inprogress_notify.py b/portality/events/consumers/application_assed_inprogress_notify.py index 652dc60f77..68520dd6d2 100644 --- a/portality/events/consumers/application_assed_inprogress_notify.py +++ b/portality/events/consumers/application_assed_inprogress_notify.py @@ -1,4 +1,5 @@ # ~~ApplicationAssedInProgressNotify:Consumer~~ +from portality.events import consumer_utils from portality.util import url_for from portality.events.consumer import EventConsumer from portality import constants @@ -10,7 +11,7 @@ class ApplicationAssedInprogressNotify(EventConsumer): ID = "application:assed:inprogress:notify" @classmethod - def consumes(cls, event): + def should_consume(cls, event): return event.id == constants.EVENT_APPLICATION_STATUS and \ event.context.get("application") is not None and \ event.context.get("old_status") in [constants.APPLICATION_STATUS_COMPLETED, constants.APPLICATION_STATUS_READY] and \ @@ -20,11 +21,7 @@ def consumes(cls, event): def consume(cls, event): app_source = event.context.get("application") - try: - application = models.Application(**app_source) - except Exception as e: - raise exceptions.NoSuchObjectException("Unable to construct Application from supplied source - data structure validation error, {x}".format(x=e)) - + application = consumer_utils.parse_application(app_source) if not application.editor: return @@ -37,7 +34,7 @@ def consume(cls, event): notification.classification = constants.NOTIFICATION_CLASSIFICATION_STATUS_CHANGE notification.long = svc.long_notification(cls.ID).format(application_title=application.bibjson().title) notification.short = svc.short_notification(cls.ID).format( - issns=", ".join(issn for issn in application.bibjson().issns()) + issns=application.bibjson().issns_as_text() ) notification.action = url_for("editor.application", application_id=application.id) diff --git a/portality/events/consumers/application_editor_completed_notify.py b/portality/events/consumers/application_editor_completed_notify.py index cd90c8bb51..4c5710e418 100644 --- a/portality/events/consumers/application_editor_completed_notify.py +++ b/portality/events/consumers/application_editor_completed_notify.py @@ -1,4 +1,5 @@ # ~~ApplicationEditorCompletedNotify:Consumer~~ +from portality.events import consumer_utils from portality.util import url_for from portality.events.consumer import EventConsumer from portality import constants @@ -11,7 +12,7 @@ class ApplicationEditorCompletedNotify(EventConsumer): ID = "application:editor:completed:notify" @classmethod - def consumes(cls, event): + def should_consume(cls, event): return event.id == constants.EVENT_APPLICATION_STATUS and \ event.context.get("old_status") != constants.APPLICATION_STATUS_COMPLETED and \ event.context.get("new_status") == constants.APPLICATION_STATUS_COMPLETED @@ -59,7 +60,7 @@ def consume(cls, event): associate_editor=associate_editor ) notification.short = svc.short_notification(cls.ID).format( - issns=", ".join(issn for issn in application.bibjson().issns()) + issns=application.bibjson().issns_as_text() ) notification.action = url_for("editor.application", application_id=application.id) diff --git a/portality/events/consumers/application_editor_group_assigned_notify.py b/portality/events/consumers/application_editor_group_assigned_notify.py index 22b277d283..54bfdfe1e6 100644 --- a/portality/events/consumers/application_editor_group_assigned_notify.py +++ b/portality/events/consumers/application_editor_group_assigned_notify.py @@ -1,4 +1,5 @@ # ~~ ApplicatioditorGroupAssignedNotify:Consumer~~ +from portality.events import consumer_utils from portality.util import url_for from portality.events.consumer import EventConsumer from portality import constants @@ -11,7 +12,7 @@ class ApplicationEditorGroupAssignedNotify(EventConsumer): ID = "application:editor_group:assigned:notify" @classmethod - def consumes(cls, event): + def should_consume(cls, event): return event.id == constants.EVENT_APPLICATION_EDITOR_GROUP_ASSIGNED and \ event.context.get("application") is not None @@ -19,11 +20,7 @@ def consumes(cls, event): def consume(cls, event): app_source = event.context.get("application") - try: - application = models.Application(**app_source) - except Exception as e: - raise exceptions.NoSuchObjectException("Unable to construct Application from supplied source - data structure validation error, {x}".format(x=e)) - + application = consumer_utils.parse_application(app_source) if not application.editor_group: return @@ -44,7 +41,7 @@ def consume(cls, event): journal_name=application.bibjson().title ) notification.short = svc.short_notification(cls.ID).format( - issns=", ".join(issn for issn in application.bibjson().issns()) + issns=application.bibjson().issns_as_text() ) notification.action = url_for("editor.application", application_id=application.id) diff --git a/portality/events/consumers/application_editor_inprogress_notify.py b/portality/events/consumers/application_editor_inprogress_notify.py index 0c929e3631..38d698683c 100644 --- a/portality/events/consumers/application_editor_inprogress_notify.py +++ b/portality/events/consumers/application_editor_inprogress_notify.py @@ -1,4 +1,5 @@ # ~~ApplicationEditorInProgressNotify:Consumer~~ +from portality.events import consumer_utils from portality.util import url_for from portality.events.consumer import EventConsumer @@ -12,7 +13,7 @@ class ApplicationEditorInProgressNotify(EventConsumer): ID = "application:editor:inprogress:notify" @classmethod - def consumes(cls, event): + def should_consume(cls, event): return event.id == constants.EVENT_APPLICATION_STATUS and \ event.context.get("old_status") in [constants.APPLICATION_STATUS_READY, constants.APPLICATION_STATUS_COMPLETED] and \ event.context.get("new_status") == constants.APPLICATION_STATUS_IN_PROGRESS @@ -54,7 +55,7 @@ def consume(cls, event): application_title=application.bibjson().title ) notification.short = svc.short_notification(cls.ID).format( - issns=", ".join(issn for issn in application.bibjson().issns()) + issns=application.bibjson().issns_as_text() ) notification.action = url_for("editor.application", application_id=application.id) diff --git a/portality/events/consumers/application_maned_ready_notify.py b/portality/events/consumers/application_maned_ready_notify.py index 58b22384b8..ac4b70d9c2 100644 --- a/portality/events/consumers/application_maned_ready_notify.py +++ b/portality/events/consumers/application_maned_ready_notify.py @@ -1,4 +1,5 @@ # ~~ApplicationManedReadyNotify:Consumer~~ +from portality.events import consumer_utils from portality.util import url_for from portality.events.consumer import EventConsumer from portality import constants @@ -10,7 +11,7 @@ class ApplicationManedReadyNotify(EventConsumer): ID = "application:maned:ready:notify" @classmethod - def consumes(cls, event): + def should_consume(cls, event): return event.id == constants.EVENT_APPLICATION_STATUS and \ event.context.get("application") is not None and \ event.context.get("old_status") != constants.APPLICATION_STATUS_READY and \ @@ -20,11 +21,7 @@ def consumes(cls, event): def consume(cls, event): app_source = event.context.get("application") - try: - application = models.Application(**app_source) - except Exception as e: - raise exceptions.NoSuchObjectException("Unable to construct Application from supplied source - data structure validation error, {x}".format(x=e)) - + application = consumer_utils.parse_application(app_source) if not application.editor_group: return @@ -50,7 +47,7 @@ def consume(cls, event): editor=editor ) notification.short = svc.short_notification(cls.ID).format( - issns=", ".join(issn for issn in application.bibjson().issns()) + issns=application.bibjson().issns_as_text() ) notification.action = url_for("admin.application", application_id=application.id) diff --git a/portality/events/consumers/application_publisher_accepted_notify.py b/portality/events/consumers/application_publisher_accepted_notify.py index 9bd34769c2..350d491ee3 100644 --- a/portality/events/consumers/application_publisher_accepted_notify.py +++ b/portality/events/consumers/application_publisher_accepted_notify.py @@ -1,24 +1,24 @@ # ~~ApplicationPublisherAcceptedNotify:Consumer~~ -from portality.util import url_for -from portality.events.consumer import EventConsumer from portality import constants from portality import models -from portality.lib import edges, dates -from portality.bll import DOAJ, exceptions +from portality.bll import DOAJ from portality.core import app +from portality.events import consumer_utils +from portality.events.consumer import EventConsumer +from portality.lib import dates +from portality.models import Account +from portality.util import url_for class ApplicationPublisherAcceptedNotify(EventConsumer): ID = "application:publisher:accepted:notify" @classmethod - def consumes(cls, event): + def should_consume(cls, event): if event.id != constants.EVENT_APPLICATION_STATUS: return False - # TODO: in the long run this needs to move out to the user's email preferences but for now it - # is here to replicate the behaviour in the code it replaces - if not app.config.get("ENABLE_PUBLISHER_EMAIL", False): + if not Account.is_enable_publisher_email(): return False app_source = event.context.get("application") @@ -28,28 +28,18 @@ def consumes(cls, event): if event.context.get("new_status") != constants.APPLICATION_STATUS_ACCEPTED: return False - try: - application = models.Application(**app_source) - except Exception as e: - raise exceptions.NoSuchObjectException("Unable to construct Application from supplied source - data structure validation error, {x}".format(x=e)) - + application = consumer_utils.parse_application(app_source) is_new_application = application.application_type == constants.APPLICATION_TYPE_NEW_APPLICATION return is_new_application @classmethod def consume(cls, event): - # TODO: in the long run this needs to move out to the user's email preferences but for now it - # is here to replicate the behaviour in the code it replaces - if not app.config.get("ENABLE_PUBLISHER_EMAIL", False): + if not Account.is_enable_publisher_email(): return app_source = event.context.get("application") - try: - application = models.Application(**app_source) - except Exception as e: - raise exceptions.NoSuchObjectException("Unable to construct Application from supplied source - data structure validation error, {x}".format(x=e)) - + application = consumer_utils.parse_application(app_source) if not application.owner: return @@ -68,7 +58,7 @@ def consume(cls, event): faq_url=app.config.get("BASE_URL") + url_for("doaj.faq") ) notification.short = svc.short_notification(cls.ID).format( - issns=", ".join(issn for issn in application.bibjson().issns()) + issns=application.bibjson().issns_as_text() ) notification.action = url_for("publisher.journals") diff --git a/portality/events/consumers/application_publisher_assigned_notify.py b/portality/events/consumers/application_publisher_assigned_notify.py index 2349dca60c..470bbcdc7a 100644 --- a/portality/events/consumers/application_publisher_assigned_notify.py +++ b/portality/events/consumers/application_publisher_assigned_notify.py @@ -1,4 +1,6 @@ # ~~ApplicationPublisherAssignedNotify:Consumer~~ +from portality.events import consumer_utils +from portality.models import Account from portality.util import url_for from portality.events.consumer import EventConsumer from portality import constants @@ -13,13 +15,11 @@ class ApplicationPublisherAssignedNotify(EventConsumer): ID = "application:publisher:assigned:notify" @classmethod - def consumes(cls, event): + def should_consume(cls, event): if event.id != constants.EVENT_APPLICATION_ASSED_ASSIGNED: return False - # TODO: in the long run this needs to move out to the user's email preferences but for now it - # is here to replicate the behaviour in the code it replaces - if not app.config.get("ENABLE_PUBLISHER_EMAIL", False): + if not Account.is_enable_publisher_email(): return False app_source = event.context.get("application") @@ -32,11 +32,7 @@ def consumes(cls, event): if event.context.get("new_editor") in [None, ""]: return False - try: - application = models.Application(**app_source) - except Exception as e: - raise exceptions.NoSuchObjectException("Unable to construct Application from supplied source - data structure validation error, {x}".format(x=e)) - + application = consumer_utils.parse_application(app_source) is_new_application = application.application_type == constants.APPLICATION_TYPE_NEW_APPLICATION return is_new_application @@ -44,11 +40,7 @@ def consumes(cls, event): def consume(cls, event): app_source = event.context.get("application") - try: - application = models.Application(**app_source) - except Exception as e: - raise exceptions.NoSuchObjectException("Unable to construct Application from supplied source - data structure validation error, {x}".format(x=e)) - + application = consumer_utils.parse_application(app_source) if not application.owner: raise exceptions.NoSuchPropertyException("Application {x} does not have property `owner`".format(x=application.id)) @@ -65,7 +57,7 @@ def consume(cls, event): volunteers_url=app.config.get('BASE_URL', "https://doaj.org") + url_for("doaj.volunteers"), ) notification.short = svc.short_notification(cls.ID).format( - issns=", ".join(issn for issn in application.bibjson().issns()) + issns=application.bibjson().issns_as_text() ) # note that there is no action url diff --git a/portality/events/consumers/application_publisher_created_notify.py b/portality/events/consumers/application_publisher_created_notify.py index 14640a18e0..497ddf8d40 100644 --- a/portality/events/consumers/application_publisher_created_notify.py +++ b/portality/events/consumers/application_publisher_created_notify.py @@ -1,4 +1,5 @@ # ~~ApplicationPublisherCreatedNotify:Notifications~~ +from portality.events import consumer_utils from portality.util import url_for from portality.lib import dates from portality.events.consumer import EventConsumer @@ -12,7 +13,7 @@ class ApplicationPublisherCreatedNotify(EventConsumer): ID = "application:publisher:created:notify" @classmethod - def consumes(cls, event): + def should_consume(cls, event): return event.id == constants.EVENT_APPLICATION_CREATED and event.context.get("application") is not None @classmethod @@ -42,7 +43,7 @@ def consume(cls, event): application_date=dates.human_date(application.date_applied), volunteers_url=url_for("doaj.volunteers")) notification.short = svc.short_notification(cls.ID).format( - issns=", ".join(issn for issn in application.bibjson().issns()) + issns=application.bibjson().issns_as_text() ) svc.notify(notification) diff --git a/portality/events/consumers/application_publisher_inprogress_notify.py b/portality/events/consumers/application_publisher_inprogress_notify.py index 659d5d31c2..04bc7b26b9 100644 --- a/portality/events/consumers/application_publisher_inprogress_notify.py +++ b/portality/events/consumers/application_publisher_inprogress_notify.py @@ -1,4 +1,5 @@ # ~~ApplicationPublisherInProgressNotify:Consumer~~ +from portality.events import consumer_utils from portality.util import url_for from portality.core import app from portality.events.consumer import EventConsumer @@ -12,7 +13,7 @@ class ApplicationPublisherInprogressNotify(EventConsumer): ID = "application:publisher:inprogress:notify" @classmethod - def consumes(cls, event): + def should_consume(cls, event): return event.id == constants.EVENT_APPLICATION_STATUS and \ event.context.get("application") is not None and \ event.context.get("old_status") == constants.APPLICATION_STATUS_PENDING and \ @@ -47,7 +48,7 @@ def consume(cls, event): volunteers=volunteers ) notification.short = svc.short_notification(cls.ID).format( - issns=", ".join(issn for issn in application.bibjson().issns()) + issns=application.bibjson().issns_as_text() ) svc.notify(notification) diff --git a/portality/events/consumers/application_publisher_quickreject_notify.py b/portality/events/consumers/application_publisher_quickreject_notify.py index 7608d18cbf..c35721c205 100644 --- a/portality/events/consumers/application_publisher_quickreject_notify.py +++ b/portality/events/consumers/application_publisher_quickreject_notify.py @@ -1,4 +1,5 @@ # ~~ApplicationPublisherQuickRejectNotify:Consumer~~ +from portality.events import consumer_utils from portality.lib import dates from portality.lib.dates import FMT_DATE_HUMAN_A from portality.util import url_for @@ -14,7 +15,7 @@ class ApplicationPublisherQuickRejectNotify(EventConsumer): ID = "application:publisher:quickreject:notify" @classmethod - def consumes(cls, event): + def should_consume(cls, event): return event.id == constants.EVENT_APPLICATION_STATUS and \ event.context.get("application") is not None and \ event.context.get("old_status") != constants.APPLICATION_STATUS_REJECTED and \ @@ -28,11 +29,7 @@ def consume(cls, event): if note: note = "\n\n**Reason for rejection**\n\n" + note + "\n\n" - try: - application = models.Application(**app_source) - except Exception as e: - raise exceptions.NoSuchObjectException("Unable to construct Application from supplied source - data structure validation error, {x}".format(x=e)) - + application = consumer_utils.parse_application(app_source) if not application.owner: return @@ -52,7 +49,7 @@ def consume(cls, event): doaj_guide_url=app.config.get('BASE_URL', "https://doaj.org") + url_for("doaj.guide") ) notification.short = svc.short_notification(cls.ID).format( - issns=", ".join(issn for issn in application.bibjson().issns()) + issns=application.bibjson().issns_as_text() ) # there is no action url for this notification diff --git a/portality/events/consumers/application_publisher_revision_notify.py b/portality/events/consumers/application_publisher_revision_notify.py index 143e0c2ccb..b2f4c99a23 100644 --- a/portality/events/consumers/application_publisher_revision_notify.py +++ b/portality/events/consumers/application_publisher_revision_notify.py @@ -1,5 +1,5 @@ # ~~ApplicationPublisherRevisionNotify:Consumer~~ - +from portality.events import consumer_utils from portality.events.consumer import EventConsumer from portality import constants from portality import models @@ -13,7 +13,7 @@ class ApplicationPublisherRevisionNotify(EventConsumer): ID = "application:publisher:revision:notify" @classmethod - def consumes(cls, event): + def should_consume(cls, event): return event.id == constants.EVENT_APPLICATION_STATUS and \ event.context.get("application") is not None and \ event.context.get("old_status") != constants.APPLICATION_STATUS_REVISIONS_REQUIRED and \ @@ -44,7 +44,7 @@ def consume(cls, event): date_applied=date_applied ) notification.short = svc.short_notification(cls.ID).format( - issns=", ".join(issn for issn in application.bibjson().issns()) + issns=application.bibjson().issns_as_text() ) svc.notify(notification) diff --git a/portality/events/consumers/bg_job_finished_notify.py b/portality/events/consumers/bg_job_finished_notify.py index 9bee7d1722..32535eaf07 100644 --- a/portality/events/consumers/bg_job_finished_notify.py +++ b/portality/events/consumers/bg_job_finished_notify.py @@ -13,7 +13,7 @@ class BGJobFinishedNotify(EventConsumer): ID = "bg:job_finished:notify" @classmethod - def consumes(cls, event): + def should_consume(cls, event): return event.id == constants.BACKGROUND_JOB_FINISHED and \ event.context.get("job") is not None diff --git a/portality/events/consumers/journal_assed_assigned_notify.py b/portality/events/consumers/journal_assed_assigned_notify.py index c57d3d07d0..5788334e44 100644 --- a/portality/events/consumers/journal_assed_assigned_notify.py +++ b/portality/events/consumers/journal_assed_assigned_notify.py @@ -1,4 +1,5 @@ # ~~JournalAssedAssignedNotify:Consumer~~ +from portality.events import consumer_utils from portality.util import url_for from portality.events.consumer import EventConsumer @@ -12,7 +13,7 @@ class JournalAssedAssignedNotify(EventConsumer): ID = "journal:assed:assigned:notify" @classmethod - def consumes(cls, event): + def should_consume(cls, event): return event.id == constants.EVENT_JOURNAL_ASSED_ASSIGNED and \ event.context.get("journal") is not None @@ -40,7 +41,7 @@ def consume(cls, event): group_name=journal.editor_group ) notification.short = svc.short_notification(cls.ID).format( - issns=", ".join(issn for issn in journal.bibjson().issns()) + issns=journal.bibjson().issns_as_text() ) notification.action = url_for("editor.journal_page", journal_id=journal.id) diff --git a/portality/events/consumers/journal_discontinuing_soon_notify.py b/portality/events/consumers/journal_discontinuing_soon_notify.py index 11da31bb96..fc603fcd76 100644 --- a/portality/events/consumers/journal_discontinuing_soon_notify.py +++ b/portality/events/consumers/journal_discontinuing_soon_notify.py @@ -12,10 +12,10 @@ from portality import dao class JournalDiscontinuingSoonNotify(EventConsumer): - ID = "journal:assed:discontinuing_soon:notify" + ID = "journal:maned:discontinuing_soon:notify" @classmethod - def consumes(cls, event): + def should_consume(cls, event): return event.id == constants.EVENT_JOURNAL_DISCONTINUING_SOON and \ event.context.get("journal") is not None and \ event.context.get("discontinue_date") is not None diff --git a/portality/events/consumers/journal_editor_group_assigned_notify.py b/portality/events/consumers/journal_editor_group_assigned_notify.py index 217a404b80..6f1637d019 100644 --- a/portality/events/consumers/journal_editor_group_assigned_notify.py +++ b/portality/events/consumers/journal_editor_group_assigned_notify.py @@ -1,4 +1,5 @@ # ~~JournalEditorGroupAssignedNotify:Consumer~~ +from portality.events import consumer_utils from portality.util import url_for from portality.events.consumer import EventConsumer @@ -12,7 +13,7 @@ class JournalEditorGroupAssignedNotify(EventConsumer): ID = "journal:editor_group:assigned:notify" @classmethod - def consumes(cls, event): + def should_consume(cls, event): return event.id == constants.EVENT_JOURNAL_EDITOR_GROUP_ASSIGNED and \ event.context.get("journal") is not None @@ -45,7 +46,7 @@ def consume(cls, event): journal_name=journal.bibjson().title ) notification.short = svc.short_notification(cls.ID).format( - issns=", ".join(issn for issn in journal.bibjson().issns()) + issns=journal.bibjson().issns_as_text() ) notification.action = url_for("editor.journal_page", journal_id=journal.id) diff --git a/portality/events/consumers/update_request_publisher_accepted_notify.py b/portality/events/consumers/update_request_publisher_accepted_notify.py index 3484617232..9e3795022e 100644 --- a/portality/events/consumers/update_request_publisher_accepted_notify.py +++ b/portality/events/consumers/update_request_publisher_accepted_notify.py @@ -1,4 +1,6 @@ # ~~UpdateRequestPublisherAcceptedNotify:Consumer~~ +from portality.events import consumer_utils +from portality.models import Account from portality.util import url_for from portality.events.consumer import EventConsumer @@ -13,13 +15,11 @@ class UpdateRequestPublisherAcceptedNotify(EventConsumer): ID = "update_request:publisher:accepted:notify" @classmethod - def consumes(cls, event): + def should_consume(cls, event): if event.id != constants.EVENT_APPLICATION_STATUS: return False - # TODO: in the long run this needs to move out to the user's email preferences but for now it - # is here to replicate the behaviour in the code it replaces - if not app.config.get("ENABLE_PUBLISHER_EMAIL", False): + if not Account.is_enable_publisher_email(): return False app_source = event.context.get("application") @@ -29,28 +29,18 @@ def consumes(cls, event): if event.context.get("new_status") != constants.APPLICATION_STATUS_ACCEPTED: return False - try: - application = models.Application(**app_source) - except Exception as e: - raise exceptions.NoSuchObjectException("Unable to construct Application from supplied source - data structure validation error, {x}".format(x=e)) - + application = consumer_utils.parse_application(app_source) is_update_request = application.application_type == constants.APPLICATION_TYPE_UPDATE_REQUEST return is_update_request @classmethod def consume(cls, event): - # TODO: in the long run this needs to move out to the user's email preferences but for now it - # is here to replicate the behaviour in the code it replaces - if not app.config.get("ENABLE_PUBLISHER_EMAIL", False): + if not Account.is_enable_publisher_email(): return app_source = event.context.get("application") - try: - application = models.Application(**app_source) - except Exception as e: - raise exceptions.NoSuchObjectException("Unable to construct Application from supplied source - data structure validation error, {x}".format(x=e)) - + application = consumer_utils.parse_application(app_source) if not application.owner: return @@ -68,7 +58,7 @@ def consume(cls, event): publisher_dashboard_url=app.config.get('BASE_URL', "https://doaj.org") + url_for("publisher.journals") ) notification.short = svc.short_notification(cls.ID).format( - issns=", ".join(issn for issn in application.bibjson().issns()) + issns=application.bibjson().issns_as_text() ) notification.action = url_for("publisher.journals") diff --git a/portality/events/consumers/update_request_publisher_assigned_notify.py b/portality/events/consumers/update_request_publisher_assigned_notify.py index 63254fbbc2..8de13cb9f4 100644 --- a/portality/events/consumers/update_request_publisher_assigned_notify.py +++ b/portality/events/consumers/update_request_publisher_assigned_notify.py @@ -1,4 +1,5 @@ # ~~UpdateRequestPublisherAssignedNotify:Consumer~~ +from portality.events import consumer_utils from portality.events.consumer import EventConsumer from portality import constants from portality import models @@ -6,19 +7,18 @@ from portality.bll import exceptions from portality.lib import dates from portality.core import app +from portality.models import Account class UpdateRequestPublisherAssignedNotify(EventConsumer): ID = "update_request:publisher:assigned:notify" @classmethod - def consumes(cls, event): + def should_consume(cls, event): if event.id != constants.EVENT_APPLICATION_ASSED_ASSIGNED: return False - # TODO: in the long run this needs to move out to the user's email preferences but for now it - # is here to replicate the behaviour in the code it replaces - if not app.config.get("ENABLE_PUBLISHER_EMAIL", False): + if not Account.is_enable_publisher_email(): return False app_source = event.context.get("application") @@ -31,11 +31,7 @@ def consumes(cls, event): if event.context.get("new_editor") in [None, ""]: return False - try: - application = models.Application(**app_source) - except Exception as e: - raise exceptions.NoSuchObjectException("Unable to construct Application from supplied source - data structure validation error, {x}".format(x=e)) - + application = consumer_utils.parse_application(app_source) is_update_request = application.application_type == constants.APPLICATION_TYPE_UPDATE_REQUEST return is_update_request @@ -43,11 +39,7 @@ def consumes(cls, event): def consume(cls, event): app_source = event.context.get("application") - try: - application = models.Application(**app_source) - except Exception as e: - raise exceptions.NoSuchObjectException("Unable to construct Application from supplied source - data structure validation error, {x}".format(x=e)) - + application = consumer_utils.parse_application(app_source) if not application.owner: raise exceptions.NoSuchPropertyException("Application {x} does not have property `owner`".format(x=application.id)) @@ -63,7 +55,7 @@ def consume(cls, event): application_date=dates.human_date(application.date_applied) ) notification.short = svc.short_notification(cls.ID).format( - issns=", ".join(issn for issn in application.bibjson().issns()) + issns=application.bibjson().issns_as_text() ) # note that there is no action url diff --git a/portality/events/consumers/update_request_publisher_rejected_notify.py b/portality/events/consumers/update_request_publisher_rejected_notify.py index 8885891a96..20c1b3cb32 100644 --- a/portality/events/consumers/update_request_publisher_rejected_notify.py +++ b/portality/events/consumers/update_request_publisher_rejected_notify.py @@ -1,25 +1,23 @@ # ~~UpdateRequestPublisherRejectedNotify:Consumer~~ - -from portality.events.consumer import EventConsumer from portality import constants from portality import models -from portality.bll import DOAJ, exceptions -from portality.core import app +from portality.bll import DOAJ +from portality.events import consumer_utils +from portality.events.consumer import EventConsumer from portality.lib import dates from portality.lib.dates import FMT_DATE_HUMAN_A +from portality.models import Account class UpdateRequestPublisherRejectedNotify(EventConsumer): ID = "update_request:publisher:rejected:notify" @classmethod - def consumes(cls, event): + def should_consume(cls, event): if event.id != constants.EVENT_APPLICATION_STATUS: return False - # TODO: in the long run this needs to move out to the user's email preferences but for now it - # is here to replicate the behaviour in the code it replaces - if not app.config.get("ENABLE_PUBLISHER_EMAIL", False): + if not Account.is_enable_publisher_email(): return False app_source = event.context.get("application") @@ -32,11 +30,7 @@ def consumes(cls, event): if event.context.get("old_status") == constants.APPLICATION_STATUS_REJECTED: return False - try: - application = models.Application(**app_source) - except Exception as e: - raise exceptions.NoSuchObjectException("Unable to construct Application from supplied source - data structure validation error, {x}".format(x=e)) - + application = consumer_utils.parse_application(app_source) is_update_request = application.application_type == constants.APPLICATION_TYPE_UPDATE_REQUEST return is_update_request @@ -44,11 +38,7 @@ def consumes(cls, event): def consume(cls, event): app_source = event.context.get("application") - try: - application = models.Application(**app_source) - except Exception as e: - raise exceptions.NoSuchObjectException("Unable to construct Application from supplied source - data structure validation error, {x}".format(x=e)) - + application = consumer_utils.parse_application(app_source) if not application.owner: return @@ -66,7 +56,7 @@ def consume(cls, event): date_applied=date_applied, ) notification.short = svc.short_notification(cls.ID).format( - issns=", ".join(issn for issn in application.bibjson().issns()) + issns=application.bibjson().issns_as_text() ) # there is no action url associated with this notification diff --git a/portality/events/consumers/update_request_publisher_submitted_notify.py b/portality/events/consumers/update_request_publisher_submitted_notify.py new file mode 100644 index 0000000000..6adf9e2f50 --- /dev/null +++ b/portality/events/consumers/update_request_publisher_submitted_notify.py @@ -0,0 +1,64 @@ +# ~~UpdateRequestPublisherSubmittedNotify:Consumer~~ +from typing import TypedDict + +from portality import constants +from portality import models +from portality.bll import DOAJ +from portality.events import consumer_utils +from portality.events.consumer import EventConsumer +from portality.lib import dates +from portality.models import Account +from portality.util import url_for + + +class UpdateRequestPublisherSubmittedNotify(EventConsumer): + ID = "update_request:publisher:submitted:notify" + + class Context(TypedDict): + # no usage, just for developer reference + application: dict + + @classmethod + def should_consume(cls, event): + if event.id != constants.EVENT_APPLICATION_UR_SUBMITTED: + return False + + if not Account.is_enable_publisher_email(): + return False + + app_source = event.context.get("application") + if app_source is None: + return False + + application = consumer_utils.parse_application(app_source) + if application.application_type != constants.APPLICATION_TYPE_UPDATE_REQUEST: + return False + + if not application.owner: + return False + + return True + + @classmethod + def consume(cls, event): + application = consumer_utils.parse_application(event.context.get("application")) + + # ~~-> Notifications:Service ~~ + svc = DOAJ.notificationsService() + + notification = models.Notification() + notification.who = application.owner + notification.created_by = cls.ID + notification.classification = constants.NOTIFICATION_CLASSIFICATION_STATUS_CHANGE + + notification.long = svc.long_notification(cls.ID).format( + application_title=application.bibjson().title, + date_applied=dates.human_date(application.date_applied), + ) + notification.short = svc.short_notification(cls.ID).format( + issns=application.bibjson().issns_as_text() + ) + + notification.action = url_for("publisher.updates_in_progress") + + svc.notify(notification) diff --git a/portality/events/kafka_consumer.py b/portality/events/kafka_consumer.py deleted file mode 100644 index 0ce1e1120e..0000000000 --- a/portality/events/kafka_consumer.py +++ /dev/null @@ -1,30 +0,0 @@ -import faust -import json - -from portality.app import app as doajapp -from portality.bll import DOAJ -from portality.models import Event - -broker = doajapp.config.get("KAFKA_BROKER") -topic_name = doajapp.config.get("KAFKA_EVENTS_TOPIC") - -app = faust.App('events', broker=broker, value_serializer='json') -topic = app.topic(topic_name) - -event_counter = 0 - - -@app.agent(topic) -async def handle_event(stream): - global event_counter - with doajapp.test_request_context("/"): - svc = DOAJ.eventsService() - async for event in stream: - event_counter += 1 - doajapp.logger.info(f"Kafka event count {event_counter}") - # TODO uncomment the following line once the Event model is fixed to Kafka - # svc.consume(Event(raw=json.loads(event))) - - -if __name__ == '__main__': - app.main() diff --git a/portality/events/kafka_producer.py b/portality/events/kafka_producer.py deleted file mode 100644 index 4dfef4aa51..0000000000 --- a/portality/events/kafka_producer.py +++ /dev/null @@ -1,12 +0,0 @@ -import json -from kafka import KafkaProducer - -from portality.core import app as doajapp -bootstrap_server = doajapp.config.get("KAFKA_BOOTSTRAP_SERVER") - -producer = KafkaProducer(bootstrap_servers=bootstrap_server, value_serializer=lambda v: json.dumps(v).encode('utf-8')) - - -def send_event(event): - future = producer.send('events', value=event.serialise()) - future.get(timeout=60) \ No newline at end of file diff --git a/portality/forms/application_forms.py b/portality/forms/application_forms.py index 3ac5f845b9..ff86584b7b 100644 --- a/portality/forms/application_forms.py +++ b/portality/forms/application_forms.py @@ -28,6 +28,7 @@ DifferentTo, RequiredIfOtherValue, OnlyIf, + OnlyIfExists, NotIf, GroupMember, RequiredValue, @@ -72,6 +73,7 @@ "research journal" ] + ######################################################## # Define all our individual fields ######################################################## @@ -96,18 +98,18 @@ class FieldDefinitions: }, "validate": [ {"required": {"message": "You must answer Yes to continue"}}, - {"required_value" : {"value" : "y"}} + {"required_value": {"value": "y"}} ], "contexts": { - "admin" : { - "validate" : [] + "admin": { + "validate": [] }, "editor": { - "validate" : [], + "validate": [], "disabled": True }, "associate_editor": { - "validate" : [], + "validate": [], "disabled": True } } @@ -135,18 +137,18 @@ class FieldDefinitions: }, "validate": [ {"required": {"message": "Enter the URL for the journal’s Open Access statement page"}}, - "is_url" # ~~^->IsURL:FormValidator~~ + "is_url" # ~~^->IsURL:FormValidator~~ ], "widgets": [ "trim_whitespace", # ~~^-> TrimWhitespace:FormWidget~~ - "clickable_url" # ~~^-> ClickableURL:FormWidget~~ + "clickable_url" # ~~^-> ClickableURL:FormWidget~~ ], "attr": { "type": "url" } } - #~~->$ Title:FormField~~ + # ~~->$ Title:FormField~~ TITLE = { "name": "title", "label": "Journal title", @@ -161,11 +163,11 @@ class FieldDefinitions: }, "validate": [ {"required": {"message": "Enter the journal’s name"}}, - "no_script_tag" # ~~^-> NoScriptTag:FormValidator + "no_script_tag" # ~~^-> NoScriptTag:FormValidator ], "widgets": [ "trim_whitespace", # ~~^-> TrimWhitespace:FormWidget~~ - "full_contents" # ~~^->FullContents:FormWidget~~ + "full_contents" # ~~^->FullContents:FormWidget~~ ], "contexts": { "admin": { @@ -204,11 +206,12 @@ class FieldDefinitions: "placeholder": "Ma revue" }, "validate": [ - "no_script_tag" # ~~^-> NoScriptTag:FormValidator + "no_script_tag" # ~~^-> NoScriptTag:FormValidator ], "widgets": [ "trim_whitespace", # ~~^-> TrimWhitespace:FormWidget~~ - {"full_contents" : {"empty_disabled" : "[The journal has no alternative title]"}} # ~~^->FullContents:FormWidget~~ + {"full_contents": {"empty_disabled": "[The journal has no alternative title]"}} + # ~~^->FullContents:FormWidget~~ ], "contexts": { "update_request": { @@ -242,27 +245,27 @@ class FieldDefinitions: "input": "text", "validate": [ "required", - "is_url" # ~~^->IsURL:FormValidator~~ + "is_url" # ~~^->IsURL:FormValidator~~ ], "widgets": [ "trim_whitespace", # ~~^-> TrimWhitespace:FormWidget~~ - "clickable_url" # ~~^-> ClickableURL:FormWidget~~ + "clickable_url" # ~~^-> ClickableURL:FormWidget~~ ], "help": { "placeholder": "https://www.my-journal.com" }, "contexts": { - "public" : { + "public": { "validate": [ {"required": {"message": "Enter the URL for the journal’s homepage"}}, - "is_url", # ~~^->IsURL:FormValidator~~ + "is_url", # ~~^->IsURL:FormValidator~~ "journal_url_in_public_doaj" # ~~^-> JournalURLInPublicDOAJ:FormValidator~~ ], } } } - #~~->$ PISSN:FormField~~ + # ~~->$ PISSN:FormField~~ PISSN = { "name": "pissn", "label": "ISSN (print)", @@ -278,28 +281,28 @@ class FieldDefinitions: "validate": [ {"optional_if": {"field": "eissn", # ~~^-> OptionalIf:FormValidator~~ "message": "You must provide one or both of an online ISSN or a print ISSN"}}, - {"is_issn": {"message": "This is not a valid ISSN"}}, # ~~^-> IsISSN:FormValidator~~ + {"is_issn": {"message": "This is not a valid ISSN"}}, # ~~^-> IsISSN:FormValidator~~ {"different_to": {"field": "eissn", "message": "This field must contain a different value to 'ISSN (" - "online)'"}} # ~~^-> DifferetTo:FormValidator~~ + "online)'"}} # ~~^-> DifferetTo:FormValidator~~ ], - "widgets" : [ + "widgets": [ "trim_whitespace", # ~~^-> TrimWhitespace:FormWidget~~ - "full_contents", # ~~^->FullContents:FormWidget~~ - "issn_link" # ~~^->IssnLink:FormWidget~~ + "full_contents", # ~~^->FullContents:FormWidget~~ + "issn_link" # ~~^->IssnLink:FormWidget~~ ], "contexts": { - "public" : { + "public": { "validate": [ {"optional_if": {"field": "eissn", # ~~^-> OptionalIf:FormValidator~~ "message": "You must provide one or both of an online ISSN or a print ISSN"}}, - {"is_issn": {"message": "This is not a valid ISSN"}}, # ~~^-> IsISSN:FormValidator~~ + {"is_issn": {"message": "This is not a valid ISSN"}}, # ~~^-> IsISSN:FormValidator~~ {"different_to": {"field": "eissn", "message": "This field must contain a different value to 'ISSN (" "online)'"}}, # ~~^-> DifferetTo:FormValidator~~ "issn_in_public_doaj" ], }, - "admin" : { + "admin": { "help": { "long_help": ["Must be a valid ISSN, fully registered and confirmed at the " " ISSN Portal", @@ -339,7 +342,7 @@ class FieldDefinitions: } } - #~~->$ EISSN:FormField~~ + # ~~->$ EISSN:FormField~~ EISSN = { "name": "eissn", "label": "ISSN (online)", @@ -355,25 +358,29 @@ class FieldDefinitions: "validate": [ {"optional_if": {"field": "pissn", # ~~^-> OptionalIf:FormValidator~~ "message": "You must provide one or both of an online ISSN or a print ISSN"}}, - {"is_issn": {"message": "This is not a valid ISSN"}}, # ~~^-> IsISSN:FormValidator~~ - {"different_to": {"field": "pissn", "message" : "This field must contain a different value to 'ISSN (print)'"}} # ~~^-> DifferetTo:FormValidator~~ + {"is_issn": {"message": "This is not a valid ISSN"}}, # ~~^-> IsISSN:FormValidator~~ + {"different_to": {"field": "pissn", + "message": "This field must contain a different value to 'ISSN (print)'"}} + # ~~^-> DifferetTo:FormValidator~~ ], - "widgets" : [ + "widgets": [ "trim_whitespace", # ~~^-> TrimWhitespace:FormWidget~~ + "full_contents", # ~~^->FullContents:FormWidget~~ "issn_link" # ~~^->IssnLink:FormWidget~~ ], "contexts": { - "public" : { - "validate" : [ + "public": { + "validate": [ {"optional_if": {"field": "pissn", # ~~^-> OptionalIf:FormValidator~~ "message": "You must provide one or both of an online ISSN or a print ISSN"}}, - {"is_issn": {"message": "This is not a valid ISSN"}}, # ~~^-> IsISSN:FormValidator~~ + {"is_issn": {"message": "This is not a valid ISSN"}}, # ~~^-> IsISSN:FormValidator~~ {"different_to": {"field": "pissn", - "message": "This field must contain a different value to 'ISSN (print)'"}}, # ~~^-> DifferetTo:FormValidator~~ + "message": "This field must contain a different value to 'ISSN (print)'"}}, + # ~~^-> DifferetTo:FormValidator~~ "issn_in_public_doaj" ] }, - "admin" : { + "admin": { "help": { "long_help": ["Must be a valid ISSN, fully registered and confirmed at the " " ISSN Portal", @@ -409,11 +416,11 @@ class FieldDefinitions: }, "update_request": { "disabled": True, - "validate" : [ + "validate": [ {"optional_if": {"field": "pissn", # ~~^-> OptionalIf:FormValidator~~ "message": "You must provide one or both of an online ISSN or a print ISSN"}}, - {"is_issn": {"message": "This is not a valid ISSN"}}, # ~~^-> IsISSN:FormValidator~~ - {"different_to": {"field": "pissn", # ~~^-> DifferetTo:FormValidator~~ + {"is_issn": {"message": "This is not a valid ISSN"}}, # ~~^-> IsISSN:FormValidator~~ + {"different_to": {"field": "pissn", # ~~^-> DifferetTo:FormValidator~~ "message": "This field must contain a different value to 'ISSN (print)'"}} ] } @@ -428,12 +435,13 @@ class FieldDefinitions: "help": { "long_help": ["Choose up to 6 keywords that describe the journal's subject matter. " "Keywords must be in English.", "Use single words or short phrases (2 to 3 words) " - "that describe the journal's main topic.", "Do not add acronyms, abbreviations or descriptive sentences.", - "Note that the keywords may be edited by DOAJ editorial staff." ], + "that describe the journal's main topic.", + "Do not add acronyms, abbreviations or descriptive sentences.", + "Note that the keywords may be edited by DOAJ editorial staff."], }, "validate": [ {"required": {"message": "Enter at least one subject keyword in English"}}, - {"stop_words": {"disallowed": STOP_WORDS}}, # ~~^->StopWords:FormValidator~~ + {"stop_words": {"disallowed": STOP_WORDS}}, # ~~^->StopWords:FormValidator~~ {"max_tags": {"max": 6}} ], "widgets": [ @@ -455,10 +463,10 @@ class FieldDefinitions: "name": "language", "label": "Languages in which the journal accepts manuscripts", "input": "select", - "default" : "", + "default": "", "options_fn": "iso_language_list", "repeatable": { - "minimum" : 1, + "minimum": 1, "initial": 5 }, "validate": [ @@ -487,34 +495,56 @@ class FieldDefinitions: ], "widgets": [ "trim_whitespace", # ~~^-> TrimWhitespace:FormWidget~~ - {"autocomplete": {"type" : "journal", "field": "bibjson.publisher.name.exact"}}, # ~~^-> Autocomplete:FormWidget~~ - "full_contents" # ~~^->FullContents:FormWidget~~ + {"autocomplete": {"type": "journal", "field": "bibjson.publisher.name.exact"}}, + # ~~^-> Autocomplete:FormWidget~~ + "full_contents" # ~~^->FullContents:FormWidget~~ ], "help": { "placeholder": "Type or select the publisher's name" }, - "contexts" : { - "bulk_edit" : { - "validate" : [] + "contexts": { + "bulk_edit": { + "validate": [] }, "admin": { "widgets": [ "trim_whitespace", # ~~^-> TrimWhitespace:FormWidget~~ - {"autocomplete": {"type": "journal", "field": "bibjson.publisher.name.exact"}}, # ~~^-> Autocomplete:FormWidget~~ + {"autocomplete": {"type": "journal", "field": "bibjson.publisher.name.exact"}}, + # ~~^-> Autocomplete:FormWidget~~ "click_to_copy", # ~~^-> ClickToCopy:FormWidget~~ ] }, + "public": { + "validate": [ + {"required": {"message": "Enter the name of the journal's publisher"}}, + {"different_to": {"field": "institution_name", + "message": "The Publisher's name and Other organisation's name cannot be the same."}} + ] + # ~~^-> DifferetTo:FormValidator~~ + + }, + "update_request": { + "validate": [ + {"required": {"message": "Enter the name of the journal's publisher"}}, + {"different_to": {"field": "institution_name", + "message": "The Publisher's name and Other organisation's name cannot be the same."}} + ] + # ~~^-> DifferetTo:FormValidator~~ + + }, "associate_editor": { "widgets": [ "trim_whitespace", # ~~^-> TrimWhitespace:FormWidget~~ - {"autocomplete": {"type": "journal", "field": "bibjson.publisher.name.exact"}}, # ~~^-> Autocomplete:FormWidget~~ + {"autocomplete": {"type": "journal", "field": "bibjson.publisher.name.exact"}}, + # ~~^-> Autocomplete:FormWidget~~ "click_to_copy", # ~~^-> ClickToCopy:FormWidget~~ ] }, "editor": { "widgets": [ "trim_whitespace", # ~~^-> TrimWhitespace:FormWidget~~ - {"autocomplete": {"type": "journal", "field": "bibjson.publisher.name.exact"}}, # ~~^-> Autocomplete:FormWidget~~ + {"autocomplete": {"type": "journal", "field": "bibjson.publisher.name.exact"}}, + # ~~^-> Autocomplete:FormWidget~~ "click_to_copy", # ~~^-> ClickToCopy:FormWidget~~ ] } @@ -534,7 +564,8 @@ class FieldDefinitions: "placeholder": "Type or select the country" }, "validate": [ - {"required": {"message": "Enter the country where the publisher carries out its business operations and is registered"}} + {"required": { + "message": "Enter the country where the publisher carries out its business operations and is registered"}} ], "widgets": [ {"select": {}} @@ -546,8 +577,8 @@ class FieldDefinitions: "associate_editor": { "disabled": True }, - "bulk_edit" : { - "validate" : [] + "bulk_edit": { + "validate": [] } } } @@ -560,17 +591,31 @@ class FieldDefinitions: "optional": True, "help": { "short_help": "Any other organisation associated with the journal", - "long_help": ["The journal may be owned, funded, sponsored, or supported by another organisation that is not " - "the publisher. If your journal is linked to " - "a second organisation, enter its name here."], + "long_help": [ + "The journal may be owned, funded, sponsored, or supported by another organisation that is not " + "the publisher. If your journal is linked to " + "a second organisation, enter its name here."], "placeholder": "Type or select the other organisation's name" }, "widgets": [ "trim_whitespace", # ~~^-> TrimWhitespace:FormWidget~~ - {"autocomplete": {"type" : "journal", "field": "bibjson.institution.name.exact"}}, # ~~^-> Autocomplete:FormWidget~~ - "full_contents" # ~~^->FullContents:FormWidget~~ + {"autocomplete": {"type": "journal", "field": "bibjson.institution.name.exact"}}, + # ~~^-> Autocomplete:FormWidget~~ + "full_contents" # ~~^->FullContents:FormWidget~~ ], "contexts": { + "public": { + "validate": [{"different_to": {"field": "publisher_name", + "message": "The Publisher's name and Other organisation's name cannot be the same."}}] + # ~~^-> DifferetTo:FormValidator~~ + + }, + "update_request": { + "validate": [{"different_to": {"field": "publisher_name", + "message": "The Publisher's name and Other organisation's name cannot be the same."}}] + # ~~^-> DifferetTo:FormValidator~~ + + }, "admin": { "widgets": [ "trim_whitespace", # ~~^-> TrimWhitespace:FormWidget~~ @@ -603,7 +648,7 @@ class FieldDefinitions: "name": "institution_country", "label": "Other organisation's country", "input": "select", - "default" : "", + "default": "", "options_fn": "iso_country_list", "optional": True, "help": { @@ -611,8 +656,32 @@ class FieldDefinitions: "placeholder": "Type or select the country" }, "widgets": [ - {"select": {"allow_clear" : True}} + {"select": {"allow_clear": True}} ], + "contexts": { + "public": { + "validate": [ + { + "only_if_exists": { + "fields": + [{"field": "institution_name"}], + "message": "'You must provide the other organization's name. You cannot provide just the country.", + } + } + ] + }, + "update_request": { + "validate": [ + { + "only_if_exists": { + "fields": + [{"field": "institution_name"}], + "message": "'You must provide the other organization's name. You cannot provide just the country.", + } + } + ] + }, + }, "attr": { "class": "input-xlarge" } @@ -633,7 +702,8 @@ class FieldDefinitions: {"display": "CC BY-NC-ND", "value": "CC BY-NC-ND"}, {"display": "CC0", "value": "CC0"}, {"display": "Public domain", "value": "Public domain"}, - {"display": "Publisher's own license", "value": "Publisher's own license", "subfields": ["license_attributes"]}, + {"display": "Publisher's own license", "value": "Publisher's own license", + "subfields": ["license_attributes"]}, ], "help": { "long_help": ["The journal must use some form of licensing to be considered for indexing in DOAJ. ", @@ -693,7 +763,7 @@ class FieldDefinitions: "diff_table_context": "License terms", "validate": [ {"required": {"message": "Enter the URL for the journal’s license terms page"}}, - "is_url" # ~~^->IsURL:FormValidator~~ + "is_url" # ~~^->IsURL:FormValidator~~ ], "help": { "short_help": "Link to the page where the license terms are stated on your site.", @@ -702,7 +772,7 @@ class FieldDefinitions: }, "widgets": [ "trim_whitespace", # ~~^-> TrimWhitespace:FormWidget~~ - "clickable_url" # ~~^-> ClickableURL:FormWidget~~ + "clickable_url" # ~~^-> ClickableURL:FormWidget~~ ] } @@ -727,7 +797,7 @@ class FieldDefinitions: ] } - #~~->$ LicenseDisplayExampleUrl:FormField~~ + # ~~->$ LicenseDisplayExampleUrl:FormField~~ LICENSE_DISPLAY_EXAMPLE_URL = { "name": "license_display_example_url", "label": "Recent article displaying or embedding a license in the full text", @@ -746,11 +816,11 @@ class FieldDefinitions: "message": "Enter the URL for any recent article that displays or embeds a license" } }, - "is_url" # ~~^->IsURL:FormValidator~~ + "is_url" # ~~^->IsURL:FormValidator~~ ], "widgets": [ "trim_whitespace", # ~~^-> TrimWhitespace:FormWidget~~ - "clickable_url" # ~~^-> ClickableURL:FormWidget~~ + "clickable_url" # ~~^-> ClickableURL:FormWidget~~ ] } @@ -789,23 +859,23 @@ class FieldDefinitions: }, "placeholder": "https://www.my-journal.com/about#licensing", "validate": [ - "is_url" # ~~^->IsURL:FormValidator~~ + "is_url" # ~~^->IsURL:FormValidator~~ ], "widgets": [ "trim_whitespace", # ~~^-> TrimWhitespace:FormWidget~~ - "clickable_url" # ~~^-> ClickableURL:FormWidget~~ + "clickable_url" # ~~^-> ClickableURL:FormWidget~~ ], "contexts": { "public": { "validate": [ {"required": {"message": "Enter the URL for the journal’s copyright terms page"}}, - "is_url" # ~~^->IsURL:FormValidator~~ + "is_url" # ~~^->IsURL:FormValidator~~ ] }, "update_request": { "validate": [ "required", - "is_url" # ~~^->IsURL:FormValidator~~ + "is_url" # ~~^->IsURL:FormValidator~~ ] } } @@ -855,7 +925,7 @@ class FieldDefinitions: } } ], - "widgets" : [ + "widgets": [ "trim_whitespace" # ~~^-> TrimWhitespace:FormWidget~~ ], "asynchronous_warning": [ @@ -875,11 +945,11 @@ class FieldDefinitions: }, "validate": [ {"required": {"message": "Enter the URL for the journal’s peer review policy page"}}, - "is_url" # ~~^->IsURL:FormValidator~~ + "is_url" # ~~^->IsURL:FormValidator~~ ], "widgets": [ "trim_whitespace", # ~~^-> TrimWhitespace:FormWidget~~ - "clickable_url" # ~~^-> ClickableURL:FormWidget~~ + "clickable_url" # ~~^-> ClickableURL:FormWidget~~ ] } @@ -890,14 +960,17 @@ class FieldDefinitions: "input": "number", "datatype": "integer", "help": { - "long_help": ["Please enter the year that the journal started to publish all content as true open access, according to DOAJ's definition.", - "For journals that have flipped to open access, enter the year that the journal flipped, not the original launch date of the journal.", - "For journals that have made digitised backfiles freely available, enter the year that the journal started publishing as a fully open access title, not the date of the earliest free content."] + "long_help": [ + "Please enter the year that the journal started to publish all content as true open access, according to DOAJ's definition.", + "For journals that have flipped to open access, enter the year that the journal flipped, not the original launch date of the journal.", + "For journals that have made digitised backfiles freely available, enter the year that the journal started publishing as a fully open access title, not the date of the earliest free content."] }, "validate": [ {"required": {"message": "Enter the Year (YYYY)."}}, {"int_range": {"gte": app.config.get('MINIMAL_OA_START_DATE', 1900), "lte": dates.now().year}}, - {"year": {"message": "OA Start Date must be a year in a 4 digit format (eg. 1987) and must be greater than {}".format(app.config.get('MINIMAL_OA_START_DATE', 1900))}} + {"year": { + "message": "OA Start Date must be a year in a 4 digit format (eg. 1987) and must be greater than {}".format( + app.config.get('MINIMAL_OA_START_DATE', 1900))}} ], "attr": { "min": app.config.get('MINIMAL_OA_START_DATE', 1900), @@ -925,7 +998,7 @@ class FieldDefinitions: ] } - #~~->$ PlagiarismURL:FormField~~ + # ~~->$ PlagiarismURL:FormField~~ PLAGIARISM_URL = { "name": "plagiarism_url", "label": "Where can we find this information?", @@ -946,11 +1019,11 @@ class FieldDefinitions: "message": "Enter the URL for the journal’s plagiarism policy page" } }, - "is_url" # ~~^->IsURL:FormValidator~~ + "is_url" # ~~^->IsURL:FormValidator~~ ], "widgets": [ "trim_whitespace", # ~~^-> TrimWhitespace:FormWidget~~ - "clickable_url" # ~~^-> ClickableURL:FormWidget~~ + "clickable_url" # ~~^-> ClickableURL:FormWidget~~ ] } @@ -965,11 +1038,11 @@ class FieldDefinitions: }, "validate": [ {"required": {"message": "Enter the URL for the journal’s Aims & Scope page"}}, - "is_url" # ~~^->IsURL:FormValidator~~ + "is_url" # ~~^->IsURL:FormValidator~~ ], "widgets": [ "trim_whitespace", # ~~^-> TrimWhitespace:FormWidget~~ - "clickable_url" # ~~^-> ClickableURL:FormWidget~~ + "clickable_url" # ~~^-> ClickableURL:FormWidget~~ ] } @@ -984,11 +1057,11 @@ class FieldDefinitions: }, "validate": [ {"required": {"message": "Enter the URL for the journal’s Editorial Board page"}}, - "is_url" # ~~^->IsURL:FormValidator~~ + "is_url" # ~~^->IsURL:FormValidator~~ ], "widgets": [ "trim_whitespace", # ~~^-> TrimWhitespace:FormWidget~~ - "clickable_url" # ~~^-> ClickableURL:FormWidget~~ + "clickable_url" # ~~^-> ClickableURL:FormWidget~~ ] } @@ -1003,11 +1076,11 @@ class FieldDefinitions: }, "validate": [ {"required": {"message": "Enter the URL for the journal’s Instructions for Authors page"}}, - "is_url" # ~~^->IsURL:FormValidator~~ + "is_url" # ~~^->IsURL:FormValidator~~ ], "widgets": [ "trim_whitespace", # ~~^-> TrimWhitespace:FormWidget~~ - "clickable_url" # ~~^-> ClickableURL:FormWidget~~ + "clickable_url" # ~~^-> ClickableURL:FormWidget~~ ] } @@ -1116,7 +1189,7 @@ class FieldDefinitions: "help": { "placeholder": "Highest fee charged" }, - "validate":[ + "validate": [ { "required_if": { "field": "apc", @@ -1144,12 +1217,13 @@ class FieldDefinitions: "placeholder": "https://www.my-journal.com/about#apc" }, "validate": [ - {"required": {"message": "Enter the URL for the journal’s publication fees information page"}}, - "is_url" # ~~^->IsURL:FormValidator~~ + {"required": { + "message": "Enter the URL for the journal’s publication fees information page"}}, + "is_url" # ~~^->IsURL:FormValidator~~ ], "widgets": [ "trim_whitespace", # ~~^-> TrimWhitespace:FormWidget~~ - "clickable_url" # ~~^-> ClickableURL:FormWidget~~ + "clickable_url" # ~~^-> ClickableURL:FormWidget~~ ] } @@ -1197,11 +1271,11 @@ class FieldDefinitions: "message": "Enter the URL for the journal’s waiver information page" } }, - "is_url" # ~~^->IsURL:FormValidator~~ + "is_url" # ~~^->IsURL:FormValidator~~ ], "widgets": [ "trim_whitespace", # ~~^-> TrimWhitespace:FormWidget~~ - "clickable_url" # ~~^-> ClickableURL:FormWidget~~ + "clickable_url" # ~~^-> ClickableURL:FormWidget~~ ] } @@ -1245,11 +1319,11 @@ class FieldDefinitions: "message": "Enter the URL for the journal’s fees information page" } }, - "is_url" # ~~^->IsURL:FormValidator~~ + "is_url" # ~~^->IsURL:FormValidator~~ ], "widgets": [ "trim_whitespace", # ~~^-> TrimWhitespace:FormWidget~~ - "clickable_url" # ~~^-> ClickableURL:FormWidget~~ + "clickable_url" # ~~^-> ClickableURL:FormWidget~~ ] } @@ -1267,8 +1341,10 @@ class FieldDefinitions: {"display": "PKP PN", "value": "PKP PN", "subfields": ["preservation_service_url"]}, {"display": "PubMed Central (PMC)", "value": "PMC", "subfields": ["preservation_service_url"]}, {"display": "Portico", "value": "Portico", "subfields": ["preservation_service_url"]}, - {"display": "A national library", "value": "national_library", "subfields": ["preservation_service_library", "preservation_service_url"]}, - {"display": "Other", "value": "other", "subfields": ["preservation_service_other", "preservation_service_url"]}, + {"display": "A national library", "value": "national_library", + "subfields": ["preservation_service_library", "preservation_service_url"]}, + {"display": "Other", "value": "other", + "subfields": ["preservation_service_other", "preservation_service_url"]}, {"display": "The journal content isn’t archived with a long-term preservation service", "value": "none", "exclusive": True} ], @@ -1296,9 +1372,9 @@ class FieldDefinitions: "name": "preservation_service_library", "label": "A national library", "input": "text", - "repeatable" : { + "repeatable": { "minimum": 1, - "initial" : 2 + "initial": 2 }, "help": { "short_help": "Name of national library" @@ -1335,8 +1411,11 @@ class FieldDefinitions: } } ], - "widgets" : [ - "trim_whitespace" # ~~^-> TrimWhitespace:FormWidget~~ + "asynchronous_warning": [ + {"warn_on_value": {"value": "None"}} + ], + "widgets": [ + "trim_whitespace" # ~~^-> TrimWhitespace:FormWidget~~ ] } @@ -1379,11 +1458,11 @@ class FieldDefinitions: ] } }, - "is_url" # ~~^->IsURL:FormValidator~~ + "is_url" # ~~^->IsURL:FormValidator~~ ], "widgets": [ "trim_whitespace", # ~~^-> TrimWhitespace:FormWidget~~ - "clickable_url" # ~~^-> ClickableURL:FormWidget~~ + "clickable_url" # ~~^-> ClickableURL:FormWidget~~ ] } @@ -1399,7 +1478,8 @@ class FieldDefinitions: {"display": "Dulcinea", "value": "Dulcinea", "subfields": ["deposit_policy_url"]}, {"display": "Mir@bel", "value": "Mir@bel", "subfields": ["deposit_policy_url"]}, {"display": "Sherpa/Romeo", "value": "Sherpa/Romeo", "subfields": ["deposit_policy_url"]}, - {"display": "Other (including publisher’s own site)", "value": "other", "subfields": ["deposit_policy_other", "deposit_policy_url"]}, + {"display": "Other (including publisher’s own site)", "value": "other", + "subfields": ["deposit_policy_other", "deposit_policy_url"]}, {"display": "The journal has no repository policy", "value": "none", "exclusive": True} ], "help": { @@ -1432,8 +1512,11 @@ class FieldDefinitions: } } ], - "widgets" : [ - "trim_whitespace" # ~~^-> TrimWhitespace:FormWidget~~ + "asynchronous_warning": [ + {"warn_on_value": {"value": "None"}} + ], + "widgets": [ + "trim_whitespace" # ~~^-> TrimWhitespace:FormWidget~~ ] } @@ -1454,14 +1537,14 @@ class FieldDefinitions: "placeholder": "https://www.my-journal.com/about#repository_policy" }, "validate": [ - "is_url" # ~~^->IsURL:FormValidator~~ + "is_url" # ~~^->IsURL:FormValidator~~ ], "widgets": [ "trim_whitespace", # ~~^-> TrimWhitespace:FormWidget~~ - "clickable_url", # ~~^-> ClickableURL:FormWidget~~ + "clickable_url", # ~~^-> ClickableURL:FormWidget~~ ], - "contexts" : { - "public" : { + "contexts": { + "public": { "validate": [ { "required_if": { @@ -1475,11 +1558,11 @@ class FieldDefinitions: ] } }, - "is_url" # ~~^->IsURL:FormValidator~~ + "is_url" # ~~^->IsURL:FormValidator~~ ] }, - "update_request" : { - "validate" : [ + "update_request": { + "validate": [ { "required_if": { "field": "deposit_policy", @@ -1492,7 +1575,7 @@ class FieldDefinitions: ] } }, - "is_url" # ~~^->IsURL:FormValidator~~ + "is_url" # ~~^->IsURL:FormValidator~~ ] } } @@ -1511,7 +1594,8 @@ class FieldDefinitions: {"display": "Handles", "value": "Handles"}, {"display": "PURLs", "value": "PURL"}, {"display": "Other", "value": "other", "subfields": ["persistent_identifiers_other"]}, - {"display": "The journal does not use persistent article identifiers", "value": "none", "exclusive": True} + {"display": "The journal does not use persistent article identifiers", "value": "none", + "exclusive": True} ], "help": { "long_help": ["A persistent article identifier (PID) is used to find the article no matter where it is " @@ -1537,8 +1621,11 @@ class FieldDefinitions: } } ], - "widgets" : [ - "trim_whitespace" # ~~^-> TrimWhitespace:FormWidget~~ + "asynchronous_warning": [ + {"warn_on_value": {"value": "None"}} + ], + "widgets": [ + "trim_whitespace" # ~~^-> TrimWhitespace:FormWidget~~ ] } @@ -1551,19 +1638,20 @@ class FieldDefinitions: {"display": "Yes", "value": "y"}, {"display": "No", "value": "n"} ], - "default" : "", + "default": "", "help": { - "long_help": ["An ORCID (Open Researcher and Contributor) iD is an alphanumeric code to uniquely identify " - "authors."], + "long_help": [ + "An ORCID (Open Researcher and Contributor) iD is an alphanumeric code to uniquely identify " + "authors."], }, - "contexts" : { - "public" : { + "contexts": { + "public": { "validate": [ {"required": {"message": "Select Yes or No"}} ] }, - "update_request" : { - "validate" : [ + "update_request": { + "validate": [ {"required": {"message": "Select Yes or No"}} ] } @@ -1579,18 +1667,19 @@ class FieldDefinitions: {"display": "Yes", "value": "y"}, {"display": "No", "value": "n"} ], - "default" : "", + "default": "", "help": { - "long_help": ["The I4OC standards ask that citations are structured, separable, and open. "], + "long_help": [ + "The I4OC standards ask that citations are structured, separable, and open. "], }, - "contexts" : { - "public" : { + "contexts": { + "public": { "validate": [ {"required": {"message": "Select Yes or No"}} ] }, - "update_request" : { - "validate" : [ + "update_request": { + "validate": [ {"required": {"message": "Select Yes or No"}} ] } @@ -1607,23 +1696,23 @@ class FieldDefinitions: "input": "checkbox", "validate": [ { - "only_if" : { - "fields" : [ - {"field" : "license_display", "value" : "y"}, - {"field" : "copyright_author_retains", "value" : "y"}, - {"field" : "preservation_service", "not" : "none"}, - {"field" : "preservation_service_url", "not" : ""}, - {"field" : "deposit_policy", "not" : "none"}, - {"field" : "persistent_identifiers", "not" : "none"}, - {"field" : "license", "or" : ["CC BY", "CC BY-SA", "CC BY-NC", "CC BY-NC-SA"]} + "only_if": { + "fields": [ + {"field": "license_display", "value": "y"}, + {"field": "copyright_author_retains", "value": "y"}, + {"field": "preservation_service", "not": "none"}, + {"field": "preservation_service_url", "not": ""}, + {"field": "deposit_policy", "not": "none"}, + {"field": "persistent_identifiers", "not": "none"}, + {"field": "license", "or": ["CC BY", "CC BY-SA", "CC BY-NC", "CC BY-NC-SA"]} ], - "message" : "In order to award the query: the license must be CC BY, CC BY-SA, CC BY-NC, or CC BY-NC-SA; " - "the license must be displayed or embedded; " - "the author must retain their copyright; " - "the journal must make use of a preservation service; " - "a url for the preservation service must be provided; " - "the journal must have a deposit policy; " - "the journal must use a persistent identifier" + "message": "In order to award the query: the license must be CC BY, CC BY-SA, CC BY-NC, or CC BY-NC-SA; " + "the license must be displayed or embedded; " + "the author must retain their copyright; " + "the journal must make use of a preservation service; " + "a url for the preservation service must be provided; " + "the journal must have a deposit policy; " + "the journal must use a persistent identifier" } } ] @@ -1662,12 +1751,12 @@ class FieldDefinitions: "owner_exists" ], "widgets": [ - {"autocomplete": {"type" : "account", "field": "id", "include" : False}}, # ~~^-> Autocomplete:FormWidget~~ + {"autocomplete": {"type": "account", "field": "id", "include": False}}, # ~~^-> Autocomplete:FormWidget~~ "clickable_owner" ], - "contexts" : { - "associate_editor" : { - "validate" : [ + "contexts": { + "associate_editor": { + "validate": [ {"required": {"message": "You must confirm the account id"}}, "reserved_usernames", "owner_exists" @@ -1685,30 +1774,30 @@ class FieldDefinitions: "validate": [ "required" ], - "help" : { - "update_requests_diff" : False, + "help": { + "update_requests_diff": False, "render_error_box": False }, - "disabled" : "application_status_disabled", - "contexts" : { - "associate_editor" : { - "help" : { + "disabled": "application_status_disabled", + "contexts": { + "associate_editor": { + "help": { "render_error_box": False, - "short_help" : "Set the status to 'In Progress' to signal to the applicant that you have started your review." - "Set the status to 'Completed' to alert the Editor that you have completed your review.", + "short_help": "Set the status to 'In Progress' to signal to the applicant that you have started your review." + "Set the status to 'Completed' to alert the Editor that you have completed your review.", "update_requests_diff": False } }, - "editor" : { - "help" : { - "render_error_box" : False, - "short_help" : "Revert the status to 'In Progress' to signal to the Associate Editor that further work is needed." - "Set the status to 'Ready' to alert the Managing Editor that you have completed your review.", + "editor": { + "help": { + "render_error_box": False, + "short_help": "Revert the status to 'In Progress' to signal to the Associate Editor that further work is needed." + "Set the status to 'Ready' to alert the Managing Editor that you have completed your review.", "update_requests_diff": False } } }, - "widgets" : [ + "widgets": [ # When Accepted selected display. 'This journal is currently assigned to its applicant account XXXXXX. Is this the correct account for this journal?' "owner_review" ] @@ -1720,16 +1809,18 @@ class FieldDefinitions: "label": "Group", "input": "text", "widgets": [ - {"autocomplete": {"type" : "editor_group", "field": "name", "include" : False}} # ~~^-> Autocomplete:FormWidget~~ + {"autocomplete": {"type": "editor_group", "field": "name", "include": False}} + # ~~^-> Autocomplete:FormWidget~~ ], - "contexts" : { - "editor" : { - "disabled" : True + "contexts": { + "editor": { + "disabled": True }, - "admin" : { - "widgets" : [ - {"autocomplete": {"type": "editor_group", "field": "name", "include" : False}}, # ~~^-> Autocomplete:FormWidget~~ - {"load_editors" : {"field" : "editor"}} + "admin": { + "widgets": [ + {"autocomplete": {"type": "editor_group", "field": "name", "include": False}}, + # ~~^-> Autocomplete:FormWidget~~ + {"load_editors": {"field": "editor"}} ] } } @@ -1741,11 +1832,11 @@ class FieldDefinitions: "label": "Individual", "input": "select", "options_fn": "editor_choices", - "default" : "", - "validate" : [ - { "group_member" : {"group_field" : "editor_group"}} + "default": "", + "validate": [ + {"group_member": {"group_field": "editor_group"}} ], - "help" : { + "help": { "render_error_box": False } } @@ -1755,22 +1846,22 @@ class FieldDefinitions: "name": "discontinued_date", "label": "Discontinued on", "input": "text", - "validate" : [ - {"bigenddate" : {"message" : "Date must be a big-end formatted date (e.g. 2020-11-23)"}}, + "validate": [ + {"bigenddate": {"message": "Date must be a big-end formatted date (e.g. 2020-11-23)"}}, { - "not_if" : { - "fields" : [ - {"field" : "continues"}, - {"field" : "continued_by"} + "not_if": { + "fields": [ + {"field": "continues"}, + {"field": "continued_by"} ], - "message" : "You cannot enter both a discontinued date and continuation information." + "message": "You cannot enter both a discontinued date and continuation information." } } ], - "help" : { - "short_help" : "Please enter the discontinued date in the form YYYY-MM-DD (e.g. 2020-11-23). " - "If the day of the month is not known, please use '01' (e.g. 2020-11-01)", - "render_error_box" : False + "help": { + "short_help": "Please enter the discontinued date in the form YYYY-MM-DD (e.g. 2020-11-23). " + "If the day of the month is not known, please use '01' (e.g. 2020-11-01)", + "render_error_box": False } } @@ -1781,11 +1872,13 @@ class FieldDefinitions: "input": "taglist", "validate": [ {"is_issn_list": {"message": "This is not a valid ISSN"}}, # ~~^-> IsISSN:FormValidator~~ - {"different_to": {"field": "continued_by", "message": "The ISSN provided in both fields must be different. Please make sure to enter the ISSN of an older journal for the first field and the ISSN of a newer journal for the second field. They cannot be the same."}}, # ~~^-> DifferetTo:FormValidator~~ + {"different_to": {"field": "continued_by", + "message": "The ISSN provided in both fields must be different. Please make sure to enter the ISSN of an older journal for the first field and the ISSN of a newer journal for the second field. They cannot be the same."}}, + # ~~^-> DifferetTo:FormValidator~~ { - "not_if" : { - "fields" : [{"field" : "discontinued_date"}], - "message" : "You cannot enter both continuation information and a discontinued date" + "not_if": { + "fields": [{"field": "discontinued_date"}], + "message": "You cannot enter both continuation information and a discontinued date" } } ], @@ -1794,7 +1887,7 @@ class FieldDefinitions: "full_contents", # ~~^->FullContents:FormWidget~~ "tagentry" # ~~-> TagEntry:FormWidget~~ ], - "help" : { + "help": { "render_error_box": False } } @@ -1806,7 +1899,9 @@ class FieldDefinitions: "input": "taglist", "validate": [ {"is_issn_list": {"message": "This is not a valid ISSN"}}, # ~~^-> IsISSN:FormValidator~~ - {"different_to": {"field": "continues", "message": "The ISSN provided in both fields must be different. Please make sure to enter the ISSN of an older journal for the first field and the ISSN of a newer journal for the second field. They cannot be the same."}}, # ~~^-> DifferetTo:FormValidator~~ + {"different_to": {"field": "continues", + "message": "The ISSN provided in both fields must be different. Please make sure to enter the ISSN of an older journal for the first field and the ISSN of a newer journal for the second field. They cannot be the same."}}, + # ~~^-> DifferetTo:FormValidator~~ { "not_if": { "fields": [{"field": "discontinued_date"}], @@ -1814,7 +1909,7 @@ class FieldDefinitions: } } ], - "help" : { + "help": { "render_error_box": False }, "widgets": [ @@ -1831,17 +1926,17 @@ class FieldDefinitions: "input": "taglist", "help": { "short_help": "Selecting a subject will not automatically select its sub-categories", - "render_error_box" : False, + "render_error_box": False, }, "validate": [ - {"required_if" : { - "field" : "application_status", - "value" : [ + {"required_if": { + "field": "application_status", + "value": [ constants.APPLICATION_STATUS_READY, constants.APPLICATION_STATUS_COMPLETED, constants.APPLICATION_STATUS_ACCEPTED ], - "message" : "This field is required when setting the Application Status to {y}, {z} or {a}".format( + "message": "This field is required when setting the Application Status to {y}, {z} or {a}".format( y=constants.APPLICATION_STATUS_READY, z=constants.APPLICATION_STATUS_COMPLETED, a=constants.APPLICATION_STATUS_ACCEPTED @@ -1852,9 +1947,9 @@ class FieldDefinitions: "widgets": [ "subject_tree" ], - "contexts" : { - "associate_editor" : { - "validate" : [ + "contexts": { + "associate_editor": { + "validate": [ "required" ] } @@ -1863,12 +1958,12 @@ class FieldDefinitions: # ~~->$ Notes:FormField~~ NOTES = { - "name" : "notes", + "name": "notes", "input": "group", "label": "Notes", - "repeatable" : { - "initial" : 1, - "add_button_placement" : "top" + "repeatable": { + "initial": 1, + "add_button_placement": "top" }, "subfields": [ "note_author", @@ -1880,10 +1975,10 @@ class FieldDefinitions: "template": "application_form/_list.html", "entry_template": "application_form/_entry_group.html", "widgets": [ - {"infinite_repeat" : {"enable_on_repeat" : ["textarea"]}}, + {"infinite_repeat": {"enable_on_repeat": ["textarea"]}}, "note_modal" ], - "merge_disabled" : "merge_disabled_notes", + "merge_disabled": "merge_disabled_notes", } # ~~->$ Note:FormField~~ @@ -1907,7 +2002,7 @@ class FieldDefinitions: # ~~->$ NoteDate:FormField~~ NOTE_DATE = { "subfield": True, - "name" : "note_date", + "name": "note_date", "group": "notes", "input": "text", "disabled": True @@ -1915,7 +2010,7 @@ class FieldDefinitions: # ~~->$ NoteID:FormField~~ NOTE_ID = { - "subfield" : True, + "subfield": True, "name": "note_id", "group": "notes", "input": "hidden" @@ -1923,7 +2018,7 @@ class FieldDefinitions: # ~~->$ NoteAuthorID:FormField~~ NOTE_AUTHOR_ID = { - "subfield" : True, + "subfield": True, "name": "note_author_id", "group": "notes", "input": "hidden" @@ -1931,10 +2026,10 @@ class FieldDefinitions: # ~~->$ OptionalValidation:FormField~~ OPTIONAL_VALIDATION = { - "name" : "make_all_fields_optional", - "label" : "Allow save without validation", - "input" : "checkbox", - "widget" : { + "name": "make_all_fields_optional", + "label": "Allow save without validation", + "input": "checkbox", + "widget": { "optional_validation" } } @@ -1945,11 +2040,11 @@ class FieldDefinitions: "name": "change_doaj_seal", "label": 'Award the Seal', "input": "select", - "default" : "", - "options" :[ - {"value": "", "display" : "Leave unchanged"}, - {"value" : "True", "display" : "Yes"}, - {"value" : "False", "display" : "No"} + "default": "", + "options": [ + {"value": "", "display": "Leave unchanged"}, + {"value": "True", "display": "Yes"}, + {"value": "False", "display": "No"} ], } @@ -1959,7 +2054,7 @@ class FieldDefinitions: ########################################################## class FieldSetDefinitions: - #~~->$ BasicCompliance:FieldSet~~ + # ~~->$ BasicCompliance:FieldSet~~ BASIC_COMPLIANCE = { "name": "basic_compliance", "label": "Open access compliance", @@ -2224,7 +2319,7 @@ class FieldSetDefinitions: OPTIONAL_VALIDATION = { "name": "optional_validation", "label": "Allow save without validation", - "fields" : [ + "fields": [ FieldDefinitions.OPTIONAL_VALIDATION["name"] ] } @@ -2232,9 +2327,9 @@ class FieldSetDefinitions: # ~~->$ BulkEdit:FieldSet~~ # ~~^-> BulkEdit:Feature~~ BULK_EDIT = { - "name" : "bulk_edit", - "label" : "Bulk edit", - "fields" : [ + "name": "bulk_edit", + "label": "Bulk edit", + "fields": [ FieldDefinitions.PUBLISHER_NAME["name"], FieldDefinitions.BULK_DOAJ_SEAL["name"], FieldDefinitions.PUBLISHER_COUNTRY["name"], @@ -2248,9 +2343,9 @@ class FieldSetDefinitions: ########################################################### class ApplicationContextDefinitions: - #~~->$ NewApplication:FormContext~~ - #~~^-> ApplicationForm:Crosswalk~~ - #~~^-> NewApplication:FormProcessor~~ + # ~~->$ NewApplication:FormContext~~ + # ~~^-> ApplicationForm:Crosswalk~~ + # ~~^-> NewApplication:FormProcessor~~ PUBLIC = { "name": "public", "fieldsets": [ @@ -2272,9 +2367,9 @@ class ApplicationContextDefinitions: FieldSetDefinitions.UNIQUE_IDENTIFIERS["name"] ], "templates": { - "form" : "application_form/public_application.html", - "default_field" : "application_form/_field.html", - "default_group" : "application_form/_group.html" + "form": "application_form/public_application.html", + "default_field": "application_form/_field.html", + "default_group": "application_form/_group.html" }, "crosswalks": { "obj2form": ApplicationFormXWalk.obj2form, @@ -2369,9 +2464,9 @@ class JournalContextDefinitions: FieldSetDefinitions.UNIQUE_IDENTIFIERS["name"] ], "templates": { - "form" : "application_form/readonly_journal.html", - "default_field" : "application_form/_field.html", - "default_group" : "application_form/_group.html" + "form": "application_form/readonly_journal.html", + "default_field": "application_form/_field.html", + "default_group": "application_form/_group.html" }, "crosswalks": { "obj2form": JournalFormXWalk.obj2form, @@ -2421,14 +2516,14 @@ class JournalContextDefinitions: # ~~^-> JournalForm:Crosswalk~~ # ~~^-> ManEdJournal:FormProcessor~~ BULK_EDIT = { - "name" : "bulk_edit", - "fieldsets" : [ + "name": "bulk_edit", + "fieldsets": [ FieldSetDefinitions.BULK_EDIT["name"] ], "templates": { - "form" : "application_form/maned_journal_bulk_edit.html", - "default_field" : "application_form/_field.html", - "default_group" : "application_form/_group.html" + "form": "application_form/maned_journal_bulk_edit.html", + "default_field": "application_form/_field.html", + "default_group": "application_form/_group.html" }, "crosswalks": { "obj2form": JournalFormXWalk.obj2form, @@ -2437,6 +2532,7 @@ class JournalContextDefinitions: "processor": application_processors.ManEdBulkEdit } + ####################################################### # Gather all of our form information in one place ####################################################### @@ -2454,7 +2550,6 @@ class JournalContextDefinitions: "fields": {v['name']: v for k, v in FieldDefinitions.__dict__.items() if not k.startswith('_')} } - JOURNAL_FORMS = { "contexts": { JournalContextDefinitions.READ_ONLY["name"]: JournalContextDefinitions.READ_ONLY, @@ -2473,8 +2568,8 @@ class JournalContextDefinitions: ####################################################### def iso_country_list(field, formualic_context_name): - #~~-> Countries:Data~~ - cl = [{"display" : " ", "value" : ""}] + # ~~-> Countries:Data~~ + cl = [{"display": " ", "value": ""}] for v, d in country_options: cl.append({"display": d, "value": v}) return cl @@ -2482,7 +2577,7 @@ def iso_country_list(field, formualic_context_name): def iso_language_list(field, formulaic_context_name): # ~~-> Languages:Data~~ - cl = [{"display" : " ", "value" : ""}] + cl = [{"display": " ", "value": ""}] for v, d in language_options: cl.append({"display": d, "value": v}) return cl @@ -2490,7 +2585,7 @@ def iso_language_list(field, formulaic_context_name): def iso_currency_list(field, formulaic_context_name): # ~~-> Currencies:Data~~ - cl = [{"display" : " ", "value" : ""}] + cl = [{"display": " ", "value": ""}] quick_pick = [] for v, d in currency_options: if v in ["GBP", "USD", "EUR"]: @@ -2503,7 +2598,8 @@ def iso_currency_list(field, formulaic_context_name): def quick_reject(field, formulaic_context_name): # ~~-> QuickReject:Feature~~ - return [{"display": "Other", "value" : ""}] + [{'display': v, 'value': v} for v in app.config.get('QUICK_REJECT_REASONS', [])] + return [{"display": "Other", "value": ""}] + [{'display': v, 'value': v} for v in + app.config.get('QUICK_REJECT_REASONS', [])] def application_statuses(field, formulaic_context): @@ -2559,20 +2655,22 @@ def editor_choices(field, formulaic_context): egf = formulaic_context.get("editor_group") wtf = egf.wtfield if wtf is None: - return [{"display" : "", "value" : ""}] + return [{"display": "", "value": ""}] editor_group_name = wtf.data if editor_group_name is None: - return [{"display" : "", "value" : ""}] + return [{"display": "", "value": ""}] else: eg = EditorGroup.pull_by_key("name", editor_group_name) if eg is not None: editors = [eg.editor] editors += eg.associates editors = list(set(editors)) - return [{"value" : "", "display" : "No editor assigned"}] + [{"value" : editor, "display" : editor} for editor in editors] + return [{"value": "", "display": "No editor assigned"}] + [{"value": editor, "display": editor} for editor + in editors] else: - return [{"display" : "", "value" : ""}] + return [{"display": "", "value": ""}] + ####################################################### ## Conditional disableds @@ -2603,6 +2701,7 @@ def disable_edit_note_except_editing_user(field: FormulaicField, return True return cur_user_id != form_field.data.get('note_author_id') + ####################################################### ## Merge disabled ####################################################### @@ -2633,6 +2732,7 @@ def merge_disabled_notes(notes_group, original_form): for m in merged: wtf.append_entry(m) + ####################################################### # Validation features ####################################################### @@ -2641,6 +2741,7 @@ class ReservedUsernamesBuilder: """ ~~->$ ReservedUsernames:FormValidator~~ """ + @staticmethod def render(settings, html_attrs): return @@ -2654,6 +2755,7 @@ class OwnerExistsBuilder: """ ~~->$ OwnerExists:FormValidator~~ """ + @staticmethod def render(settings, html_attrs): return @@ -2667,6 +2769,7 @@ class RequiredBuilder: """ ~~->$ Required:FormValidator~~ """ + @staticmethod def render(settings, html_attrs): html_attrs["required"] = "" @@ -2702,13 +2805,15 @@ class IntRangeBuilder: ~~->$ IntRange:FormValidator~~ ~~^-> NumberRange:FormValidator~~ """ + @staticmethod def render(settings, html_attrs): html_attrs["data-parsley-type"] = "digits" default_msg = "" if "gte" in settings and "lte" in settings: html_attrs["data-parsley-range"] = "[" + str(settings.get("gte")) + ", " + str(settings.get("lte")) + "]" - default_msg = "This value should be between " + str(settings.get("gte")) + " and " + str(settings.get("lte")) + default_msg = "This value should be between " + str(settings.get("gte")) + " and " + str( + settings.get("lte")) else: if "gte" in settings: html_attrs["data-parsley-min"] = settings.get("gte") @@ -2734,10 +2839,12 @@ class MaxTagsBuilder: """ ~~->$ MaxLen:FormValidator~~ """ + @staticmethod def wtforms(field, settings): max = settings.get("max") - message = settings.get("message") if "message" in settings else 'You can only enter up to {x} keywords.'.format(x=max) + message = settings.get("message") if "message" in settings else 'You can only enter up to {x} keywords.'.format( + x=max) return MaxLen(max, message=message) @@ -2745,6 +2852,7 @@ class StopWordsBuilder: """ ~~->$ StopWords:FormValidator~~ """ + @staticmethod def wtforms(field, settings): stopwords = settings.get("disallowed", []) @@ -2755,6 +2863,7 @@ class ISSNInPublicDOAJBuilder: """ ~~->$ ISSNInPublicDOAJ:FormValidator~~ """ + @staticmethod def render(settings, html_attrs): # FIXME: not yet implemented in the front end, so setting here is speculative @@ -2878,7 +2987,19 @@ def render(settings, html_attrs): @staticmethod def wtforms(fields, settings): - return OnlyIf(settings.get('fields') or fields, settings.get('message')) + return OnlyIf(other_fields=settings.get('fields') or fields, ignore_empty=settings.get('ignore_empty', True), message=settings.get('message')) + + +class OnlyIfExistsBuilder: + # ~~->$ OnlyIf:FormValidator~~ + @staticmethod + def render(settings, html_attrs): + html_attrs["data-parsley-only-if-exists"] = ",".join([f["field"] for f in settings.get("fields", [])]) + html_attrs["data-parsley-only-if-exists-message"] = "

" + settings.get("message") + "

" + + @staticmethod + def wtforms(fields, settings): + return OnlyIfExists(other_fields=settings.get('fields') or fields, ignore_empty=settings.get('ignore_empty', True), message=settings.get('message')) class NotIfBuildier: @@ -2928,6 +3049,7 @@ def render(settings, html_attrs): def wtforms(field, settings): return BigEndDate(settings.get("message")) + class YearBuilder: @staticmethod def render(settings, html_attrs): @@ -2958,6 +3080,7 @@ def render(settings, html_attrs): def wtforms(field, settings): return CurrentISOLanguage(settings.get("message")) + ######################################################### # Crosswalks ######################################################### @@ -2967,16 +3090,16 @@ def wtforms(field, settings): "iso_country_list": iso_country_list, "iso_language_list": iso_language_list, "iso_currency_list": iso_currency_list, - "quick_reject" : quick_reject, - "application_statuses" : application_statuses, - "editor_choices" : editor_choices + "quick_reject": quick_reject, + "application_statuses": application_statuses, + "editor_choices": editor_choices }, - "disabled" : { - "application_status_disabled" : application_status_disabled, + "disabled": { + "application_status_disabled": application_status_disabled, "disable_edit_note_except_editing_user": disable_edit_note_except_editing_user, }, - "merge_disabled" : { - "merge_disabled_notes" : merge_disabled_notes + "merge_disabled": { + "merge_disabled_notes": merge_disabled_notes }, "validate": { "render": { @@ -2984,16 +3107,17 @@ def wtforms(field, settings): "is_url": IsURLBuilder.render, "int_range": IntRangeBuilder.render, "issn_in_public_doaj": ISSNInPublicDOAJBuilder.render, - "journal_url_in_public_doaj" : JournalURLInPublicDOAJBuilder.render, + "journal_url_in_public_doaj": JournalURLInPublicDOAJBuilder.render, "optional_if": OptionalIfBuilder.render, "is_issn": IsISSNBuilder.render, "is_issn_list": IsISSNListBuilder.render, "different_to": DifferentToBuilder.render, "required_if": RequiredIfBuilder.render, - "only_if" : OnlyIfBuilder.render, - "group_member" : GroupMemberBuilder.render, - "not_if" : NotIfBuildier.render, - "required_value" : RequiredValueBuilder.render, + "only_if": OnlyIfBuilder.render, + "only_if_exists": OnlyIfExistsBuilder.render, + "group_member": GroupMemberBuilder.render, + "not_if": NotIfBuildier.render, + "required_value": RequiredValueBuilder.render, "bigenddate": BigEndDateBuilder.render, "no_script_tag": NoScriptTagBuilder.render, "year": YearBuilder.render @@ -3005,19 +3129,20 @@ def wtforms(field, settings): "int_range": IntRangeBuilder.wtforms, "stop_words": StopWordsBuilder.wtforms, "issn_in_public_doaj": ISSNInPublicDOAJBuilder.wtforms, - "journal_url_in_public_doaj" : JournalURLInPublicDOAJBuilder.wtforms, + "journal_url_in_public_doaj": JournalURLInPublicDOAJBuilder.wtforms, "optional_if": OptionalIfBuilder.wtforms, "is_issn": IsISSNBuilder.wtforms, "is_issn_list": IsISSNListBuilder.wtforms, "different_to": DifferentToBuilder.wtforms, "required_if": RequiredIfBuilder.wtforms, - "only_if" : OnlyIfBuilder.wtforms, - "group_member" : GroupMemberBuilder.wtforms, - "not_if" : NotIfBuildier.wtforms, - "required_value" : RequiredValueBuilder.wtforms, + "only_if": OnlyIfBuilder.wtforms, + "only_if_exists": OnlyIfExistsBuilder.wtforms, + "group_member": GroupMemberBuilder.wtforms, + "not_if": NotIfBuildier.wtforms, + "required_value": RequiredValueBuilder.wtforms, "bigenddate": BigEndDateBuilder.wtforms, - "reserved_usernames" : ReservedUsernamesBuilder.wtforms, - "owner_exists" : OwnerExistsBuilder.wtforms, + "reserved_usernames": ReservedUsernamesBuilder.wtforms, + "owner_exists": OwnerExistsBuilder.wtforms, "no_script_tag": NoScriptTagBuilder.wtforms, "year": YearBuilder.wtforms, "current_iso_currency": CurrentISOCurrencyBuilder.wtforms, @@ -3027,22 +3152,22 @@ def wtforms(field, settings): } JAVASCRIPT_FUNCTIONS = { - "clickable_url": "formulaic.widgets.newClickableUrl", # ~~-> ClickableURL:FormWidget~~ - "click_to_copy": "formulaic.widgets.newClickToCopy", # ~~-> ClickToCopy:FormWidget~~ - "clickable_owner": "formulaic.widgets.newClickableOwner", # ~~-> ClickableOwner:FormWidget~~ - "select": "formulaic.widgets.newSelect", # ~~-> SelectBox:FormWidget~~ + "clickable_url": "formulaic.widgets.newClickableUrl", # ~~-> ClickableURL:FormWidget~~ + "click_to_copy": "formulaic.widgets.newClickToCopy", # ~~-> ClickToCopy:FormWidget~~ + "clickable_owner": "formulaic.widgets.newClickableOwner", # ~~-> ClickableOwner:FormWidget~~ + "select": "formulaic.widgets.newSelect", # ~~-> SelectBox:FormWidget~~ "taglist": "formulaic.widgets.newTagList", # ~~-> TagList:FormWidget~~ - "tagentry" : "formulaic.widgets.newTagEntry", # ~~-> TagEntry:FormWidget~~ - "multiple_field": "formulaic.widgets.newMultipleField", # ~~-> MultiField:FormWidget~~ - "infinite_repeat": "formulaic.widgets.newInfiniteRepeat", # ~~-> InfiniteRepeat:FormWidget~~ - "autocomplete": "formulaic.widgets.newAutocomplete", # ~~-> Autocomplete:FormWidget~~ - "subject_tree" : "formulaic.widgets.newSubjectTree", # ~~-> SubjectTree:FormWidget~~ - "full_contents" : "formulaic.widgets.newFullContents", # ~~^->FullContents:FormWidget~~ - "load_editors" : "formulaic.widgets.newLoadEditors", # ~~-> LoadEditors:FormWidget~~ - "trim_whitespace" : "formulaic.widgets.newTrimWhitespace", # ~~-> TrimWhitespace:FormWidget~~ - "note_modal" : "formulaic.widgets.newNoteModal", # ~~-> NoteModal:FormWidget~~ + "tagentry": "formulaic.widgets.newTagEntry", # ~~-> TagEntry:FormWidget~~ + "multiple_field": "formulaic.widgets.newMultipleField", # ~~-> MultiField:FormWidget~~ + "infinite_repeat": "formulaic.widgets.newInfiniteRepeat", # ~~-> InfiniteRepeat:FormWidget~~ + "autocomplete": "formulaic.widgets.newAutocomplete", # ~~-> Autocomplete:FormWidget~~ + "subject_tree": "formulaic.widgets.newSubjectTree", # ~~-> SubjectTree:FormWidget~~ + "full_contents": "formulaic.widgets.newFullContents", # ~~^->FullContents:FormWidget~~ + "load_editors": "formulaic.widgets.newLoadEditors", # ~~-> LoadEditors:FormWidget~~ + "trim_whitespace": "formulaic.widgets.newTrimWhitespace", # ~~-> TrimWhitespace:FormWidget~~ + "note_modal": "formulaic.widgets.newNoteModal", # ~~-> NoteModal:FormWidget~~, "autocheck": "formulaic.widgets.newAutocheck", # ~~-> Autocheck:FormWidget~~ - "issn_link" : "formulaic.widgets.newIssnLink" # ~~-> IssnLink:FormWidget~~, + "issn_link": "formulaic.widgets.newIssnLink" # ~~-> IssnLink:FormWidget~~, } @@ -3087,6 +3212,7 @@ def __call__(self, field, **kwargs): html.append('' % self.html_tag) return HTMLString(''.join(html)) + ########################################################## # Mapping from configurations to WTForms builders ########################################################## @@ -3234,7 +3360,6 @@ def wtform(formulaic_context, field, wtfargs): return HiddenField(**wtfargs) - WTFORMS_BUILDERS = [ RadioBuilder, MultiCheckboxBuilder, @@ -3250,10 +3375,10 @@ def wtform(formulaic_context, field, wtfargs): HiddenFieldBuilder ] - -ApplicationFormFactory = Formulaic(APPLICATION_FORMS, WTFORMS_BUILDERS, function_map=PYTHON_FUNCTIONS, javascript_functions=JAVASCRIPT_FUNCTIONS) -JournalFormFactory = Formulaic(JOURNAL_FORMS, WTFORMS_BUILDERS, function_map=PYTHON_FUNCTIONS, javascript_functions=JAVASCRIPT_FUNCTIONS) - +ApplicationFormFactory = Formulaic(APPLICATION_FORMS, WTFORMS_BUILDERS, function_map=PYTHON_FUNCTIONS, + javascript_functions=JAVASCRIPT_FUNCTIONS) +JournalFormFactory = Formulaic(JOURNAL_FORMS, WTFORMS_BUILDERS, function_map=PYTHON_FUNCTIONS, + javascript_functions=JAVASCRIPT_FUNCTIONS) if __name__ == "__main__": """ diff --git a/portality/forms/application_processors.py b/portality/forms/application_processors.py index cd7fdaf5f4..a7f0271448 100644 --- a/portality/forms/application_processors.py +++ b/portality/forms/application_processors.py @@ -756,12 +756,13 @@ def finalise(self, save_target=True, email_alert=True): # email the publisher to tell them we received their update request if email_alert: - try: - # ~~-> Email:Notifications~~ - self._send_received_email() - except app_email.EmailException as e: - self.add_alert("We were unable to send you an email confirmation - possible problem with your email address") - app.logger.exception('Error sending reapplication received email to publisher') + DOAJ.eventsService().trigger(models.Event( + constants.EVENT_APPLICATION_UR_SUBMITTED, + current_user and current_user.id, + context={ + 'application': self.target.data, + } + )) def _carry_subjects_and_seal(self): # carry over the subjects @@ -771,33 +772,6 @@ def _carry_subjects_and_seal(self): # carry over the seal self.target.set_seal(self.source.has_seal()) - def _send_received_email(self): - # ~~-> Account:Model~~ - acc = models.Account.pull(self.target.owner) - if acc is None: - self.add_alert("Unable to locate account for specified owner") - return - - # ~~-> Email:Library~~ - to = [acc.email] - fro = app.config.get('SYSTEM_EMAIL_FROM', 'helpdesk@doaj.org') - subject = app.config.get("SERVICE_NAME","") + " - update request received" - - try: - if app.config.get("ENABLE_PUBLISHER_EMAIL", False): - app_email.send_mail(to=to, - fro=fro, - subject=subject, - template_name="email/publisher_update_request_received.jinja2", - application=self.target, - owner=acc) - self.add_alert('A confirmation email has been sent to ' + acc.email + '.') - except app_email.EmailException as e: - magic = str(uuid.uuid1()) - self.add_alert('Hm, sending the "update request received" email didn\'t work. Please quote this magic number when reporting the issue: ' + magic + ' . Thank you!') - app.logger.error(magic + "\n" + repr(e)) - raise e - class PublisherUpdateRequestReadOnly(ApplicationProcessor): """ diff --git a/portality/forms/article_forms.py b/portality/forms/article_forms.py index e15f44f32e..92bfc44968 100644 --- a/portality/forms/article_forms.py +++ b/portality/forms/article_forms.py @@ -1,5 +1,5 @@ from copy import deepcopy -from datetime import datetime +from typing import Literal, Optional from flask import render_template, url_for, request from flask_login import current_user @@ -70,7 +70,7 @@ def form(self, val): self._form = val @property - def source(self): + def source(self) -> Optional: return self._source @property @@ -525,15 +525,28 @@ def _render_checkbox(self, field, **kwargs): INITIAL_AUTHOR_FIELDS = 3 -def choices_for_article_issns(user, article_id=None): +def choices_for_article_issns(user, article_id=None, + issn_type: Literal['eissn', 'pissn', 'all'] = 'all'): + + owner = None if "admin" in user.role and article_id is not None: # ~~->Article:Model~~ a = models.Article.pull(article_id) - # ~~->Journal:Model~~ - issns = models.Journal.issns_by_owner(a.get_owner(), in_doaj=True) + if a: + owner = a.get_owner() + + if not owner: + owner = user.id + + if issn_type == 'eissn': + issn_field = 'bibjson.eissn.exact' + elif issn_type == 'pissn': + issn_field = 'bibjson.pissn.exact' else: - issns = models.Journal.issns_by_owner(user.id, in_doaj=True) + issn_field = 'index.issn.exact' + # ~~->Journal:Model~~ + issns = models.Journal.issns_by_owner(owner, in_doaj=True, issn_field=issn_field) ic = [("", "Select an ISSN")] + [(i, i) for i in issns] return ic @@ -556,8 +569,14 @@ class ArticleForm(Form): fulltext = StringField("Full-text URL", [OptionalIf("doi", "You must provide the Full-Text URL or the DOI"), validators.URL()]) publication_year = DOAJSelectField("Year", [validators.Optional()], choices=YEAR_CHOICES, default=str(dates.now().year)) publication_month = DOAJSelectField("Month", [validators.Optional()], choices=MONTH_CHOICES, default="" ) - pissn = DOAJSelectField("Print", [ThisOrThat("eissn", "Either this field or Online ISSN is required"), DifferentTo("eissn", message=IDENTICAL_ISSNS_ERROR)], choices=[]) # choices set at construction - eissn = DOAJSelectField("Online", [ThisOrThat("pissn", "Either this field or Print ISSN is required"), DifferentTo("pissn", message=IDENTICAL_ISSNS_ERROR)], choices=[]) # choices set at construction + pissn = DOAJSelectField("Print", [ + ThisOrThat("eissn", "Either this field or Online ISSN is required"), + DifferentTo("eissn", message=IDENTICAL_ISSNS_ERROR) + ], choices=[]) # choices set at construction + eissn = DOAJSelectField("Online", [ + ThisOrThat("pissn", "Either this field or Print ISSN is required"), + DifferentTo("pissn", message=IDENTICAL_ISSNS_ERROR) + ], choices=[]) # choices set at construction volume = StringField("Volume", [validators.Optional(), NoScriptTag()]) number = StringField("Issue", [validators.Optional(), NoScriptTag()]) @@ -566,13 +585,17 @@ class ArticleForm(Form): def __init__(self, *args, **kwargs): super(ArticleForm, self).__init__(*args, **kwargs) + self.set_choices() + + def set_choices(self, user=None, article_id=None): + user = user or current_user try: - self.pissn.choices = choices_for_article_issns(current_user) - self.eissn.choices = choices_for_article_issns(current_user) - except: + self.pissn.choices = choices_for_article_issns(user, issn_type='pissn', article_id=article_id) + self.eissn.choices = choices_for_article_issns(user, issn_type='eissn', article_id=article_id) + except Exception as e: # not logged in, and current_user is broken # probably you are loading the class from the command line - pass + app.logger.exception(str(e)) @@ -605,15 +628,8 @@ def __init__(self, source, form_data, user): super(MetadataForm, self).__init__(source=source, form_data=form_data) def _set_choices(self): - try: - ic = choices_for_article_issns(user=self.user, article_id=self.source.id) - self.form.pissn.choices = ic - self.form.eissn.choices = ic - except Exception as e: - print (str(e)) - # not logged in, and current_user is broken - # probably you are loading the class from the command line - pass + if self.source is not None: + self.form.set_choices(user=self.user, article_id=self.source.id) def modify_authors_if_required(self, request_data): diff --git a/portality/forms/validate.py b/portality/forms/validate.py index c7dd02cda2..363a9cba76 100644 --- a/portality/forms/validate.py +++ b/portality/forms/validate.py @@ -472,6 +472,20 @@ def __call__(self, form, field): validators.ValidationError(self.message) +class OnlyIfExists(OnlyIf): + """ + Field only validates if other fields DOES have ANY values (or are truthy) + ~~NotIf:FormValidator~~ + """ + + def __call__(self, form, field): + others = self.get_other_fields(form) + + for o_f in self.other_fields: + other = others[o_f["field"]] + if not other.data or not field.data: + validators.ValidationError(self.message) + class NoScriptTag(object): """ Checks that a field does not contain a script html tag diff --git a/portality/lib/color_text.py b/portality/lib/color_text.py new file mode 100644 index 0000000000..1e2511c471 --- /dev/null +++ b/portality/lib/color_text.py @@ -0,0 +1,55 @@ +from enum import Enum +from typing import Union + +reset_code = "\u001b[0m" + + +class Color(Enum): + black = 0 + red = 1 + green = 2 + yellow = 3 + blue = 4 + magenta = 5 + cyan = 6 + white = 7 + + +ColorLike = Union[Color, int] + + +def apply_color(text, + front: ColorLike = None, + background: ColorLike = None, + bold=False, faint=False, underline=False, + blink=False) -> str: + def _to_color_idx(_input: ColorLike): + if isinstance(_input, Color): + return _input.value + return _input + + color_code_list = [] + if front is not None: + color_code_list.append(30 + _to_color_idx(front)) + + if background is not None: + color_code_list.append(40 + _to_color_idx(background)) + + if bold: + color_code_list.append(1) + + if faint: + color_code_list.append(2) + + if underline: + color_code_list.append(4) + + if blink: + color_code_list.append(5) + + if not color_code_list: + return f'{text}' + + color_start_code = ';'.join(map(str, color_code_list)) + color_start_code = f'\u001b[{color_start_code}m' + return f'{color_start_code}{text}{reset_code}' diff --git a/portality/lib/paths.py b/portality/lib/paths.py index 2d65093fe8..18d5f689c9 100644 --- a/portality/lib/paths.py +++ b/portality/lib/paths.py @@ -36,7 +36,7 @@ def get_project_root() -> Path: return Path(__file__).parent.parent.parent.absolute() -def create_tmp_dir(is_auto_mkdir=False) -> Path: +def create_tmp_path(is_auto_mkdir=False) -> Path: num_retry = 20 for _ in range(num_retry): path = Path(tempfile.NamedTemporaryFile().name) diff --git a/portality/migrate/3829_remove_seal_column/README.md b/portality/migrate/3829_remove_seal_column/README.md new file mode 100644 index 0000000000..13b52aa38f --- /dev/null +++ b/portality/migrate/3829_remove_seal_column/README.md @@ -0,0 +1,7 @@ +# 2024-03-21; Issue 3829 - Remove seal column + +## Execution + +Run the migration with + + python portality/upgrade.py -u portality/migrate/3829_remove_seal_column/migrate.json \ No newline at end of file diff --git a/portality/migrate/3829_remove_seal_column/__init__.py b/portality/migrate/3829_remove_seal_column/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/portality/migrate/3829_remove_seal_column/migrate.json b/portality/migrate/3829_remove_seal_column/migrate.json new file mode 100644 index 0000000000..57bf924be0 --- /dev/null +++ b/portality/migrate/3829_remove_seal_column/migrate.json @@ -0,0 +1,14 @@ +{ + "batch": 10000, + "types": [ + { + "type": "datalog_journal_added", + "init_with_model": false, + "action": "index", + "keepalive": "20m", + "functions" : [ + "portality.migrate.3829_remove_seal_column.operations.remove_has_seal" + ] + } + ] +} \ No newline at end of file diff --git a/portality/migrate/3829_remove_seal_column/operations.py b/portality/migrate/3829_remove_seal_column/operations.py new file mode 100644 index 0000000000..78f1e8685a --- /dev/null +++ b/portality/migrate/3829_remove_seal_column/operations.py @@ -0,0 +1,5 @@ +def remove_has_seal(obj): + if 'has_seal' in obj: + print(f'update record {obj}') + del obj['has_seal'] + return obj diff --git a/portality/models/__init__.py b/portality/models/__init__.py index eea1859a70..2570929105 100644 --- a/portality/models/__init__.py +++ b/portality/models/__init__.py @@ -11,7 +11,7 @@ from portality.models.lcc import LCC from portality.models.account import Account from portality.models.editors import EditorGroup, EditorGroupMemberQuery, EditorGroupQuery -from portality.models.uploads import FileUpload, ExistsFileQuery, OwnerFileQuery, ValidFileQuery +from portality.models.uploads import FileUpload, ExistsFileQuery, OwnerFileQuery, ValidFileQuery, BulkArticles from portality.models.lock import Lock from portality.models.history import ArticleHistory, JournalHistory from portality.models.article import Article, ArticleBibJSON, ArticleQuery, ArticleVolumesQuery, DuplicateArticleQuery, NoJournalException diff --git a/portality/models/account.py b/portality/models/account.py index 2bd70a3161..f1a62f41f0 100644 --- a/portality/models/account.py +++ b/portality/models/account.py @@ -56,7 +56,7 @@ def pull_by_email(cls, email: str): res = cls.query(q='email:"' + email + '"') if res.get('hits', {}).get('total', {}).get('value', 0) == 1: acc = cls(**res['hits']['hits'][0]['_source']) - if acc.email == email: # Only return the account if it was an exact match with supplied email + if acc.email.lower() == email.lower(): # allow case insensitive login return acc return None @@ -246,3 +246,9 @@ def get_name_safe(cls, account_id) -> str: if author is not None and author.name: return author.name return '' + + @classmethod + def is_enable_publisher_email(cls) -> bool: + # TODO: in the long run this needs to move out to the user's email preferences but for now it + # is here to replicate the behaviour in the code it replaces + return app.config.get("ENABLE_PUBLISHER_EMAIL", False) diff --git a/portality/models/background.py b/portality/models/background.py index 604eccc95d..5fd6f8b7cc 100644 --- a/portality/models/background.py +++ b/portality/models/background.py @@ -250,8 +250,18 @@ def __init__(self): }, } - def append_must(self, must_dict: dict): - self.query_dict["query"]["bool"]["must"].append(must_dict) + def _append_bool_condition(self, bool_key: str, condition: dict): + if bool_key not in self.query_dict['query']['bool']: + self.query_dict['query']['bool'][bool_key] = [] + self.query_dict['query']['bool'][bool_key].append(condition) + return self + + def append_must(self, condition: dict): + self._append_bool_condition('must', condition) + return self + + def append_must_not(self, condition: dict): + self._append_bool_condition('must_not', condition) return self def since(self, since: datetime.datetime): @@ -268,13 +278,19 @@ def queue_id(self, queue_id): self.append_must({"term": {"queue_id.exact": queue_id}}) return self + def _to_list(self, item): + if isinstance(item, str): + item = [item] + elif not isinstance(item, list): + item = list(item) + return item + def status_includes(self, status): - if isinstance(status, str): - status = [status] - elif not isinstance(status, list): - status = list(status) + self.append_must({"terms": {"status.exact": self._to_list(status)}}) + return self - self.append_must({"terms": {"status.exact": status}}) + def status_excludes(self, status): + self.append_must_not({"terms": {"status.exact": self._to_list(status)}}) return self def size(self, size: int): @@ -325,5 +341,3 @@ def query(self): .order_by('last_updated', 'desc') .size(1) .build_query_dict()) - - diff --git a/portality/models/datalog_journal_added.py b/portality/models/datalog_journal_added.py index dfea539840..51bb22d424 100644 --- a/portality/models/datalog_journal_added.py +++ b/portality/models/datalog_journal_added.py @@ -14,7 +14,6 @@ class DatalogJournalAdded(SeamlessMixin, DomainObject): "title": {"coerce": "unicode"}, "issn": {"coerce": "unicode"}, "date_added": {"coerce": "utcdatetime-datalog"}, - "has_seal": {"coerce": "bool"}, "has_continuations": {"coerce": "bool"}, "journal_id": {"coerce": "unicode"}, "created_date": {"coerce": "utcdatetime"}, @@ -63,14 +62,6 @@ def date_added(self, val): def date_added_str(self): return self.date_added.strftime(self.DATE_FMT) - @property - def has_seal(self): - return self.__seamless__.get_single("has_seal") - - @has_seal.setter - def has_seal(self, val): - self.__seamless__.set_single('has_seal', val) - @property def has_continuations(self): return self.__seamless__.get_single("has_continuations") diff --git a/portality/models/event.py b/portality/models/event.py index dd875e490b..ca6a0b335f 100644 --- a/portality/models/event.py +++ b/portality/models/event.py @@ -3,12 +3,12 @@ class Event(object): - def __init__(self, id=None, who=None, context=None, raw=None): + def __init__(self, id=None, who: str = None, context: dict = None, raw=None): if raw is not None: self.data = raw else: self.data = { - "when" : dates.now_str() + "when": dates.now_str() } if id is not None: self.id = id diff --git a/portality/models/uploads.py b/portality/models/uploads.py index ca8b734550..c955a6cbd3 100644 --- a/portality/models/uploads.py +++ b/portality/models/uploads.py @@ -1,27 +1,21 @@ -from portality.dao import DomainObject, ESMappingMissingError from copy import deepcopy +from portality.dao import DomainObject, ESMappingMissingError from portality.lib import dates -class FileUpload(DomainObject): - __type__ = "upload" - - @property - def status(self): - return self.data.get("status") +class BaseArticlesUpload(DomainObject): + """ + Base class for article uploads. which handle status and error messages. - @property - def local_filename(self): - return self.id + ".xml" + This is abstract class. it has no __type__ attribute. + For object creation and query please use FileUpload or BulkArticles instead. - @property - def filename(self): - return self.data.get("filename") + """ @property - def schema(self): - return self.data.get("schema") + def status(self): + return self.data.get("status") @property def owner(self): @@ -61,23 +55,14 @@ def created_timestamp(self): return None return dates.parse(self.data["created_date"]) - def set_schema(self, s): - self.data["schema"] = s - - def upload(self, owner, filename, status="incoming"): - self.data["filename"] = filename - self.data["owner"] = owner - self.data["status"] = status - def failed(self, message, details=None): self.data["status"] = "failed" self.data["error"] = message if details is not None: self.data["error_details"] = details - def validated(self, schema): + def validated(self): self.data["status"] = "validated" - self.data["schema"] = schema def processed(self, count, update, new): self.data["status"] = "processed" @@ -104,9 +89,6 @@ def set_failure_reasons(self, shared, unowned, unmatched): def exists(self): self.data["status"] = "exists" - def downloaded(self): - self.data["status"] = "downloaded" - @classmethod def list_valid(cls): q = ValidFileQuery() @@ -124,13 +106,57 @@ def by_owner(cls, owner, size=10): res = cls.query(q=q.query()) except ESMappingMissingError: return [] - rs = [FileUpload(**r.get("_source")) for r in res.get("hits", {}).get("hits", [])] + rs = [cls(**r.get("_source")) for r in res.get("hits", {}).get("hits", [])] return rs +class FileUpload(BaseArticlesUpload): + __type__ = "upload" + + @property + def schema(self): + return self.data.get("schema") + + def set_schema(self, s): + self.data["schema"] = s + + @property + def filename(self): + return self.data.get("filename") + + @property + def local_filename(self): + return self.id + ".xml" + + def downloaded(self): + self.data["status"] = "downloaded" + + def validated(self, schema=None): + self.data["status"] = "validated" + if schema is not None: + self.data["schema"] = schema + + def upload(self, owner, filename, status="incoming"): + self.data["filename"] = filename + self.data["owner"] = owner + self.data["status"] = status + + +class BulkArticles(BaseArticlesUpload): + __type__ = "bulk_articles" + + @property + def local_filename(self): + return self.id + ".json" + + def incoming(self, owner): + self.data["owner"] = owner + self.data["status"] = "incoming" + + class ValidFileQuery(object): base_query = { - "track_total_hits" : True, + "track_total_hits": True, "query": { "term": {"status.exact": "validated"} }, @@ -148,7 +174,7 @@ def query(self): class ExistsFileQuery(object): base_query = { - "track_total_hits" : True, + "track_total_hits": True, "query": { "term": {"status.exact": "exists"} }, @@ -166,7 +192,7 @@ def query(self): class OwnerFileQuery(object): base_query = { - "track_total_hits" : True, + "track_total_hits": True, "query": { "bool": { "must": [] diff --git a/portality/models/v2/application.py b/portality/models/v2/application.py index 391156ed4a..c8f42d5c4e 100644 --- a/portality/models/v2/application.py +++ b/portality/models/v2/application.py @@ -7,6 +7,9 @@ from portality.models.v2.journal import JournalLikeObject, Journal from portality.lib.coerce import COERCE_MAP from portality.dao import DomainObject +from portality.bll import DOAJ + + APPLICATION_STRUCT = { "objects": [ @@ -208,8 +211,17 @@ def prep(self, is_update=True): self.set_last_updated() def save(self, sync_owner=True, **kwargs): + if self.id is None: + self.set_id(self.makeid()) + + if self.application_type == constants.APPLICATION_TYPE_UPDATE_REQUEST: + # ~~-> Concurrency_Prevention:Service ~~ + cs = DOAJ.applicationService() + cs.prevent_concurrent_ur_submission(self, record_if_not_concurrent=True) + self.prep() self.verify_against_struct() + if sync_owner: self._sync_owner_to_journal() return super(Application, self).save(**kwargs) diff --git a/portality/models/v2/bibjson.py b/portality/models/v2/bibjson.py index 65fbec0b90..99e4292982 100644 --- a/portality/models/v2/bibjson.py +++ b/portality/models/v2/bibjson.py @@ -601,7 +601,7 @@ def waiver_url(self, url): ##################################################### ## External utility functions - def issns(self): + def issns(self) -> list: issns = [] if self.pissn: issns.append(self.pissn) @@ -609,6 +609,9 @@ def issns(self): issns.append(self.eissn) return issns + def issns_as_text(self) -> str: + return ", ".join(issn for issn in self.issns()) + def publisher_country_name(self): if self.publisher_country is not None: return datasets.get_country_name(self.publisher_country) diff --git a/portality/models/v2/journal.py b/portality/models/v2/journal.py index 735a88a42d..f97f8084f2 100644 --- a/portality/models/v2/journal.py +++ b/portality/models/v2/journal.py @@ -1,19 +1,23 @@ -from portality.dao import DomainObject -from portality.core import app -from portality.lib.dates import DEFAULT_TIMESTAMP_VAL -from portality.models.v2.bibjson import JournalLikeBibJSON -from portality.models.v2 import shared_structs -from portality.models.account import Account -from portality.lib import es_data_mapping, dates, coerce -from portality.lib.seamless import SeamlessMixin -from portality.lib.coerce import COERCE_MAP +from __future__ import annotations +import string +import uuid from copy import deepcopy from datetime import datetime, timedelta +from typing import Callable, Iterable -import string, uuid from unidecode import unidecode +from portality.core import app +from portality.dao import DomainObject +from portality.lib import es_data_mapping, dates, coerce +from portality.lib.coerce import COERCE_MAP +from portality.lib.dates import DEFAULT_TIMESTAMP_VAL +from portality.lib.seamless import SeamlessMixin +from portality.models.account import Account +from portality.models.v2 import shared_structs +from portality.models.v2.bibjson import JournalLikeBibJSON + JOURNAL_STRUCT = { "objects": [ "admin", "index" @@ -53,6 +57,7 @@ } + class ContinuationException(Exception): pass @@ -87,8 +92,8 @@ def find_by_issn_exact(cls, issns, in_doaj=None, max=2): return records @classmethod - def issns_by_owner(cls, owner, in_doaj=None): - q = IssnQuery(owner, in_doaj=in_doaj) + def issns_by_owner(cls, owner, in_doaj=None, issn_field=None): + q = IssnQuery(owner, in_doaj=in_doaj, issn_field=issn_field) res = cls.query(q=q.query()) issns = [term.get("key") for term in res.get("aggregations", {}).get("issns", {}).get("buckets", [])] return issns @@ -280,7 +285,6 @@ def add_note_by_dict(self, note): return self.add_note(note=note.get("note"), date=note.get("date"), id=note.get("id"), author_id=note.get("author_id")) - def remove_note(self, note): self.__seamless__.delete_from_list("admin.notes", matchsub=note) @@ -301,7 +305,7 @@ def ordered_notes(self): clusters = {} for note in notes: if "date" not in note: - note["date"] = DEFAULT_TIMESTAMP_VAL # this really means something is broken with note date setting, which needs to be fixed + note["date"] = DEFAULT_TIMESTAMP_VAL # this really means something is broken with note date setting, which needs to be fixed if note["date"] not in clusters: clusters[note["date"]] = [note] else: @@ -505,7 +509,7 @@ def __init__(self, **kwargs): if "_source" in kwargs: kwargs = kwargs["_source"] # FIXME: I have taken this out for the moment, as I'm not sure it's what we should be doing - #if kwargs: + # if kwargs: # self.add_autogenerated_fields(**kwargs) super(Journal, self).__init__(raw=kwargs) @@ -535,7 +539,7 @@ def add_autogenerated_fields(cls, **kwargs): bib["pid_scheme"] = {"has_pid_scheme": False} if "preservation" in bib and bib["preservation"] != '': bib["preservation"]["has_preservation"] = (len(bib["preservation"]) != 0 or - bib["national_library"] is not None) + bib["national_library"] is not None) else: bib["preservation"] = {"has_preservation": True} @@ -764,7 +768,8 @@ def remove_related_applications(self): self.__seamless__.delete("admin.related_applications") def remove_related_application(self, application_id): - self.set_related_applications([r for r in self.related_applications if r.get("application_id") != application_id]) + self.set_related_applications([r for r in self.related_applications + if r.get("application_id") != application_id]) def related_application_record(self, application_id): for record in self.related_applications: @@ -784,29 +789,41 @@ def latest_related_application_id(self): ######################################################################## ## Functions for handling continuations - def get_future_continuations(self): - irb = self.bibjson().is_replaced_by - q = ContinuationQuery(irb) - - journals = self.q2obj(q=q.query()) - subjournals = [] - for j in journals: - subjournals += j.get_future_continuations() - future = journals + subjournals - return future + def _get_continuations(self, issns, + get_sub_journals: Callable, + journal_caches: set[str] = None) -> Iterable['Journal']: + """ - def get_past_continuations(self): - replaces = self.bibjson().replaces - q = ContinuationQuery(replaces) + Parameters + ---------- + issns + get_sub_journals + journal_caches + contain completed journals ids, avoid infinite recursion by passing a + set of journal objects that have already been processed + """ + journal_caches = journal_caches or set() + journal_caches.add(self.id) + journals = self.q2obj(q=ContinuationQuery(issns).query()) + journals = [j for j in journals if j.id not in journal_caches] + journal_caches.update({j.id for j in journals}) - journals = self.q2obj(q=q.query()) subjournals = [] for j in journals: - subjournals += j.get_past_continuations() + subjournals += get_sub_journals(j, journal_caches) - past = journals + subjournals - return past + return journals + subjournals + + def get_future_continuations(self, journal_caches: set[str]=None) -> Iterable['Journal']: + return self._get_continuations(self.bibjson().is_replaced_by, + lambda j, jc: j.get_future_continuations(jc), + journal_caches=journal_caches) + + def get_past_continuations(self, journal_caches: set[str]=None) -> Iterable['Journal']: + return self._get_continuations(self.bibjson().replaces, + lambda j, jc: j.get_past_continuations(jc), + journal_caches=journal_caches) ####################################################################### @@ -853,7 +870,6 @@ def propagate_in_doaj_status_to_articles(self): article.set_in_doaj(self.is_in_doaj()) article.save() - def prep(self, is_update=True): self._ensure_in_doaj() self.calculate_tick() @@ -1010,7 +1026,7 @@ def query(self): ] } }, - "size" : self.max + "size": self.max } if self.in_doaj is not None: q["query"]["bool"]["must"].append({"term": {"admin.in_doaj": self.in_doaj}}) @@ -1018,14 +1034,15 @@ def query(self): class IssnQuery(object): - def __init__(self, owner, in_doaj=None): + def __init__(self, owner, in_doaj=None, issn_field=None): self._owner = owner self._in_doaj = in_doaj + self._issn_field = issn_field or 'index.issn.exact' def query(self): - musts = [{"term": { "admin.owner.exact": self._owner}}] + musts = [{"term": {"admin.owner.exact": self._owner}}] if self._in_doaj is not None: - musts.append({"term": { "admin.in_doaj": self._in_doaj}}) + musts.append({"term": {"admin.in_doaj": self._in_doaj}}) return { "track_total_hits": True, "query": { @@ -1037,9 +1054,9 @@ def query(self): "aggs": { "issns": { "terms": { - "field": "index.issn.exact", + "field": self._issn_field, "size": 10000, - "order": { "_key": "asc" } + "order": {"_key": "asc"} } } } @@ -1162,9 +1179,9 @@ def __init__(self, max): def query(self): return { "track_total_hits": True, - "query" : {"match_all" : {}}, - "size" : self.max, - "sort" : [ - {"created_date" : {"order" : "desc"}} + "query": {"match_all": {}}, + "size": self.max, + "sort": [ + {"created_date": {"order": "desc"}} ] } diff --git a/portality/regex.py b/portality/regex.py index 09a55a52bf..2e152ead9d 100644 --- a/portality/regex.py +++ b/portality/regex.py @@ -19,9 +19,9 @@ #~~URL:Regex~~ HTTP_URL = ( r'^(?:https?)://' # Scheme: http(s) or ftp - r'(?:[\w-]+\.)*[\w-]+' # Domain name (optional subdomains) + r'(?:[\w\-]+\.)*[\w\-]+' # Domain name (optional subdomains) r'(?:\.[a-z]{2,})' # Top-level domain (e.g., .com, .org) - r'(?:\:(0|6[0-5][0-5][0-3][0-5]|[1-5][0-9][0-9][0-9][0-9]|[1-9][0-9]{0,3}))?' # port (0-65535) preceded with `:` + r'(?::(0|6[0-5][0-5][0-3][0-5]|[1-5][0-9][0-9][0-9][0-9]|[1-9][0-9]{0,3}))?' # port (0-65535) preceded with `:` r'(?:\/[^\/\s]*)*' # Path (optional) r'(?:\?[^\/\s]*)?' # Query string (optional) r'(?:#[^\/\s]*)?$' # Fragment (optional) diff --git a/portality/scripts/220524_3886_duplicatied_emails/README.md b/portality/scripts/220524_3886_duplicatied_emails/README.md new file mode 100644 index 0000000000..d150d4392e --- /dev/null +++ b/portality/scripts/220524_3886_duplicatied_emails/README.md @@ -0,0 +1 @@ +Run query from `query.json` directly to find any accounts with duplicated emails (case-insensitive) \ No newline at end of file diff --git a/portality/scripts/220524_3886_duplicatied_emails/__init__.py b/portality/scripts/220524_3886_duplicatied_emails/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/portality/scripts/220524_3886_duplicatied_emails/query.json b/portality/scripts/220524_3886_duplicatied_emails/query.json new file mode 100644 index 0000000000..d0daf5f404 --- /dev/null +++ b/portality/scripts/220524_3886_duplicatied_emails/query.json @@ -0,0 +1,15 @@ +{ + "size": 0, + "aggs": { + "duplicate_emails": { + "terms": { + "script": { + "source": "doc['email.exact'].value.toLowerCase()", + "lang": "painless" + }, + "size": 10000, + "min_doc_count": 2 + } + } + } +} \ No newline at end of file diff --git a/portality/scripts/3918_received_app_by_country_and_year/__init__.py b/portality/scripts/3918_received_app_by_country_and_year/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/portality/scripts/3918_received_app_by_country_and_year/script.py b/portality/scripts/3918_received_app_by_country_and_year/script.py new file mode 100644 index 0000000000..29d43ccd2a --- /dev/null +++ b/portality/scripts/3918_received_app_by_country_and_year/script.py @@ -0,0 +1,84 @@ +from portality import models +from portality.bll import exceptions +import csv +from datetime import datetime + +QUERY = { + "track_total_hits": True, + "size": 0, + "query": { + "bool": { + "must": [ + { + "term": { + "admin.application_type.exact": "new_application" + } + } + ], + "filter": [ + { + "range": { + "created_date": { + "gte": "2019-01-01T00:00:00Z", + "lte": "2023-12-31T23:59:59Z" + } + } + } + ] + } + }, + "aggs": { + "applications_by_country": { + "aggs": { + "applications_by_year": { + "date_histogram": { + "field": "created_date", + "calendar_interval": "year", + "format": "yyyy", + "min_doc_count": 0 + } + } + }, + "terms": { + "field": "index.country.exact", + "size": 1000 + } + } + } +} + + +if __name__ == "__main__": + import argparse + + parser = argparse.ArgumentParser() + parser.add_argument("-o", "--out", help="output file", required=True) + args = parser.parse_args() + + # Initialize CSV writers for both reports + with open(args.out, "w", newline="", encoding="utf-8") as f: + writer = csv.writer(f) + writer.writerow(["Country", "Year", "Count"]) + + res = models.Application.send_query(QUERY) + country_buckets = res["aggregations"]["applications_by_country"]["buckets"] + + def get_country(country_bucket): + return country_bucket["key"] + + def get_years_data(country_bucket): + return country_bucket["applications_by_year"]["buckets"] + + def get_year(year_bucket): + return year_bucket["key_as_string"] + + def get_count(year_bucket): + return year_bucket["doc_count"] + + + for country_bucket in country_buckets: + years_buckets = get_years_data(country_bucket) + for years_bucket in years_buckets: + writer.writerow([get_country(country_bucket), get_year(years_bucket), get_count(years_bucket)]) + + print("Reports generated successfully.") diff --git a/portality/scripts/download_swagger_ui.py b/portality/scripts/download_swagger_ui.py new file mode 100644 index 0000000000..9d09330650 --- /dev/null +++ b/portality/scripts/download_swagger_ui.py @@ -0,0 +1,48 @@ +import argparse + +import requests +from portality.lib.paths import get_project_root + + +def download_file(url, local_path, chunk_size=8192, show_progress=True): + cur_size = 0 + with requests.get(url, stream=True) as r: + with open(local_path, 'wb') as f: + for chunk in r.iter_content(chunk_size=chunk_size): + cur_size += len(chunk) / 1024 / 1024 + if show_progress: + print(f'download: {cur_size:.2f}MB', end='\r') + if chunk: + f.write(chunk) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('-v', '--version', default='5.11.6', help='swagger-ui version') + + args = parser.parse_args() + + version = args.version + dest_path = get_project_root() / f'portality/static/vendor' + if not dest_path.is_dir(): + raise ValueError(f'dest path not found: {dest_path}') + + dest_path = dest_path / f'swagger-ui-{version}' + dest_path.mkdir(parents=True, exist_ok=True) + + download_file(f"https://unpkg.com/swagger-ui-dist@{version}/swagger-ui.css", + dest_path / "swagger-ui.css") + download_file(f"https://unpkg.com/swagger-ui-dist@{version}/swagger-ui-bundle.js", + dest_path / "swagger-ui-bundle.js") + + try: + download_file(f"https://raw.githubusercontent.com/swagger-api/swagger-ui/v{version}/docs/usage/installation.md", + dest_path / "installation.md") + except Exception as e: + print(f'failed to doc from github') + + print(f'downloaded to {dest_path.as_posix()}') + + +if __name__ == '__main__': + main() diff --git a/portality/scripts/githubpri/github_prioritisation.py b/portality/scripts/githubpri/github_prioritisation.py index 644a5f2650..88529c88c6 100644 --- a/portality/scripts/githubpri/github_prioritisation.py +++ b/portality/scripts/githubpri/github_prioritisation.py @@ -1,24 +1,28 @@ +""" +main script of team priorities sheet generation +""" + import logging import os import sys +from collections import OrderedDict import pandas as pd from gspread.utils import ValueInputOption from portality.lib import gsheet -from portality.scripts.githubpri import pri_data_serv, gdrive_sheet_serv -from portality.scripts.githubpri.gdrive_sheet_serv import create_or_load_worksheet -from collections import OrderedDict +from portality.scripts.githubpri import pridata, pri_gsheets, github_utils +from portality.scripts.githubpri.pri_gsheets import create_or_load_worksheet log = logging.getLogger(__name__) def to_ordered_df_by_user_pri_map(user_pri_map): user_pri_map = user_pri_map.copy() - claimable_df = user_pri_map.pop(pri_data_serv.DEFAULT_USER, None) + claimable_df = user_pri_map.pop(pridata.DEFAULT_USER, None) user_pri_map = OrderedDict(sorted(user_pri_map.items(), key=lambda x: x[0].lower())) if claimable_df is not None: - user_pri_map[pri_data_serv.DEFAULT_USER] = claimable_df + user_pri_map[pridata.DEFAULT_USER] = claimable_df return pd.concat(user_pri_map, axis=1) @@ -28,8 +32,8 @@ def priorities(priorities_file, gdrive_filename=None, github_username=None, github_password_key=None, ): - sender = pri_data_serv.GithubReqSender(username=github_username, password_key=github_password_key) - user_pri_map = pri_data_serv.create_priorities_excel_data(priorities_file, sender) + sender = github_utils.GithubReqSender(token_password=github_password_key, username=github_username) + user_pri_map = pridata.create_priorities_excel_data(priorities_file, sender) if outfile is not None: to_ordered_df_by_user_pri_map(user_pri_map).to_csv(outfile) @@ -66,7 +70,7 @@ def priorities(priorities_file, cell.value = f'=HYPERLINK("{link}", "{title}")' worksheet.update_cells(cells, ValueInputOption.user_entered) - gdrive_sheet_serv.apply_prilist_styles(worksheet, display_df) + pri_gsheets.apply_prilist_styles(worksheet, display_df) print(f'[End] update google sheet [{gdrive_filename}]') @@ -80,13 +84,27 @@ def main(): description = """ Generate a excel for DOAJ github issue prioritisation queue for each user +Environment variables +--------------------- you need github and google drive api key to run this script: -* `DOAJ_PRILIST_KEY_PATH` is json file path for google drive api - the `DOAJ_PRILIST_KEY_PATH` json file generated by `console.cloud.google.com` and should be enabled for - * google drive api - * google sheet api -* `DOAJ_GITHUB_KEY` is github api key, this key is optional, if not provided, you can input github password instead by -p option - """ + +DOAJ_PRILIST_KEY_PATH + `DOAJ_PRILIST_KEY_PATH` is path of json file path for google drive api, which generated by + `console.cloud.google.com` and should be enabled for + * google drive api + * google sheet api + + +DOAJ_GITHUB_KEY + `DOAJ_GITHUB_KEY` is github api key, this key is optional, if not provided, you can input github + password instead by -p option + + +Example +--------------------- +github_prioritisation -g 'test-githubpri' -r '/tmp/githubpri-rule.csv' + +""" parser = argparse.ArgumentParser(description=description, formatter_class=argparse.RawTextHelpFormatter) parser.add_argument("-u", "--username", diff --git a/portality/scripts/githubpri/github_utils.py b/portality/scripts/githubpri/github_utils.py new file mode 100644 index 0000000000..3a0add3e66 --- /dev/null +++ b/portality/scripts/githubpri/github_utils.py @@ -0,0 +1,286 @@ +""" +functions to interact with "Github" for githubpri +""" +from __future__ import annotations + +import functools +import logging +import os +import warnings +from typing import Union, Iterable, TypedDict + +import requests +from requests import Response +from requests.auth import HTTPBasicAuth + +URL_API = "https://api.github.com" + +AuthLike = Union[dict, tuple, HTTPBasicAuth, None] + +log = logging.getLogger(__name__) + + +class GithubReqSender: + def __init__(self, token_password, username=None): + """ + + Parameters + ---------- + token_password + password of username or github api key + username + """ + if token_password is None: + raise ValueError("api_key or password must be provided") + self.username_password = (username, token_password) + + self.url_json_cache = {} + + def get(self, url, **req_kwargs) -> Response: + warnings.warn("use send instead of get", DeprecationWarning) + return send_request(url, auth=self.username_password, **req_kwargs) + + def send(self, url, method='get', **req_kwargs) -> Response: + return send_request(url, method=method, auth=self.username_password, **req_kwargs) + + def query_graphql(self, query: str) -> dict: + return self.send("https://api.github.com/graphql", method='post', json={'query': query}).json() + + @functools.lru_cache(maxsize=102400) + def send_cached_json(self, url): + if url in self.url_json_cache: + return self.url_json_cache[url] + + result = self.send(url).json() + self.url_json_cache[url] = result + return result + + def yield_all(self, url, params=None, n_per_page=100) -> Iterable[dict]: + return yields_all(url, auth=self.username_password, params=params, n_per_page=n_per_page) + + def __hash__(self): + return hash(self.username_password) + + def __eq__(self, other): + if not isinstance(other, GithubReqSender): + return False + return self.__hash__() == other.__hash__() + + +def send_request(url, method='get', + auth: AuthLike = None, + **req_kwargs) -> Response: + final_req_kwargs = {} + auth = create_auth(auth) + if auth is not None: + final_req_kwargs = {'auth': auth} + final_req_kwargs.update(req_kwargs) + resp = requests.request(method, url, **final_req_kwargs) + if resp.status_code >= 400: + raise ConnectionError(f'Something wrong in api response: {resp.status_code} {resp.text}') + return resp + + +def create_auth(auth: AuthLike) -> HTTPBasicAuth | None: + """ + + Parameters + ---------- + auth + accept HTTPBasicAuth, Tuple[username, password], Dict or None + + Returns + ------- + HTTPBasicAuth + + """ + + if auth is not None: + if isinstance(auth, tuple): + auth = HTTPBasicAuth(*auth) + if isinstance(auth, dict): + auth = HTTPBasicAuth(auth['username'], auth['password']) + return auth + + +def get_projects(full_name, auth: AuthLike) -> list[dict]: + """ + + Parameters + ---------- + full_name + owner/repo_name -- e.g. 'DOAJ/doajPM' + auth + + Returns + ------- + + """ + url = f'{URL_API}/repos/{full_name}/projects' + resp = send_request(url, auth=auth) + project_list = resp.json() + return project_list + + +def get_project(full_name, project_name, auth: AuthLike) -> dict | None: + project_list = get_projects(full_name, auth) + names = [p for p in project_list if p.get("name") == project_name] + if len(names) == 0: + return None + if len(names) > 1: + log.warning(f"Multiple projects found: {project_name}") + return names[0] + + +def yields_all(url, auth: AuthLike, params=None, n_per_page=100) -> Iterable[dict]: + final_params = {"per_page": n_per_page, "page": 1} + if params is not None: + final_params.update(params) + + while True: + items = send_request(url, params=final_params, auth=auth).json() + yield from items + if len(items) < n_per_page: + break + + final_params["page"] += 1 + + +@functools.lru_cache(maxsize=102400) +def get_column_issues(columns_url, col, sender: GithubReqSender): + print(f"Fetching column issues {col}") + col_data = sender.send_cached_json(columns_url) + column_records = [c for c in col_data if c.get("name") == col] + if len(column_records) == 0: + log.warning(f"Column not found: {col}") + return [] + if len(column_records) > 1: + log.warning(f"Multiple columns found: {col}") + + issues = [] + for card_data in sender.yield_all(column_records[0].get("cards_url")): + issue_data = sender.send(card_data.get("content_url")).json() + issues.append(issue_data) + + print("Column issues {x}".format(x=[i.get("number") for i in issues])) + return issues + + +class Issue(TypedDict): + number: int + title: str + status: str + url: str + assignees: list[str] + label_names: list[str] + + +def find_all_issues(owner, repo, project_number, sender: GithubReqSender) -> Iterable[Issue]: + query_template = """ + { + repository(owner: "%s", name: "%s") { + projectV2(number: %s) { + url + title + items(first: 100, after: AFTER_CURSOR) { + pageInfo { + endCursor + hasNextPage + } + nodes { + content { + ... on Issue { + id + number + title + state + url + stateReason + labels (first:100) { + nodes { + name + } + } + assignees(first: 100) { + nodes { + name + login + } + } + } + } + fieldValues(first: 100) { + nodes { + ... on ProjectV2ItemFieldSingleSelectValue { + name + field { + ... on ProjectV2SingleSelectField { + name + } + } + } + } + } + } + } + } + } + } + """ % (owner, repo, project_number) + + # Function to fetch all items with pagination + def fetch_all_items(): + # all_projects = [] + after_cursor = None + while True: + # Replace AFTER_CURSOR placeholder in the query template + query = query_template.replace("AFTER_CURSOR", f'"{after_cursor}"' if after_cursor else "null") + + data = sender.query_graphql(query) + + # Process the data + project = data['data']['repository']['projectV2'] + items = project['items']['nodes'] + yield from items + # print(f'items: {len(items)}') + # all_projects.extend(items) + + # Check if there are more pages + page_info = project['items']['pageInfo'] + if page_info['hasNextPage']: + after_cursor = page_info['endCursor'] + else: + break + + def _to_issue(item): + content = item['content'] + return Issue( + number=content['number'], + title=content['title'], + url=content['url'], + assignees=[a['login'] for a in content['assignees']['nodes']], + status=next((f['name'] for f in item['fieldValues']['nodes'] + if f and f['field']['name'] == 'Status'), None), + label_names=[l['name'] for l in content['labels']['nodes']], + ) + + # Fetch all items + all_items = fetch_all_items() + all_items = (i for i in all_items if i['content']) + return map(_to_issue, all_items) + # + # # Filter to include only issues + # issues = [item['content'] for item in all_items if 'id' in item['content']] + # for issue in issues: + # print(f"Issue ID: {issue['id']}, Title: {issue['title']}, State: {issue['state']}, URL: {issue['url']}, " + # f"State Reason: {issue['stateReason']}") + + +def main(): + sender = GithubReqSender(os.environ.get('DOAJ_GITHUB_KEY')) + for i in find_all_issues(sender): + print(i) + + +if __name__ == '__main__': + main() diff --git a/portality/scripts/githubpri/pri_data_serv.py b/portality/scripts/githubpri/pri_data_serv.py deleted file mode 100644 index b0480aed90..0000000000 --- a/portality/scripts/githubpri/pri_data_serv.py +++ /dev/null @@ -1,199 +0,0 @@ -import csv -import json -import os -from collections import defaultdict -from typing import TypedDict, List, Dict - -import pandas as pd -import requests -from requests.auth import HTTPBasicAuth - -REPO = "https://api.github.com/repos/DOAJ/doajPM/" -PROJECTS = REPO + "projects" -PROJECT_NAME = "DOAJ Kanban" -DEFAULT_COLUMNS = ["Review", "In progress", "To Do"] -HEADERS = {"Accept": "application/vnd.github+json"} - -DEFAULT_USER = 'Claimable' - - -class GithubReqSender: - def __init__(self, username=None, password_key=None): - """ - :param password_key: - password of username or github api key - """ - self.username = username - self.password_key = password_key - if self.password_key is None: - raise ValueError("api_key or password must be provided") - - def create_github_request_kwargs(self) -> dict: - req_kwargs = {'headers': dict(HEADERS)} - req_kwargs['auth'] = HTTPBasicAuth(self.username, self.password_key) - return req_kwargs - - def get(self, url, **req_kwargs): - final_req_kwargs = self.create_github_request_kwargs() - final_req_kwargs.update(req_kwargs) - return requests.get(url, **final_req_kwargs) - - -class Rule(TypedDict): - id: str - labels: List[str] - columns: List[str] - - -class PriIssue(TypedDict): - rule_id: str - title: str - issue_url: str - - -class GithubIssue(TypedDict): - api_url: str - issue_number: str - status: str # e.g. 'To Do', 'In progress', 'Review' - title: str - - -def load_rules(rules_file) -> List[Rule]: - if not os.path.exists(rules_file): - raise FileNotFoundError(f"Rules file [{rules_file}] not found") - with open(rules_file, "r") as f: - reader = csv.DictReader(f) - rules = [] - for row in reader: - rules.append({ - "id": row["id"], - "labels": [l.strip() for l in row["labels"].split(",") if l.strip() != ""], - "columns": [c.strip() for c in row["columns"].split(",") if c.strip() != ""] - }) - return rules - - -def create_priorities_excel_data(priorities_file, sender: GithubReqSender) -> Dict[str, pd.DataFrame]: - """ - - ENV VARIABLE `DOAJ_GITHUB_KEY` will be used if username and password are not provided - - :param priorities_file: - :param username: - :param password: - :return: - """ - resp = sender.get(PROJECTS) - if resp.status_code >= 400: - raise ConnectionError(f'Error fetching github projects: {resp.status_code} {resp.text}') - project_list = resp.json() - project = [p for p in project_list if p.get("name") == PROJECT_NAME][0] - user_priorities = defaultdict(list) - for priority in load_rules(priorities_file): - print("Applying rule {x}".format(x=json.dumps(priority))) - issues_by_user = _issues_by_user(project, priority, sender) - print("Unfiltered matches for rule {x}".format(x=issues_by_user)) - for user, issues in issues_by_user.items(): - issues: List[GithubIssue] - pri_issues = [PriIssue(rule_id=priority.get("id", 1), - title='[{}] {}'.format(github_issue['issue_number'], github_issue['title']), - issue_url=_ui_url(github_issue['api_url']), - status=github_issue['status'], - ) - for github_issue in issues] - pri_issues = [i for i in pri_issues if - i['issue_url'] not in {u['issue_url'] for u in user_priorities[user]}] - print("Novel issues for rule for user {x} {y}".format(x=user, y=pri_issues)) - user_priorities[user] += pri_issues - - df_list = {} - for user, pri_issues in user_priorities.items(): - df_list[user] = pd.DataFrame(pri_issues) - - return df_list - - -def _issues_by_user(project, priority, sender) -> Dict[str, List[GithubIssue]]: - cols = priority.get("columns", []) - if len(cols) == 0: - cols = DEFAULT_COLUMNS - - user_issues = defaultdict(list) - for status_col in cols: - column_issues = _get_column_issues(project, status_col, sender) - labels = priority.get("labels", []) - if len(labels) == 0: - _split_by_user(user_issues, column_issues, status_col) - continue - - labelled_issues = _filter_issues_by_label(column_issues, labels) - _split_by_user(user_issues, labelled_issues, status_col) - - return user_issues - - -COLUMN_CACHE = {} - - -def _get_column_issues(project, col, sender: GithubReqSender): - if col in COLUMN_CACHE: - return COLUMN_CACHE[col] - - print("Fetching column issues {x}".format(x=col)) - cols_url = project.get("columns_url") - resp = sender.get(cols_url) - col_data = resp.json() - - column_record = [c for c in col_data if c.get("name") == col][0] - cards_url = column_record.get("cards_url") - - params = {"per_page": 100, "page": 1} - issues = [] - - while True: - resp = sender.get(cards_url, params=params) - cards_data = resp.json() - if len(cards_data) == 0: - break - params["page"] += 1 - - for card_data in cards_data: - content_url = card_data.get("content_url") - resp = sender.get(content_url) - issue_data = resp.json() - issues.append(issue_data) - - COLUMN_CACHE[col] = issues - print("Column issues {x}".format(x=[i.get("url") for i in issues])) - return issues - - -def _filter_issues_by_label(issues, labels): - filtered = [] - for issue in issues: - issue_labels = issue.get("labels", []) - label_names = [l.get("name") for l in issue_labels] - found = 0 - for label in labels: - if label in label_names: - found += 1 - if found == len(labels): - filtered.append(issue) - return filtered - - -def _split_by_user(registry: defaultdict, issues: dict, status: str): - for issue in issues: - assignees = issue.get("assignees") - assignees = [a.get("login") for a in assignees] if assignees else [DEFAULT_USER] - github_issue = GithubIssue(api_url=issue.get("url"), - issue_number=issue.get("number"), - status=status, - title=issue.get("title"), - ) - for assignee in assignees: - registry[assignee].append(github_issue) - - -def _ui_url(api_url): - return "https://github.com/" + api_url[len("https://api.github.com/repos/"):] diff --git a/portality/scripts/githubpri/gdrive_sheet_serv.py b/portality/scripts/githubpri/pri_gsheets.py similarity index 97% rename from portality/scripts/githubpri/gdrive_sheet_serv.py rename to portality/scripts/githubpri/pri_gsheets.py index 49a9277c47..c8d584305d 100644 --- a/portality/scripts/githubpri/gdrive_sheet_serv.py +++ b/portality/scripts/githubpri/pri_gsheets.py @@ -1,3 +1,7 @@ +""" +functions to interact with "Google Drive Sheets" for githubpri +""" + import datetime import gspread @@ -26,7 +30,6 @@ def apply_prilist_styles(worksheet, display_df): latest_username = username gs_col_idx = col_idx + 1 - cells = worksheet.range(3, gs_col_idx, len(titles) + 3, gs_col_idx) gspfmt.format_cell_range(worksheet, range_idx_to_a1(1, gs_col_idx, n_row + 2, gs_col_idx), cell_format=gspfmt.CellFormat( diff --git a/portality/scripts/githubpri/pridata.py b/portality/scripts/githubpri/pridata.py new file mode 100644 index 0000000000..447c87a39d --- /dev/null +++ b/portality/scripts/githubpri/pridata.py @@ -0,0 +1,145 @@ +""" +functions and logic of priority data +core logic of githubpri +extract data from Github and convert to priority order format +""" + +from __future__ import annotations + +import csv +import json +import logging +import os +from collections import defaultdict +from typing import TypedDict, Iterable + +import pandas as pd + +from portality.scripts.githubpri import github_utils +from portality.scripts.githubpri.github_utils import GithubReqSender, Issue + +PROJECT_NAME = "DOAJ Kanban" +DEFAULT_COLUMNS = ["Review", "In progress", "To Do"] + +DEFAULT_USER = 'Claimable' + +log = logging.getLogger(__name__) + + +class Rule(TypedDict): + id: str + labels: list[str] + columns: list[str] + + +class PriIssue(TypedDict): + rule_id: str + title: str + issue_url: str + status: str + + +class GithubIssue(TypedDict): + url: str + issue_number: str + status: str # e.g. 'To Do', 'In progress', 'Review' + title: str + assignees: list[str] + + +def load_rules(rules_file) -> list[Rule]: + if not os.path.exists(rules_file): + raise FileNotFoundError(f"Rules file [{rules_file}] not found") + with open(rules_file) as f: + reader = csv.DictReader(f) + rules = [] + for row in reader: + rules.append({ + "id": row["id"], + "labels": [l.strip() for l in row["labels"].split(",") if l.strip() != ""], + "columns": [c.strip() for c in row["columns"].split(",") if c.strip() != ""] + }) + return rules + + +def create_priorities_excel_data(priorities_file, sender: GithubReqSender) -> dict[str, pd.DataFrame]: + """ + ENV VARIABLE `DOAJ_GITHUB_KEY` will be used if username and password are not provided + + Parameters + ---------- + priorities_file + sender + + Returns + ------- + dict mapping 'username' to 'priority dataframe' + """ + github_owner = 'DOAJ' + github_repo = 'doajPM' + project_number = 8 + + print(f'Find issues from {github_owner}/{github_repo} project[{project_number}]') + print(f'Project url: http://github.com/orgs/{github_owner}/projects/{project_number}') + + all_issues = github_utils.find_all_issues(owner=github_owner, repo=github_repo, project_number=project_number, + sender=sender) + all_issues = list(all_issues) + print(f'Number of issues found: [{len(all_issues)}]') + + user_priorities = defaultdict(list) + for priority in load_rules(priorities_file): + print(f"Applying rule [{json.dumps(priority)}]") + issues_by_user = _issues_by_user(all_issues, priority) + print("Unfiltered matches for rule: [{}]".format( + sorted(i.get("issue_number") for user, issues in issues_by_user.items() for i in issues) + )) + + for user, issues in issues_by_user.items(): + issues: list[GithubIssue] + pri_issues = [PriIssue(rule_id=priority.get("id", 1), + title='[{}] {}'.format(github_issue['issue_number'], github_issue['title']), + issue_url=github_issue['url'], + status=github_issue['status'], + ) + for github_issue in issues] + pri_issues = [i for i in pri_issues if + i['issue_url'] not in {u['issue_url'] for u in user_priorities[user]}] + for i in pri_issues: + print(' * [{}]{}'.format(user, i.get('title'))) + user_priorities[user] += pri_issues + + df_list = {} + for user, pri_issues in user_priorities.items(): + df_list[user] = pd.DataFrame(pri_issues) + + return df_list + + +def _issues_by_user(issues: Iterable[Issue], priority) -> dict[str, list[GithubIssue]]: + cols = priority.get("columns", []) or DEFAULT_COLUMNS + + user_issues = defaultdict(list) + for status_col in cols: + status_issues = (issue for issue in issues if issue.get("status") == status_col) + labels = priority.get("labels", []) + if labels: + status_issues = (issue for issue in status_issues + if set(issue.get('label_names', [])).issuperset(set(labels))) + + status_issues = map(to_github_issue, status_issues) + for issue in status_issues: + for assignee in issue['assignees']: + user_issues[assignee].append(issue) + + return user_issues + + +def to_github_issue(issue: Issue): + github_issue = GithubIssue(url=issue.get("url"), + issue_number=issue.get("number"), + status=issue['status'], + title=issue.get("title"), + assignees=issue['assignees'] or [DEFAULT_USER] + ) + return github_issue diff --git a/portality/scripts/journal_urls.py b/portality/scripts/journal_urls.py index 3f1e8229aa..0d8b5c3514 100644 --- a/portality/scripts/journal_urls.py +++ b/portality/scripts/journal_urls.py @@ -51,7 +51,9 @@ def get_csv_file_name(): def extra_columns(j): """Add extra columns""" - return [('Journal ID', j.id)] + account = j.owner_account + return [('Journal ID', j.id), ('Account Name', account.name if account else ''), + ('Account Email', account.email if account else '')] def generate_journals_csv(csv_file): diff --git a/portality/scripts/journals_update_via_csv.py b/portality/scripts/journals_update_via_csv.py index f09ae5ea86..9d5f61e6fd 100644 --- a/portality/scripts/journals_update_via_csv.py +++ b/portality/scripts/journals_update_via_csv.py @@ -45,6 +45,15 @@ sys_acc = Account(**SYSTEM_ACCOUNT) + +def confirm_prompt(): + doit = input('Proceed? [y\\N] ') + + if doit.lower() != 'y': + print('\nExiting.') + exit(0) + + if __name__ == "__main__": import argparse @@ -67,11 +76,7 @@ print('\nNote supplied via $DOAJ_CSV_NOTE: ' + note) if not args.yes: - doit = input('Proceed? [y\\N] ') - - if doit.lower() != 'y': - print('\nExiting.') - exit(0) + confirm_prompt() # Disable app emails so this doesn't spam users app.config['ENABLE_EMAIL'] = False @@ -102,14 +107,18 @@ if not validation_results.has_errors() and args.force: print('Forcing update despite warnings...') elif validation_results.has_errors() and args.force: - print("Can't force update on file with errors.") - exit(1) + if args.sys: + print("DANGER - do you want to force these changes despite errors (some may not work)?") + confirm_prompt() + else: + print("Can't force update on file with errors using publisher account.") + exit(1) else: print(f'No updates processed due to errors or warnings. Supply -f arg to ignore warnings.') exit(1) - # if we get to here, the records can all be imported, so we can go ahead with minimal - # additional checks + # if we get to here, the records can all be imported*, so we can go ahead with minimal additional checks + # * unless we're forcing with errors # Open with encoding that deals with the Byte Order Mark since we're given files from Windows. with open(args.infile, 'r', encoding='utf-8-sig') as g: @@ -157,7 +166,7 @@ alock = None try: # ~~ ^->UpdateRequest:Feature ~~ - update_req, jlock, alock = DOAJ.applicationService().update_request_for_journal(j.id, account=j.owner_account) + update_req, jlock, alock = DOAJ.applicationService().update_request_for_journal(j.id, account=acc) except AuthoriseException as e: print('Could not create update request: {0}'.format(e.reason)) continue diff --git a/portality/scripts/link_checker_report.py b/portality/scripts/link_checker_report.py index 5bc1c922c5..ce1dec09af 100644 --- a/portality/scripts/link_checker_report.py +++ b/portality/scripts/link_checker_report.py @@ -103,8 +103,8 @@ def fetch_matching_rows(journal_url_index, report_values): # df_result_selected_columns = df_result[columns].copy() # create a copy to avoid SettingWithCopyWarning df_result_selected_columns = pd.DataFrame( data=[list(journal_data)], - columns=['Journal title', 'Added on Date', 'Last updated Date', "Journal ID", "Publisher", - "Country of publisher" ] + columns=['Journal title', 'Added on Date', 'Last updated Date', "Journal ID", 'Account Name', + 'Account Email', "Publisher", "Country of publisher" ] ) jid = df_result_selected_columns["Journal ID"].values[0] @@ -141,7 +141,7 @@ def _index_journals(df): # FIXME: assumes each URL only appears once if isinstance(cell, str) and cell.startswith("http"): # make an index of the URL to the journal title, added date, updated date and journal id - jidx[cell] = (row[0], row[50], row[51], row[54], row[9], row[10]) + jidx[cell] = (row[0], row[50], row[51], row[54], row[55], row[56], row[9], row[10]) return jidx @@ -192,8 +192,8 @@ def generate_report(csv_files, journal_csv_file): journal_url_index = _index_journals(journal_df) log("Indexed journal urls") - master_df = pd.DataFrame(columns=['Journal title', 'Added on Date', 'Last updated Date', "Journal ID", "Publisher", - "Country of publisher",]) + master_df = pd.DataFrame(columns=['Journal title', 'Added on Date', 'Last updated Date', "Journal ID", + 'Account Name', 'Account Email', "Publisher", "Country of publisher",]) for csv_file in csv_files: df = pd.read_csv(csv_file) log("Checking file {x}".format(x=csv_file)) diff --git a/portality/scripts/manage_background_jobs.py b/portality/scripts/manage_background_jobs.py index 20f1f53024..440594b2e9 100644 --- a/portality/scripts/manage_background_jobs.py +++ b/portality/scripts/manage_background_jobs.py @@ -1,131 +1,172 @@ """ Script to allow us to re-queue or cancel background jobs of certain kinds. -To use, do: - -python portality/scripts/requeue_background_job.py [-r] [-c] [-a ] [-s ] [-f ] [-t ] - -One of -r, -c is required. -r requeues jobs, -c cancels them. --a, -s, -f and -t are optional, with the following default values: - --a : None (for running through all job types supported below, others will be skipped) --s : queued --f : 1970-01-01T00:00:00Z --t : the current timestamp - This script supports managing the following background job types listed in HANDLERS; if you need to re-queue any other kind of job, you need to add it there. TODO: this should be a script calling functionality inside a business logic layer with a fuller understanding of jobs. """ -from portality import models -from portality.lib import dates +import argparse +import json +import time +import typing +from collections import Counter +from typing import Dict, Type, List + +from portality import constants +from portality.bll import DOAJ +from portality.constants import BGJOB_STATUS_QUEUED +from portality.lib import dates, color_text, es_queries from portality.lib.dates import DEFAULT_TIMESTAMP_VAL -from portality.tasks.anon_export import AnonExportBackgroundTask -from portality.tasks.article_bulk_delete import ArticleBulkDeleteBackgroundTask -from portality.tasks.article_cleanup_sync import ArticleCleanupSyncBackgroundTask -from portality.tasks.article_duplicate_report import ArticleDuplicateReportBackgroundTask -from portality.tasks.async_workflow_notifications import AsyncWorkflowBackgroundTask -from portality.tasks.check_latest_es_backup import CheckLatestESBackupBackgroundTask -from portality.tasks.request_es_backup import RequestESBackupBackgroundTask -# from portality.tasks.find_discontinued_soon import FindDiscontinuedSoonBackgroundTask -from portality.tasks.harvester import HarvesterBackgroundTask -from portality.tasks.ingestarticles import IngestArticlesBackgroundTask -from portality.tasks.journal_bulk_delete import JournalBulkDeleteBackgroundTask -from portality.tasks.journal_bulk_edit import JournalBulkEditBackgroundTask -from portality.tasks.journal_csv import JournalCSVBackgroundTask -from portality.tasks.journal_in_out_doaj import SetInDOAJBackgroundTask -from portality.tasks.preservation import PreservationBackgroundTask -from portality.tasks.prune_es_backups import PruneESBackupsBackgroundTask -from portality.tasks.public_data_dump import PublicDataDumpBackgroundTask -from portality.tasks.read_news import ReadNewsBackgroundTask -from portality.tasks.reporting import ReportingBackgroundTask -from portality.tasks.sitemap import SitemapBackgroundTask -from portality.tasks.suggestion_bulk_edit import SuggestionBulkEditBackgroundTask - -from portality.background import BackgroundApi - -# dict of {task_name: task_class} so we can interact with the jobs -HANDLERS = { - AnonExportBackgroundTask.__action__: AnonExportBackgroundTask, - ArticleBulkDeleteBackgroundTask.__action__: ArticleBulkDeleteBackgroundTask, - ArticleCleanupSyncBackgroundTask.__action__: ArticleCleanupSyncBackgroundTask, - ArticleDuplicateReportBackgroundTask.__action__: ArticleDuplicateReportBackgroundTask, - AsyncWorkflowBackgroundTask.__action__: AsyncWorkflowBackgroundTask, - CheckLatestESBackupBackgroundTask.__action__: CheckLatestESBackupBackgroundTask, - RequestESBackupBackgroundTask.__action__: RequestESBackupBackgroundTask, - # FindDiscontinuedSoonBackgroundTask.__action__: FindDiscontinuedSoonBackgroundTask, - HarvesterBackgroundTask.__action__: HarvesterBackgroundTask, - IngestArticlesBackgroundTask.__action__: IngestArticlesBackgroundTask, - JournalBulkDeleteBackgroundTask.__action__: JournalBulkDeleteBackgroundTask, - JournalBulkEditBackgroundTask.__action__: JournalBulkEditBackgroundTask, - JournalCSVBackgroundTask.__action__: JournalCSVBackgroundTask, - SetInDOAJBackgroundTask.__action__: SetInDOAJBackgroundTask, - PreservationBackgroundTask.__action__:PreservationBackgroundTask, - PruneESBackupsBackgroundTask.__action__: PruneESBackupsBackgroundTask, - PublicDataDumpBackgroundTask.__action__: PublicDataDumpBackgroundTask, - ReadNewsBackgroundTask.__action__: ReadNewsBackgroundTask, - ReportingBackgroundTask.__action__: ReportingBackgroundTask, - SitemapBackgroundTask.__action__: SitemapBackgroundTask, - SuggestionBulkEditBackgroundTask.__action__: SuggestionBulkEditBackgroundTask +if typing.TYPE_CHECKING: + from portality.models import BackgroundJob + from portality.bll.services.huey_job import HueyJobData + + +def get_action_handler(action): + from portality.background import BackgroundTask + from portality.tasks.anon_export import AnonExportBackgroundTask + from portality.tasks.article_bulk_delete import ArticleBulkDeleteBackgroundTask + from portality.tasks.article_bulk_create import ArticleBulkCreateBackgroundTask + from portality.tasks.article_cleanup_sync import ArticleCleanupSyncBackgroundTask + from portality.tasks.article_duplicate_report import ArticleDuplicateReportBackgroundTask + from portality.tasks.async_workflow_notifications import AsyncWorkflowBackgroundTask + from portality.tasks.check_latest_es_backup import CheckLatestESBackupBackgroundTask + from portality.tasks.request_es_backup import RequestESBackupBackgroundTask + from portality.tasks.find_discontinued_soon import FindDiscontinuedSoonBackgroundTask + from portality.tasks.harvester import HarvesterBackgroundTask + from portality.tasks.ingestarticles import IngestArticlesBackgroundTask + from portality.tasks.journal_bulk_delete import JournalBulkDeleteBackgroundTask + from portality.tasks.journal_bulk_edit import JournalBulkEditBackgroundTask + from portality.tasks.journal_csv import JournalCSVBackgroundTask + from portality.tasks.journal_in_out_doaj import SetInDOAJBackgroundTask + from portality.tasks.preservation import PreservationBackgroundTask + from portality.tasks.prune_es_backups import PruneESBackupsBackgroundTask + from portality.tasks.public_data_dump import PublicDataDumpBackgroundTask + from portality.tasks.read_news import ReadNewsBackgroundTask + from portality.tasks.reporting import ReportingBackgroundTask + from portality.tasks.sitemap import SitemapBackgroundTask + from portality.tasks.suggestion_bulk_edit import SuggestionBulkEditBackgroundTask + + # dict of {task_name: task_class} so we can interact with the jobs + HANDLERS: Dict[str, Type[BackgroundTask]] = { + AnonExportBackgroundTask.__action__: AnonExportBackgroundTask, + ArticleBulkDeleteBackgroundTask.__action__: ArticleBulkDeleteBackgroundTask, + ArticleBulkCreateBackgroundTask.__action__: ArticleBulkCreateBackgroundTask, + ArticleCleanupSyncBackgroundTask.__action__: ArticleCleanupSyncBackgroundTask, + ArticleDuplicateReportBackgroundTask.__action__: ArticleDuplicateReportBackgroundTask, + AsyncWorkflowBackgroundTask.__action__: AsyncWorkflowBackgroundTask, + CheckLatestESBackupBackgroundTask.__action__: CheckLatestESBackupBackgroundTask, + RequestESBackupBackgroundTask.__action__: RequestESBackupBackgroundTask, + FindDiscontinuedSoonBackgroundTask.__action__: FindDiscontinuedSoonBackgroundTask, + HarvesterBackgroundTask.__action__: HarvesterBackgroundTask, + IngestArticlesBackgroundTask.__action__: IngestArticlesBackgroundTask, + JournalBulkDeleteBackgroundTask.__action__: JournalBulkDeleteBackgroundTask, + JournalBulkEditBackgroundTask.__action__: JournalBulkEditBackgroundTask, + JournalCSVBackgroundTask.__action__: JournalCSVBackgroundTask, + SetInDOAJBackgroundTask.__action__: SetInDOAJBackgroundTask, + PreservationBackgroundTask.__action__: PreservationBackgroundTask, + PruneESBackupsBackgroundTask.__action__: PruneESBackupsBackgroundTask, + PublicDataDumpBackgroundTask.__action__: PublicDataDumpBackgroundTask, + ReadNewsBackgroundTask.__action__: ReadNewsBackgroundTask, + ReportingBackgroundTask.__action__: ReportingBackgroundTask, + SitemapBackgroundTask.__action__: SitemapBackgroundTask, + SuggestionBulkEditBackgroundTask.__action__: SuggestionBulkEditBackgroundTask + } + return HANDLERS.get(action) + + +def handle_requeue(job: 'BackgroundJob'): + handler = get_action_handler(job.action) + if handler is None: + print('This script is not set up to {0} task type {1}. Skipping.'.format('requeue', job.action)) + return + + job.queue() + handler.submit(job) + + +def handle_cancel(job: 'BackgroundJob'): + print(f'Cancel bgjob from db: [{job.id}]') + job.cancel() + job.save() + + huey_job_serv = DOAJ.hueyJobService() + redis_client = huey_job_serv.create_redis_client() + huey_rows = huey_job_serv.find_queued_huey_jobs(client=redis_client) + huey_rows = (r for r in huey_rows if r.bgjob_id == job.id) + huey_job = next(huey_rows, None) + if huey_job is not None: + print(f'Remove job from redis: [{huey_job.bgjob_id}]') + huey_job_serv.rm_huey_job_from_redis(huey_job, client=redis_client) + redis_client.close() + + +def handle_process(job: 'BackgroundJob'): + from portality.background import BackgroundApi + handler = get_action_handler(job.action) + if handler is None: + print('This script is not set up to {0} task type {1}. Skipping.'.format('process', job.action)) + return + + task = handler(job) # Just execute immediately without going through huey + BackgroundApi.execute(task) + + +cmd_handlers = { + 'requeue': handle_requeue, + 'cancel': handle_cancel, + 'process': handle_process, } -def manage_jobs(verb, action, status, from_date, to_date, prompt=True): - q = JobsQuery(action, status, from_date, to_date) +def manage_jobs(verb, action, status, from_date, to_date, job_id, prompt=True, size=10000): + from portality import models + if job_id is not None: + j = models.BackgroundJob.pull(job_id) + jobs: List[models.BackgroundJob] = [] if j is None else [j] + query_ref = job_id + else: + q = JobsQuery(action, status, from_date, to_date, size=size) + jobs: List[models.BackgroundJob] = models.BackgroundJob.q2obj(q=q.query()) + query_ref = json.dumps(q.query(), indent=4) - jobs = models.BackgroundJob.q2obj(q=q.query()) + if len(jobs) == 0: + print('No jobs found by query: ') + print(query_ref) + return print('You are about to {verb} {count} job(s)'.format(verb=verb, count=len(jobs))) + print('The first 5 are:') + for j in jobs[:5]: + print(job_to_str(j)) - doit = "y" - if prompt: - doit = input('Proceed? [y\\N] ') - - if doit.lower() == 'y': - print('Please wait...') - for job in jobs: - if job.action not in HANDLERS: - print('This script is not set up to {0} task type {1}. Skipping.'.format(verb, job.action)) - continue - - job.add_audit_message("Job {pp} from job management script.".format( - pp={'requeue': 'requeued', 'cancel': 'cancelled', "process": "processed"}[verb])) - - if verb == 'requeue': # Re-queue and execute immediately - job.queue() - HANDLERS[job.action].submit(job) - elif verb == 'cancel': # Just apply cancelled status - job.cancel() - job.save() - elif verb == 'process': - task = HANDLERS[job.action](job) # Just execute immediately without going through huey - BackgroundApi.execute(task) - - print('done.') - else: + if prompt and input('Proceed? [y\\N] ').lower() != 'y': print('No action.') + return + job_handler = cmd_handlers.get(verb) + if job_handler is None: + print(f'Unknown verb: {verb}') + return -def requeue_jobs(action, status, from_date, to_date, prompt=True): - manage_jobs('requeue', action, status, from_date, to_date, prompt=prompt) - - -def cancel_jobs(action, status, from_date, to_date, prompt=True): - manage_jobs('cancel', action, status, from_date, to_date, prompt=prompt) + print('Please wait...') + for job in jobs: + job.add_audit_message("Job [{pp}] from job management script.".format( + pp={'requeue': 'requeued', 'cancel': 'cancelled', "process": "processed"}[verb])) + job_handler(job) - -def process_jobs(action, status, from_date, to_date, prompt=True): - manage_jobs("process", action, status, from_date, to_date, prompt=prompt) + print('done.') class JobsQuery(object): - def __init__(self, action, status, from_date, to_date): + def __init__(self, action, status, from_date, to_date, size=10000): self.action = action self.status = status self.from_date = from_date self.to_date = to_date + self.size = size def query(self): q = { @@ -138,7 +179,7 @@ def query(self): ] } }, - "size": 10000 + "size": self.size } if self.action is not None: @@ -147,40 +188,308 @@ def query(self): return q -if __name__ == '__main__': - import argparse - parser = argparse.ArgumentParser() - - parser.add_argument('-r', '--requeue', - help='Add these jobs back on the job queue for processing', action='store_true') - parser.add_argument('-c', '--cancel', - help='Cancel these jobs (set their status to "cancelled")', action='store_true') - parser.add_argument("-p", "--process", - help="Immediately process these jobs on the command line", action="store_true") - parser.add_argument('-s', '--status', - help='Filter for job status. Default is "queued"', - default='queued') - parser.add_argument('-a', '--action', - help='Background job action. Leave empty for all actions (not recommended)', - default=None) - parser.add_argument('-f', '--from_date', - help='Date from which to look for jobs in the given type and status', - default=DEFAULT_TIMESTAMP_VAL) - parser.add_argument('-t', '--to_date', - help='Date to which to look for jobs in the given type and status', - default=dates.now_str()) +def find_bgjobs_by_status(status) -> List['BackgroundJob']: + from portality import models + bgjobs = models.BackgroundJob.q2obj( + q=models.background.BackgroundJobQueryBuilder() + .status_includes([status]) + .size(10000) + .build_query_dict()) + return bgjobs + + +def job_to_str(job: 'BackgroundJob'): + return f'{job.action:30} {job.id} {job.status:10} {job.created_date}' + + +def print_huey_jobs_counter(counter): + for k, v in sorted(counter.items(), key=lambda x: x[0]): + print(f'{k:30} {v}') + + +def print_huey_job_data(job_data: 'HueyJobData'): + print(f'{job_data.bgjob_action:30} {job_data.bgjob_id} {dates.format(job_data.schedule_time)}') + + +def print_job_delta(title_val, id_action_list: typing.Iterable[tuple]): + id_action_list = sorted(id_action_list, key=lambda x: x[1]) + print(title(f'{title_val} ({len(id_action_list)})', background=color_text.Color.red)) + for id, action in id_action_list: + print(f'{action:30} {id}') + print() + + +def total_counter(counter): + return sum(counter.values()) + + +def title(s, background=color_text.Color.green): + return color_text.apply_color(s, front=color_text.Color.black, background=background) + + +def report(example_size=10): + huey_rows = list(DOAJ.hueyJobService().find_all_huey_jobs()) + + scheduled = Counter(r.bgjob_action for r in huey_rows if r.is_scheduled) + unscheduled = Counter(r.bgjob_action for r in huey_rows if not r.is_scheduled) + + print(title('# Huey jobs:')) + print(title(f'### Scheduled ({total_counter(scheduled)})')) + print_huey_jobs_counter(scheduled) + print() + + print(title(f'### Unscheduled ({total_counter(unscheduled)})')) + print_huey_jobs_counter(unscheduled) + print() + + unscheduled_huey_jobs = sorted((r for r in huey_rows if not r.is_scheduled), + key=lambda x: x.schedule_time, reverse=True) + display_size = min(example_size, len(unscheduled_huey_jobs)) + print(title(f'### last {display_size} unscheduled jobs:')) + for r in unscheduled_huey_jobs[:display_size]: + print_huey_job_data(r) + print() + + counter = Counter(r.bgjob_id for r in huey_rows if not r.is_scheduled) + counter = {k: v for k, v in counter.items() if v > 1} + if counter: + print(title(f'### Duplicated jobs ({total_counter(counter)})')) + for k, v in counter.items(): + print(f'{k} {v}') + + print_huey_jobs_counter(counter) + + print(title('# DB records')) + print(title(f'### Processing')) + for j in find_bgjobs_by_status(constants.BGJOB_STATUS_PROCESSING): + print(job_to_str(j)) + print() + + bgjobs: List[BackgroundJob] = find_bgjobs_by_status(BGJOB_STATUS_QUEUED) + counter = Counter((j.action, j.status) for j in bgjobs) + print(title(f'### Queued ({sum(counter.values())})')) + for k, v in sorted(counter.items(), key=lambda x: x[0]): + print(f'{k[0]:30} {k[1]:10} {v}') + print() + + display_size = min(example_size, len(bgjobs)) + print(title(f'### last {display_size} jobs')) + for j in sorted(bgjobs, key=lambda j: j.created_date, reverse=True)[:display_size]: + print(job_to_str(j)) + print() + + print(title('# Queued delta between DB and redis:', background=color_text.Color.red)) + huey_only, db_only = find_huey_bgjob_delta(unscheduled_huey_jobs, bgjobs) + print_job_delta('### DB only', ((j.id, j.action) for j in db_only)) + print_job_delta('### Redis only', ((i.bgjob_id, i.bgjob_action) for i in huey_only)) + + +def rm_all(): + if input(color_text.apply_color( + 'WARNING: This will delete all jobs from redis and the database. Proceed? [y\\N] ', + background=color_text.Color.red)).lower() != 'y': + print('No action.') + return + from portality import models + from portality.bll.services.huey_job import HUEY_REDIS_DOAJMAINQUEUE, HUEY_REDIS_DOAJLONGRUNNING + + print('Remove all jobs from DB') + models.BackgroundJob.delete_by_query(es_queries.query_all()) + + print('Remove all jobs from redis') + client = DOAJ.hueyJobService().create_redis_client() + client.delete(HUEY_REDIS_DOAJMAINQUEUE) + client.delete(HUEY_REDIS_DOAJLONGRUNNING) + client.close() + + +def rm_old_processing(is_all=False): + from portality import models + from portality.constants import BGJOB_STATUS_PROCESSING + from portality.models.background import BackgroundJobQueryBuilder + + bgjobs = models.BackgroundJob.q2obj(q=BackgroundJobQueryBuilder() + .status_includes([BGJOB_STATUS_PROCESSING]) + .order_by('created_date', 'asc') + .size(10000) + .build_query_dict()) + + if not is_all: + bgjobs = bgjobs[:-1] + + if bgjobs: + print(f'Following {len(bgjobs)} jobs will be removed:') + for j in bgjobs: + print(job_to_str(j)) + models.BackgroundJob.bulk_delete(b.id for b in bgjobs) + + +def rm_redundant(): + from portality.core import app + from portality import models + + target_actions = app.config.get('BGJOB_MANAGE_REDUNDANT_ACTIONS', []) + print(f'Following actions will be cleaned up for redundant jobs: {target_actions}') + huey_job_service = DOAJ.hueyJobService() + client = huey_job_service.create_redis_client() + huey_rows = list(huey_job_service.find_queued_huey_jobs(client=client)) + for action in target_actions: + bgjobs = models.BackgroundJob.q2obj(q=models.background.BackgroundJobQueryBuilder() + .status_includes([BGJOB_STATUS_QUEUED]) + .action(action) + .order_by('created_date', 'asc') + .size(10000) + .build_query_dict()) + bgjobs = bgjobs[:-1] + if not bgjobs: + continue + + print(f'Remove redundant jobs: [{action}][{len(bgjobs)}]') + # remove from db + models.BackgroundJob.bulk_delete(b.id for b in bgjobs) + + # remove from redis + for j in bgjobs: + for h in iter(huey_rows): + if h.bgjob_id == j.id: + huey_job_service.rm_huey_job_from_redis(h, client=client) + huey_rows.remove(h) + break + client.close() + + +def find_huey_bgjob_delta(huey_rows: List['HueyJobData'] = None, + bgjobs: List['BackgroundJob'] = None + ) -> (List['HueyJobData'], List['BackgroundJob']): + huey_rows: List['HueyJobData'] = (list(DOAJ.hueyJobService().find_queued_huey_jobs()) + if huey_rows is None else list(huey_rows)) + bgjobs: List[BackgroundJob] = (find_bgjobs_by_status(BGJOB_STATUS_QUEUED) + if bgjobs is None else list(bgjobs)) + + huey_ids = {i.bgjob_id for i in huey_rows} + db_ids = {j.id for j in bgjobs} + + huey_only = [i for i in huey_rows if i.bgjob_id not in db_ids] + db_only = [j for j in bgjobs if j.id not in huey_ids] + return huey_only, db_only + + +def rm_async_queued_jobs(): + huey_only, db_only = find_huey_bgjob_delta() + print('Remove async queued jobs') + print_job_delta('### DB only', ((j.id, j.action) for j in db_only)) + if db_only: + from portality import models + models.BackgroundJob.bulk_delete(d.id for d in db_only) + + print_job_delta('### Redis only', ((i.bgjob_id, i.bgjob_action) for i in huey_only)) + huey_job_serv = DOAJ.hueyJobService() + client = huey_job_serv.create_redis_client() + for i in huey_only: + huey_job_serv.rm_huey_job_from_redis(i, client=client) + client.close() + + +def cleanup(): + rm_redundant() + rm_old_processing() + + time.sleep(5) # wait DB records to be deleted + rm_async_queued_jobs() + + +def add_arguments_yes(parser: argparse.ArgumentParser): parser.add_argument("-y", "--yes", help="Answer yes to all prompts", action="store_true") + + +def add_arguments_common(parser: argparse.ArgumentParser): + # common options + add_arguments_yes(parser) + + # query options + query_options = parser.add_argument_group('Query options') + query_options.add_argument('-s', '--status', + help='Filter for job status. Default is "queued"', + default='queued') + query_options.add_argument('-a', '--action', + help='Background job action. Leave empty for all actions (not recommended)', + default=None) + query_options.add_argument('-f', '--from_date', + help='Date from which to look for jobs in the given type and status', + default=DEFAULT_TIMESTAMP_VAL) + query_options.add_argument('-t', '--to_date', + help='Date to which to look for jobs in the given type and status', + default=dates.now_str() + ) + query_options.add_argument('-i', '--id', + help='Background Job id', + default=None) + + default_size = 10000 + query_options.add_argument('--size', + help=f'Number of jobs to process at a time, default is {default_size}', + type=int, default=default_size, ) + + +def main(): + epilog = """ + +Example +------------ + +### Requeue all 'read_news' jobs from 1970-01-0 to 2020-01-01 +manage-bgjobs requeue -a read_news -f 1970-01-01T00:00:00Z -t 2020-01-01T00:00:00Z + +### Cancel job with id abcd12 without confirmation prompt +manage-bgjobs cancel -i fd1c22fac3844ad9a8e163c7d39306f4 -y + +### cleanup outdated jobs +manage-bgjobs cleanup + + """ + + parser = argparse.ArgumentParser(description='Manage background jobs in the DOAJ database and redis', + epilog=epilog, + formatter_class=argparse.RawDescriptionHelpFormatter) + sp = parser.add_subparsers(dest='cmdname', help='Actions', ) + + sp_p = sp.add_parser('requeue', help='Add these jobs back on the job queue for processing') + add_arguments_common(sp_p) + + sp_p = sp.add_parser('cancel', help='Cancel bgjobs (set their status to "cancelled"), also remove from redis') + add_arguments_common(sp_p) + + sp_p = sp.add_parser('process', help='Immediately process these jobs on the command line') + add_arguments_common(sp_p) + + sp_p = sp.add_parser('report', help='Report status of jobs, e.g. out sync job on redis and db ') + + sp_p = sp.add_parser('rm-all', help='Remove all Jobs from redis and DB') + + sp_p = sp.add_parser('cleanup', + help=('Remove all outdated / async records, include old processing job,' + 'redundant queued jobs, async queued jobs')) + args = parser.parse_args() - if args.requeue and args.cancel: - print('Use only --requeue OR --cancel, not both.') - exit(1) - elif args.requeue: - requeue_jobs(args.action, args.status, args.from_date, args.to_date, prompt=False if args.yes else True) - elif args.cancel: - cancel_jobs(args.action, args.status, args.from_date, args.to_date, prompt=False if args.yes else True) - elif args.process: - process_jobs(args.action, args.status, args.from_date, args.to_date, prompt=False if args.yes else True) + cmdname = args.__dict__.get('cmdname') + if cmdname in ['requeue', 'cancel', 'process']: + manage_jobs(cmdname, + args.action, args.status, + args.from_date, args.to_date, + job_id=args.id, + prompt=not args.yes) + elif cmdname == 'report': + report() + elif cmdname == 'rm-all': + rm_all() + elif cmdname == 'cleanup': + cleanup() else: - print('You must supply one of --requeue, --cancel or --process to run this script') + parser.print_help() exit(1) + + +if __name__ == '__main__': + main() + # report() diff --git a/portality/scripts/priorities.csv b/portality/scripts/priorities.csv index 75c95cf037..f5e5cdcc59 100644 --- a/portality/scripts/priorities.csv +++ b/portality/scripts/priorities.csv @@ -1,7 +1,7 @@ id,labels,columns HP/DaR,"Priority: High, Type: Data at Risk", HP/bug,"Priority: High, bug", -Deadline,Priority: Deadline, +Deadline,Priority: Deadline,"Review, In progress, To Do" HP/PfL,"Prioroty: High, Workflow: Pending for Live",Review HP/sup,"Priority: High, Origin: Support", Test1,Workflow: On Test,Review @@ -10,6 +10,7 @@ HP/PfT,"Priority: High, Workflow: Pending for Test",Review HP/rev,Priority: High,Review PfT,Workflow: Pending for Test,Review PfL,Workflow: Pending for Live,Review +Inv,Workflow: Initial Investigation,"Review, In Progress, To Do" Rev,,Review Near,Scale: Nearly Finished, Sch,Priority: Scheduled, diff --git a/portality/settings.py b/portality/settings.py index 744e4b1108..1ad46e8720 100644 --- a/portality/settings.py +++ b/portality/settings.py @@ -9,8 +9,8 @@ # Application Version information # ~~->API:Feature~~ -DOAJ_VERSION = "6.6.10" -API_VERSION = "3.0.1" +DOAJ_VERSION = "6.8.5" +API_VERSION = "4.0.0" ###################################### # Deployment configuration @@ -67,13 +67,13 @@ ELASTIC_SEARCH_TEST_DB_PREFIX = "doajtest-" INITIALISE_INDEX = True # whether or not to try creating the index and required index types on startup -ELASTIC_SEARCH_VERSION = "1.7.5" +ELASTIC_SEARCH_VERSION = "7.10.2" ELASTIC_SEARCH_SNAPSHOT_REPOSITORY = None ELASTIC_SEARCH_SNAPSHOT_TTL = 366 ES_TERMS_LIMIT = 1024 - -ES_READ_TIMEOUT = '2m' +ELASTICSEARCH_REQ_TIMEOUT = 20 # Seconds - used in core.py for whole ES connection request timeout +ES_READ_TIMEOUT = '2m' # Minutes - used in DAO for searches ##################################################### # Elastic APM config (MUST be configured in env file) @@ -96,14 +96,8 @@ ########################################### # Event handler -# use this to queue events asynchronously through kafka -EVENT_SEND_FUNCTION = "portality.events.kafka_producer.send_event" -# use this one to bypass kafka and process events immediately/synchronously -# EVENT_SEND_FUNCTION = "portality.events.shortcircuit.send_event" - -KAFKA_BROKER = "kafka://localhost:9092" -KAFKA_EVENTS_TOPIC = "events" -KAFKA_BOOTSTRAP_SERVER = "localhost:9092" +# Process events immediately/synchronously +EVENT_SEND_FUNCTION = "portality.events.shortcircuit.send_event" ########################################### # Read Only Mode @@ -126,8 +120,8 @@ # List the features we want to be active (API v1 and v2 remain with redirects to v3) # ~~->API:Feature~~ -FEATURES = ['api1', 'api2', 'api3'] -VALID_FEATURES = ['api1', 'api2', 'api3'] +FEATURES = ['api1', 'api2', 'api3', 'api4'] +VALID_FEATURES = ['api1', 'api2', 'api3', 'api4'] ######################################## # File Path and URL Path settings @@ -148,7 +142,8 @@ # ~~->API:Feature~~ BASE_API_URL = "https://doaj.org/api/" -API_CURRENT_BLUEPRINT_NAME = "api_v3" # change if upgrading API to new version and creating new view +API_CURRENT_BLUEPRINT_NAME = "api_v4" # change if upgrading API to new version and creating new view +CURRENT_API_MAJOR_VERSION = "4" # URL used for the journal ToC URL in the journal CSV export # NOTE: must be the correct route as configured in view/doaj.py @@ -165,6 +160,7 @@ # directory to upload files to. MUST be full absolute path # The default takes the directory above this, and then down in to "upload" UPLOAD_DIR = os.path.join(ROOT_DIR, "upload") +UPLOAD_ASYNC_DIR = os.path.join(ROOT_DIR, "upload_async") FAILED_ARTICLE_DIR = os.path.join(ROOT_DIR, "failed_articles") # directory where reports are output @@ -420,8 +416,8 @@ # ~~->BackgroundTasks:Feature~~ # huey/redis settings -HUEY_REDIS_HOST = os.getenv('HUEY_REDIS_HOST', '127.0.0.1') -HUEY_REDIS_PORT = os.getenv('HUEY_REDIS_PORT', 6379) +REDIS_HOST = os.getenv('REDIS_HOST', '127.0.0.1') +REDIS_PORT = os.getenv('REDIS_PORT', 6379) HUEY_EAGER = False # Crontab for never running a job - February 31st (use to disable tasks) @@ -441,15 +437,20 @@ "request_es_backup": {"month": "*", "day": "*", "day_of_week": "*", "hour": "6", "minute": "0"}, "check_latest_es_backup": {"month": "*", "day": "*", "day_of_week": "*", "hour": "9", "minute": "0"}, "prune_es_backups": {"month": "*", "day": "*", "day_of_week": "*", "hour": "9", "minute": "15"}, - "public_data_dump": {"month": "*", "day": "*/6", "day_of_week": "*", "hour": "10", "minute": "0"}, + "public_data_dump": {"month": "*", "day": "*", "day_of_week": "*", "hour": "10", "minute": "0"}, "harvest": {"month": "*", "day": "*", "day_of_week": "*", "hour": "5", "minute": "30"}, "anon_export": {"month": "*", "day": "10", "day_of_week": "*", "hour": "6", "minute": "30"}, "old_data_cleanup": {"month": "*", "day": "12", "day_of_week": "*", "hour": "6", "minute": "30"}, "monitor_bgjobs": {"month": "*", "day": "*/6", "day_of_week": "*", "hour": "10", "minute": "0"}, "find_discontinued_soon": {"month": "*", "day": "*", "day_of_week": "*", "hour": "0", "minute": "3"}, "datalog_journal_added_update": {"month": "*", "day": "*", "day_of_week": "*", "hour": "*", "minute": "*/30"}, + "article_bulk_create": {"month": "*", "day": "*", "day_of_week": "*", "hour": "*", "minute": "20"}, } +# Standard schedule for PDD (#3970) +# "public_data_dump": {"month": "*", "day": "*/6", "day_of_week": "*", "hour": "10", "minute": "0"}, + + HUEY_TASKS = { "ingest_articles": {"retries": 10, "retry_delay": 15}, "preserve": {"retries": 0, "retry_delay": 15}, @@ -701,6 +702,7 @@ MAPPINGS['article'] = MAPPINGS["account"] #~~->Article:Model~~ MAPPINGS['upload'] = MAPPINGS["account"] #~~->Upload:Model~~ +MAPPINGS['bulk_articles'] = MAPPINGS["account"] #~~->BulkArticles:Model~~ MAPPINGS['cache'] = MAPPINGS["account"] #~~->Cache:Model~~ MAPPINGS['lcc'] = MAPPINGS["account"] #~~->LCC:Model~~ MAPPINGS['editor_group'] = MAPPINGS["account"] #~~->EditorGroup:Model~~ @@ -1130,7 +1132,7 @@ BLOG_FEED_URL = "http://doajournals.wordpress.com/feed/atom/" -FRONT_PAGE_NEWS_ITEMS = 6 +FRONT_PAGE_NEWS_ITEMS = 4 NEWS_PAGE_NEWS_ITEMS = 20 @@ -1286,6 +1288,7 @@ 'bulk_application_create': 'Bulk application create', 'bulk_application_delete': 'Bulk application delete', 'bulk_article_create': 'Bulk article create', + 'bulk_article_create_status': 'Bulk article create status', 'bulk_article_delete': 'Bulk article delete' } @@ -1337,7 +1340,7 @@ ## EPMC Client configuration # ~~-> EPMC:ExternalService~~ EPMC_REST_API = "https://www.ebi.ac.uk/europepmc/webservices/rest/" -EPMC_TARGET_VERSION = "6.6" # doc here: https://europepmc.org/docs/Europe_PMC_RESTful_Release_Notes.pdf +EPMC_TARGET_VERSION = "6.9" # doc here: https://europepmc.org/docs/Europe_PMC_RESTful_Release_Notes.pdf EPMC_HARVESTER_THROTTLE = 0.2 # General harvester configuration @@ -1409,6 +1412,12 @@ 'long_running': 93600, # 26 hours } +# Default monitoring config for background job types which are not enumerated in BG_MONITOR_ERRORS_CONFIG below +BG_MONITOR_DEFAULT_CONFIG = { + 'total': 2, + 'oldest': 1200, +} + # Configures the monitoring period and the allowed number of errors in that period before a queue is marked # as unstable BG_MONITOR_ERRORS_CONFIG = { @@ -1517,8 +1526,10 @@ SELENIUM_DOAJ_HOST = '172.17.0.1' SELENIUM_DOAJ_PORT = 5014 +################################################# +# Concurrency timeout(s) - +UR_CONCURRENCY_TIMEOUT = 10 ############################################# @@ -1530,7 +1541,6 @@ GOOGLE_KEY_PATH = '' - ############################################# # Datalog # ~~->Datalog:Feature~~ @@ -1551,3 +1561,12 @@ AUTOCHECK_RESOURCE_ISSN_ORG_TIMEOUT = 10 AUTOCHECK_RESOURCE_ISSN_ORG_THROTTLE = 1 # seconds between requests + + +################################################## +# Background jobs Management settings + +# list of actions name that will be cleaned up if they are redundant +BGJOB_MANAGE_REDUNDANT_ACTIONS = [ + 'read_news', 'journal_csv' +] diff --git a/portality/static/crossref/JATS-journalpublishing1.xsd b/portality/static/crossref/JATS-journalpublishing1.xsd index 47eb75235f..063f7ab2e3 100644 --- a/portality/static/crossref/JATS-journalpublishing1.xsd +++ b/portality/static/crossref/JATS-journalpublishing1.xsd @@ -3,7 +3,7 @@ targetNamespace="http://www.ncbi.nlm.nih.gov/JATS1" xmlns:xs="http://www.w3.org/2001/XMLSchema" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xlink="http://www.w3.org/1999/xlink" elementFormDefault="qualified"> - + diff --git a/portality/static/crossref/xlink.xsd b/portality/static/crossref/xlink.xsd new file mode 100644 index 0000000000..f6e909e0c1 --- /dev/null +++ b/portality/static/crossref/xlink.xsd @@ -0,0 +1,270 @@ + + + + + This schema document provides attribute declarations and +attribute group, complex type and simple type definitions which can be used in +the construction of user schemas to define the structure of particular linking +constructs, e.g. + + + + + + + ... + + ... + + + ... +]]> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Intended for use as the type of user-declared elements to make them + simple links. + + + + + + + + + + + + + + + + + + + + + + + + + Intended for use as the type of user-declared elements to make them + extended links. + Note that the elements referenced in the content model are all abstract. + The intention is that by simply declaring elements with these as their + substitutionGroup, all the right things will happen. + + + + + + + + + + + + + + xml:lang is not required, but provides much of the + motivation for title elements in addition to attributes, and so + is provided here for convenience. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + label is not required, but locators have no particular + XLink function if they are not labeled. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + from and to have default behavior when values are missing + + + + + + + + + + + + + + + + + diff --git a/portality/static/crossref/xml.xsd b/portality/static/crossref/xml.xsd new file mode 100644 index 0000000000..aea7d0db0a --- /dev/null +++ b/portality/static/crossref/xml.xsd @@ -0,0 +1,287 @@ + + + + + + +
+

About the XML namespace

+ +
+

+ This schema document describes the XML namespace, in a form + suitable for import by other schema documents. +

+

+ See + http://www.w3.org/XML/1998/namespace.html and + + http://www.w3.org/TR/REC-xml for information + about this namespace. +

+

+ Note that local names in this namespace are intended to be + defined only by the World Wide Web Consortium or its subgroups. + The names currently defined in this namespace are listed below. + They should not be used with conflicting semantics by any Working + Group, specification, or document instance. +

+

+ See further below in this document for more information about how to refer to this schema document from your own + XSD schema documents and about the + namespace-versioning policy governing this schema document. +

+
+
+
+
+ + + + +
+ +

lang (as an attribute name)

+

+ denotes an attribute whose value + is a language code for the natural language of the content of + any element; its value is inherited. This name is reserved + by virtue of its definition in the XML specification.

+ +
+
+

Notes

+

+ Attempting to install the relevant ISO 2- and 3-letter + codes as the enumerated possible values is probably never + going to be a realistic possibility. +

+

+ See BCP 47 at + http://www.rfc-editor.org/rfc/bcp/bcp47.txt + and the IANA language subtag registry at + + http://www.iana.org/assignments/language-subtag-registry + for further information. +

+

+ The union allows for the 'un-declaration' of xml:lang with + the empty string. +

+
+
+
+ + + + + + + + + +
+ + + + +
+ +

space (as an attribute name)

+

+ denotes an attribute whose + value is a keyword indicating what whitespace processing + discipline is intended for the content of the element; its + value is inherited. This name is reserved by virtue of its + definition in the XML specification.

+ +
+
+
+ + + + + + +
+ + + +
+ +

base (as an attribute name)

+

+ denotes an attribute whose value + provides a URI to be used as the base for interpreting any + relative URIs in the scope of the element on which it + appears; its value is inherited. This name is reserved + by virtue of its definition in the XML Base specification.

+ +

+ See http://www.w3.org/TR/xmlbase/ + for information about this attribute. +

+
+
+
+
+ + + + +
+ +

id (as an attribute name)

+

+ denotes an attribute whose value + should be interpreted as if declared to be of type ID. + This name is reserved by virtue of its definition in the + xml:id specification.

+ +

+ See http://www.w3.org/TR/xml-id/ + for information about this attribute. +

+
+
+
+
+ + + + + + + + + + +
+ +

Father (in any context at all)

+ +
+

+ denotes Jon Bosak, the chair of + the original XML Working Group. This name is reserved by + the following decision of the W3C XML Plenary and + XML Coordination groups: +

+
+

+ In appreciation for his vision, leadership and + dedication the W3C XML Plenary on this 10th day of + February, 2000, reserves for Jon Bosak in perpetuity + the XML name "xml:Father". +

+
+
+
+
+
+ + + +
+

About this schema document

+ +
+

+ This schema defines attributes and an attribute group suitable + for use by schemas wishing to allow xml:base, + xml:lang, xml:space or + xml:id attributes on elements they define. +

+

+ To enable this, such a schema must import this schema for + the XML namespace, e.g. as follows: +

+
+          <schema . . .>
+           . . .
+           <import namespace="http://www.w3.org/XML/1998/namespace"
+                      schemaLocation="http://www.w3.org/2001/xml.xsd"/>
+     
+

+ or +

+
+           <import namespace="http://www.w3.org/XML/1998/namespace"
+                      schemaLocation="http://www.w3.org/2009/01/xml.xsd"/>
+     
+

+ Subsequently, qualified reference to any of the attributes or the + group defined below will have the desired effect, e.g. +

+
+          <type . . .>
+           . . .
+           <attributeGroup ref="xml:specialAttrs"/>
+     
+

+ will define a type which will schema-validate an instance element + with any of those attributes. +

+
+
+
+
+ + + +
+

Versioning policy for this schema document

+
+

+ In keeping with the XML Schema WG's standard versioning + policy, this schema document will persist at + + http://www.w3.org/2009/01/xml.xsd. +

+

+ At the date of issue it can also be found at + + http://www.w3.org/2001/xml.xsd. +

+

+ The schema document at that URI may however change in the future, + in order to remain compatible with the latest version of XML + Schema itself, or with the XML namespace itself. In other words, + if the XML Schema or XML namespaces change, the version of this + document at + http://www.w3.org/2001/xml.xsd + + will change accordingly; the version at + + http://www.w3.org/2009/01/xml.xsd + + will not change. +

+

+ Previous dated (and unchanging) versions of this schema + document are at: +

+ +
+
+
+
+ +
+ diff --git a/portality/static/doaj/css/doaj_api.css b/portality/static/doaj/css/doaj_api.css index 40a5cbc365..5e7cd7a19e 100644 --- a/portality/static/doaj/css/doaj_api.css +++ b/portality/static/doaj/css/doaj_api.css @@ -1,36 +1,96 @@ /**************************************** * API Docs Styles */ +.swagger-section { + font-size: 0.7rem; +} + +.swagger-ui .wrapper { + padding: 0; +} + +.swagger-ui .title pre { + background: initial +} -.swagger-section .heading h3 { - font-family: 'Source Code Pro', monospace; - font-weight: 400; +.swagger-ui code { + background: #333; } -.swagger-section .swagger-ui-wrap { - max-width: inherit; +.swagger-ui .information-container { + display: none; } -.swagger-section .swagger-ui-wrap ul#resources { +.swagger-ui .opblock-tag { padding-left: 0; + font-size: 1.424em; + color: #282624; +} + +.swagger-ui .opblock-tag.no-desc span { + font-family: 'Source Sans Pro', sans-serif; + font-size: 1.424em; + line-height: 1.05; + font-weight: 400; +} + +.swagger-ui .opblock.opblock-delete { + background-color: #FD5A3B10; /* $grapefruit with opacity 0.1 */ + border-color: #FD5A3B; /* $grapefruit with opacity 1 */ + border-radius: 0; + color: #282624; /* warm black */ +} + +.swagger-ui .opblock.opblock-delete .opblock-summary-method { + border-radius: 0; + background-color: #FD5A3B; /* $grapefruit */ + color: #FFF; +} + +.swagger-ui .opblock.opblock-post { + background-color: #47A17810; /* $mid-green with opacity 0.1 */ + border-color: #47A178; /* $mid-green with opacity 1 */ + border-radius: 0; + color: #282624; /* warm black */ +} + +.swagger-ui .opblock.opblock-post .opblock-summary-method { + border-radius: 0; + background-color: #47A178; /* $grapefruit */ + color: #FFF; +} + +.swagger-ui .opblock.opblock-get { + background-color: #5C595610; /* $dark-grey with opacity 0.1 */ + border-color: #5C5956; /* $grapefruit with opacity 1 */ + border-radius: 0; + color: #282624; /* warm black */ +} + +.swagger-ui .opblock.opblock-get .opblock-summary-method { + border-radius: 0; + background-color: #5C5956; /* $dark-grey */ + color: #FFF; } -.swagger-section h4 { - font-family: 'Source Sans Pro', sans-serif; - letter-spacing: 0.01em; +.swagger-ui .opblock.opblock-put { + background-color: #F9D95010; /* $yellow with opacity 0.1 */ + border-color: #F9D950; /* $yellow with opacity 1 */ + border-radius: 0; + color: #282624; /* warm black */ } -ul, -ol { - list-style: initial; - padding-left: 0 !important; - margin-left: 0 !important; +.swagger-ui .opblock.opblock-put .opblock-summary-method { + border-radius: 0; + background-color: #F9D950; /* $yellow */ + color: #282624; /* warm black */ } -.swagger-section .swagger-ui-wrap .footer { - visibility: hidden; +.swagger-section .swagger-ui-wrap table tbody tr td { + width: auto; } -.swagger-section a { - width: 4rem !important; +.swagger-section .swagger-ui-wrap table thead tr th { + width: auto !important; + max-width: none !important; } diff --git a/portality/static/js/application_form.js b/portality/static/js/application_form.js index 54fd313368..b3c646dfac 100644 --- a/portality/static/js/application_form.js +++ b/portality/static/js/application_form.js @@ -905,6 +905,29 @@ window.Parsley.addValidator("onlyIf", { priority: 1 }); +window.Parsley.addValidator("onlyIfExists", { + validateString: function (values, requirement, parsleyInstance) { + let fields = requirement.split(","); + for (var i = 0; i < fields.length; i++) { + let field = fields[i]; + let other = $("[name=" + field + "]"); + let type = other.attr("type"); + if (type === "checkbox" || type === "radio") { + let checked = other.filter(":checked"); + if (checked.length === 0) { + return false; + } + return true; + } + return !!other.val() + } + }, + messages: { + en: 'This only can be set when requirements are met' + }, + priority: 1 +}); + window.Parsley.addValidator("notIf", { validateString : function(value, requirement, parsleyInstance) { if (!!value){ diff --git a/portality/static/vendor/swagger-ui b/portality/static/vendor/swagger-ui deleted file mode 160000 index d75505fb73..0000000000 --- a/portality/static/vendor/swagger-ui +++ /dev/null @@ -1 +0,0 @@ -Subproject commit d75505fb73a6ac4ecbeabc47bd5a42b0f4d3f270 diff --git a/portality/static/vendor/swagger-ui-5.11.6/installation.md b/portality/static/vendor/swagger-ui-5.11.6/installation.md new file mode 100644 index 0000000000..5b34bc13b9 --- /dev/null +++ b/portality/static/vendor/swagger-ui-5.11.6/installation.md @@ -0,0 +1,190 @@ +# Installation + +## Distribution channels + +### NPM Registry + +We publish three modules to npm: **`swagger-ui`**, **`swagger-ui-dist`** and **`swagger-ui-react`**. + +**`swagger-ui`** is meant for consumption by JavaScript web projects that include module bundlers, +such as Webpack, Browserify, and Rollup. Its main file exports Swagger UI's main function, +and the module also includes a namespaced stylesheet at `swagger-ui/dist/swagger-ui.css`. Here's an example: + +```javascript +import SwaggerUI from 'swagger-ui' +// or use require if you prefer +const SwaggerUI = require('swagger-ui') + +SwaggerUI({ + dom_id: '#myDomId' +}) +``` + +See the [Webpack Getting Started](../samples/webpack-getting-started) sample for details. + +In contrast, **`swagger-ui-dist`** is meant for server-side projects that need assets to serve to clients. The module, when imported, includes an `absolutePath` helper function that returns the absolute filesystem path to where the `swagger-ui-dist` module is installed. + +_Note: we suggest using `swagger-ui` when your tooling makes it possible, as `swagger-ui-dist` +will result in more code going across the wire._ + +The module's contents mirror the `dist` folder you see in the Git repository. The most useful file is `swagger-ui-bundle.js`, which is a build of Swagger UI that includes all the code it needs to run in one file. The folder also has an `index.html` asset, to make it easy to serve Swagger UI like so: + +```javascript +const express = require('express') +const pathToSwaggerUi = require('swagger-ui-dist').absolutePath() + +const app = express() + +app.use(express.static(pathToSwaggerUi)) + +app.listen(3000) +``` + +The module also exports `SwaggerUIBundle` and `SwaggerUIStandalonePreset`, so +if you're in a JavaScript project that can't handle a traditional npm module, +you could do something like this: + +```js +var SwaggerUIBundle = require('swagger-ui-dist').SwaggerUIBundle + +const ui = SwaggerUIBundle({ + url: "https://petstore.swagger.io/v2/swagger.json", + dom_id: '#swagger-ui', + presets: [ + SwaggerUIBundle.presets.apis, + SwaggerUIBundle.SwaggerUIStandalonePreset + ], + layout: "StandaloneLayout" + }) +``` + +`SwaggerUIBundle` is equivalent to `SwaggerUI`. + +### Docker + +You can pull a pre-built docker image of the swagger-ui directly from Docker Hub: + +```sh +docker pull swaggerapi/swagger-ui +docker run -p 80:8080 swaggerapi/swagger-ui +``` + +Will start nginx with Swagger UI on port 80. + +Or you can provide your own swagger.json on your host + +```sh +docker run -p 80:8080 -e SWAGGER_JSON=/foo/swagger.json -v /bar:/foo swaggerapi/swagger-ui +``` + +You can also provide a URL to a swagger.json on an external host: + +```sh +docker run -p 80:8080 -e SWAGGER_JSON_URL=https://petstore3.swagger.io/api/v3/openapi.json swaggerapi/swagger-ui +``` + +The base URL of the web application can be changed by specifying the `BASE_URL` environment variable: + +```sh +docker run -p 80:8080 -e BASE_URL=/swagger -e SWAGGER_JSON=/foo/swagger.json -v /bar:/foo swaggerapi/swagger-ui +``` + +This will serve Swagger UI at `/swagger` instead of `/`. + +You can specify a different port via `PORT` variable for accessing the application, default is `8080`. + +```sh +docker run -p 80:80 -e PORT=80 swaggerapi/swagger-ui +``` + +You can specify an IPv6 port via `PORT_IPV6` variable. By default, IPv6 port is not set. + +```sh +docker run -p 80:80 -e PORT_IPV6=8080 swaggerapi/swagger-ui +``` + +You can allow/disallow [embedding](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/X-Frame-Options) via `EMBEDDING` variable. By default, embedding is disabled. + +```sh +docker run -p 80:80 -e EMBEDDING=true swaggerapi/swagger-ui +``` + +For more information on controlling Swagger UI through the Docker image, see the Docker section of the [Configuration documentation](configuration.md#docker). + +### unpkg + +You can embed Swagger UI's code directly in your HTML by using [unpkg's](https://unpkg.com/) interface: + +```html + + + + + + + SwaggerUI + + + +
+ + + + +``` + +Using `StandalonePreset` will render `TopBar` and `ValidatorBadge` as well. + +```html + + + + + + + SwaggerUI + + + +
+ + + + + +``` + +See [unpkg's main page](https://unpkg.com/) for more information on how to use unpkg. + +### Static files without HTTP or HTML + +Once swagger-ui has successfully generated the `/dist` directory, you can copy this to your own file system and host from there. + +## Plain old HTML/CSS/JS (Standalone) + +The folder `/dist` includes all the HTML, CSS and JS files needed to run SwaggerUI on a static website or CMS, without requiring NPM. + +1. Download the [latest release](https://github.com/swagger-api/swagger-ui/releases/latest). +1. Copy the contents of the `/dist` folder to your server. +1. Open `swagger-initializer.js` in your text editor and replace "https://petstore.swagger.io/v2/swagger.json" with the URL for your OpenAPI 3.0 spec. + + diff --git a/portality/static/vendor/swagger-ui-5.11.6/swagger-ui-bundle.js b/portality/static/vendor/swagger-ui-5.11.6/swagger-ui-bundle.js new file mode 100644 index 0000000000..3b1d60ac72 --- /dev/null +++ b/portality/static/vendor/swagger-ui-5.11.6/swagger-ui-bundle.js @@ -0,0 +1,3 @@ +/*! For license information please see swagger-ui-bundle.js.LICENSE.txt */ +!function webpackUniversalModuleDefinition(s,i){"object"==typeof exports&&"object"==typeof module?module.exports=i():"function"==typeof define&&define.amd?define([],i):"object"==typeof exports?exports.SwaggerUIBundle=i():s.SwaggerUIBundle=i()}(this,(()=>(()=>{var s,i,u={57468:(s,i)=>{"use strict";Object.defineProperty(i,"__esModule",{value:!0}),i.BLANK_URL=i.relativeFirstCharacters=i.urlSchemeRegex=i.ctrlCharactersRegex=i.htmlCtrlEntityRegex=i.htmlEntitiesRegex=i.invalidProtocolRegex=void 0,i.invalidProtocolRegex=/^([^\w]*)(javascript|data|vbscript)/im,i.htmlEntitiesRegex=/&#(\w+)(^\w|;)?/g,i.htmlCtrlEntityRegex=/&(newline|tab);/gi,i.ctrlCharactersRegex=/[\u0000-\u001F\u007F-\u009F\u2000-\u200D\uFEFF]/gim,i.urlSchemeRegex=/^.+(:|:)/gim,i.relativeFirstCharacters=[".","/"],i.BLANK_URL="about:blank"},86208:(s,i,u)=>{"use strict";i.o=void 0;var _=u(57468);i.o=function sanitizeUrl(s){if(!s)return _.BLANK_URL;var i=function decodeHtmlCharacters(s){return s.replace(_.ctrlCharactersRegex,"").replace(_.htmlEntitiesRegex,(function(s,i){return String.fromCharCode(i)}))}(s).replace(_.htmlCtrlEntityRegex,"").replace(_.ctrlCharactersRegex,"").trim();if(!i)return _.BLANK_URL;if(function isRelativeUrlWithoutProtocol(s){return _.relativeFirstCharacters.indexOf(s[0])>-1}(i))return i;var u=i.match(_.urlSchemeRegex);if(!u)return i;var w=u[0];return _.invalidProtocolRegex.test(w)?_.BLANK_URL:i}},25704:(s,i)=>{"use strict";i.byteLength=function byteLength(s){var i=getLens(s),u=i[0],_=i[1];return 3*(u+_)/4-_},i.toByteArray=function toByteArray(s){var i,u,x=getLens(s),j=x[0],P=x[1],B=new w(function _byteLength(s,i,u){return 3*(i+u)/4-u}(0,j,P)),$=0,U=P>0?j-4:j;for(u=0;u>16&255,B[$++]=i>>8&255,B[$++]=255&i;2===P&&(i=_[s.charCodeAt(u)]<<2|_[s.charCodeAt(u+1)]>>4,B[$++]=255&i);1===P&&(i=_[s.charCodeAt(u)]<<10|_[s.charCodeAt(u+1)]<<4|_[s.charCodeAt(u+2)]>>2,B[$++]=i>>8&255,B[$++]=255&i);return B},i.fromByteArray=function fromByteArray(s){for(var i,_=s.length,w=_%3,x=[],j=16383,P=0,B=_-w;PB?B:P+j));1===w?(i=s[_-1],x.push(u[i>>2]+u[i<<4&63]+"==")):2===w&&(i=(s[_-2]<<8)+s[_-1],x.push(u[i>>10]+u[i>>4&63]+u[i<<2&63]+"="));return x.join("")};for(var u=[],_=[],w="undefined"!=typeof Uint8Array?Uint8Array:Array,x="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",j=0;j<64;++j)u[j]=x[j],_[x.charCodeAt(j)]=j;function getLens(s){var i=s.length;if(i%4>0)throw new Error("Invalid string. Length must be a multiple of 4");var u=s.indexOf("=");return-1===u&&(u=i),[u,u===i?0:4-u%4]}function encodeChunk(s,i,_){for(var w,x,j=[],P=i;P<_;P+=3)w=(s[P]<<16&16711680)+(s[P+1]<<8&65280)+(255&s[P+2]),j.push(u[(x=w)>>18&63]+u[x>>12&63]+u[x>>6&63]+u[63&x]);return j.join("")}_["-".charCodeAt(0)]=62,_["_".charCodeAt(0)]=63},33296:(s,i,u)=>{"use strict";const _=u(25704),w=u(24404),x="function"==typeof Symbol&&"function"==typeof Symbol.for?Symbol.for("nodejs.util.inspect.custom"):null;i.Buffer=Buffer,i.SlowBuffer=function SlowBuffer(s){+s!=s&&(s=0);return Buffer.alloc(+s)},i.INSPECT_MAX_BYTES=50;const j=2147483647;function createBuffer(s){if(s>j)throw new RangeError('The value "'+s+'" is invalid for option "size"');const i=new Uint8Array(s);return Object.setPrototypeOf(i,Buffer.prototype),i}function Buffer(s,i,u){if("number"==typeof s){if("string"==typeof i)throw new TypeError('The "string" argument must be of type string. Received type number');return allocUnsafe(s)}return from(s,i,u)}function from(s,i,u){if("string"==typeof s)return function fromString(s,i){"string"==typeof i&&""!==i||(i="utf8");if(!Buffer.isEncoding(i))throw new TypeError("Unknown encoding: "+i);const u=0|byteLength(s,i);let _=createBuffer(u);const w=_.write(s,i);w!==u&&(_=_.slice(0,w));return _}(s,i);if(ArrayBuffer.isView(s))return function fromArrayView(s){if(isInstance(s,Uint8Array)){const i=new Uint8Array(s);return fromArrayBuffer(i.buffer,i.byteOffset,i.byteLength)}return fromArrayLike(s)}(s);if(null==s)throw new TypeError("The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type "+typeof s);if(isInstance(s,ArrayBuffer)||s&&isInstance(s.buffer,ArrayBuffer))return fromArrayBuffer(s,i,u);if("undefined"!=typeof SharedArrayBuffer&&(isInstance(s,SharedArrayBuffer)||s&&isInstance(s.buffer,SharedArrayBuffer)))return fromArrayBuffer(s,i,u);if("number"==typeof s)throw new TypeError('The "value" argument must not be of type number. Received type number');const _=s.valueOf&&s.valueOf();if(null!=_&&_!==s)return Buffer.from(_,i,u);const w=function fromObject(s){if(Buffer.isBuffer(s)){const i=0|checked(s.length),u=createBuffer(i);return 0===u.length||s.copy(u,0,0,i),u}if(void 0!==s.length)return"number"!=typeof s.length||numberIsNaN(s.length)?createBuffer(0):fromArrayLike(s);if("Buffer"===s.type&&Array.isArray(s.data))return fromArrayLike(s.data)}(s);if(w)return w;if("undefined"!=typeof Symbol&&null!=Symbol.toPrimitive&&"function"==typeof s[Symbol.toPrimitive])return Buffer.from(s[Symbol.toPrimitive]("string"),i,u);throw new TypeError("The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type "+typeof s)}function assertSize(s){if("number"!=typeof s)throw new TypeError('"size" argument must be of type number');if(s<0)throw new RangeError('The value "'+s+'" is invalid for option "size"')}function allocUnsafe(s){return assertSize(s),createBuffer(s<0?0:0|checked(s))}function fromArrayLike(s){const i=s.length<0?0:0|checked(s.length),u=createBuffer(i);for(let _=0;_=j)throw new RangeError("Attempt to allocate Buffer larger than maximum size: 0x"+j.toString(16)+" bytes");return 0|s}function byteLength(s,i){if(Buffer.isBuffer(s))return s.length;if(ArrayBuffer.isView(s)||isInstance(s,ArrayBuffer))return s.byteLength;if("string"!=typeof s)throw new TypeError('The "string" argument must be one of type string, Buffer, or ArrayBuffer. Received type '+typeof s);const u=s.length,_=arguments.length>2&&!0===arguments[2];if(!_&&0===u)return 0;let w=!1;for(;;)switch(i){case"ascii":case"latin1":case"binary":return u;case"utf8":case"utf-8":return utf8ToBytes(s).length;case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return 2*u;case"hex":return u>>>1;case"base64":return base64ToBytes(s).length;default:if(w)return _?-1:utf8ToBytes(s).length;i=(""+i).toLowerCase(),w=!0}}function slowToString(s,i,u){let _=!1;if((void 0===i||i<0)&&(i=0),i>this.length)return"";if((void 0===u||u>this.length)&&(u=this.length),u<=0)return"";if((u>>>=0)<=(i>>>=0))return"";for(s||(s="utf8");;)switch(s){case"hex":return hexSlice(this,i,u);case"utf8":case"utf-8":return utf8Slice(this,i,u);case"ascii":return asciiSlice(this,i,u);case"latin1":case"binary":return latin1Slice(this,i,u);case"base64":return base64Slice(this,i,u);case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return utf16leSlice(this,i,u);default:if(_)throw new TypeError("Unknown encoding: "+s);s=(s+"").toLowerCase(),_=!0}}function swap(s,i,u){const _=s[i];s[i]=s[u],s[u]=_}function bidirectionalIndexOf(s,i,u,_,w){if(0===s.length)return-1;if("string"==typeof u?(_=u,u=0):u>2147483647?u=2147483647:u<-2147483648&&(u=-2147483648),numberIsNaN(u=+u)&&(u=w?0:s.length-1),u<0&&(u=s.length+u),u>=s.length){if(w)return-1;u=s.length-1}else if(u<0){if(!w)return-1;u=0}if("string"==typeof i&&(i=Buffer.from(i,_)),Buffer.isBuffer(i))return 0===i.length?-1:arrayIndexOf(s,i,u,_,w);if("number"==typeof i)return i&=255,"function"==typeof Uint8Array.prototype.indexOf?w?Uint8Array.prototype.indexOf.call(s,i,u):Uint8Array.prototype.lastIndexOf.call(s,i,u):arrayIndexOf(s,[i],u,_,w);throw new TypeError("val must be string, number or Buffer")}function arrayIndexOf(s,i,u,_,w){let x,j=1,P=s.length,B=i.length;if(void 0!==_&&("ucs2"===(_=String(_).toLowerCase())||"ucs-2"===_||"utf16le"===_||"utf-16le"===_)){if(s.length<2||i.length<2)return-1;j=2,P/=2,B/=2,u/=2}function read(s,i){return 1===j?s[i]:s.readUInt16BE(i*j)}if(w){let _=-1;for(x=u;xP&&(u=P-B),x=u;x>=0;x--){let u=!0;for(let _=0;_w&&(_=w):_=w;const x=i.length;let j;for(_>x/2&&(_=x/2),j=0;j<_;++j){const _=parseInt(i.substr(2*j,2),16);if(numberIsNaN(_))return j;s[u+j]=_}return j}function utf8Write(s,i,u,_){return blitBuffer(utf8ToBytes(i,s.length-u),s,u,_)}function asciiWrite(s,i,u,_){return blitBuffer(function asciiToBytes(s){const i=[];for(let u=0;u>8,w=u%256,x.push(w),x.push(_);return x}(i,s.length-u),s,u,_)}function base64Slice(s,i,u){return 0===i&&u===s.length?_.fromByteArray(s):_.fromByteArray(s.slice(i,u))}function utf8Slice(s,i,u){u=Math.min(s.length,u);const _=[];let w=i;for(;w239?4:i>223?3:i>191?2:1;if(w+j<=u){let u,_,P,B;switch(j){case 1:i<128&&(x=i);break;case 2:u=s[w+1],128==(192&u)&&(B=(31&i)<<6|63&u,B>127&&(x=B));break;case 3:u=s[w+1],_=s[w+2],128==(192&u)&&128==(192&_)&&(B=(15&i)<<12|(63&u)<<6|63&_,B>2047&&(B<55296||B>57343)&&(x=B));break;case 4:u=s[w+1],_=s[w+2],P=s[w+3],128==(192&u)&&128==(192&_)&&128==(192&P)&&(B=(15&i)<<18|(63&u)<<12|(63&_)<<6|63&P,B>65535&&B<1114112&&(x=B))}}null===x?(x=65533,j=1):x>65535&&(x-=65536,_.push(x>>>10&1023|55296),x=56320|1023&x),_.push(x),w+=j}return function decodeCodePointsArray(s){const i=s.length;if(i<=P)return String.fromCharCode.apply(String,s);let u="",_=0;for(;__.length?(Buffer.isBuffer(i)||(i=Buffer.from(i)),i.copy(_,w)):Uint8Array.prototype.set.call(_,i,w);else{if(!Buffer.isBuffer(i))throw new TypeError('"list" argument must be an Array of Buffers');i.copy(_,w)}w+=i.length}return _},Buffer.byteLength=byteLength,Buffer.prototype._isBuffer=!0,Buffer.prototype.swap16=function swap16(){const s=this.length;if(s%2!=0)throw new RangeError("Buffer size must be a multiple of 16-bits");for(let i=0;iu&&(s+=" ... "),""},x&&(Buffer.prototype[x]=Buffer.prototype.inspect),Buffer.prototype.compare=function compare(s,i,u,_,w){if(isInstance(s,Uint8Array)&&(s=Buffer.from(s,s.offset,s.byteLength)),!Buffer.isBuffer(s))throw new TypeError('The "target" argument must be one of type Buffer or Uint8Array. Received type '+typeof s);if(void 0===i&&(i=0),void 0===u&&(u=s?s.length:0),void 0===_&&(_=0),void 0===w&&(w=this.length),i<0||u>s.length||_<0||w>this.length)throw new RangeError("out of range index");if(_>=w&&i>=u)return 0;if(_>=w)return-1;if(i>=u)return 1;if(this===s)return 0;let x=(w>>>=0)-(_>>>=0),j=(u>>>=0)-(i>>>=0);const P=Math.min(x,j),B=this.slice(_,w),$=s.slice(i,u);for(let s=0;s>>=0,isFinite(u)?(u>>>=0,void 0===_&&(_="utf8")):(_=u,u=void 0)}const w=this.length-i;if((void 0===u||u>w)&&(u=w),s.length>0&&(u<0||i<0)||i>this.length)throw new RangeError("Attempt to write outside buffer bounds");_||(_="utf8");let x=!1;for(;;)switch(_){case"hex":return hexWrite(this,s,i,u);case"utf8":case"utf-8":return utf8Write(this,s,i,u);case"ascii":case"latin1":case"binary":return asciiWrite(this,s,i,u);case"base64":return base64Write(this,s,i,u);case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return ucs2Write(this,s,i,u);default:if(x)throw new TypeError("Unknown encoding: "+_);_=(""+_).toLowerCase(),x=!0}},Buffer.prototype.toJSON=function toJSON(){return{type:"Buffer",data:Array.prototype.slice.call(this._arr||this,0)}};const P=4096;function asciiSlice(s,i,u){let _="";u=Math.min(s.length,u);for(let w=i;w_)&&(u=_);let w="";for(let _=i;_u)throw new RangeError("Trying to access beyond buffer length")}function checkInt(s,i,u,_,w,x){if(!Buffer.isBuffer(s))throw new TypeError('"buffer" argument must be a Buffer instance');if(i>w||is.length)throw new RangeError("Index out of range")}function wrtBigUInt64LE(s,i,u,_,w){checkIntBI(i,_,w,s,u,7);let x=Number(i&BigInt(4294967295));s[u++]=x,x>>=8,s[u++]=x,x>>=8,s[u++]=x,x>>=8,s[u++]=x;let j=Number(i>>BigInt(32)&BigInt(4294967295));return s[u++]=j,j>>=8,s[u++]=j,j>>=8,s[u++]=j,j>>=8,s[u++]=j,u}function wrtBigUInt64BE(s,i,u,_,w){checkIntBI(i,_,w,s,u,7);let x=Number(i&BigInt(4294967295));s[u+7]=x,x>>=8,s[u+6]=x,x>>=8,s[u+5]=x,x>>=8,s[u+4]=x;let j=Number(i>>BigInt(32)&BigInt(4294967295));return s[u+3]=j,j>>=8,s[u+2]=j,j>>=8,s[u+1]=j,j>>=8,s[u]=j,u+8}function checkIEEE754(s,i,u,_,w,x){if(u+_>s.length)throw new RangeError("Index out of range");if(u<0)throw new RangeError("Index out of range")}function writeFloat(s,i,u,_,x){return i=+i,u>>>=0,x||checkIEEE754(s,0,u,4),w.write(s,i,u,_,23,4),u+4}function writeDouble(s,i,u,_,x){return i=+i,u>>>=0,x||checkIEEE754(s,0,u,8),w.write(s,i,u,_,52,8),u+8}Buffer.prototype.slice=function slice(s,i){const u=this.length;(s=~~s)<0?(s+=u)<0&&(s=0):s>u&&(s=u),(i=void 0===i?u:~~i)<0?(i+=u)<0&&(i=0):i>u&&(i=u),i>>=0,i>>>=0,u||checkOffset(s,i,this.length);let _=this[s],w=1,x=0;for(;++x>>=0,i>>>=0,u||checkOffset(s,i,this.length);let _=this[s+--i],w=1;for(;i>0&&(w*=256);)_+=this[s+--i]*w;return _},Buffer.prototype.readUint8=Buffer.prototype.readUInt8=function readUInt8(s,i){return s>>>=0,i||checkOffset(s,1,this.length),this[s]},Buffer.prototype.readUint16LE=Buffer.prototype.readUInt16LE=function readUInt16LE(s,i){return s>>>=0,i||checkOffset(s,2,this.length),this[s]|this[s+1]<<8},Buffer.prototype.readUint16BE=Buffer.prototype.readUInt16BE=function readUInt16BE(s,i){return s>>>=0,i||checkOffset(s,2,this.length),this[s]<<8|this[s+1]},Buffer.prototype.readUint32LE=Buffer.prototype.readUInt32LE=function readUInt32LE(s,i){return s>>>=0,i||checkOffset(s,4,this.length),(this[s]|this[s+1]<<8|this[s+2]<<16)+16777216*this[s+3]},Buffer.prototype.readUint32BE=Buffer.prototype.readUInt32BE=function readUInt32BE(s,i){return s>>>=0,i||checkOffset(s,4,this.length),16777216*this[s]+(this[s+1]<<16|this[s+2]<<8|this[s+3])},Buffer.prototype.readBigUInt64LE=defineBigIntMethod((function readBigUInt64LE(s){validateNumber(s>>>=0,"offset");const i=this[s],u=this[s+7];void 0!==i&&void 0!==u||boundsError(s,this.length-8);const _=i+256*this[++s]+65536*this[++s]+this[++s]*2**24,w=this[++s]+256*this[++s]+65536*this[++s]+u*2**24;return BigInt(_)+(BigInt(w)<>>=0,"offset");const i=this[s],u=this[s+7];void 0!==i&&void 0!==u||boundsError(s,this.length-8);const _=i*2**24+65536*this[++s]+256*this[++s]+this[++s],w=this[++s]*2**24+65536*this[++s]+256*this[++s]+u;return(BigInt(_)<>>=0,i>>>=0,u||checkOffset(s,i,this.length);let _=this[s],w=1,x=0;for(;++x=w&&(_-=Math.pow(2,8*i)),_},Buffer.prototype.readIntBE=function readIntBE(s,i,u){s>>>=0,i>>>=0,u||checkOffset(s,i,this.length);let _=i,w=1,x=this[s+--_];for(;_>0&&(w*=256);)x+=this[s+--_]*w;return w*=128,x>=w&&(x-=Math.pow(2,8*i)),x},Buffer.prototype.readInt8=function readInt8(s,i){return s>>>=0,i||checkOffset(s,1,this.length),128&this[s]?-1*(255-this[s]+1):this[s]},Buffer.prototype.readInt16LE=function readInt16LE(s,i){s>>>=0,i||checkOffset(s,2,this.length);const u=this[s]|this[s+1]<<8;return 32768&u?4294901760|u:u},Buffer.prototype.readInt16BE=function readInt16BE(s,i){s>>>=0,i||checkOffset(s,2,this.length);const u=this[s+1]|this[s]<<8;return 32768&u?4294901760|u:u},Buffer.prototype.readInt32LE=function readInt32LE(s,i){return s>>>=0,i||checkOffset(s,4,this.length),this[s]|this[s+1]<<8|this[s+2]<<16|this[s+3]<<24},Buffer.prototype.readInt32BE=function readInt32BE(s,i){return s>>>=0,i||checkOffset(s,4,this.length),this[s]<<24|this[s+1]<<16|this[s+2]<<8|this[s+3]},Buffer.prototype.readBigInt64LE=defineBigIntMethod((function readBigInt64LE(s){validateNumber(s>>>=0,"offset");const i=this[s],u=this[s+7];void 0!==i&&void 0!==u||boundsError(s,this.length-8);const _=this[s+4]+256*this[s+5]+65536*this[s+6]+(u<<24);return(BigInt(_)<>>=0,"offset");const i=this[s],u=this[s+7];void 0!==i&&void 0!==u||boundsError(s,this.length-8);const _=(i<<24)+65536*this[++s]+256*this[++s]+this[++s];return(BigInt(_)<>>=0,i||checkOffset(s,4,this.length),w.read(this,s,!0,23,4)},Buffer.prototype.readFloatBE=function readFloatBE(s,i){return s>>>=0,i||checkOffset(s,4,this.length),w.read(this,s,!1,23,4)},Buffer.prototype.readDoubleLE=function readDoubleLE(s,i){return s>>>=0,i||checkOffset(s,8,this.length),w.read(this,s,!0,52,8)},Buffer.prototype.readDoubleBE=function readDoubleBE(s,i){return s>>>=0,i||checkOffset(s,8,this.length),w.read(this,s,!1,52,8)},Buffer.prototype.writeUintLE=Buffer.prototype.writeUIntLE=function writeUIntLE(s,i,u,_){if(s=+s,i>>>=0,u>>>=0,!_){checkInt(this,s,i,u,Math.pow(2,8*u)-1,0)}let w=1,x=0;for(this[i]=255&s;++x>>=0,u>>>=0,!_){checkInt(this,s,i,u,Math.pow(2,8*u)-1,0)}let w=u-1,x=1;for(this[i+w]=255&s;--w>=0&&(x*=256);)this[i+w]=s/x&255;return i+u},Buffer.prototype.writeUint8=Buffer.prototype.writeUInt8=function writeUInt8(s,i,u){return s=+s,i>>>=0,u||checkInt(this,s,i,1,255,0),this[i]=255&s,i+1},Buffer.prototype.writeUint16LE=Buffer.prototype.writeUInt16LE=function writeUInt16LE(s,i,u){return s=+s,i>>>=0,u||checkInt(this,s,i,2,65535,0),this[i]=255&s,this[i+1]=s>>>8,i+2},Buffer.prototype.writeUint16BE=Buffer.prototype.writeUInt16BE=function writeUInt16BE(s,i,u){return s=+s,i>>>=0,u||checkInt(this,s,i,2,65535,0),this[i]=s>>>8,this[i+1]=255&s,i+2},Buffer.prototype.writeUint32LE=Buffer.prototype.writeUInt32LE=function writeUInt32LE(s,i,u){return s=+s,i>>>=0,u||checkInt(this,s,i,4,4294967295,0),this[i+3]=s>>>24,this[i+2]=s>>>16,this[i+1]=s>>>8,this[i]=255&s,i+4},Buffer.prototype.writeUint32BE=Buffer.prototype.writeUInt32BE=function writeUInt32BE(s,i,u){return s=+s,i>>>=0,u||checkInt(this,s,i,4,4294967295,0),this[i]=s>>>24,this[i+1]=s>>>16,this[i+2]=s>>>8,this[i+3]=255&s,i+4},Buffer.prototype.writeBigUInt64LE=defineBigIntMethod((function writeBigUInt64LE(s,i=0){return wrtBigUInt64LE(this,s,i,BigInt(0),BigInt("0xffffffffffffffff"))})),Buffer.prototype.writeBigUInt64BE=defineBigIntMethod((function writeBigUInt64BE(s,i=0){return wrtBigUInt64BE(this,s,i,BigInt(0),BigInt("0xffffffffffffffff"))})),Buffer.prototype.writeIntLE=function writeIntLE(s,i,u,_){if(s=+s,i>>>=0,!_){const _=Math.pow(2,8*u-1);checkInt(this,s,i,u,_-1,-_)}let w=0,x=1,j=0;for(this[i]=255&s;++w>0)-j&255;return i+u},Buffer.prototype.writeIntBE=function writeIntBE(s,i,u,_){if(s=+s,i>>>=0,!_){const _=Math.pow(2,8*u-1);checkInt(this,s,i,u,_-1,-_)}let w=u-1,x=1,j=0;for(this[i+w]=255&s;--w>=0&&(x*=256);)s<0&&0===j&&0!==this[i+w+1]&&(j=1),this[i+w]=(s/x>>0)-j&255;return i+u},Buffer.prototype.writeInt8=function writeInt8(s,i,u){return s=+s,i>>>=0,u||checkInt(this,s,i,1,127,-128),s<0&&(s=255+s+1),this[i]=255&s,i+1},Buffer.prototype.writeInt16LE=function writeInt16LE(s,i,u){return s=+s,i>>>=0,u||checkInt(this,s,i,2,32767,-32768),this[i]=255&s,this[i+1]=s>>>8,i+2},Buffer.prototype.writeInt16BE=function writeInt16BE(s,i,u){return s=+s,i>>>=0,u||checkInt(this,s,i,2,32767,-32768),this[i]=s>>>8,this[i+1]=255&s,i+2},Buffer.prototype.writeInt32LE=function writeInt32LE(s,i,u){return s=+s,i>>>=0,u||checkInt(this,s,i,4,2147483647,-2147483648),this[i]=255&s,this[i+1]=s>>>8,this[i+2]=s>>>16,this[i+3]=s>>>24,i+4},Buffer.prototype.writeInt32BE=function writeInt32BE(s,i,u){return s=+s,i>>>=0,u||checkInt(this,s,i,4,2147483647,-2147483648),s<0&&(s=4294967295+s+1),this[i]=s>>>24,this[i+1]=s>>>16,this[i+2]=s>>>8,this[i+3]=255&s,i+4},Buffer.prototype.writeBigInt64LE=defineBigIntMethod((function writeBigInt64LE(s,i=0){return wrtBigUInt64LE(this,s,i,-BigInt("0x8000000000000000"),BigInt("0x7fffffffffffffff"))})),Buffer.prototype.writeBigInt64BE=defineBigIntMethod((function writeBigInt64BE(s,i=0){return wrtBigUInt64BE(this,s,i,-BigInt("0x8000000000000000"),BigInt("0x7fffffffffffffff"))})),Buffer.prototype.writeFloatLE=function writeFloatLE(s,i,u){return writeFloat(this,s,i,!0,u)},Buffer.prototype.writeFloatBE=function writeFloatBE(s,i,u){return writeFloat(this,s,i,!1,u)},Buffer.prototype.writeDoubleLE=function writeDoubleLE(s,i,u){return writeDouble(this,s,i,!0,u)},Buffer.prototype.writeDoubleBE=function writeDoubleBE(s,i,u){return writeDouble(this,s,i,!1,u)},Buffer.prototype.copy=function copy(s,i,u,_){if(!Buffer.isBuffer(s))throw new TypeError("argument should be a Buffer");if(u||(u=0),_||0===_||(_=this.length),i>=s.length&&(i=s.length),i||(i=0),_>0&&_=this.length)throw new RangeError("Index out of range");if(_<0)throw new RangeError("sourceEnd out of bounds");_>this.length&&(_=this.length),s.length-i<_-u&&(_=s.length-i+u);const w=_-u;return this===s&&"function"==typeof Uint8Array.prototype.copyWithin?this.copyWithin(i,u,_):Uint8Array.prototype.set.call(s,this.subarray(u,_),i),w},Buffer.prototype.fill=function fill(s,i,u,_){if("string"==typeof s){if("string"==typeof i?(_=i,i=0,u=this.length):"string"==typeof u&&(_=u,u=this.length),void 0!==_&&"string"!=typeof _)throw new TypeError("encoding must be a string");if("string"==typeof _&&!Buffer.isEncoding(_))throw new TypeError("Unknown encoding: "+_);if(1===s.length){const i=s.charCodeAt(0);("utf8"===_&&i<128||"latin1"===_)&&(s=i)}}else"number"==typeof s?s&=255:"boolean"==typeof s&&(s=Number(s));if(i<0||this.length>>=0,u=void 0===u?this.length:u>>>0,s||(s=0),"number"==typeof s)for(w=i;w=_+4;u-=3)i=`_${s.slice(u-3,u)}${i}`;return`${s.slice(0,u)}${i}`}function checkIntBI(s,i,u,_,w,x){if(s>u||s3?0===i||i===BigInt(0)?`>= 0${_} and < 2${_} ** ${8*(x+1)}${_}`:`>= -(2${_} ** ${8*(x+1)-1}${_}) and < 2 ** ${8*(x+1)-1}${_}`:`>= ${i}${_} and <= ${u}${_}`,new B.ERR_OUT_OF_RANGE("value",w,s)}!function checkBounds(s,i,u){validateNumber(i,"offset"),void 0!==s[i]&&void 0!==s[i+u]||boundsError(i,s.length-(u+1))}(_,w,x)}function validateNumber(s,i){if("number"!=typeof s)throw new B.ERR_INVALID_ARG_TYPE(i,"number",s)}function boundsError(s,i,u){if(Math.floor(s)!==s)throw validateNumber(s,u),new B.ERR_OUT_OF_RANGE(u||"offset","an integer",s);if(i<0)throw new B.ERR_BUFFER_OUT_OF_BOUNDS;throw new B.ERR_OUT_OF_RANGE(u||"offset",`>= ${u?1:0} and <= ${i}`,s)}E("ERR_BUFFER_OUT_OF_BOUNDS",(function(s){return s?`${s} is outside of buffer bounds`:"Attempt to access memory outside buffer bounds"}),RangeError),E("ERR_INVALID_ARG_TYPE",(function(s,i){return`The "${s}" argument must be of type number. Received type ${typeof i}`}),TypeError),E("ERR_OUT_OF_RANGE",(function(s,i,u){let _=`The value of "${s}" is out of range.`,w=u;return Number.isInteger(u)&&Math.abs(u)>2**32?w=addNumericalSeparator(String(u)):"bigint"==typeof u&&(w=String(u),(u>BigInt(2)**BigInt(32)||u<-(BigInt(2)**BigInt(32)))&&(w=addNumericalSeparator(w)),w+="n"),_+=` It must be ${i}. Received ${w}`,_}),RangeError);const $=/[^+/0-9A-Za-z-_]/g;function utf8ToBytes(s,i){let u;i=i||1/0;const _=s.length;let w=null;const x=[];for(let j=0;j<_;++j){if(u=s.charCodeAt(j),u>55295&&u<57344){if(!w){if(u>56319){(i-=3)>-1&&x.push(239,191,189);continue}if(j+1===_){(i-=3)>-1&&x.push(239,191,189);continue}w=u;continue}if(u<56320){(i-=3)>-1&&x.push(239,191,189),w=u;continue}u=65536+(w-55296<<10|u-56320)}else w&&(i-=3)>-1&&x.push(239,191,189);if(w=null,u<128){if((i-=1)<0)break;x.push(u)}else if(u<2048){if((i-=2)<0)break;x.push(u>>6|192,63&u|128)}else if(u<65536){if((i-=3)<0)break;x.push(u>>12|224,u>>6&63|128,63&u|128)}else{if(!(u<1114112))throw new Error("Invalid code point");if((i-=4)<0)break;x.push(u>>18|240,u>>12&63|128,u>>6&63|128,63&u|128)}}return x}function base64ToBytes(s){return _.toByteArray(function base64clean(s){if((s=(s=s.split("=")[0]).trim().replace($,"")).length<2)return"";for(;s.length%4!=0;)s+="=";return s}(s))}function blitBuffer(s,i,u,_){let w;for(w=0;w<_&&!(w+u>=i.length||w>=s.length);++w)i[w+u]=s[w];return w}function isInstance(s,i){return s instanceof i||null!=s&&null!=s.constructor&&null!=s.constructor.name&&s.constructor.name===i.name}function numberIsNaN(s){return s!=s}const U=function(){const s="0123456789abcdef",i=new Array(256);for(let u=0;u<16;++u){const _=16*u;for(let w=0;w<16;++w)i[_+w]=s[u]+s[w]}return i}();function defineBigIntMethod(s){return"undefined"==typeof BigInt?BufferBigIntNotDefined:s}function BufferBigIntNotDefined(){throw new Error("BigInt not supported")}},22768:(s,i,u)=>{"use strict";var _=u(96604),w=u(4572),x=w(_("String.prototype.indexOf"));s.exports=function callBoundIntrinsic(s,i){var u=_(s,!!i);return"function"==typeof u&&x(s,".prototype.")>-1?w(u):u}},4572:(s,i,u)=>{"use strict";var _=u(30268),w=u(96604),x=u(52392),j=w("%TypeError%"),P=w("%Function.prototype.apply%"),B=w("%Function.prototype.call%"),$=w("%Reflect.apply%",!0)||_.call(B,P),U=w("%Object.defineProperty%",!0),Y=w("%Math.max%");if(U)try{U({},"a",{value:1})}catch(s){U=null}s.exports=function callBind(s){if("function"!=typeof s)throw new j("a function is required");var i=$(_,B,arguments);return x(i,1+Y(0,s.length-(arguments.length-1)),!0)};var X=function applyBind(){return $(_,P,arguments)};U?U(s.exports,"apply",{value:X}):s.exports.apply=X},6272:(s,i,u)=>{"use strict";var _=u(20308),w={"text/plain":"Text","text/html":"Url",default:"Text"};s.exports=function copy(s,i){var u,x,j,P,B,$,U=!1;i||(i={}),u=i.debug||!1;try{if(j=_(),P=document.createRange(),B=document.getSelection(),($=document.createElement("span")).textContent=s,$.ariaHidden="true",$.style.all="unset",$.style.position="fixed",$.style.top=0,$.style.clip="rect(0, 0, 0, 0)",$.style.whiteSpace="pre",$.style.webkitUserSelect="text",$.style.MozUserSelect="text",$.style.msUserSelect="text",$.style.userSelect="text",$.addEventListener("copy",(function(_){if(_.stopPropagation(),i.format)if(_.preventDefault(),void 0===_.clipboardData){u&&console.warn("unable to use e.clipboardData"),u&&console.warn("trying IE specific stuff"),window.clipboardData.clearData();var x=w[i.format]||w.default;window.clipboardData.setData(x,s)}else _.clipboardData.clearData(),_.clipboardData.setData(i.format,s);i.onCopy&&(_.preventDefault(),i.onCopy(_.clipboardData))})),document.body.appendChild($),P.selectNodeContents($),B.addRange(P),!document.execCommand("copy"))throw new Error("copy command was unsuccessful");U=!0}catch(_){u&&console.error("unable to copy using execCommand: ",_),u&&console.warn("trying IE specific stuff");try{window.clipboardData.setData(i.format||"text",s),i.onCopy&&i.onCopy(window.clipboardData),U=!0}catch(_){u&&console.error("unable to copy using clipboardData: ",_),u&&console.error("falling back to prompt"),x=function format(s){var i=(/mac os x/i.test(navigator.userAgent)?"⌘":"Ctrl")+"+C";return s.replace(/#{\s*key\s*}/g,i)}("message"in i?i.message:"Copy to clipboard: #{key}, Enter"),window.prompt(x,s)}}finally{B&&("function"==typeof B.removeRange?B.removeRange(P):B.removeAllRanges()),$&&document.body.removeChild($),j()}return U}},50432:function(s,i,u){var _;_=void 0!==u.g?u.g:this,s.exports=function(s){if(s.CSS&&s.CSS.escape)return s.CSS.escape;var cssEscape=function(s){if(0==arguments.length)throw new TypeError("`CSS.escape` requires an argument.");for(var i,u=String(s),_=u.length,w=-1,x="",j=u.charCodeAt(0);++w<_;)0!=(i=u.charCodeAt(w))?x+=i>=1&&i<=31||127==i||0==w&&i>=48&&i<=57||1==w&&i>=48&&i<=57&&45==j?"\\"+i.toString(16)+" ":0==w&&1==_&&45==i||!(i>=128||45==i||95==i||i>=48&&i<=57||i>=65&&i<=90||i>=97&&i<=122)?"\\"+u.charAt(w):u.charAt(w):x+="�";return x};return s.CSS||(s.CSS={}),s.CSS.escape=cssEscape,cssEscape}(_)},41796:(s,i,u)=>{"use strict";var _=u(33296).Buffer;function isSpecificValue(s){return s instanceof _||s instanceof Date||s instanceof RegExp}function cloneSpecificValue(s){if(s instanceof _){var i=_.alloc?_.alloc(s.length):new _(s.length);return s.copy(i),i}if(s instanceof Date)return new Date(s.getTime());if(s instanceof RegExp)return new RegExp(s);throw new Error("Unexpected situation")}function deepCloneArray(s){var i=[];return s.forEach((function(s,u){"object"==typeof s&&null!==s?Array.isArray(s)?i[u]=deepCloneArray(s):isSpecificValue(s)?i[u]=cloneSpecificValue(s):i[u]=w({},s):i[u]=s})),i}function safeGetProperty(s,i){return"__proto__"===i?void 0:s[i]}var w=s.exports=function(){if(arguments.length<1||"object"!=typeof arguments[0])return!1;if(arguments.length<2)return arguments[0];var s,i,u=arguments[0];return Array.prototype.slice.call(arguments,1).forEach((function(_){"object"!=typeof _||null===_||Array.isArray(_)||Object.keys(_).forEach((function(x){return i=safeGetProperty(u,x),(s=safeGetProperty(_,x))===u?void 0:"object"!=typeof s||null===s?void(u[x]=s):Array.isArray(s)?void(u[x]=deepCloneArray(s)):isSpecificValue(s)?void(u[x]=cloneSpecificValue(s)):"object"!=typeof i||null===i||Array.isArray(i)?void(u[x]=w({},s)):void(u[x]=w(i,s))}))})),u}},68084:s=>{"use strict";var i=function isMergeableObject(s){return function isNonNullObject(s){return!!s&&"object"==typeof s}(s)&&!function isSpecial(s){var i=Object.prototype.toString.call(s);return"[object RegExp]"===i||"[object Date]"===i||function isReactElement(s){return s.$$typeof===u}(s)}(s)};var u="function"==typeof Symbol&&Symbol.for?Symbol.for("react.element"):60103;function cloneUnlessOtherwiseSpecified(s,i){return!1!==i.clone&&i.isMergeableObject(s)?deepmerge(function emptyTarget(s){return Array.isArray(s)?[]:{}}(s),s,i):s}function defaultArrayMerge(s,i,u){return s.concat(i).map((function(s){return cloneUnlessOtherwiseSpecified(s,u)}))}function getKeys(s){return Object.keys(s).concat(function getEnumerableOwnPropertySymbols(s){return Object.getOwnPropertySymbols?Object.getOwnPropertySymbols(s).filter((function(i){return Object.propertyIsEnumerable.call(s,i)})):[]}(s))}function propertyIsOnObject(s,i){try{return i in s}catch(s){return!1}}function mergeObject(s,i,u){var _={};return u.isMergeableObject(s)&&getKeys(s).forEach((function(i){_[i]=cloneUnlessOtherwiseSpecified(s[i],u)})),getKeys(i).forEach((function(w){(function propertyIsUnsafe(s,i){return propertyIsOnObject(s,i)&&!(Object.hasOwnProperty.call(s,i)&&Object.propertyIsEnumerable.call(s,i))})(s,w)||(propertyIsOnObject(s,w)&&u.isMergeableObject(i[w])?_[w]=function getMergeFunction(s,i){if(!i.customMerge)return deepmerge;var u=i.customMerge(s);return"function"==typeof u?u:deepmerge}(w,u)(s[w],i[w],u):_[w]=cloneUnlessOtherwiseSpecified(i[w],u))})),_}function deepmerge(s,u,_){(_=_||{}).arrayMerge=_.arrayMerge||defaultArrayMerge,_.isMergeableObject=_.isMergeableObject||i,_.cloneUnlessOtherwiseSpecified=cloneUnlessOtherwiseSpecified;var w=Array.isArray(u);return w===Array.isArray(s)?w?_.arrayMerge(s,u,_):mergeObject(s,u,_):cloneUnlessOtherwiseSpecified(u,_)}deepmerge.all=function deepmergeAll(s,i){if(!Array.isArray(s))throw new Error("first argument should be an array");return s.reduce((function(s,u){return deepmerge(s,u,i)}),{})};var _=deepmerge;s.exports=_},5456:(s,i,u)=>{"use strict";var _=u(71188)(),w=u(96604),x=_&&w("%Object.defineProperty%",!0);if(x)try{x({},"a",{value:1})}catch(s){x=!1}var j=w("%SyntaxError%"),P=w("%TypeError%"),B=u(80872);s.exports=function defineDataProperty(s,i,u){if(!s||"object"!=typeof s&&"function"!=typeof s)throw new P("`obj` must be an object or a function`");if("string"!=typeof i&&"symbol"!=typeof i)throw new P("`property` must be a string or a symbol`");if(arguments.length>3&&"boolean"!=typeof arguments[3]&&null!==arguments[3])throw new P("`nonEnumerable`, if provided, must be a boolean or null");if(arguments.length>4&&"boolean"!=typeof arguments[4]&&null!==arguments[4])throw new P("`nonWritable`, if provided, must be a boolean or null");if(arguments.length>5&&"boolean"!=typeof arguments[5]&&null!==arguments[5])throw new P("`nonConfigurable`, if provided, must be a boolean or null");if(arguments.length>6&&"boolean"!=typeof arguments[6])throw new P("`loose`, if provided, must be a boolean");var _=arguments.length>3?arguments[3]:null,w=arguments.length>4?arguments[4]:null,$=arguments.length>5?arguments[5]:null,U=arguments.length>6&&arguments[6],Y=!!B&&B(s,i);if(x)x(s,i,{configurable:null===$&&Y?Y.configurable:!$,enumerable:null===_&&Y?Y.enumerable:!_,value:u,writable:null===w&&Y?Y.writable:!w});else{if(!U&&(_||w||$))throw new j("This environment does not support defining a property as non-configurable, non-writable, or non-enumerable.");s[i]=u}}},24300:function(s){s.exports=function(){"use strict";const{entries:s,setPrototypeOf:i,isFrozen:u,getPrototypeOf:_,getOwnPropertyDescriptor:w}=Object;let{freeze:x,seal:j,create:P}=Object,{apply:B,construct:$}="undefined"!=typeof Reflect&&Reflect;x||(x=function freeze(s){return s}),j||(j=function seal(s){return s}),B||(B=function apply(s,i,u){return s.apply(i,u)}),$||($=function construct(s,i){return new s(...i)});const U=unapply(Array.prototype.forEach),Y=unapply(Array.prototype.pop),X=unapply(Array.prototype.push),Z=unapply(String.prototype.toLowerCase),ee=unapply(String.prototype.toString),ae=unapply(String.prototype.match),ie=unapply(String.prototype.replace),le=unapply(String.prototype.indexOf),ce=unapply(String.prototype.trim),pe=unapply(RegExp.prototype.test),de=unconstruct(TypeError);function unapply(s){return function(i){for(var u=arguments.length,_=new Array(u>1?u-1:0),w=1;w2&&void 0!==arguments[2]?arguments[2]:Z;i&&i(s,null);let x=_.length;for(;x--;){let i=_[x];if("string"==typeof i){const s=w(i);s!==i&&(u(_)||(_[x]=s),i=s)}s[i]=!0}return s}function cleanArray(s){for(let i=0;i/gm),We=j(/\${[\w\W]*}/gm),He=j(/^data-[\-\w.\u00B7-\uFFFF]/),Ye=j(/^aria-[\-\w]+$/),Qe=j(/^(?:(?:(?:f|ht)tps?|mailto|tel|callto|sms|cid|xmpp):|[^a-z]|[a-z+.\-]+(?:[^a-z+.\-:]|$))/i),Xe=j(/^(?:\w+script|data):/i),et=j(/[\u0000-\u0020\u00A0\u1680\u180E\u2000-\u2029\u205F\u3000]/g),tt=j(/^html$/i);var rt=Object.freeze({__proto__:null,MUSTACHE_EXPR:$e,ERB_EXPR:ze,TMPLIT_EXPR:We,DATA_ATTR:He,ARIA_ATTR:Ye,IS_ALLOWED_URI:Qe,IS_SCRIPT_OR_DATA:Xe,ATTR_WHITESPACE:et,DOCTYPE_NAME:tt});const nt=function getGlobal(){return"undefined"==typeof window?null:window},ot=function _createTrustedTypesPolicy(s,i){if("object"!=typeof s||"function"!=typeof s.createPolicy)return null;let u=null;const _="data-tt-policy-suffix";i&&i.hasAttribute(_)&&(u=i.getAttribute(_));const w="dompurify"+(u?"#"+u:"");try{return s.createPolicy(w,{createHTML:s=>s,createScriptURL:s=>s})}catch(s){return console.warn("TrustedTypes policy "+w+" could not be created."),null}};function createDOMPurify(){let i=arguments.length>0&&void 0!==arguments[0]?arguments[0]:nt();const DOMPurify=s=>createDOMPurify(s);if(DOMPurify.version="3.0.8",DOMPurify.removed=[],!i||!i.document||9!==i.document.nodeType)return DOMPurify.isSupported=!1,DOMPurify;let{document:u}=i;const _=u,w=_.currentScript,{DocumentFragment:j,HTMLTemplateElement:B,Node:$,Element:$e,NodeFilter:ze,NamedNodeMap:We=i.NamedNodeMap||i.MozNamedAttrMap,HTMLFormElement:He,DOMParser:Ye,trustedTypes:Xe}=i,et=$e.prototype,at=lookupGetter(et,"cloneNode"),st=lookupGetter(et,"nextSibling"),it=lookupGetter(et,"childNodes"),lt=lookupGetter(et,"parentNode");if("function"==typeof B){const s=u.createElement("template");s.content&&s.content.ownerDocument&&(u=s.content.ownerDocument)}let ct,ut="";const{implementation:pt,createNodeIterator:ht,createDocumentFragment:dt,getElementsByTagName:mt}=u,{importNode:gt}=_;let yt={};DOMPurify.isSupported="function"==typeof s&&"function"==typeof lt&&pt&&void 0!==pt.createHTMLDocument;const{MUSTACHE_EXPR:vt,ERB_EXPR:bt,TMPLIT_EXPR:_t,DATA_ATTR:wt,ARIA_ATTR:Et,IS_SCRIPT_OR_DATA:St,ATTR_WHITESPACE:xt}=rt;let{IS_ALLOWED_URI:kt}=rt,Ot=null;const Ct=addToSet({},[...fe,...ye,...be,...we,...xe]);let At=null;const jt=addToSet({},[...Pe,...Te,...Re,...qe]);let It=Object.seal(P(null,{tagNameCheck:{writable:!0,configurable:!1,enumerable:!0,value:null},attributeNameCheck:{writable:!0,configurable:!1,enumerable:!0,value:null},allowCustomizedBuiltInElements:{writable:!0,configurable:!1,enumerable:!0,value:!1}})),Pt=null,Nt=null,Tt=!0,Mt=!0,Rt=!1,Dt=!0,Bt=!1,Lt=!1,Ft=!1,qt=!1,$t=!1,Ut=!1,zt=!1,Wt=!0,Vt=!1;const Kt="user-content-";let Ht=!0,Jt=!1,Gt={},Yt=null;const Qt=addToSet({},["annotation-xml","audio","colgroup","desc","foreignobject","head","iframe","math","mi","mn","mo","ms","mtext","noembed","noframes","noscript","plaintext","script","style","svg","template","thead","title","video","xmp"]);let Xt=null;const Zt=addToSet({},["audio","video","img","source","image","track"]);let er=null;const tr=addToSet({},["alt","class","for","id","label","name","pattern","placeholder","role","summary","title","value","style","xmlns"]),rr="http://www.w3.org/1998/Math/MathML",nr="http://www.w3.org/2000/svg",ar="http://www.w3.org/1999/xhtml";let sr=ar,ir=!1,lr=null;const cr=addToSet({},[rr,nr,ar],ee);let ur=null;const pr=["application/xhtml+xml","text/html"],dr="text/html";let fr=null,mr=null;const gr=u.createElement("form"),yr=function isRegexOrFunction(s){return s instanceof RegExp||s instanceof Function},vr=function _parseConfig(){let s=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{};if(!mr||mr!==s){if(s&&"object"==typeof s||(s={}),s=clone(s),ur=-1===pr.indexOf(s.PARSER_MEDIA_TYPE)?dr:s.PARSER_MEDIA_TYPE,fr="application/xhtml+xml"===ur?ee:Z,Ot="ALLOWED_TAGS"in s?addToSet({},s.ALLOWED_TAGS,fr):Ct,At="ALLOWED_ATTR"in s?addToSet({},s.ALLOWED_ATTR,fr):jt,lr="ALLOWED_NAMESPACES"in s?addToSet({},s.ALLOWED_NAMESPACES,ee):cr,er="ADD_URI_SAFE_ATTR"in s?addToSet(clone(tr),s.ADD_URI_SAFE_ATTR,fr):tr,Xt="ADD_DATA_URI_TAGS"in s?addToSet(clone(Zt),s.ADD_DATA_URI_TAGS,fr):Zt,Yt="FORBID_CONTENTS"in s?addToSet({},s.FORBID_CONTENTS,fr):Qt,Pt="FORBID_TAGS"in s?addToSet({},s.FORBID_TAGS,fr):{},Nt="FORBID_ATTR"in s?addToSet({},s.FORBID_ATTR,fr):{},Gt="USE_PROFILES"in s&&s.USE_PROFILES,Tt=!1!==s.ALLOW_ARIA_ATTR,Mt=!1!==s.ALLOW_DATA_ATTR,Rt=s.ALLOW_UNKNOWN_PROTOCOLS||!1,Dt=!1!==s.ALLOW_SELF_CLOSE_IN_ATTR,Bt=s.SAFE_FOR_TEMPLATES||!1,Lt=s.WHOLE_DOCUMENT||!1,$t=s.RETURN_DOM||!1,Ut=s.RETURN_DOM_FRAGMENT||!1,zt=s.RETURN_TRUSTED_TYPE||!1,qt=s.FORCE_BODY||!1,Wt=!1!==s.SANITIZE_DOM,Vt=s.SANITIZE_NAMED_PROPS||!1,Ht=!1!==s.KEEP_CONTENT,Jt=s.IN_PLACE||!1,kt=s.ALLOWED_URI_REGEXP||Qe,sr=s.NAMESPACE||ar,It=s.CUSTOM_ELEMENT_HANDLING||{},s.CUSTOM_ELEMENT_HANDLING&&yr(s.CUSTOM_ELEMENT_HANDLING.tagNameCheck)&&(It.tagNameCheck=s.CUSTOM_ELEMENT_HANDLING.tagNameCheck),s.CUSTOM_ELEMENT_HANDLING&&yr(s.CUSTOM_ELEMENT_HANDLING.attributeNameCheck)&&(It.attributeNameCheck=s.CUSTOM_ELEMENT_HANDLING.attributeNameCheck),s.CUSTOM_ELEMENT_HANDLING&&"boolean"==typeof s.CUSTOM_ELEMENT_HANDLING.allowCustomizedBuiltInElements&&(It.allowCustomizedBuiltInElements=s.CUSTOM_ELEMENT_HANDLING.allowCustomizedBuiltInElements),Bt&&(Mt=!1),Ut&&($t=!0),Gt&&(Ot=addToSet({},xe),At=[],!0===Gt.html&&(addToSet(Ot,fe),addToSet(At,Pe)),!0===Gt.svg&&(addToSet(Ot,ye),addToSet(At,Te),addToSet(At,qe)),!0===Gt.svgFilters&&(addToSet(Ot,be),addToSet(At,Te),addToSet(At,qe)),!0===Gt.mathMl&&(addToSet(Ot,we),addToSet(At,Re),addToSet(At,qe))),s.ADD_TAGS&&(Ot===Ct&&(Ot=clone(Ot)),addToSet(Ot,s.ADD_TAGS,fr)),s.ADD_ATTR&&(At===jt&&(At=clone(At)),addToSet(At,s.ADD_ATTR,fr)),s.ADD_URI_SAFE_ATTR&&addToSet(er,s.ADD_URI_SAFE_ATTR,fr),s.FORBID_CONTENTS&&(Yt===Qt&&(Yt=clone(Yt)),addToSet(Yt,s.FORBID_CONTENTS,fr)),Ht&&(Ot["#text"]=!0),Lt&&addToSet(Ot,["html","head","body"]),Ot.table&&(addToSet(Ot,["tbody"]),delete Pt.tbody),s.TRUSTED_TYPES_POLICY){if("function"!=typeof s.TRUSTED_TYPES_POLICY.createHTML)throw de('TRUSTED_TYPES_POLICY configuration option must provide a "createHTML" hook.');if("function"!=typeof s.TRUSTED_TYPES_POLICY.createScriptURL)throw de('TRUSTED_TYPES_POLICY configuration option must provide a "createScriptURL" hook.');ct=s.TRUSTED_TYPES_POLICY,ut=ct.createHTML("")}else void 0===ct&&(ct=ot(Xe,w)),null!==ct&&"string"==typeof ut&&(ut=ct.createHTML(""));x&&x(s),mr=s}},br=addToSet({},["mi","mo","mn","ms","mtext"]),_r=addToSet({},["foreignobject","desc","title","annotation-xml"]),wr=addToSet({},["title","style","font","a","script"]),Er=addToSet({},[...ye,...be,..._e]),Sr=addToSet({},[...we,...Se]),xr=function _checkValidNamespace(s){let i=lt(s);i&&i.tagName||(i={namespaceURI:sr,tagName:"template"});const u=Z(s.tagName),_=Z(i.tagName);return!!lr[s.namespaceURI]&&(s.namespaceURI===nr?i.namespaceURI===ar?"svg"===u:i.namespaceURI===rr?"svg"===u&&("annotation-xml"===_||br[_]):Boolean(Er[u]):s.namespaceURI===rr?i.namespaceURI===ar?"math"===u:i.namespaceURI===nr?"math"===u&&_r[_]:Boolean(Sr[u]):s.namespaceURI===ar?!(i.namespaceURI===nr&&!_r[_])&&!(i.namespaceURI===rr&&!br[_])&&!Sr[u]&&(wr[u]||!Er[u]):!("application/xhtml+xml"!==ur||!lr[s.namespaceURI]))},kr=function _forceRemove(s){X(DOMPurify.removed,{element:s});try{s.parentNode.removeChild(s)}catch(i){s.remove()}},Or=function _removeAttribute(s,i){try{X(DOMPurify.removed,{attribute:i.getAttributeNode(s),from:i})}catch(s){X(DOMPurify.removed,{attribute:null,from:i})}if(i.removeAttribute(s),"is"===s&&!At[s])if($t||Ut)try{kr(i)}catch(s){}else try{i.setAttribute(s,"")}catch(s){}},Cr=function _initDocument(s){let i=null,_=null;if(qt)s=""+s;else{const i=ae(s,/^[\r\n\t ]+/);_=i&&i[0]}"application/xhtml+xml"===ur&&sr===ar&&(s=''+s+"");const w=ct?ct.createHTML(s):s;if(sr===ar)try{i=(new Ye).parseFromString(w,ur)}catch(s){}if(!i||!i.documentElement){i=pt.createDocument(sr,"template",null);try{i.documentElement.innerHTML=ir?ut:w}catch(s){}}const x=i.body||i.documentElement;return s&&_&&x.insertBefore(u.createTextNode(_),x.childNodes[0]||null),sr===ar?mt.call(i,Lt?"html":"body")[0]:Lt?i.documentElement:x},Ar=function _createNodeIterator(s){return ht.call(s.ownerDocument||s,s,ze.SHOW_ELEMENT|ze.SHOW_COMMENT|ze.SHOW_TEXT,null)},jr=function _isClobbered(s){return s instanceof He&&("string"!=typeof s.nodeName||"string"!=typeof s.textContent||"function"!=typeof s.removeChild||!(s.attributes instanceof We)||"function"!=typeof s.removeAttribute||"function"!=typeof s.setAttribute||"string"!=typeof s.namespaceURI||"function"!=typeof s.insertBefore||"function"!=typeof s.hasChildNodes)},Ir=function _isNode(s){return"function"==typeof $&&s instanceof $},Pr=function _executeHook(s,i,u){yt[s]&&U(yt[s],(s=>{s.call(DOMPurify,i,u,mr)}))},Nr=function _sanitizeElements(s){let i=null;if(Pr("beforeSanitizeElements",s,null),jr(s))return kr(s),!0;const u=fr(s.nodeName);if(Pr("uponSanitizeElement",s,{tagName:u,allowedTags:Ot}),s.hasChildNodes()&&!Ir(s.firstElementChild)&&pe(/<[/\w]/g,s.innerHTML)&&pe(/<[/\w]/g,s.textContent))return kr(s),!0;if(!Ot[u]||Pt[u]){if(!Pt[u]&&Mr(u)){if(It.tagNameCheck instanceof RegExp&&pe(It.tagNameCheck,u))return!1;if(It.tagNameCheck instanceof Function&&It.tagNameCheck(u))return!1}if(Ht&&!Yt[u]){const i=lt(s)||s.parentNode,u=it(s)||s.childNodes;if(u&&i)for(let _=u.length-1;_>=0;--_)i.insertBefore(at(u[_],!0),st(s))}return kr(s),!0}return s instanceof $e&&!xr(s)?(kr(s),!0):"noscript"!==u&&"noembed"!==u&&"noframes"!==u||!pe(/<\/no(script|embed|frames)/i,s.innerHTML)?(Bt&&3===s.nodeType&&(i=s.textContent,U([vt,bt,_t],(s=>{i=ie(i,s," ")})),s.textContent!==i&&(X(DOMPurify.removed,{element:s.cloneNode()}),s.textContent=i)),Pr("afterSanitizeElements",s,null),!1):(kr(s),!0)},Tr=function _isValidAttribute(s,i,_){if(Wt&&("id"===i||"name"===i)&&(_ in u||_ in gr))return!1;if(Mt&&!Nt[i]&&pe(wt,i));else if(Tt&&pe(Et,i));else if(!At[i]||Nt[i]){if(!(Mr(s)&&(It.tagNameCheck instanceof RegExp&&pe(It.tagNameCheck,s)||It.tagNameCheck instanceof Function&&It.tagNameCheck(s))&&(It.attributeNameCheck instanceof RegExp&&pe(It.attributeNameCheck,i)||It.attributeNameCheck instanceof Function&&It.attributeNameCheck(i))||"is"===i&&It.allowCustomizedBuiltInElements&&(It.tagNameCheck instanceof RegExp&&pe(It.tagNameCheck,_)||It.tagNameCheck instanceof Function&&It.tagNameCheck(_))))return!1}else if(er[i]);else if(pe(kt,ie(_,xt,"")));else if("src"!==i&&"xlink:href"!==i&&"href"!==i||"script"===s||0!==le(_,"data:")||!Xt[s])if(Rt&&!pe(St,ie(_,xt,"")));else if(_)return!1;return!0},Mr=function _isBasicCustomElement(s){return s.indexOf("-")>0},Rr=function _sanitizeAttributes(s){Pr("beforeSanitizeAttributes",s,null);const{attributes:i}=s;if(!i)return;const u={attrName:"",attrValue:"",keepAttr:!0,allowedAttributes:At};let _=i.length;for(;_--;){const w=i[_],{name:x,namespaceURI:j,value:P}=w,B=fr(x);let $="value"===x?P:ce(P);if(u.attrName=B,u.attrValue=$,u.keepAttr=!0,u.forceKeepAttr=void 0,Pr("uponSanitizeAttribute",s,u),$=u.attrValue,u.forceKeepAttr)continue;if(Or(x,s),!u.keepAttr)continue;if(!Dt&&pe(/\/>/i,$)){Or(x,s);continue}Bt&&U([vt,bt,_t],(s=>{$=ie($,s," ")}));const X=fr(s.nodeName);if(Tr(X,B,$)){if(!Vt||"id"!==B&&"name"!==B||(Or(x,s),$=Kt+$),ct&&"object"==typeof Xe&&"function"==typeof Xe.getAttributeType)if(j);else switch(Xe.getAttributeType(X,B)){case"TrustedHTML":$=ct.createHTML($);break;case"TrustedScriptURL":$=ct.createScriptURL($)}try{j?s.setAttributeNS(j,x,$):s.setAttribute(x,$),Y(DOMPurify.removed)}catch(s){}}}Pr("afterSanitizeAttributes",s,null)},Dr=function _sanitizeShadowDOM(s){let i=null;const u=Ar(s);for(Pr("beforeSanitizeShadowDOM",s,null);i=u.nextNode();)Pr("uponSanitizeShadowNode",i,null),Nr(i)||(i.content instanceof j&&_sanitizeShadowDOM(i.content),Rr(i));Pr("afterSanitizeShadowDOM",s,null)};return DOMPurify.sanitize=function(s){let i=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},u=null,w=null,x=null,P=null;if(ir=!s,ir&&(s="\x3c!--\x3e"),"string"!=typeof s&&!Ir(s)){if("function"!=typeof s.toString)throw de("toString is not a function");if("string"!=typeof(s=s.toString()))throw de("dirty is not a string, aborting")}if(!DOMPurify.isSupported)return s;if(Ft||vr(i),DOMPurify.removed=[],"string"==typeof s&&(Jt=!1),Jt){if(s.nodeName){const i=fr(s.nodeName);if(!Ot[i]||Pt[i])throw de("root node is forbidden and cannot be sanitized in-place")}}else if(s instanceof $)u=Cr("\x3c!----\x3e"),w=u.ownerDocument.importNode(s,!0),1===w.nodeType&&"BODY"===w.nodeName||"HTML"===w.nodeName?u=w:u.appendChild(w);else{if(!$t&&!Bt&&!Lt&&-1===s.indexOf("<"))return ct&&zt?ct.createHTML(s):s;if(u=Cr(s),!u)return $t?null:zt?ut:""}u&&qt&&kr(u.firstChild);const B=Ar(Jt?s:u);for(;x=B.nextNode();)Nr(x)||(x.content instanceof j&&Dr(x.content),Rr(x));if(Jt)return s;if($t){if(Ut)for(P=dt.call(u.ownerDocument);u.firstChild;)P.appendChild(u.firstChild);else P=u;return(At.shadowroot||At.shadowrootmode)&&(P=gt.call(_,P,!0)),P}let Y=Lt?u.outerHTML:u.innerHTML;return Lt&&Ot["!doctype"]&&u.ownerDocument&&u.ownerDocument.doctype&&u.ownerDocument.doctype.name&&pe(tt,u.ownerDocument.doctype.name)&&(Y="\n"+Y),Bt&&U([vt,bt,_t],(s=>{Y=ie(Y,s," ")})),ct&&zt?ct.createHTML(Y):Y},DOMPurify.setConfig=function(){vr(arguments.length>0&&void 0!==arguments[0]?arguments[0]:{}),Ft=!0},DOMPurify.clearConfig=function(){mr=null,Ft=!1},DOMPurify.isValidAttribute=function(s,i,u){mr||vr({});const _=fr(s),w=fr(i);return Tr(_,w,u)},DOMPurify.addHook=function(s,i){"function"==typeof i&&(yt[s]=yt[s]||[],X(yt[s],i))},DOMPurify.removeHook=function(s){if(yt[s])return Y(yt[s])},DOMPurify.removeHooks=function(s){yt[s]&&(yt[s]=[])},DOMPurify.removeAllHooks=function(){yt={}},DOMPurify}return createDOMPurify()}()},64716:s=>{"use strict";class SubRange{constructor(s,i){this.low=s,this.high=i,this.length=1+i-s}overlaps(s){return!(this.highs.high)}touches(s){return!(this.high+1s.high)}add(s){return new SubRange(Math.min(this.low,s.low),Math.max(this.high,s.high))}subtract(s){return s.low<=this.low&&s.high>=this.high?[]:s.low>this.low&&s.highs+i.length),0)}add(s,i){var _add=s=>{for(var i=0;i{for(var i=0;i{for(var i=0;i{for(var u=i.low;u<=i.high;)s.push(u),u++;return s}),[])}subranges(){return this.ranges.map((s=>({low:s.low,high:s.high,length:1+s.high-s.low})))}}s.exports=DRange},84936:s=>{"use strict";var i,u="object"==typeof Reflect?Reflect:null,_=u&&"function"==typeof u.apply?u.apply:function ReflectApply(s,i,u){return Function.prototype.apply.call(s,i,u)};i=u&&"function"==typeof u.ownKeys?u.ownKeys:Object.getOwnPropertySymbols?function ReflectOwnKeys(s){return Object.getOwnPropertyNames(s).concat(Object.getOwnPropertySymbols(s))}:function ReflectOwnKeys(s){return Object.getOwnPropertyNames(s)};var w=Number.isNaN||function NumberIsNaN(s){return s!=s};function EventEmitter(){EventEmitter.init.call(this)}s.exports=EventEmitter,s.exports.once=function once(s,i){return new Promise((function(u,_){function errorListener(u){s.removeListener(i,resolver),_(u)}function resolver(){"function"==typeof s.removeListener&&s.removeListener("error",errorListener),u([].slice.call(arguments))}eventTargetAgnosticAddListener(s,i,resolver,{once:!0}),"error"!==i&&function addErrorHandlerIfEventEmitter(s,i,u){"function"==typeof s.on&&eventTargetAgnosticAddListener(s,"error",i,u)}(s,errorListener,{once:!0})}))},EventEmitter.EventEmitter=EventEmitter,EventEmitter.prototype._events=void 0,EventEmitter.prototype._eventsCount=0,EventEmitter.prototype._maxListeners=void 0;var x=10;function checkListener(s){if("function"!=typeof s)throw new TypeError('The "listener" argument must be of type Function. Received type '+typeof s)}function _getMaxListeners(s){return void 0===s._maxListeners?EventEmitter.defaultMaxListeners:s._maxListeners}function _addListener(s,i,u,_){var w,x,j;if(checkListener(u),void 0===(x=s._events)?(x=s._events=Object.create(null),s._eventsCount=0):(void 0!==x.newListener&&(s.emit("newListener",i,u.listener?u.listener:u),x=s._events),j=x[i]),void 0===j)j=x[i]=u,++s._eventsCount;else if("function"==typeof j?j=x[i]=_?[u,j]:[j,u]:_?j.unshift(u):j.push(u),(w=_getMaxListeners(s))>0&&j.length>w&&!j.warned){j.warned=!0;var P=new Error("Possible EventEmitter memory leak detected. "+j.length+" "+String(i)+" listeners added. Use emitter.setMaxListeners() to increase limit");P.name="MaxListenersExceededWarning",P.emitter=s,P.type=i,P.count=j.length,function ProcessEmitWarning(s){console&&console.warn&&console.warn(s)}(P)}return s}function onceWrapper(){if(!this.fired)return this.target.removeListener(this.type,this.wrapFn),this.fired=!0,0===arguments.length?this.listener.call(this.target):this.listener.apply(this.target,arguments)}function _onceWrap(s,i,u){var _={fired:!1,wrapFn:void 0,target:s,type:i,listener:u},w=onceWrapper.bind(_);return w.listener=u,_.wrapFn=w,w}function _listeners(s,i,u){var _=s._events;if(void 0===_)return[];var w=_[i];return void 0===w?[]:"function"==typeof w?u?[w.listener||w]:[w]:u?function unwrapListeners(s){for(var i=new Array(s.length),u=0;u0&&(j=i[0]),j instanceof Error)throw j;var P=new Error("Unhandled error."+(j?" ("+j.message+")":""));throw P.context=j,P}var B=x[s];if(void 0===B)return!1;if("function"==typeof B)_(B,this,i);else{var $=B.length,U=arrayClone(B,$);for(u=0;u<$;++u)_(U[u],this,i)}return!0},EventEmitter.prototype.addListener=function addListener(s,i){return _addListener(this,s,i,!1)},EventEmitter.prototype.on=EventEmitter.prototype.addListener,EventEmitter.prototype.prependListener=function prependListener(s,i){return _addListener(this,s,i,!0)},EventEmitter.prototype.once=function once(s,i){return checkListener(i),this.on(s,_onceWrap(this,s,i)),this},EventEmitter.prototype.prependOnceListener=function prependOnceListener(s,i){return checkListener(i),this.prependListener(s,_onceWrap(this,s,i)),this},EventEmitter.prototype.removeListener=function removeListener(s,i){var u,_,w,x,j;if(checkListener(i),void 0===(_=this._events))return this;if(void 0===(u=_[s]))return this;if(u===i||u.listener===i)0==--this._eventsCount?this._events=Object.create(null):(delete _[s],_.removeListener&&this.emit("removeListener",s,u.listener||i));else if("function"!=typeof u){for(w=-1,x=u.length-1;x>=0;x--)if(u[x]===i||u[x].listener===i){j=u[x].listener,w=x;break}if(w<0)return this;0===w?u.shift():function spliceOne(s,i){for(;i+1=0;_--)this.removeListener(s,i[_]);return this},EventEmitter.prototype.listeners=function listeners(s){return _listeners(this,s,!0)},EventEmitter.prototype.rawListeners=function rawListeners(s){return _listeners(this,s,!1)},EventEmitter.listenerCount=function(s,i){return"function"==typeof s.listenerCount?s.listenerCount(i):listenerCount.call(s,i)},EventEmitter.prototype.listenerCount=listenerCount,EventEmitter.prototype.eventNames=function eventNames(){return this._eventsCount>0?i(this._events):[]}},98556:(s,i,u)=>{"use strict";var _=u(43016),w=create(Error);function create(s){return FormattedError.displayName=s.displayName||s.name,FormattedError;function FormattedError(i){return i&&(i=_.apply(null,arguments)),new s(i)}}s.exports=w,w.eval=create(EvalError),w.range=create(RangeError),w.reference=create(ReferenceError),w.syntax=create(SyntaxError),w.type=create(TypeError),w.uri=create(URIError),w.create=create},43016:s=>{!function(){var i;function format(s){for(var i,u,_,w,x=1,j=[].slice.call(arguments),P=0,B=s.length,$="",U=!1,Y=!1,nextArg=function(){return j[x++]},slurpNumber=function(){for(var u="";/\d/.test(s[P]);)u+=s[P++],i=s[P];return u.length>0?parseInt(u):null};P{"use strict";var i=Object.prototype.toString,u=Math.max,_=function concatty(s,i){for(var u=[],_=0;_{"use strict";var _=u(97840);s.exports=Function.prototype.bind||_},96604:(s,i,u)=>{"use strict";var _,w=SyntaxError,x=Function,j=TypeError,getEvalledConstructor=function(s){try{return x('"use strict"; return ('+s+").constructor;")()}catch(s){}},P=Object.getOwnPropertyDescriptor;if(P)try{P({},"")}catch(s){P=null}var throwTypeError=function(){throw new j},B=P?function(){try{return throwTypeError}catch(s){try{return P(arguments,"callee").get}catch(s){return throwTypeError}}}():throwTypeError,$=u(24308)(),U=u(37636)(),Y=Object.getPrototypeOf||(U?function(s){return s.__proto__}:null),X={},Z="undefined"!=typeof Uint8Array&&Y?Y(Uint8Array):_,ee={"%AggregateError%":"undefined"==typeof AggregateError?_:AggregateError,"%Array%":Array,"%ArrayBuffer%":"undefined"==typeof ArrayBuffer?_:ArrayBuffer,"%ArrayIteratorPrototype%":$&&Y?Y([][Symbol.iterator]()):_,"%AsyncFromSyncIteratorPrototype%":_,"%AsyncFunction%":X,"%AsyncGenerator%":X,"%AsyncGeneratorFunction%":X,"%AsyncIteratorPrototype%":X,"%Atomics%":"undefined"==typeof Atomics?_:Atomics,"%BigInt%":"undefined"==typeof BigInt?_:BigInt,"%BigInt64Array%":"undefined"==typeof BigInt64Array?_:BigInt64Array,"%BigUint64Array%":"undefined"==typeof BigUint64Array?_:BigUint64Array,"%Boolean%":Boolean,"%DataView%":"undefined"==typeof DataView?_:DataView,"%Date%":Date,"%decodeURI%":decodeURI,"%decodeURIComponent%":decodeURIComponent,"%encodeURI%":encodeURI,"%encodeURIComponent%":encodeURIComponent,"%Error%":Error,"%eval%":eval,"%EvalError%":EvalError,"%Float32Array%":"undefined"==typeof Float32Array?_:Float32Array,"%Float64Array%":"undefined"==typeof Float64Array?_:Float64Array,"%FinalizationRegistry%":"undefined"==typeof FinalizationRegistry?_:FinalizationRegistry,"%Function%":x,"%GeneratorFunction%":X,"%Int8Array%":"undefined"==typeof Int8Array?_:Int8Array,"%Int16Array%":"undefined"==typeof Int16Array?_:Int16Array,"%Int32Array%":"undefined"==typeof Int32Array?_:Int32Array,"%isFinite%":isFinite,"%isNaN%":isNaN,"%IteratorPrototype%":$&&Y?Y(Y([][Symbol.iterator]())):_,"%JSON%":"object"==typeof JSON?JSON:_,"%Map%":"undefined"==typeof Map?_:Map,"%MapIteratorPrototype%":"undefined"!=typeof Map&&$&&Y?Y((new Map)[Symbol.iterator]()):_,"%Math%":Math,"%Number%":Number,"%Object%":Object,"%parseFloat%":parseFloat,"%parseInt%":parseInt,"%Promise%":"undefined"==typeof Promise?_:Promise,"%Proxy%":"undefined"==typeof Proxy?_:Proxy,"%RangeError%":RangeError,"%ReferenceError%":ReferenceError,"%Reflect%":"undefined"==typeof Reflect?_:Reflect,"%RegExp%":RegExp,"%Set%":"undefined"==typeof Set?_:Set,"%SetIteratorPrototype%":"undefined"!=typeof Set&&$&&Y?Y((new Set)[Symbol.iterator]()):_,"%SharedArrayBuffer%":"undefined"==typeof SharedArrayBuffer?_:SharedArrayBuffer,"%String%":String,"%StringIteratorPrototype%":$&&Y?Y(""[Symbol.iterator]()):_,"%Symbol%":$?Symbol:_,"%SyntaxError%":w,"%ThrowTypeError%":B,"%TypedArray%":Z,"%TypeError%":j,"%Uint8Array%":"undefined"==typeof Uint8Array?_:Uint8Array,"%Uint8ClampedArray%":"undefined"==typeof Uint8ClampedArray?_:Uint8ClampedArray,"%Uint16Array%":"undefined"==typeof Uint16Array?_:Uint16Array,"%Uint32Array%":"undefined"==typeof Uint32Array?_:Uint32Array,"%URIError%":URIError,"%WeakMap%":"undefined"==typeof WeakMap?_:WeakMap,"%WeakRef%":"undefined"==typeof WeakRef?_:WeakRef,"%WeakSet%":"undefined"==typeof WeakSet?_:WeakSet};if(Y)try{null.error}catch(s){var ae=Y(Y(s));ee["%Error.prototype%"]=ae}var ie=function doEval(s){var i;if("%AsyncFunction%"===s)i=getEvalledConstructor("async function () {}");else if("%GeneratorFunction%"===s)i=getEvalledConstructor("function* () {}");else if("%AsyncGeneratorFunction%"===s)i=getEvalledConstructor("async function* () {}");else if("%AsyncGenerator%"===s){var u=doEval("%AsyncGeneratorFunction%");u&&(i=u.prototype)}else if("%AsyncIteratorPrototype%"===s){var _=doEval("%AsyncGenerator%");_&&Y&&(i=Y(_.prototype))}return ee[s]=i,i},le={"%ArrayBufferPrototype%":["ArrayBuffer","prototype"],"%ArrayPrototype%":["Array","prototype"],"%ArrayProto_entries%":["Array","prototype","entries"],"%ArrayProto_forEach%":["Array","prototype","forEach"],"%ArrayProto_keys%":["Array","prototype","keys"],"%ArrayProto_values%":["Array","prototype","values"],"%AsyncFunctionPrototype%":["AsyncFunction","prototype"],"%AsyncGenerator%":["AsyncGeneratorFunction","prototype"],"%AsyncGeneratorPrototype%":["AsyncGeneratorFunction","prototype","prototype"],"%BooleanPrototype%":["Boolean","prototype"],"%DataViewPrototype%":["DataView","prototype"],"%DatePrototype%":["Date","prototype"],"%ErrorPrototype%":["Error","prototype"],"%EvalErrorPrototype%":["EvalError","prototype"],"%Float32ArrayPrototype%":["Float32Array","prototype"],"%Float64ArrayPrototype%":["Float64Array","prototype"],"%FunctionPrototype%":["Function","prototype"],"%Generator%":["GeneratorFunction","prototype"],"%GeneratorPrototype%":["GeneratorFunction","prototype","prototype"],"%Int8ArrayPrototype%":["Int8Array","prototype"],"%Int16ArrayPrototype%":["Int16Array","prototype"],"%Int32ArrayPrototype%":["Int32Array","prototype"],"%JSONParse%":["JSON","parse"],"%JSONStringify%":["JSON","stringify"],"%MapPrototype%":["Map","prototype"],"%NumberPrototype%":["Number","prototype"],"%ObjectPrototype%":["Object","prototype"],"%ObjProto_toString%":["Object","prototype","toString"],"%ObjProto_valueOf%":["Object","prototype","valueOf"],"%PromisePrototype%":["Promise","prototype"],"%PromiseProto_then%":["Promise","prototype","then"],"%Promise_all%":["Promise","all"],"%Promise_reject%":["Promise","reject"],"%Promise_resolve%":["Promise","resolve"],"%RangeErrorPrototype%":["RangeError","prototype"],"%ReferenceErrorPrototype%":["ReferenceError","prototype"],"%RegExpPrototype%":["RegExp","prototype"],"%SetPrototype%":["Set","prototype"],"%SharedArrayBufferPrototype%":["SharedArrayBuffer","prototype"],"%StringPrototype%":["String","prototype"],"%SymbolPrototype%":["Symbol","prototype"],"%SyntaxErrorPrototype%":["SyntaxError","prototype"],"%TypedArrayPrototype%":["TypedArray","prototype"],"%TypeErrorPrototype%":["TypeError","prototype"],"%Uint8ArrayPrototype%":["Uint8Array","prototype"],"%Uint8ClampedArrayPrototype%":["Uint8ClampedArray","prototype"],"%Uint16ArrayPrototype%":["Uint16Array","prototype"],"%Uint32ArrayPrototype%":["Uint32Array","prototype"],"%URIErrorPrototype%":["URIError","prototype"],"%WeakMapPrototype%":["WeakMap","prototype"],"%WeakSetPrototype%":["WeakSet","prototype"]},ce=u(30268),pe=u(15176),de=ce.call(Function.call,Array.prototype.concat),fe=ce.call(Function.apply,Array.prototype.splice),ye=ce.call(Function.call,String.prototype.replace),be=ce.call(Function.call,String.prototype.slice),_e=ce.call(Function.call,RegExp.prototype.exec),we=/[^%.[\]]+|\[(?:(-?\d+(?:\.\d+)?)|(["'])((?:(?!\2)[^\\]|\\.)*?)\2)\]|(?=(?:\.|\[\])(?:\.|\[\]|%$))/g,Se=/\\(\\)?/g,xe=function getBaseIntrinsic(s,i){var u,_=s;if(pe(le,_)&&(_="%"+(u=le[_])[0]+"%"),pe(ee,_)){var x=ee[_];if(x===X&&(x=ie(_)),void 0===x&&!i)throw new j("intrinsic "+s+" exists, but is not available. Please file an issue!");return{alias:u,name:_,value:x}}throw new w("intrinsic "+s+" does not exist!")};s.exports=function GetIntrinsic(s,i){if("string"!=typeof s||0===s.length)throw new j("intrinsic name must be a non-empty string");if(arguments.length>1&&"boolean"!=typeof i)throw new j('"allowMissing" argument must be a boolean');if(null===_e(/^%?[^%]*%?$/,s))throw new w("`%` may not be present anywhere but at the beginning and end of the intrinsic name");var u=function stringToPath(s){var i=be(s,0,1),u=be(s,-1);if("%"===i&&"%"!==u)throw new w("invalid intrinsic syntax, expected closing `%`");if("%"===u&&"%"!==i)throw new w("invalid intrinsic syntax, expected opening `%`");var _=[];return ye(s,we,(function(s,i,u,w){_[_.length]=u?ye(w,Se,"$1"):i||s})),_}(s),_=u.length>0?u[0]:"",x=xe("%"+_+"%",i),B=x.name,$=x.value,U=!1,Y=x.alias;Y&&(_=Y[0],fe(u,de([0,1],Y)));for(var X=1,Z=!0;X=u.length){var ce=P($,ae);$=(Z=!!ce)&&"get"in ce&&!("originalValue"in ce.get)?ce.get:$[ae]}else Z=pe($,ae),$=$[ae];Z&&!U&&(ee[B]=$)}}return $}},80872:(s,i,u)=>{"use strict";var _=u(96604)("%Object.getOwnPropertyDescriptor%",!0);if(_)try{_([],"length")}catch(s){_=null}s.exports=_},71188:(s,i,u)=>{"use strict";var _=u(96604)("%Object.defineProperty%",!0),w=function hasPropertyDescriptors(){if(_)try{return _({},"a",{value:1}),!0}catch(s){return!1}return!1};w.hasArrayLengthDefineBug=function hasArrayLengthDefineBug(){if(!w())return null;try{return 1!==_([],"length",{value:1}).length}catch(s){return!0}},s.exports=w},37636:s=>{"use strict";var i={foo:{}},u=Object;s.exports=function hasProto(){return{__proto__:i}.foo===i.foo&&!({__proto__:null}instanceof u)}},24308:(s,i,u)=>{"use strict";var _="undefined"!=typeof Symbol&&Symbol,w=u(31968);s.exports=function hasNativeSymbols(){return"function"==typeof _&&("function"==typeof Symbol&&("symbol"==typeof _("foo")&&("symbol"==typeof Symbol("bar")&&w())))}},31968:s=>{"use strict";s.exports=function hasSymbols(){if("function"!=typeof Symbol||"function"!=typeof Object.getOwnPropertySymbols)return!1;if("symbol"==typeof Symbol.iterator)return!0;var s={},i=Symbol("test"),u=Object(i);if("string"==typeof i)return!1;if("[object Symbol]"!==Object.prototype.toString.call(i))return!1;if("[object Symbol]"!==Object.prototype.toString.call(u))return!1;for(i in s[i]=42,s)return!1;if("function"==typeof Object.keys&&0!==Object.keys(s).length)return!1;if("function"==typeof Object.getOwnPropertyNames&&0!==Object.getOwnPropertyNames(s).length)return!1;var _=Object.getOwnPropertySymbols(s);if(1!==_.length||_[0]!==i)return!1;if(!Object.prototype.propertyIsEnumerable.call(s,i))return!1;if("function"==typeof Object.getOwnPropertyDescriptor){var w=Object.getOwnPropertyDescriptor(s,i);if(42!==w.value||!0!==w.enumerable)return!1}return!0}},15176:(s,i,u)=>{"use strict";var _=Function.prototype.call,w=Object.prototype.hasOwnProperty,x=u(30268);s.exports=x.call(_,w)},4728:s=>{function deepFreeze(s){return s instanceof Map?s.clear=s.delete=s.set=function(){throw new Error("map is read-only")}:s instanceof Set&&(s.add=s.clear=s.delete=function(){throw new Error("set is read-only")}),Object.freeze(s),Object.getOwnPropertyNames(s).forEach((function(i){var u=s[i];"object"!=typeof u||Object.isFrozen(u)||deepFreeze(u)})),s}var i=deepFreeze,u=deepFreeze;i.default=u;class Response{constructor(s){void 0===s.data&&(s.data={}),this.data=s.data,this.isMatchIgnored=!1}ignoreMatch(){this.isMatchIgnored=!0}}function escapeHTML(s){return s.replace(/&/g,"&").replace(//g,">").replace(/"/g,""").replace(/'/g,"'")}function inherit(s,...i){const u=Object.create(null);for(const i in s)u[i]=s[i];return i.forEach((function(s){for(const i in s)u[i]=s[i]})),u}const emitsWrappingTags=s=>!!s.kind;class HTMLRenderer{constructor(s,i){this.buffer="",this.classPrefix=i.classPrefix,s.walk(this)}addText(s){this.buffer+=escapeHTML(s)}openNode(s){if(!emitsWrappingTags(s))return;let i=s.kind;s.sublanguage||(i=`${this.classPrefix}${i}`),this.span(i)}closeNode(s){emitsWrappingTags(s)&&(this.buffer+="")}value(){return this.buffer}span(s){this.buffer+=``}}class TokenTree{constructor(){this.rootNode={children:[]},this.stack=[this.rootNode]}get top(){return this.stack[this.stack.length-1]}get root(){return this.rootNode}add(s){this.top.children.push(s)}openNode(s){const i={kind:s,children:[]};this.add(i),this.stack.push(i)}closeNode(){if(this.stack.length>1)return this.stack.pop()}closeAllNodes(){for(;this.closeNode(););}toJSON(){return JSON.stringify(this.rootNode,null,4)}walk(s){return this.constructor._walk(s,this.rootNode)}static _walk(s,i){return"string"==typeof i?s.addText(i):i.children&&(s.openNode(i),i.children.forEach((i=>this._walk(s,i))),s.closeNode(i)),s}static _collapse(s){"string"!=typeof s&&s.children&&(s.children.every((s=>"string"==typeof s))?s.children=[s.children.join("")]:s.children.forEach((s=>{TokenTree._collapse(s)})))}}class TokenTreeEmitter extends TokenTree{constructor(s){super(),this.options=s}addKeyword(s,i){""!==s&&(this.openNode(i),this.addText(s),this.closeNode())}addText(s){""!==s&&this.add(s)}addSublanguage(s,i){const u=s.root;u.kind=i,u.sublanguage=!0,this.add(u)}toHTML(){return new HTMLRenderer(this,this.options).value()}finalize(){return!0}}function source(s){return s?"string"==typeof s?s:s.source:null}const _=/\[(?:[^\\\]]|\\.)*\]|\(\??|\\([1-9][0-9]*)|\\./;const w="[a-zA-Z]\\w*",x="[a-zA-Z_]\\w*",j="\\b\\d+(\\.\\d+)?",P="(-?)(\\b0[xX][a-fA-F0-9]+|(\\b\\d+(\\.\\d*)?|\\.\\d+)([eE][-+]?\\d+)?)",B="\\b(0b[01]+)",$={begin:"\\\\[\\s\\S]",relevance:0},U={className:"string",begin:"'",end:"'",illegal:"\\n",contains:[$]},Y={className:"string",begin:'"',end:'"',illegal:"\\n",contains:[$]},X={begin:/\b(a|an|the|are|I'm|isn't|don't|doesn't|won't|but|just|should|pretty|simply|enough|gonna|going|wtf|so|such|will|you|your|they|like|more)\b/},COMMENT=function(s,i,u={}){const _=inherit({className:"comment",begin:s,end:i,contains:[]},u);return _.contains.push(X),_.contains.push({className:"doctag",begin:"(?:TODO|FIXME|NOTE|BUG|OPTIMIZE|HACK|XXX):",relevance:0}),_},Z=COMMENT("//","$"),ee=COMMENT("/\\*","\\*/"),ae=COMMENT("#","$"),ie={className:"number",begin:j,relevance:0},le={className:"number",begin:P,relevance:0},ce={className:"number",begin:B,relevance:0},pe={className:"number",begin:j+"(%|em|ex|ch|rem|vw|vh|vmin|vmax|cm|mm|in|pt|pc|px|deg|grad|rad|turn|s|ms|Hz|kHz|dpi|dpcm|dppx)?",relevance:0},de={begin:/(?=\/[^/\n]*\/)/,contains:[{className:"regexp",begin:/\//,end:/\/[gimuy]*/,illegal:/\n/,contains:[$,{begin:/\[/,end:/\]/,relevance:0,contains:[$]}]}]},fe={className:"title",begin:w,relevance:0},ye={className:"title",begin:x,relevance:0},be={begin:"\\.\\s*"+x,relevance:0};var _e=Object.freeze({__proto__:null,MATCH_NOTHING_RE:/\b\B/,IDENT_RE:w,UNDERSCORE_IDENT_RE:x,NUMBER_RE:j,C_NUMBER_RE:P,BINARY_NUMBER_RE:B,RE_STARTERS_RE:"!|!=|!==|%|%=|&|&&|&=|\\*|\\*=|\\+|\\+=|,|-|-=|/=|/|:|;|<<|<<=|<=|<|===|==|=|>>>=|>>=|>=|>>>|>>|>|\\?|\\[|\\{|\\(|\\^|\\^=|\\||\\|=|\\|\\||~",SHEBANG:(s={})=>{const i=/^#![ ]*\//;return s.binary&&(s.begin=function concat(...s){return s.map((s=>source(s))).join("")}(i,/.*\b/,s.binary,/\b.*/)),inherit({className:"meta",begin:i,end:/$/,relevance:0,"on:begin":(s,i)=>{0!==s.index&&i.ignoreMatch()}},s)},BACKSLASH_ESCAPE:$,APOS_STRING_MODE:U,QUOTE_STRING_MODE:Y,PHRASAL_WORDS_MODE:X,COMMENT,C_LINE_COMMENT_MODE:Z,C_BLOCK_COMMENT_MODE:ee,HASH_COMMENT_MODE:ae,NUMBER_MODE:ie,C_NUMBER_MODE:le,BINARY_NUMBER_MODE:ce,CSS_NUMBER_MODE:pe,REGEXP_MODE:de,TITLE_MODE:fe,UNDERSCORE_TITLE_MODE:ye,METHOD_GUARD:be,END_SAME_AS_BEGIN:function(s){return Object.assign(s,{"on:begin":(s,i)=>{i.data._beginMatch=s[1]},"on:end":(s,i)=>{i.data._beginMatch!==s[1]&&i.ignoreMatch()}})}});function skipIfhasPrecedingDot(s,i){"."===s.input[s.index-1]&&i.ignoreMatch()}function beginKeywords(s,i){i&&s.beginKeywords&&(s.begin="\\b("+s.beginKeywords.split(" ").join("|")+")(?!\\.)(?=\\b|\\s)",s.__beforeBegin=skipIfhasPrecedingDot,s.keywords=s.keywords||s.beginKeywords,delete s.beginKeywords,void 0===s.relevance&&(s.relevance=0))}function compileIllegal(s,i){Array.isArray(s.illegal)&&(s.illegal=function either(...s){return"("+s.map((s=>source(s))).join("|")+")"}(...s.illegal))}function compileMatch(s,i){if(s.match){if(s.begin||s.end)throw new Error("begin & end are not supported with match");s.begin=s.match,delete s.match}}function compileRelevance(s,i){void 0===s.relevance&&(s.relevance=1)}const we=["of","and","for","in","not","or","if","then","parent","list","value"],Se="keyword";function compileKeywords(s,i,u=Se){const _={};return"string"==typeof s?compileList(u,s.split(" ")):Array.isArray(s)?compileList(u,s):Object.keys(s).forEach((function(u){Object.assign(_,compileKeywords(s[u],i,u))})),_;function compileList(s,u){i&&(u=u.map((s=>s.toLowerCase()))),u.forEach((function(i){const u=i.split("|");_[u[0]]=[s,scoreForKeyword(u[0],u[1])]}))}}function scoreForKeyword(s,i){return i?Number(i):function commonKeyword(s){return we.includes(s.toLowerCase())}(s)?0:1}function compileLanguage(s,{plugins:i}){function langRe(i,u){return new RegExp(source(i),"m"+(s.case_insensitive?"i":"")+(u?"g":""))}class MultiRegex{constructor(){this.matchIndexes={},this.regexes=[],this.matchAt=1,this.position=0}addRule(s,i){i.position=this.position++,this.matchIndexes[this.matchAt]=i,this.regexes.push([i,s]),this.matchAt+=function countMatchGroups(s){return new RegExp(s.toString()+"|").exec("").length-1}(s)+1}compile(){0===this.regexes.length&&(this.exec=()=>null);const s=this.regexes.map((s=>s[1]));this.matcherRe=langRe(function join(s,i="|"){let u=0;return s.map((s=>{u+=1;const i=u;let w=source(s),x="";for(;w.length>0;){const s=_.exec(w);if(!s){x+=w;break}x+=w.substring(0,s.index),w=w.substring(s.index+s[0].length),"\\"===s[0][0]&&s[1]?x+="\\"+String(Number(s[1])+i):(x+=s[0],"("===s[0]&&u++)}return x})).map((s=>`(${s})`)).join(i)}(s),!0),this.lastIndex=0}exec(s){this.matcherRe.lastIndex=this.lastIndex;const i=this.matcherRe.exec(s);if(!i)return null;const u=i.findIndex(((s,i)=>i>0&&void 0!==s)),_=this.matchIndexes[u];return i.splice(0,u),Object.assign(i,_)}}class ResumableMultiRegex{constructor(){this.rules=[],this.multiRegexes=[],this.count=0,this.lastIndex=0,this.regexIndex=0}getMatcher(s){if(this.multiRegexes[s])return this.multiRegexes[s];const i=new MultiRegex;return this.rules.slice(s).forEach((([s,u])=>i.addRule(s,u))),i.compile(),this.multiRegexes[s]=i,i}resumingScanAtSamePosition(){return 0!==this.regexIndex}considerAll(){this.regexIndex=0}addRule(s,i){this.rules.push([s,i]),"begin"===i.type&&this.count++}exec(s){const i=this.getMatcher(this.regexIndex);i.lastIndex=this.lastIndex;let u=i.exec(s);if(this.resumingScanAtSamePosition())if(u&&u.index===this.lastIndex);else{const i=this.getMatcher(0);i.lastIndex=this.lastIndex+1,u=i.exec(s)}return u&&(this.regexIndex+=u.position+1,this.regexIndex===this.count&&this.considerAll()),u}}if(s.compilerExtensions||(s.compilerExtensions=[]),s.contains&&s.contains.includes("self"))throw new Error("ERR: contains `self` is not supported at the top-level of a language. See documentation.");return s.classNameAliases=inherit(s.classNameAliases||{}),function compileMode(i,u){const _=i;if(i.isCompiled)return _;[compileMatch].forEach((s=>s(i,u))),s.compilerExtensions.forEach((s=>s(i,u))),i.__beforeBegin=null,[beginKeywords,compileIllegal,compileRelevance].forEach((s=>s(i,u))),i.isCompiled=!0;let w=null;if("object"==typeof i.keywords&&(w=i.keywords.$pattern,delete i.keywords.$pattern),i.keywords&&(i.keywords=compileKeywords(i.keywords,s.case_insensitive)),i.lexemes&&w)throw new Error("ERR: Prefer `keywords.$pattern` to `mode.lexemes`, BOTH are not allowed. (see mode reference) ");return w=w||i.lexemes||/\w+/,_.keywordPatternRe=langRe(w,!0),u&&(i.begin||(i.begin=/\B|\b/),_.beginRe=langRe(i.begin),i.endSameAsBegin&&(i.end=i.begin),i.end||i.endsWithParent||(i.end=/\B|\b/),i.end&&(_.endRe=langRe(i.end)),_.terminatorEnd=source(i.end)||"",i.endsWithParent&&u.terminatorEnd&&(_.terminatorEnd+=(i.end?"|":"")+u.terminatorEnd)),i.illegal&&(_.illegalRe=langRe(i.illegal)),i.contains||(i.contains=[]),i.contains=[].concat(...i.contains.map((function(s){return function expandOrCloneMode(s){s.variants&&!s.cachedVariants&&(s.cachedVariants=s.variants.map((function(i){return inherit(s,{variants:null},i)})));if(s.cachedVariants)return s.cachedVariants;if(dependencyOnParent(s))return inherit(s,{starts:s.starts?inherit(s.starts):null});if(Object.isFrozen(s))return inherit(s);return s}("self"===s?i:s)}))),i.contains.forEach((function(s){compileMode(s,_)})),i.starts&&compileMode(i.starts,u),_.matcher=function buildModeRegex(s){const i=new ResumableMultiRegex;return s.contains.forEach((s=>i.addRule(s.begin,{rule:s,type:"begin"}))),s.terminatorEnd&&i.addRule(s.terminatorEnd,{type:"end"}),s.illegal&&i.addRule(s.illegal,{type:"illegal"}),i}(_),_}(s)}function dependencyOnParent(s){return!!s&&(s.endsWithParent||dependencyOnParent(s.starts))}function BuildVuePlugin(s){const i={props:["language","code","autodetect"],data:function(){return{detectedLanguage:"",unknownLanguage:!1}},computed:{className(){return this.unknownLanguage?"":"hljs "+this.detectedLanguage},highlighted(){if(!this.autoDetect&&!s.getLanguage(this.language))return console.warn(`The language "${this.language}" you specified could not be found.`),this.unknownLanguage=!0,escapeHTML(this.code);let i={};return this.autoDetect?(i=s.highlightAuto(this.code),this.detectedLanguage=i.language):(i=s.highlight(this.language,this.code,this.ignoreIllegals),this.detectedLanguage=this.language),i.value},autoDetect(){return!this.language||function hasValueOrEmptyAttribute(s){return Boolean(s||""===s)}(this.autodetect)},ignoreIllegals:()=>!0},render(s){return s("pre",{},[s("code",{class:this.className,domProps:{innerHTML:this.highlighted}})])}};return{Component:i,VuePlugin:{install(s){s.component("highlightjs",i)}}}}const xe={"after:highlightElement":({el:s,result:i,text:u})=>{const _=nodeStream(s);if(!_.length)return;const w=document.createElement("div");w.innerHTML=i.value,i.value=function mergeStreams(s,i,u){let _=0,w="";const x=[];function selectStream(){return s.length&&i.length?s[0].offset!==i[0].offset?s[0].offset"}function close(s){w+=""}function render(s){("start"===s.event?open:close)(s.node)}for(;s.length||i.length;){let i=selectStream();if(w+=escapeHTML(u.substring(_,i[0].offset)),_=i[0].offset,i===s){x.reverse().forEach(close);do{render(i.splice(0,1)[0]),i=selectStream()}while(i===s&&i.length&&i[0].offset===_);x.reverse().forEach(open)}else"start"===i[0].event?x.push(i[0].node):x.pop(),render(i.splice(0,1)[0])}return w+escapeHTML(u.substr(_))}(_,nodeStream(w),u)}};function tag(s){return s.nodeName.toLowerCase()}function nodeStream(s){const i=[];return function _nodeStream(s,u){for(let _=s.firstChild;_;_=_.nextSibling)3===_.nodeType?u+=_.nodeValue.length:1===_.nodeType&&(i.push({event:"start",offset:u,node:_}),u=_nodeStream(_,u),tag(_).match(/br|hr|img|input/)||i.push({event:"stop",offset:u,node:_}));return u}(s,0),i}const Pe={},error=s=>{console.error(s)},warn=(s,...i)=>{console.log(`WARN: ${s}`,...i)},deprecated=(s,i)=>{Pe[`${s}/${i}`]||(console.log(`Deprecated as of ${s}. ${i}`),Pe[`${s}/${i}`]=!0)},Te=escapeHTML,Re=inherit,qe=Symbol("nomatch");var $e=function(s){const u=Object.create(null),_=Object.create(null),w=[];let x=!0;const j=/(^(<[^>]+>|\t|)+|\n)/gm,P="Could not find the language '{}', did you forget to load/include a language module?",B={disableAutodetect:!0,name:"Plain text",contains:[]};let $={noHighlightRe:/^(no-?highlight)$/i,languageDetectRe:/\blang(?:uage)?-([\w-]+)\b/i,classPrefix:"hljs-",tabReplace:null,useBR:!1,languages:null,__emitter:TokenTreeEmitter};function shouldNotHighlight(s){return $.noHighlightRe.test(s)}function highlight(s,i,u,_){let w="",x="";"object"==typeof i?(w=s,u=i.ignoreIllegals,x=i.language,_=void 0):(deprecated("10.7.0","highlight(lang, code, ...args) has been deprecated."),deprecated("10.7.0","Please use highlight(code, options) instead.\nhttps://github.com/highlightjs/highlight.js/issues/2277"),x=s,w=i);const j={code:w,language:x};fire("before:highlight",j);const P=j.result?j.result:_highlight(j.language,j.code,u,_);return P.code=j.code,fire("after:highlight",P),P}function _highlight(s,i,_,j){function keywordData(s,i){const u=U.case_insensitive?i[0].toLowerCase():i[0];return Object.prototype.hasOwnProperty.call(s.keywords,u)&&s.keywords[u]}function processBuffer(){null!=Z.subLanguage?function processSubLanguage(){if(""===ie)return;let s=null;if("string"==typeof Z.subLanguage){if(!u[Z.subLanguage])return void ae.addText(ie);s=_highlight(Z.subLanguage,ie,!0,ee[Z.subLanguage]),ee[Z.subLanguage]=s.top}else s=highlightAuto(ie,Z.subLanguage.length?Z.subLanguage:null);Z.relevance>0&&(le+=s.relevance),ae.addSublanguage(s.emitter,s.language)}():function processKeywords(){if(!Z.keywords)return void ae.addText(ie);let s=0;Z.keywordPatternRe.lastIndex=0;let i=Z.keywordPatternRe.exec(ie),u="";for(;i;){u+=ie.substring(s,i.index);const _=keywordData(Z,i);if(_){const[s,w]=_;if(ae.addText(u),u="",le+=w,s.startsWith("_"))u+=i[0];else{const u=U.classNameAliases[s]||s;ae.addKeyword(i[0],u)}}else u+=i[0];s=Z.keywordPatternRe.lastIndex,i=Z.keywordPatternRe.exec(ie)}u+=ie.substr(s),ae.addText(u)}(),ie=""}function startNewMode(s){return s.className&&ae.openNode(U.classNameAliases[s.className]||s.className),Z=Object.create(s,{parent:{value:Z}}),Z}function endOfMode(s,i,u){let _=function startsWith(s,i){const u=s&&s.exec(i);return u&&0===u.index}(s.endRe,u);if(_){if(s["on:end"]){const u=new Response(s);s["on:end"](i,u),u.isMatchIgnored&&(_=!1)}if(_){for(;s.endsParent&&s.parent;)s=s.parent;return s}}if(s.endsWithParent)return endOfMode(s.parent,i,u)}function doIgnore(s){return 0===Z.matcher.regexIndex?(ie+=s[0],1):(de=!0,0)}function doBeginMatch(s){const i=s[0],u=s.rule,_=new Response(u),w=[u.__beforeBegin,u["on:begin"]];for(const u of w)if(u&&(u(s,_),_.isMatchIgnored))return doIgnore(i);return u&&u.endSameAsBegin&&(u.endRe=function escape(s){return new RegExp(s.replace(/[-/\\^$*+?.()|[\]{}]/g,"\\$&"),"m")}(i)),u.skip?ie+=i:(u.excludeBegin&&(ie+=i),processBuffer(),u.returnBegin||u.excludeBegin||(ie=i)),startNewMode(u),u.returnBegin?0:i.length}function doEndMatch(s){const u=s[0],_=i.substr(s.index),w=endOfMode(Z,s,_);if(!w)return qe;const x=Z;x.skip?ie+=u:(x.returnEnd||x.excludeEnd||(ie+=u),processBuffer(),x.excludeEnd&&(ie=u));do{Z.className&&ae.closeNode(),Z.skip||Z.subLanguage||(le+=Z.relevance),Z=Z.parent}while(Z!==w.parent);return w.starts&&(w.endSameAsBegin&&(w.starts.endRe=w.endRe),startNewMode(w.starts)),x.returnEnd?0:u.length}let B={};function processLexeme(u,w){const j=w&&w[0];if(ie+=u,null==j)return processBuffer(),0;if("begin"===B.type&&"end"===w.type&&B.index===w.index&&""===j){if(ie+=i.slice(w.index,w.index+1),!x){const i=new Error("0 width match regex");throw i.languageName=s,i.badRule=B.rule,i}return 1}if(B=w,"begin"===w.type)return doBeginMatch(w);if("illegal"===w.type&&!_){const s=new Error('Illegal lexeme "'+j+'" for mode "'+(Z.className||"")+'"');throw s.mode=Z,s}if("end"===w.type){const s=doEndMatch(w);if(s!==qe)return s}if("illegal"===w.type&&""===j)return 1;if(pe>1e5&&pe>3*w.index){throw new Error("potential infinite loop, way more iterations than matches")}return ie+=j,j.length}const U=getLanguage(s);if(!U)throw error(P.replace("{}",s)),new Error('Unknown language: "'+s+'"');const Y=compileLanguage(U,{plugins:w});let X="",Z=j||Y;const ee={},ae=new $.__emitter($);!function processContinuations(){const s=[];for(let i=Z;i!==U;i=i.parent)i.className&&s.unshift(i.className);s.forEach((s=>ae.openNode(s)))}();let ie="",le=0,ce=0,pe=0,de=!1;try{for(Z.matcher.considerAll();;){pe++,de?de=!1:Z.matcher.considerAll(),Z.matcher.lastIndex=ce;const s=Z.matcher.exec(i);if(!s)break;const u=processLexeme(i.substring(ce,s.index),s);ce=s.index+u}return processLexeme(i.substr(ce)),ae.closeAllNodes(),ae.finalize(),X=ae.toHTML(),{relevance:Math.floor(le),value:X,language:s,illegal:!1,emitter:ae,top:Z}}catch(u){if(u.message&&u.message.includes("Illegal"))return{illegal:!0,illegalBy:{msg:u.message,context:i.slice(ce-100,ce+100),mode:u.mode},sofar:X,relevance:0,value:Te(i),emitter:ae};if(x)return{illegal:!1,relevance:0,value:Te(i),emitter:ae,language:s,top:Z,errorRaised:u};throw u}}function highlightAuto(s,i){i=i||$.languages||Object.keys(u);const _=function justTextHighlightResult(s){const i={relevance:0,emitter:new $.__emitter($),value:Te(s),illegal:!1,top:B};return i.emitter.addText(s),i}(s),w=i.filter(getLanguage).filter(autoDetection).map((i=>_highlight(i,s,!1)));w.unshift(_);const x=w.sort(((s,i)=>{if(s.relevance!==i.relevance)return i.relevance-s.relevance;if(s.language&&i.language){if(getLanguage(s.language).supersetOf===i.language)return 1;if(getLanguage(i.language).supersetOf===s.language)return-1}return 0})),[j,P]=x,U=j;return U.second_best=P,U}const U={"before:highlightElement":({el:s})=>{$.useBR&&(s.innerHTML=s.innerHTML.replace(/\n/g,"").replace(//g,"\n"))},"after:highlightElement":({result:s})=>{$.useBR&&(s.value=s.value.replace(/\n/g,"
"))}},Y=/^(<[^>]+>|\t)+/gm,X={"after:highlightElement":({result:s})=>{$.tabReplace&&(s.value=s.value.replace(Y,(s=>s.replace(/\t/g,$.tabReplace))))}};function highlightElement(s){let i=null;const u=function blockLanguage(s){let i=s.className+" ";i+=s.parentNode?s.parentNode.className:"";const u=$.languageDetectRe.exec(i);if(u){const i=getLanguage(u[1]);return i||(warn(P.replace("{}",u[1])),warn("Falling back to no-highlight mode for this block.",s)),i?u[1]:"no-highlight"}return i.split(/\s+/).find((s=>shouldNotHighlight(s)||getLanguage(s)))}(s);if(shouldNotHighlight(u))return;fire("before:highlightElement",{el:s,language:u}),i=s;const w=i.textContent,x=u?highlight(w,{language:u,ignoreIllegals:!0}):highlightAuto(w);fire("after:highlightElement",{el:s,result:x,text:w}),s.innerHTML=x.value,function updateClassName(s,i,u){const w=i?_[i]:u;s.classList.add("hljs"),w&&s.classList.add(w)}(s,u,x.language),s.result={language:x.language,re:x.relevance,relavance:x.relevance},x.second_best&&(s.second_best={language:x.second_best.language,re:x.second_best.relevance,relavance:x.second_best.relevance})}const initHighlighting=()=>{if(initHighlighting.called)return;initHighlighting.called=!0,deprecated("10.6.0","initHighlighting() is deprecated. Use highlightAll() instead.");document.querySelectorAll("pre code").forEach(highlightElement)};let Z=!1;function highlightAll(){if("loading"===document.readyState)return void(Z=!0);document.querySelectorAll("pre code").forEach(highlightElement)}function getLanguage(s){return s=(s||"").toLowerCase(),u[s]||u[_[s]]}function registerAliases(s,{languageName:i}){"string"==typeof s&&(s=[s]),s.forEach((s=>{_[s.toLowerCase()]=i}))}function autoDetection(s){const i=getLanguage(s);return i&&!i.disableAutodetect}function fire(s,i){const u=s;w.forEach((function(s){s[u]&&s[u](i)}))}"undefined"!=typeof window&&window.addEventListener&&window.addEventListener("DOMContentLoaded",(function boot(){Z&&highlightAll()}),!1),Object.assign(s,{highlight,highlightAuto,highlightAll,fixMarkup:function deprecateFixMarkup(s){return deprecated("10.2.0","fixMarkup will be removed entirely in v11.0"),deprecated("10.2.0","Please see https://github.com/highlightjs/highlight.js/issues/2534"),function fixMarkup(s){return $.tabReplace||$.useBR?s.replace(j,(s=>"\n"===s?$.useBR?"
":s:$.tabReplace?s.replace(/\t/g,$.tabReplace):s)):s}(s)},highlightElement,highlightBlock:function deprecateHighlightBlock(s){return deprecated("10.7.0","highlightBlock will be removed entirely in v12.0"),deprecated("10.7.0","Please use highlightElement now."),highlightElement(s)},configure:function configure(s){s.useBR&&(deprecated("10.3.0","'useBR' will be removed entirely in v11.0"),deprecated("10.3.0","Please see https://github.com/highlightjs/highlight.js/issues/2559")),$=Re($,s)},initHighlighting,initHighlightingOnLoad:function initHighlightingOnLoad(){deprecated("10.6.0","initHighlightingOnLoad() is deprecated. Use highlightAll() instead."),Z=!0},registerLanguage:function registerLanguage(i,_){let w=null;try{w=_(s)}catch(s){if(error("Language definition for '{}' could not be registered.".replace("{}",i)),!x)throw s;error(s),w=B}w.name||(w.name=i),u[i]=w,w.rawDefinition=_.bind(null,s),w.aliases&®isterAliases(w.aliases,{languageName:i})},unregisterLanguage:function unregisterLanguage(s){delete u[s];for(const i of Object.keys(_))_[i]===s&&delete _[i]},listLanguages:function listLanguages(){return Object.keys(u)},getLanguage,registerAliases,requireLanguage:function requireLanguage(s){deprecated("10.4.0","requireLanguage will be removed entirely in v11."),deprecated("10.4.0","Please see https://github.com/highlightjs/highlight.js/pull/2844");const i=getLanguage(s);if(i)return i;throw new Error("The '{}' language is required, but not loaded.".replace("{}",s))},autoDetection,inherit:Re,addPlugin:function addPlugin(s){!function upgradePluginAPI(s){s["before:highlightBlock"]&&!s["before:highlightElement"]&&(s["before:highlightElement"]=i=>{s["before:highlightBlock"](Object.assign({block:i.el},i))}),s["after:highlightBlock"]&&!s["after:highlightElement"]&&(s["after:highlightElement"]=i=>{s["after:highlightBlock"](Object.assign({block:i.el},i))})}(s),w.push(s)},vuePlugin:BuildVuePlugin(s).VuePlugin}),s.debugMode=function(){x=!1},s.safeMode=function(){x=!0},s.versionString="10.7.3";for(const s in _e)"object"==typeof _e[s]&&i(_e[s]);return Object.assign(s,_e),s.addPlugin(U),s.addPlugin(xe),s.addPlugin(X),s}({});s.exports=$e},37952:s=>{function concat(...s){return s.map((s=>function source(s){return s?"string"==typeof s?s:s.source:null}(s))).join("")}s.exports=function bash(s){const i={},u={begin:/\$\{/,end:/\}/,contains:["self",{begin:/:-/,contains:[i]}]};Object.assign(i,{className:"variable",variants:[{begin:concat(/\$[\w\d#@][\w\d_]*/,"(?![\\w\\d])(?![$])")},u]});const _={className:"subst",begin:/\$\(/,end:/\)/,contains:[s.BACKSLASH_ESCAPE]},w={begin:/<<-?\s*(?=\w+)/,starts:{contains:[s.END_SAME_AS_BEGIN({begin:/(\w+)/,end:/(\w+)/,className:"string"})]}},x={className:"string",begin:/"/,end:/"/,contains:[s.BACKSLASH_ESCAPE,i,_]};_.contains.push(x);const j={begin:/\$\(\(/,end:/\)\)/,contains:[{begin:/\d+#[0-9a-f]+/,className:"number"},s.NUMBER_MODE,i]},P=s.SHEBANG({binary:`(${["fish","bash","zsh","sh","csh","ksh","tcsh","dash","scsh"].join("|")})`,relevance:10}),B={className:"function",begin:/\w[\w\d_]*\s*\(\s*\)\s*\{/,returnBegin:!0,contains:[s.inherit(s.TITLE_MODE,{begin:/\w[\w\d_]*/})],relevance:0};return{name:"Bash",aliases:["sh","zsh"],keywords:{$pattern:/\b[a-z._-]+\b/,keyword:"if then else elif fi for while in do done case esac function",literal:"true false",built_in:"break cd continue eval exec exit export getopts hash pwd readonly return shift test times trap umask unset alias bind builtin caller command declare echo enable help let local logout mapfile printf read readarray source type typeset ulimit unalias set shopt autoload bg bindkey bye cap chdir clone comparguments compcall compctl compdescribe compfiles compgroups compquote comptags comptry compvalues dirs disable disown echotc echoti emulate fc fg float functions getcap getln history integer jobs kill limit log noglob popd print pushd pushln rehash sched setcap setopt stat suspend ttyctl unfunction unhash unlimit unsetopt vared wait whence where which zcompile zformat zftp zle zmodload zparseopts zprof zpty zregexparse zsocket zstyle ztcp"},contains:[P,s.SHEBANG(),B,j,s.HASH_COMMENT_MODE,w,x,{className:"",begin:/\\"/},{className:"string",begin:/'/,end:/'/},i]}}},56080:s=>{function concat(...s){return s.map((s=>function source(s){return s?"string"==typeof s?s:s.source:null}(s))).join("")}s.exports=function http(s){const i="HTTP/(2|1\\.[01])",u={className:"attribute",begin:concat("^",/[A-Za-z][A-Za-z0-9-]*/,"(?=\\:\\s)"),starts:{contains:[{className:"punctuation",begin:/: /,relevance:0,starts:{end:"$",relevance:0}}]}},_=[u,{begin:"\\n\\n",starts:{subLanguage:[],endsWithParent:!0}}];return{name:"HTTP",aliases:["https"],illegal:/\S/,contains:[{begin:"^(?="+i+" \\d{3})",end:/$/,contains:[{className:"meta",begin:i},{className:"number",begin:"\\b\\d{3}\\b"}],starts:{end:/\b\B/,illegal:/\S/,contains:_}},{begin:"(?=^[A-Z]+ (.*?) "+i+"$)",end:/$/,contains:[{className:"string",begin:" ",end:" ",excludeBegin:!0,excludeEnd:!0},{className:"meta",begin:i},{className:"keyword",begin:"[A-Z]+"}],starts:{end:/\b\B/,illegal:/\S/,contains:_}},s.inherit(u,{relevance:0})]}}},96800:s=>{const i="[A-Za-z$_][0-9A-Za-z$_]*",u=["as","in","of","if","for","while","finally","var","new","function","do","return","void","else","break","catch","instanceof","with","throw","case","default","try","switch","continue","typeof","delete","let","yield","const","class","debugger","async","await","static","import","from","export","extends"],_=["true","false","null","undefined","NaN","Infinity"],w=[].concat(["setInterval","setTimeout","clearInterval","clearTimeout","require","exports","eval","isFinite","isNaN","parseFloat","parseInt","decodeURI","decodeURIComponent","encodeURI","encodeURIComponent","escape","unescape"],["arguments","this","super","console","window","document","localStorage","module","global"],["Intl","DataView","Number","Math","Date","String","RegExp","Object","Function","Boolean","Error","Symbol","Set","Map","WeakSet","WeakMap","Proxy","Reflect","JSON","Promise","Float64Array","Int16Array","Int32Array","Int8Array","Uint16Array","Uint32Array","Float32Array","Array","Uint8Array","Uint8ClampedArray","ArrayBuffer","BigInt64Array","BigUint64Array","BigInt"],["EvalError","InternalError","RangeError","ReferenceError","SyntaxError","TypeError","URIError"]);function lookahead(s){return concat("(?=",s,")")}function concat(...s){return s.map((s=>function source(s){return s?"string"==typeof s?s:s.source:null}(s))).join("")}s.exports=function javascript(s){const x=i,j="<>",P="",B={begin:/<[A-Za-z0-9\\._:-]+/,end:/\/[A-Za-z0-9\\._:-]+>|\/>/,isTrulyOpeningTag:(s,i)=>{const u=s[0].length+s.index,_=s.input[u];"<"!==_?">"===_&&(((s,{after:i})=>{const u="",returnBegin:!0,end:"\\s*=>",contains:[{className:"params",variants:[{begin:s.UNDERSCORE_IDENT_RE,relevance:0},{className:null,begin:/\(\s*\)/,skip:!0},{begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,keywords:$,contains:fe}]}]},{begin:/,/,relevance:0},{className:"",begin:/\s/,end:/\s*/,skip:!0},{variants:[{begin:j,end:P},{begin:B.begin,"on:begin":B.isTrulyOpeningTag,end:B.end}],subLanguage:"xml",contains:[{begin:B.begin,end:B.end,skip:!0,contains:["self"]}]}],relevance:0},{className:"function",beginKeywords:"function",end:/[{;]/,excludeEnd:!0,keywords:$,contains:["self",s.inherit(s.TITLE_MODE,{begin:x}),ye],illegal:/%/},{beginKeywords:"while if switch catch for"},{className:"function",begin:s.UNDERSCORE_IDENT_RE+"\\([^()]*(\\([^()]*(\\([^()]*\\)[^()]*)*\\)[^()]*)*\\)\\s*\\{",returnBegin:!0,contains:[ye,s.inherit(s.TITLE_MODE,{begin:x})]},{variants:[{begin:"\\."+x},{begin:"\\$"+x}],relevance:0},{className:"class",beginKeywords:"class",end:/[{;=]/,excludeEnd:!0,illegal:/[:"[\]]/,contains:[{beginKeywords:"extends"},s.UNDERSCORE_TITLE_MODE]},{begin:/\b(?=constructor)/,end:/[{;]/,excludeEnd:!0,contains:[s.inherit(s.TITLE_MODE,{begin:x}),"self",ye]},{begin:"(get|set)\\s+(?="+x+"\\()",end:/\{/,keywords:"get set",contains:[s.inherit(s.TITLE_MODE,{begin:x}),{begin:/\(\)/},ye]},{begin:/\$[(.]/}]}}},11163:s=>{s.exports=function json(s){const i={literal:"true false null"},u=[s.C_LINE_COMMENT_MODE,s.C_BLOCK_COMMENT_MODE],_=[s.QUOTE_STRING_MODE,s.C_NUMBER_MODE],w={end:",",endsWithParent:!0,excludeEnd:!0,contains:_,keywords:i},x={begin:/\{/,end:/\}/,contains:[{className:"attr",begin:/"/,end:/"/,contains:[s.BACKSLASH_ESCAPE],illegal:"\\n"},s.inherit(w,{begin:/:/})].concat(u),illegal:"\\S"},j={begin:"\\[",end:"\\]",contains:[s.inherit(w)],illegal:"\\S"};return _.push(x,j),u.forEach((function(s){_.push(s)})),{name:"JSON",contains:_,keywords:i,illegal:"\\S"}}},98792:s=>{s.exports=function powershell(s){const i={$pattern:/-?[A-z\.\-]+\b/,keyword:"if else foreach return do while until elseif begin for trap data dynamicparam end break throw param continue finally in switch exit filter try process catch hidden static parameter",built_in:"ac asnp cat cd CFS chdir clc clear clhy cli clp cls clv cnsn compare copy cp cpi cpp curl cvpa dbp del diff dir dnsn ebp echo|0 epal epcsv epsn erase etsn exsn fc fhx fl ft fw gal gbp gc gcb gci gcm gcs gdr gerr ghy gi gin gjb gl gm gmo gp gps gpv group gsn gsnp gsv gtz gu gv gwmi h history icm iex ihy ii ipal ipcsv ipmo ipsn irm ise iwmi iwr kill lp ls man md measure mi mount move mp mv nal ndr ni nmo npssc nsn nv ogv oh popd ps pushd pwd r rbp rcjb rcsn rd rdr ren ri rjb rm rmdir rmo rni rnp rp rsn rsnp rujb rv rvpa rwmi sajb sal saps sasv sbp sc scb select set shcm si sl sleep sls sort sp spjb spps spsv start stz sujb sv swmi tee trcm type wget where wjb write"},u={begin:"`[\\s\\S]",relevance:0},_={className:"variable",variants:[{begin:/\$\B/},{className:"keyword",begin:/\$this/},{begin:/\$[\w\d][\w\d_:]*/}]},w={className:"string",variants:[{begin:/"/,end:/"/},{begin:/@"/,end:/^"@/}],contains:[u,_,{className:"variable",begin:/\$[A-z]/,end:/[^A-z]/}]},x={className:"string",variants:[{begin:/'/,end:/'/},{begin:/@'/,end:/^'@/}]},j=s.inherit(s.COMMENT(null,null),{variants:[{begin:/#/,end:/$/},{begin:/<#/,end:/#>/}],contains:[{className:"doctag",variants:[{begin:/\.(synopsis|description|example|inputs|outputs|notes|link|component|role|functionality)/},{begin:/\.(parameter|forwardhelptargetname|forwardhelpcategory|remotehelprunspace|externalhelp)\s+\S+/}]}]}),P={className:"built_in",variants:[{begin:"(".concat("Add|Clear|Close|Copy|Enter|Exit|Find|Format|Get|Hide|Join|Lock|Move|New|Open|Optimize|Pop|Push|Redo|Remove|Rename|Reset|Resize|Search|Select|Set|Show|Skip|Split|Step|Switch|Undo|Unlock|Watch|Backup|Checkpoint|Compare|Compress|Convert|ConvertFrom|ConvertTo|Dismount|Edit|Expand|Export|Group|Import|Initialize|Limit|Merge|Mount|Out|Publish|Restore|Save|Sync|Unpublish|Update|Approve|Assert|Build|Complete|Confirm|Deny|Deploy|Disable|Enable|Install|Invoke|Register|Request|Restart|Resume|Start|Stop|Submit|Suspend|Uninstall|Unregister|Wait|Debug|Measure|Ping|Repair|Resolve|Test|Trace|Connect|Disconnect|Read|Receive|Send|Write|Block|Grant|Protect|Revoke|Unblock|Unprotect|Use|ForEach|Sort|Tee|Where",")+(-)[\\w\\d]+")}]},B={className:"class",beginKeywords:"class enum",end:/\s*[{]/,excludeEnd:!0,relevance:0,contains:[s.TITLE_MODE]},$={className:"function",begin:/function\s+/,end:/\s*\{|$/,excludeEnd:!0,returnBegin:!0,relevance:0,contains:[{begin:"function",relevance:0,className:"keyword"},{className:"title",begin:/\w[\w\d]*((-)[\w\d]+)*/,relevance:0},{begin:/\(/,end:/\)/,className:"params",relevance:0,contains:[_]}]},U={begin:/using\s/,end:/$/,returnBegin:!0,contains:[w,x,{className:"keyword",begin:/(using|assembly|command|module|namespace|type)/}]},Y={variants:[{className:"operator",begin:"(".concat("-and|-as|-band|-bnot|-bor|-bxor|-casesensitive|-ccontains|-ceq|-cge|-cgt|-cle|-clike|-clt|-cmatch|-cne|-cnotcontains|-cnotlike|-cnotmatch|-contains|-creplace|-csplit|-eq|-exact|-f|-file|-ge|-gt|-icontains|-ieq|-ige|-igt|-ile|-ilike|-ilt|-imatch|-in|-ine|-inotcontains|-inotlike|-inotmatch|-ireplace|-is|-isnot|-isplit|-join|-le|-like|-lt|-match|-ne|-not|-notcontains|-notin|-notlike|-notmatch|-or|-regex|-replace|-shl|-shr|-split|-wildcard|-xor",")\\b")},{className:"literal",begin:/(-)[\w\d]+/,relevance:0}]},X={className:"function",begin:/\[.*\]\s*[\w]+[ ]??\(/,end:/$/,returnBegin:!0,relevance:0,contains:[{className:"keyword",begin:"(".concat(i.keyword.toString().replace(/\s/g,"|"),")\\b"),endsParent:!0,relevance:0},s.inherit(s.TITLE_MODE,{endsParent:!0})]},Z=[X,j,u,s.NUMBER_MODE,w,x,P,_,{className:"literal",begin:/\$(null|true|false)\b/},{className:"selector-tag",begin:/@\B/,relevance:0}],ee={begin:/\[/,end:/\]/,excludeBegin:!0,excludeEnd:!0,relevance:0,contains:[].concat("self",Z,{begin:"("+["string","char","byte","int","long","bool","decimal","single","double","DateTime","xml","array","hashtable","void"].join("|")+")",className:"built_in",relevance:0},{className:"type",begin:/[\.\w\d]+/,relevance:0})};return X.contains.unshift(ee),{name:"PowerShell",aliases:["ps","ps1"],case_insensitive:!0,keywords:i,contains:Z.concat(B,$,U,Y,ee)}}},20960:s=>{function source(s){return s?"string"==typeof s?s:s.source:null}function lookahead(s){return concat("(?=",s,")")}function concat(...s){return s.map((s=>source(s))).join("")}function either(...s){return"("+s.map((s=>source(s))).join("|")+")"}s.exports=function xml(s){const i=concat(/[A-Z_]/,function optional(s){return concat("(",s,")?")}(/[A-Z0-9_.-]*:/),/[A-Z0-9_.-]*/),u={className:"symbol",begin:/&[a-z]+;|&#[0-9]+;|&#x[a-f0-9]+;/},_={begin:/\s/,contains:[{className:"meta-keyword",begin:/#?[a-z_][a-z1-9_-]+/,illegal:/\n/}]},w=s.inherit(_,{begin:/\(/,end:/\)/}),x=s.inherit(s.APOS_STRING_MODE,{className:"meta-string"}),j=s.inherit(s.QUOTE_STRING_MODE,{className:"meta-string"}),P={endsWithParent:!0,illegal:/`]+/}]}]}]};return{name:"HTML, XML",aliases:["html","xhtml","rss","atom","xjb","xsd","xsl","plist","wsf","svg"],case_insensitive:!0,contains:[{className:"meta",begin://,relevance:10,contains:[_,j,x,w,{begin:/\[/,end:/\]/,contains:[{className:"meta",begin://,contains:[_,w,j,x]}]}]},s.COMMENT(//,{relevance:10}),{begin://,relevance:10},u,{className:"meta",begin:/<\?xml/,end:/\?>/,relevance:10},{className:"tag",begin:/)/,end:/>/,keywords:{name:"style"},contains:[P],starts:{end:/<\/style>/,returnEnd:!0,subLanguage:["css","xml"]}},{className:"tag",begin:/)/,end:/>/,keywords:{name:"script"},contains:[P],starts:{end:/<\/script>/,returnEnd:!0,subLanguage:["javascript","handlebars","xml"]}},{className:"tag",begin:/<>|<\/>/},{className:"tag",begin:concat(//,/>/,/\s/)))),end:/\/?>/,contains:[{className:"name",begin:i,relevance:0,starts:P}]},{className:"tag",begin:concat(/<\//,lookahead(concat(i,/>/))),contains:[{className:"name",begin:i,relevance:0},{begin:/>/,relevance:0,endsParent:!0}]}]}}},63396:s=>{s.exports=function yaml(s){var i="true false yes no null",u="[\\w#;/?:@&=+$,.~*'()[\\]]+",_={className:"string",relevance:0,variants:[{begin:/'/,end:/'/},{begin:/"/,end:/"/},{begin:/\S+/}],contains:[s.BACKSLASH_ESCAPE,{className:"template-variable",variants:[{begin:/\{\{/,end:/\}\}/},{begin:/%\{/,end:/\}/}]}]},w=s.inherit(_,{variants:[{begin:/'/,end:/'/},{begin:/"/,end:/"/},{begin:/[^\s,{}[\]]+/}]}),x={className:"number",begin:"\\b[0-9]{4}(-[0-9][0-9]){0,2}([Tt \\t][0-9][0-9]?(:[0-9][0-9]){2})?(\\.[0-9]*)?([ \\t])*(Z|[-+][0-9][0-9]?(:[0-9][0-9])?)?\\b"},j={end:",",endsWithParent:!0,excludeEnd:!0,keywords:i,relevance:0},P={begin:/\{/,end:/\}/,contains:[j],illegal:"\\n",relevance:0},B={begin:"\\[",end:"\\]",contains:[j],illegal:"\\n",relevance:0},$=[{className:"attr",variants:[{begin:"\\w[\\w :\\/.-]*:(?=[ \t]|$)"},{begin:'"\\w[\\w :\\/.-]*":(?=[ \t]|$)'},{begin:"'\\w[\\w :\\/.-]*':(?=[ \t]|$)"}]},{className:"meta",begin:"^---\\s*$",relevance:10},{className:"string",begin:"[\\|>]([1-9]?[+-])?[ ]*\\n( +)[^ ][^\\n]*\\n(\\2[^\\n]+\\n?)*"},{begin:"<%[%=-]?",end:"[%-]?%>",subLanguage:"ruby",excludeBegin:!0,excludeEnd:!0,relevance:0},{className:"type",begin:"!\\w+!"+u},{className:"type",begin:"!<"+u+">"},{className:"type",begin:"!"+u},{className:"type",begin:"!!"+u},{className:"meta",begin:"&"+s.UNDERSCORE_IDENT_RE+"$"},{className:"meta",begin:"\\*"+s.UNDERSCORE_IDENT_RE+"$"},{className:"bullet",begin:"-(?=[ ]|$)",relevance:0},s.HASH_COMMENT_MODE,{beginKeywords:i,keywords:{literal:i}},x,{className:"number",begin:s.C_NUMBER_RE+"\\b",relevance:0},P,B,_],U=[...$];return U.pop(),U.push(w),j.contains=U,{name:"YAML",case_insensitive:!0,aliases:["yml"],contains:$}}},24404:(s,i)=>{i.read=function(s,i,u,_,w){var x,j,P=8*w-_-1,B=(1<>1,U=-7,Y=u?w-1:0,X=u?-1:1,Z=s[i+Y];for(Y+=X,x=Z&(1<<-U)-1,Z>>=-U,U+=P;U>0;x=256*x+s[i+Y],Y+=X,U-=8);for(j=x&(1<<-U)-1,x>>=-U,U+=_;U>0;j=256*j+s[i+Y],Y+=X,U-=8);if(0===x)x=1-$;else{if(x===B)return j?NaN:1/0*(Z?-1:1);j+=Math.pow(2,_),x-=$}return(Z?-1:1)*j*Math.pow(2,x-_)},i.write=function(s,i,u,_,w,x){var j,P,B,$=8*x-w-1,U=(1<<$)-1,Y=U>>1,X=23===w?Math.pow(2,-24)-Math.pow(2,-77):0,Z=_?0:x-1,ee=_?1:-1,ae=i<0||0===i&&1/i<0?1:0;for(i=Math.abs(i),isNaN(i)||i===1/0?(P=isNaN(i)?1:0,j=U):(j=Math.floor(Math.log(i)/Math.LN2),i*(B=Math.pow(2,-j))<1&&(j--,B*=2),(i+=j+Y>=1?X/B:X*Math.pow(2,1-Y))*B>=2&&(j++,B/=2),j+Y>=U?(P=0,j=U):j+Y>=1?(P=(i*B-1)*Math.pow(2,w),j+=Y):(P=i*Math.pow(2,Y-1)*Math.pow(2,w),j=0));w>=8;s[u+Z]=255&P,Z+=ee,P/=256,w-=8);for(j=j<0;s[u+Z]=255&j,Z+=ee,j/=256,$-=8);s[u+Z-ee]|=128*ae}},50532:function(s){s.exports=function(){"use strict";var s=Array.prototype.slice;function createClass(s,i){i&&(s.prototype=Object.create(i.prototype)),s.prototype.constructor=s}function Iterable(s){return isIterable(s)?s:Seq(s)}function KeyedIterable(s){return isKeyed(s)?s:KeyedSeq(s)}function IndexedIterable(s){return isIndexed(s)?s:IndexedSeq(s)}function SetIterable(s){return isIterable(s)&&!isAssociative(s)?s:SetSeq(s)}function isIterable(s){return!(!s||!s[i])}function isKeyed(s){return!(!s||!s[u])}function isIndexed(s){return!(!s||!s[_])}function isAssociative(s){return isKeyed(s)||isIndexed(s)}function isOrdered(s){return!(!s||!s[w])}createClass(KeyedIterable,Iterable),createClass(IndexedIterable,Iterable),createClass(SetIterable,Iterable),Iterable.isIterable=isIterable,Iterable.isKeyed=isKeyed,Iterable.isIndexed=isIndexed,Iterable.isAssociative=isAssociative,Iterable.isOrdered=isOrdered,Iterable.Keyed=KeyedIterable,Iterable.Indexed=IndexedIterable,Iterable.Set=SetIterable;var i="@@__IMMUTABLE_ITERABLE__@@",u="@@__IMMUTABLE_KEYED__@@",_="@@__IMMUTABLE_INDEXED__@@",w="@@__IMMUTABLE_ORDERED__@@",x="delete",j=5,P=1<>>0;if(""+u!==i||4294967295===u)return NaN;i=u}return i<0?ensureSize(s)+i:i}function returnTrue(){return!0}function wholeSlice(s,i,u){return(0===s||void 0!==u&&s<=-u)&&(void 0===i||void 0!==u&&i>=u)}function resolveBegin(s,i){return resolveIndex(s,i,0)}function resolveEnd(s,i){return resolveIndex(s,i,i)}function resolveIndex(s,i,u){return void 0===s?u:s<0?Math.max(0,i+s):void 0===i?s:Math.min(i,s)}var X=0,Z=1,ee=2,ae="function"==typeof Symbol&&Symbol.iterator,ie="@@iterator",le=ae||ie;function Iterator(s){this.next=s}function iteratorValue(s,i,u,_){var w=0===s?i:1===s?u:[i,u];return _?_.value=w:_={value:w,done:!1},_}function iteratorDone(){return{value:void 0,done:!0}}function hasIterator(s){return!!getIteratorFn(s)}function isIterator(s){return s&&"function"==typeof s.next}function getIterator(s){var i=getIteratorFn(s);return i&&i.call(s)}function getIteratorFn(s){var i=s&&(ae&&s[ae]||s[ie]);if("function"==typeof i)return i}function isArrayLike(s){return s&&"number"==typeof s.length}function Seq(s){return null==s?emptySequence():isIterable(s)?s.toSeq():seqFromValue(s)}function KeyedSeq(s){return null==s?emptySequence().toKeyedSeq():isIterable(s)?isKeyed(s)?s.toSeq():s.fromEntrySeq():keyedSeqFromValue(s)}function IndexedSeq(s){return null==s?emptySequence():isIterable(s)?isKeyed(s)?s.entrySeq():s.toIndexedSeq():indexedSeqFromValue(s)}function SetSeq(s){return(null==s?emptySequence():isIterable(s)?isKeyed(s)?s.entrySeq():s:indexedSeqFromValue(s)).toSetSeq()}Iterator.prototype.toString=function(){return"[Iterator]"},Iterator.KEYS=X,Iterator.VALUES=Z,Iterator.ENTRIES=ee,Iterator.prototype.inspect=Iterator.prototype.toSource=function(){return this.toString()},Iterator.prototype[le]=function(){return this},createClass(Seq,Iterable),Seq.of=function(){return Seq(arguments)},Seq.prototype.toSeq=function(){return this},Seq.prototype.toString=function(){return this.__toString("Seq {","}")},Seq.prototype.cacheResult=function(){return!this._cache&&this.__iterateUncached&&(this._cache=this.entrySeq().toArray(),this.size=this._cache.length),this},Seq.prototype.__iterate=function(s,i){return seqIterate(this,s,i,!0)},Seq.prototype.__iterator=function(s,i){return seqIterator(this,s,i,!0)},createClass(KeyedSeq,Seq),KeyedSeq.prototype.toKeyedSeq=function(){return this},createClass(IndexedSeq,Seq),IndexedSeq.of=function(){return IndexedSeq(arguments)},IndexedSeq.prototype.toIndexedSeq=function(){return this},IndexedSeq.prototype.toString=function(){return this.__toString("Seq [","]")},IndexedSeq.prototype.__iterate=function(s,i){return seqIterate(this,s,i,!1)},IndexedSeq.prototype.__iterator=function(s,i){return seqIterator(this,s,i,!1)},createClass(SetSeq,Seq),SetSeq.of=function(){return SetSeq(arguments)},SetSeq.prototype.toSetSeq=function(){return this},Seq.isSeq=isSeq,Seq.Keyed=KeyedSeq,Seq.Set=SetSeq,Seq.Indexed=IndexedSeq;var ce,pe,de,fe="@@__IMMUTABLE_SEQ__@@";function ArraySeq(s){this._array=s,this.size=s.length}function ObjectSeq(s){var i=Object.keys(s);this._object=s,this._keys=i,this.size=i.length}function IterableSeq(s){this._iterable=s,this.size=s.length||s.size}function IteratorSeq(s){this._iterator=s,this._iteratorCache=[]}function isSeq(s){return!(!s||!s[fe])}function emptySequence(){return ce||(ce=new ArraySeq([]))}function keyedSeqFromValue(s){var i=Array.isArray(s)?new ArraySeq(s).fromEntrySeq():isIterator(s)?new IteratorSeq(s).fromEntrySeq():hasIterator(s)?new IterableSeq(s).fromEntrySeq():"object"==typeof s?new ObjectSeq(s):void 0;if(!i)throw new TypeError("Expected Array or iterable object of [k, v] entries, or keyed object: "+s);return i}function indexedSeqFromValue(s){var i=maybeIndexedSeqFromValue(s);if(!i)throw new TypeError("Expected Array or iterable object of values: "+s);return i}function seqFromValue(s){var i=maybeIndexedSeqFromValue(s)||"object"==typeof s&&new ObjectSeq(s);if(!i)throw new TypeError("Expected Array or iterable object of values, or keyed object: "+s);return i}function maybeIndexedSeqFromValue(s){return isArrayLike(s)?new ArraySeq(s):isIterator(s)?new IteratorSeq(s):hasIterator(s)?new IterableSeq(s):void 0}function seqIterate(s,i,u,_){var w=s._cache;if(w){for(var x=w.length-1,j=0;j<=x;j++){var P=w[u?x-j:j];if(!1===i(P[1],_?P[0]:j,s))return j+1}return j}return s.__iterateUncached(i,u)}function seqIterator(s,i,u,_){var w=s._cache;if(w){var x=w.length-1,j=0;return new Iterator((function(){var s=w[u?x-j:j];return j++>x?iteratorDone():iteratorValue(i,_?s[0]:j-1,s[1])}))}return s.__iteratorUncached(i,u)}function fromJS(s,i){return i?fromJSWith(i,s,"",{"":s}):fromJSDefault(s)}function fromJSWith(s,i,u,_){return Array.isArray(i)?s.call(_,u,IndexedSeq(i).map((function(u,_){return fromJSWith(s,u,_,i)}))):isPlainObj(i)?s.call(_,u,KeyedSeq(i).map((function(u,_){return fromJSWith(s,u,_,i)}))):i}function fromJSDefault(s){return Array.isArray(s)?IndexedSeq(s).map(fromJSDefault).toList():isPlainObj(s)?KeyedSeq(s).map(fromJSDefault).toMap():s}function isPlainObj(s){return s&&(s.constructor===Object||void 0===s.constructor)}function is(s,i){if(s===i||s!=s&&i!=i)return!0;if(!s||!i)return!1;if("function"==typeof s.valueOf&&"function"==typeof i.valueOf){if((s=s.valueOf())===(i=i.valueOf())||s!=s&&i!=i)return!0;if(!s||!i)return!1}return!("function"!=typeof s.equals||"function"!=typeof i.equals||!s.equals(i))}function deepEqual(s,i){if(s===i)return!0;if(!isIterable(i)||void 0!==s.size&&void 0!==i.size&&s.size!==i.size||void 0!==s.__hash&&void 0!==i.__hash&&s.__hash!==i.__hash||isKeyed(s)!==isKeyed(i)||isIndexed(s)!==isIndexed(i)||isOrdered(s)!==isOrdered(i))return!1;if(0===s.size&&0===i.size)return!0;var u=!isAssociative(s);if(isOrdered(s)){var _=s.entries();return i.every((function(s,i){var w=_.next().value;return w&&is(w[1],s)&&(u||is(w[0],i))}))&&_.next().done}var w=!1;if(void 0===s.size)if(void 0===i.size)"function"==typeof s.cacheResult&&s.cacheResult();else{w=!0;var x=s;s=i,i=x}var j=!0,P=i.__iterate((function(i,_){if(u?!s.has(i):w?!is(i,s.get(_,$)):!is(s.get(_,$),i))return j=!1,!1}));return j&&s.size===P}function Repeat(s,i){if(!(this instanceof Repeat))return new Repeat(s,i);if(this._value=s,this.size=void 0===i?1/0:Math.max(0,i),0===this.size){if(pe)return pe;pe=this}}function invariant(s,i){if(!s)throw new Error(i)}function Range(s,i,u){if(!(this instanceof Range))return new Range(s,i,u);if(invariant(0!==u,"Cannot step a Range by 0"),s=s||0,void 0===i&&(i=1/0),u=void 0===u?1:Math.abs(u),i_?iteratorDone():iteratorValue(s,w,u[i?_-w++:w++])}))},createClass(ObjectSeq,KeyedSeq),ObjectSeq.prototype.get=function(s,i){return void 0===i||this.has(s)?this._object[s]:i},ObjectSeq.prototype.has=function(s){return this._object.hasOwnProperty(s)},ObjectSeq.prototype.__iterate=function(s,i){for(var u=this._object,_=this._keys,w=_.length-1,x=0;x<=w;x++){var j=_[i?w-x:x];if(!1===s(u[j],j,this))return x+1}return x},ObjectSeq.prototype.__iterator=function(s,i){var u=this._object,_=this._keys,w=_.length-1,x=0;return new Iterator((function(){var j=_[i?w-x:x];return x++>w?iteratorDone():iteratorValue(s,j,u[j])}))},ObjectSeq.prototype[w]=!0,createClass(IterableSeq,IndexedSeq),IterableSeq.prototype.__iterateUncached=function(s,i){if(i)return this.cacheResult().__iterate(s,i);var u=getIterator(this._iterable),_=0;if(isIterator(u))for(var w;!(w=u.next()).done&&!1!==s(w.value,_++,this););return _},IterableSeq.prototype.__iteratorUncached=function(s,i){if(i)return this.cacheResult().__iterator(s,i);var u=getIterator(this._iterable);if(!isIterator(u))return new Iterator(iteratorDone);var _=0;return new Iterator((function(){var i=u.next();return i.done?i:iteratorValue(s,_++,i.value)}))},createClass(IteratorSeq,IndexedSeq),IteratorSeq.prototype.__iterateUncached=function(s,i){if(i)return this.cacheResult().__iterate(s,i);for(var u,_=this._iterator,w=this._iteratorCache,x=0;x=_.length){var i=u.next();if(i.done)return i;_[w]=i.value}return iteratorValue(s,w,_[w++])}))},createClass(Repeat,IndexedSeq),Repeat.prototype.toString=function(){return 0===this.size?"Repeat []":"Repeat [ "+this._value+" "+this.size+" times ]"},Repeat.prototype.get=function(s,i){return this.has(s)?this._value:i},Repeat.prototype.includes=function(s){return is(this._value,s)},Repeat.prototype.slice=function(s,i){var u=this.size;return wholeSlice(s,i,u)?this:new Repeat(this._value,resolveEnd(i,u)-resolveBegin(s,u))},Repeat.prototype.reverse=function(){return this},Repeat.prototype.indexOf=function(s){return is(this._value,s)?0:-1},Repeat.prototype.lastIndexOf=function(s){return is(this._value,s)?this.size:-1},Repeat.prototype.__iterate=function(s,i){for(var u=0;u=0&&i=0&&uu?iteratorDone():iteratorValue(s,x++,j)}))},Range.prototype.equals=function(s){return s instanceof Range?this._start===s._start&&this._end===s._end&&this._step===s._step:deepEqual(this,s)},createClass(Collection,Iterable),createClass(KeyedCollection,Collection),createClass(IndexedCollection,Collection),createClass(SetCollection,Collection),Collection.Keyed=KeyedCollection,Collection.Indexed=IndexedCollection,Collection.Set=SetCollection;var ye="function"==typeof Math.imul&&-2===Math.imul(4294967295,2)?Math.imul:function imul(s,i){var u=65535&(s|=0),_=65535&(i|=0);return u*_+((s>>>16)*_+u*(i>>>16)<<16>>>0)|0};function smi(s){return s>>>1&1073741824|3221225471&s}function hash(s){if(!1===s||null==s)return 0;if("function"==typeof s.valueOf&&(!1===(s=s.valueOf())||null==s))return 0;if(!0===s)return 1;var i=typeof s;if("number"===i){if(s!=s||s===1/0)return 0;var u=0|s;for(u!==s&&(u^=4294967295*s);s>4294967295;)u^=s/=4294967295;return smi(u)}if("string"===i)return s.length>Te?cachedHashString(s):hashString(s);if("function"==typeof s.hashCode)return s.hashCode();if("object"===i)return hashJSObj(s);if("function"==typeof s.toString)return hashString(s.toString());throw new Error("Value type "+i+" cannot be hashed.")}function cachedHashString(s){var i=$e[s];return void 0===i&&(i=hashString(s),qe===Re&&(qe=0,$e={}),qe++,$e[s]=i),i}function hashString(s){for(var i=0,u=0;u0)switch(s.nodeType){case 1:return s.uniqueID;case 9:return s.documentElement&&s.documentElement.uniqueID}}var we,Se="function"==typeof WeakMap;Se&&(we=new WeakMap);var xe=0,Pe="__immutablehash__";"function"==typeof Symbol&&(Pe=Symbol(Pe));var Te=16,Re=255,qe=0,$e={};function assertNotInfinite(s){invariant(s!==1/0,"Cannot perform this action with an infinite size.")}function Map(s){return null==s?emptyMap():isMap(s)&&!isOrdered(s)?s:emptyMap().withMutations((function(i){var u=KeyedIterable(s);assertNotInfinite(u.size),u.forEach((function(s,u){return i.set(u,s)}))}))}function isMap(s){return!(!s||!s[We])}createClass(Map,KeyedCollection),Map.of=function(){var i=s.call(arguments,0);return emptyMap().withMutations((function(s){for(var u=0;u=i.length)throw new Error("Missing value for key: "+i[u]);s.set(i[u],i[u+1])}}))},Map.prototype.toString=function(){return this.__toString("Map {","}")},Map.prototype.get=function(s,i){return this._root?this._root.get(0,void 0,s,i):i},Map.prototype.set=function(s,i){return updateMap(this,s,i)},Map.prototype.setIn=function(s,i){return this.updateIn(s,$,(function(){return i}))},Map.prototype.remove=function(s){return updateMap(this,s,$)},Map.prototype.deleteIn=function(s){return this.updateIn(s,(function(){return $}))},Map.prototype.update=function(s,i,u){return 1===arguments.length?s(this):this.updateIn([s],i,u)},Map.prototype.updateIn=function(s,i,u){u||(u=i,i=void 0);var _=updateInDeepMap(this,forceIterator(s),i,u);return _===$?void 0:_},Map.prototype.clear=function(){return 0===this.size?this:this.__ownerID?(this.size=0,this._root=null,this.__hash=void 0,this.__altered=!0,this):emptyMap()},Map.prototype.merge=function(){return mergeIntoMapWith(this,void 0,arguments)},Map.prototype.mergeWith=function(i){return mergeIntoMapWith(this,i,s.call(arguments,1))},Map.prototype.mergeIn=function(i){var u=s.call(arguments,1);return this.updateIn(i,emptyMap(),(function(s){return"function"==typeof s.merge?s.merge.apply(s,u):u[u.length-1]}))},Map.prototype.mergeDeep=function(){return mergeIntoMapWith(this,deepMerger,arguments)},Map.prototype.mergeDeepWith=function(i){var u=s.call(arguments,1);return mergeIntoMapWith(this,deepMergerWith(i),u)},Map.prototype.mergeDeepIn=function(i){var u=s.call(arguments,1);return this.updateIn(i,emptyMap(),(function(s){return"function"==typeof s.mergeDeep?s.mergeDeep.apply(s,u):u[u.length-1]}))},Map.prototype.sort=function(s){return OrderedMap(sortFactory(this,s))},Map.prototype.sortBy=function(s,i){return OrderedMap(sortFactory(this,i,s))},Map.prototype.withMutations=function(s){var i=this.asMutable();return s(i),i.wasAltered()?i.__ensureOwner(this.__ownerID):this},Map.prototype.asMutable=function(){return this.__ownerID?this:this.__ensureOwner(new OwnerID)},Map.prototype.asImmutable=function(){return this.__ensureOwner()},Map.prototype.wasAltered=function(){return this.__altered},Map.prototype.__iterator=function(s,i){return new MapIterator(this,s,i)},Map.prototype.__iterate=function(s,i){var u=this,_=0;return this._root&&this._root.iterate((function(i){return _++,s(i[1],i[0],u)}),i),_},Map.prototype.__ensureOwner=function(s){return s===this.__ownerID?this:s?makeMap(this.size,this._root,s,this.__hash):(this.__ownerID=s,this.__altered=!1,this)},Map.isMap=isMap;var ze,We="@@__IMMUTABLE_MAP__@@",He=Map.prototype;function ArrayMapNode(s,i){this.ownerID=s,this.entries=i}function BitmapIndexedNode(s,i,u){this.ownerID=s,this.bitmap=i,this.nodes=u}function HashArrayMapNode(s,i,u){this.ownerID=s,this.count=i,this.nodes=u}function HashCollisionNode(s,i,u){this.ownerID=s,this.keyHash=i,this.entries=u}function ValueNode(s,i,u){this.ownerID=s,this.keyHash=i,this.entry=u}function MapIterator(s,i,u){this._type=i,this._reverse=u,this._stack=s._root&&mapIteratorFrame(s._root)}function mapIteratorValue(s,i){return iteratorValue(s,i[0],i[1])}function mapIteratorFrame(s,i){return{node:s,index:0,__prev:i}}function makeMap(s,i,u,_){var w=Object.create(He);return w.size=s,w._root=i,w.__ownerID=u,w.__hash=_,w.__altered=!1,w}function emptyMap(){return ze||(ze=makeMap(0))}function updateMap(s,i,u){var _,w;if(s._root){var x=MakeRef(U),j=MakeRef(Y);if(_=updateNode(s._root,s.__ownerID,0,void 0,i,u,x,j),!j.value)return s;w=s.size+(x.value?u===$?-1:1:0)}else{if(u===$)return s;w=1,_=new ArrayMapNode(s.__ownerID,[[i,u]])}return s.__ownerID?(s.size=w,s._root=_,s.__hash=void 0,s.__altered=!0,s):_?makeMap(w,_):emptyMap()}function updateNode(s,i,u,_,w,x,j,P){return s?s.update(i,u,_,w,x,j,P):x===$?s:(SetRef(P),SetRef(j),new ValueNode(i,_,[w,x]))}function isLeafNode(s){return s.constructor===ValueNode||s.constructor===HashCollisionNode}function mergeIntoNode(s,i,u,_,w){if(s.keyHash===_)return new HashCollisionNode(i,_,[s.entry,w]);var x,P=(0===u?s.keyHash:s.keyHash>>>u)&B,$=(0===u?_:_>>>u)&B;return new BitmapIndexedNode(i,1<>>=1)j[B]=1&u?i[x++]:void 0;return j[_]=w,new HashArrayMapNode(s,x+1,j)}function mergeIntoMapWith(s,i,u){for(var _=[],w=0;w>1&1431655765))+(s>>2&858993459))+(s>>4)&252645135,s+=s>>8,127&(s+=s>>16)}function setIn(s,i,u,_){var w=_?s:arrCopy(s);return w[i]=u,w}function spliceIn(s,i,u,_){var w=s.length+1;if(_&&i+1===w)return s[i]=u,s;for(var x=new Array(w),j=0,P=0;P=Ye)return createNodes(s,B,_,w);var Z=s&&s===this.ownerID,ee=Z?B:arrCopy(B);return X?P?U===Y-1?ee.pop():ee[U]=ee.pop():ee[U]=[_,w]:ee.push([_,w]),Z?(this.entries=ee,this):new ArrayMapNode(s,ee)}},BitmapIndexedNode.prototype.get=function(s,i,u,_){void 0===i&&(i=hash(u));var w=1<<((0===s?i:i>>>s)&B),x=this.bitmap;return 0==(x&w)?_:this.nodes[popCount(x&w-1)].get(s+j,i,u,_)},BitmapIndexedNode.prototype.update=function(s,i,u,_,w,x,P){void 0===u&&(u=hash(_));var U=(0===i?u:u>>>i)&B,Y=1<=Qe)return expandNodes(s,ae,X,U,le);if(Z&&!le&&2===ae.length&&isLeafNode(ae[1^ee]))return ae[1^ee];if(Z&&le&&1===ae.length&&isLeafNode(le))return le;var ce=s&&s===this.ownerID,pe=Z?le?X:X^Y:X|Y,de=Z?le?setIn(ae,ee,le,ce):spliceOut(ae,ee,ce):spliceIn(ae,ee,le,ce);return ce?(this.bitmap=pe,this.nodes=de,this):new BitmapIndexedNode(s,pe,de)},HashArrayMapNode.prototype.get=function(s,i,u,_){void 0===i&&(i=hash(u));var w=(0===s?i:i>>>s)&B,x=this.nodes[w];return x?x.get(s+j,i,u,_):_},HashArrayMapNode.prototype.update=function(s,i,u,_,w,x,P){void 0===u&&(u=hash(_));var U=(0===i?u:u>>>i)&B,Y=w===$,X=this.nodes,Z=X[U];if(Y&&!Z)return this;var ee=updateNode(Z,s,i+j,u,_,w,x,P);if(ee===Z)return this;var ae=this.count;if(Z){if(!ee&&--ae0&&_=0&&s>>i&B;if(_>=this.array.length)return new VNode([],s);var w,x=0===_;if(i>0){var P=this.array[_];if((w=P&&P.removeBefore(s,i-j,u))===P&&x)return this}if(x&&!w)return this;var $=editableVNode(this,s);if(!x)for(var U=0;U<_;U++)$.array[U]=void 0;return w&&($.array[_]=w),$},VNode.prototype.removeAfter=function(s,i,u){if(u===(i?1<>>i&B;if(w>=this.array.length)return this;if(i>0){var x=this.array[w];if((_=x&&x.removeAfter(s,i-j,u))===x&&w===this.array.length-1)return this}var P=editableVNode(this,s);return P.array.splice(w+1),_&&(P.array[w]=_),P};var rt,nt,ot={};function iterateList(s,i){var u=s._origin,_=s._capacity,w=getTailOffset(_),x=s._tail;return iterateNodeOrLeaf(s._root,s._level,0);function iterateNodeOrLeaf(s,i,u){return 0===i?iterateLeaf(s,u):iterateNode(s,i,u)}function iterateLeaf(s,j){var B=j===w?x&&x.array:s&&s.array,$=j>u?0:u-j,U=_-j;return U>P&&(U=P),function(){if($===U)return ot;var s=i?--U:$++;return B&&B[s]}}function iterateNode(s,w,x){var B,$=s&&s.array,U=x>u?0:u-x>>w,Y=1+(_-x>>w);return Y>P&&(Y=P),function(){for(;;){if(B){var s=B();if(s!==ot)return s;B=null}if(U===Y)return ot;var u=i?--Y:U++;B=iterateNodeOrLeaf($&&$[u],w-j,x+(u<=s.size||i<0)return s.withMutations((function(s){i<0?setListBounds(s,i).set(0,u):setListBounds(s,0,i+1).set(i,u)}));i+=s._origin;var _=s._tail,w=s._root,x=MakeRef(Y);return i>=getTailOffset(s._capacity)?_=updateVNode(_,s.__ownerID,0,i,u,x):w=updateVNode(w,s.__ownerID,s._level,i,u,x),x.value?s.__ownerID?(s._root=w,s._tail=_,s.__hash=void 0,s.__altered=!0,s):makeList(s._origin,s._capacity,s._level,w,_):s}function updateVNode(s,i,u,_,w,x){var P,$=_>>>u&B,U=s&&$0){var Y=s&&s.array[$],X=updateVNode(Y,i,u-j,_,w,x);return X===Y?s:((P=editableVNode(s,i)).array[$]=X,P)}return U&&s.array[$]===w?s:(SetRef(x),P=editableVNode(s,i),void 0===w&&$===P.array.length-1?P.array.pop():P.array[$]=w,P)}function editableVNode(s,i){return i&&s&&i===s.ownerID?s:new VNode(s?s.array.slice():[],i)}function listNodeFor(s,i){if(i>=getTailOffset(s._capacity))return s._tail;if(i<1<0;)u=u.array[i>>>_&B],_-=j;return u}}function setListBounds(s,i,u){void 0!==i&&(i|=0),void 0!==u&&(u|=0);var _=s.__ownerID||new OwnerID,w=s._origin,x=s._capacity,P=w+i,$=void 0===u?x:u<0?x+u:w+u;if(P===w&&$===x)return s;if(P>=$)return s.clear();for(var U=s._level,Y=s._root,X=0;P+X<0;)Y=new VNode(Y&&Y.array.length?[void 0,Y]:[],_),X+=1<<(U+=j);X&&(P+=X,w+=X,$+=X,x+=X);for(var Z=getTailOffset(x),ee=getTailOffset($);ee>=1<Z?new VNode([],_):ae;if(ae&&ee>Z&&Pj;ce-=j){var pe=Z>>>ce&B;le=le.array[pe]=editableVNode(le.array[pe],_)}le.array[Z>>>j&B]=ae}if($=ee)P-=ee,$-=ee,U=j,Y=null,ie=ie&&ie.removeBefore(_,0,P);else if(P>w||ee>>U&B;if(de!==ee>>>U&B)break;de&&(X+=(1<w&&(Y=Y.removeBefore(_,U,P-X)),Y&&eew&&(w=P.size),isIterable(j)||(P=P.map((function(s){return fromJS(s)}))),_.push(P)}return w>s.size&&(s=s.setSize(w)),mergeIntoCollectionWith(s,i,_)}function getTailOffset(s){return s>>j<=P&&j.size>=2*x.size?(_=(w=j.filter((function(s,i){return void 0!==s&&B!==i}))).toKeyedSeq().map((function(s){return s[0]})).flip().toMap(),s.__ownerID&&(_.__ownerID=w.__ownerID=s.__ownerID)):(_=x.remove(i),w=B===j.size-1?j.pop():j.set(B,void 0))}else if(U){if(u===j.get(B)[1])return s;_=x,w=j.set(B,[i,u])}else _=x.set(i,j.size),w=j.set(j.size,[i,u]);return s.__ownerID?(s.size=_.size,s._map=_,s._list=w,s.__hash=void 0,s):makeOrderedMap(_,w)}function ToKeyedSequence(s,i){this._iter=s,this._useKeys=i,this.size=s.size}function ToIndexedSequence(s){this._iter=s,this.size=s.size}function ToSetSequence(s){this._iter=s,this.size=s.size}function FromEntriesSequence(s){this._iter=s,this.size=s.size}function flipFactory(s){var i=makeSequence(s);return i._iter=s,i.size=s.size,i.flip=function(){return s},i.reverse=function(){var i=s.reverse.apply(this);return i.flip=function(){return s.reverse()},i},i.has=function(i){return s.includes(i)},i.includes=function(i){return s.has(i)},i.cacheResult=cacheResultThrough,i.__iterateUncached=function(i,u){var _=this;return s.__iterate((function(s,u){return!1!==i(u,s,_)}),u)},i.__iteratorUncached=function(i,u){if(i===ee){var _=s.__iterator(i,u);return new Iterator((function(){var s=_.next();if(!s.done){var i=s.value[0];s.value[0]=s.value[1],s.value[1]=i}return s}))}return s.__iterator(i===Z?X:Z,u)},i}function mapFactory(s,i,u){var _=makeSequence(s);return _.size=s.size,_.has=function(i){return s.has(i)},_.get=function(_,w){var x=s.get(_,$);return x===$?w:i.call(u,x,_,s)},_.__iterateUncached=function(_,w){var x=this;return s.__iterate((function(s,w,j){return!1!==_(i.call(u,s,w,j),w,x)}),w)},_.__iteratorUncached=function(_,w){var x=s.__iterator(ee,w);return new Iterator((function(){var w=x.next();if(w.done)return w;var j=w.value,P=j[0];return iteratorValue(_,P,i.call(u,j[1],P,s),w)}))},_}function reverseFactory(s,i){var u=makeSequence(s);return u._iter=s,u.size=s.size,u.reverse=function(){return s},s.flip&&(u.flip=function(){var i=flipFactory(s);return i.reverse=function(){return s.flip()},i}),u.get=function(u,_){return s.get(i?u:-1-u,_)},u.has=function(u){return s.has(i?u:-1-u)},u.includes=function(i){return s.includes(i)},u.cacheResult=cacheResultThrough,u.__iterate=function(i,u){var _=this;return s.__iterate((function(s,u){return i(s,u,_)}),!u)},u.__iterator=function(i,u){return s.__iterator(i,!u)},u}function filterFactory(s,i,u,_){var w=makeSequence(s);return _&&(w.has=function(_){var w=s.get(_,$);return w!==$&&!!i.call(u,w,_,s)},w.get=function(_,w){var x=s.get(_,$);return x!==$&&i.call(u,x,_,s)?x:w}),w.__iterateUncached=function(w,x){var j=this,P=0;return s.__iterate((function(s,x,B){if(i.call(u,s,x,B))return P++,w(s,_?x:P-1,j)}),x),P},w.__iteratorUncached=function(w,x){var j=s.__iterator(ee,x),P=0;return new Iterator((function(){for(;;){var x=j.next();if(x.done)return x;var B=x.value,$=B[0],U=B[1];if(i.call(u,U,$,s))return iteratorValue(w,_?$:P++,U,x)}}))},w}function countByFactory(s,i,u){var _=Map().asMutable();return s.__iterate((function(w,x){_.update(i.call(u,w,x,s),0,(function(s){return s+1}))})),_.asImmutable()}function groupByFactory(s,i,u){var _=isKeyed(s),w=(isOrdered(s)?OrderedMap():Map()).asMutable();s.__iterate((function(x,j){w.update(i.call(u,x,j,s),(function(s){return(s=s||[]).push(_?[j,x]:x),s}))}));var x=iterableClass(s);return w.map((function(i){return reify(s,x(i))}))}function sliceFactory(s,i,u,_){var w=s.size;if(void 0!==i&&(i|=0),void 0!==u&&(u===1/0?u=w:u|=0),wholeSlice(i,u,w))return s;var x=resolveBegin(i,w),j=resolveEnd(u,w);if(x!=x||j!=j)return sliceFactory(s.toSeq().cacheResult(),i,u,_);var P,B=j-x;B==B&&(P=B<0?0:B);var $=makeSequence(s);return $.size=0===P?P:s.size&&P||void 0,!_&&isSeq(s)&&P>=0&&($.get=function(i,u){return(i=wrapIndex(this,i))>=0&&iP)return iteratorDone();var s=w.next();return _||i===Z?s:iteratorValue(i,B-1,i===X?void 0:s.value[1],s)}))},$}function takeWhileFactory(s,i,u){var _=makeSequence(s);return _.__iterateUncached=function(_,w){var x=this;if(w)return this.cacheResult().__iterate(_,w);var j=0;return s.__iterate((function(s,w,P){return i.call(u,s,w,P)&&++j&&_(s,w,x)})),j},_.__iteratorUncached=function(_,w){var x=this;if(w)return this.cacheResult().__iterator(_,w);var j=s.__iterator(ee,w),P=!0;return new Iterator((function(){if(!P)return iteratorDone();var s=j.next();if(s.done)return s;var w=s.value,B=w[0],$=w[1];return i.call(u,$,B,x)?_===ee?s:iteratorValue(_,B,$,s):(P=!1,iteratorDone())}))},_}function skipWhileFactory(s,i,u,_){var w=makeSequence(s);return w.__iterateUncached=function(w,x){var j=this;if(x)return this.cacheResult().__iterate(w,x);var P=!0,B=0;return s.__iterate((function(s,x,$){if(!P||!(P=i.call(u,s,x,$)))return B++,w(s,_?x:B-1,j)})),B},w.__iteratorUncached=function(w,x){var j=this;if(x)return this.cacheResult().__iterator(w,x);var P=s.__iterator(ee,x),B=!0,$=0;return new Iterator((function(){var s,x,U;do{if((s=P.next()).done)return _||w===Z?s:iteratorValue(w,$++,w===X?void 0:s.value[1],s);var Y=s.value;x=Y[0],U=Y[1],B&&(B=i.call(u,U,x,j))}while(B);return w===ee?s:iteratorValue(w,x,U,s)}))},w}function concatFactory(s,i){var u=isKeyed(s),_=[s].concat(i).map((function(s){return isIterable(s)?u&&(s=KeyedIterable(s)):s=u?keyedSeqFromValue(s):indexedSeqFromValue(Array.isArray(s)?s:[s]),s})).filter((function(s){return 0!==s.size}));if(0===_.length)return s;if(1===_.length){var w=_[0];if(w===s||u&&isKeyed(w)||isIndexed(s)&&isIndexed(w))return w}var x=new ArraySeq(_);return u?x=x.toKeyedSeq():isIndexed(s)||(x=x.toSetSeq()),(x=x.flatten(!0)).size=_.reduce((function(s,i){if(void 0!==s){var u=i.size;if(void 0!==u)return s+u}}),0),x}function flattenFactory(s,i,u){var _=makeSequence(s);return _.__iterateUncached=function(_,w){var x=0,j=!1;function flatDeep(s,P){var B=this;s.__iterate((function(s,w){return(!i||P0}function zipWithFactory(s,i,u){var _=makeSequence(s);return _.size=new ArraySeq(u).map((function(s){return s.size})).min(),_.__iterate=function(s,i){for(var u,_=this.__iterator(Z,i),w=0;!(u=_.next()).done&&!1!==s(u.value,w++,this););return w},_.__iteratorUncached=function(s,_){var w=u.map((function(s){return s=Iterable(s),getIterator(_?s.reverse():s)})),x=0,j=!1;return new Iterator((function(){var u;return j||(u=w.map((function(s){return s.next()})),j=u.some((function(s){return s.done}))),j?iteratorDone():iteratorValue(s,x++,i.apply(null,u.map((function(s){return s.value}))))}))},_}function reify(s,i){return isSeq(s)?i:s.constructor(i)}function validateEntry(s){if(s!==Object(s))throw new TypeError("Expected [K, V] tuple: "+s)}function resolveSize(s){return assertNotInfinite(s.size),ensureSize(s)}function iterableClass(s){return isKeyed(s)?KeyedIterable:isIndexed(s)?IndexedIterable:SetIterable}function makeSequence(s){return Object.create((isKeyed(s)?KeyedSeq:isIndexed(s)?IndexedSeq:SetSeq).prototype)}function cacheResultThrough(){return this._iter.cacheResult?(this._iter.cacheResult(),this.size=this._iter.size,this):Seq.prototype.cacheResult.call(this)}function defaultComparator(s,i){return s>i?1:s=0;u--)i={value:arguments[u],next:i};return this.__ownerID?(this.size=s,this._head=i,this.__hash=void 0,this.__altered=!0,this):makeStack(s,i)},Stack.prototype.pushAll=function(s){if(0===(s=IndexedIterable(s)).size)return this;assertNotInfinite(s.size);var i=this.size,u=this._head;return s.reverse().forEach((function(s){i++,u={value:s,next:u}})),this.__ownerID?(this.size=i,this._head=u,this.__hash=void 0,this.__altered=!0,this):makeStack(i,u)},Stack.prototype.pop=function(){return this.slice(1)},Stack.prototype.unshift=function(){return this.push.apply(this,arguments)},Stack.prototype.unshiftAll=function(s){return this.pushAll(s)},Stack.prototype.shift=function(){return this.pop.apply(this,arguments)},Stack.prototype.clear=function(){return 0===this.size?this:this.__ownerID?(this.size=0,this._head=void 0,this.__hash=void 0,this.__altered=!0,this):emptyStack()},Stack.prototype.slice=function(s,i){if(wholeSlice(s,i,this.size))return this;var u=resolveBegin(s,this.size);if(resolveEnd(i,this.size)!==this.size)return IndexedCollection.prototype.slice.call(this,s,i);for(var _=this.size-u,w=this._head;u--;)w=w.next;return this.__ownerID?(this.size=_,this._head=w,this.__hash=void 0,this.__altered=!0,this):makeStack(_,w)},Stack.prototype.__ensureOwner=function(s){return s===this.__ownerID?this:s?makeStack(this.size,this._head,s,this.__hash):(this.__ownerID=s,this.__altered=!1,this)},Stack.prototype.__iterate=function(s,i){if(i)return this.reverse().__iterate(s);for(var u=0,_=this._head;_&&!1!==s(_.value,u++,this);)_=_.next;return u},Stack.prototype.__iterator=function(s,i){if(i)return this.reverse().__iterator(s);var u=0,_=this._head;return new Iterator((function(){if(_){var i=_.value;return _=_.next,iteratorValue(s,u++,i)}return iteratorDone()}))},Stack.isStack=isStack;var pt,ht="@@__IMMUTABLE_STACK__@@",dt=Stack.prototype;function makeStack(s,i,u,_){var w=Object.create(dt);return w.size=s,w._head=i,w.__ownerID=u,w.__hash=_,w.__altered=!1,w}function emptyStack(){return pt||(pt=makeStack(0))}function mixin(s,i){var keyCopier=function(u){s.prototype[u]=i[u]};return Object.keys(i).forEach(keyCopier),Object.getOwnPropertySymbols&&Object.getOwnPropertySymbols(i).forEach(keyCopier),s}dt[ht]=!0,dt.withMutations=He.withMutations,dt.asMutable=He.asMutable,dt.asImmutable=He.asImmutable,dt.wasAltered=He.wasAltered,Iterable.Iterator=Iterator,mixin(Iterable,{toArray:function(){assertNotInfinite(this.size);var s=new Array(this.size||0);return this.valueSeq().__iterate((function(i,u){s[u]=i})),s},toIndexedSeq:function(){return new ToIndexedSequence(this)},toJS:function(){return this.toSeq().map((function(s){return s&&"function"==typeof s.toJS?s.toJS():s})).__toJS()},toJSON:function(){return this.toSeq().map((function(s){return s&&"function"==typeof s.toJSON?s.toJSON():s})).__toJS()},toKeyedSeq:function(){return new ToKeyedSequence(this,!0)},toMap:function(){return Map(this.toKeyedSeq())},toObject:function(){assertNotInfinite(this.size);var s={};return this.__iterate((function(i,u){s[u]=i})),s},toOrderedMap:function(){return OrderedMap(this.toKeyedSeq())},toOrderedSet:function(){return OrderedSet(isKeyed(this)?this.valueSeq():this)},toSet:function(){return Set(isKeyed(this)?this.valueSeq():this)},toSetSeq:function(){return new ToSetSequence(this)},toSeq:function(){return isIndexed(this)?this.toIndexedSeq():isKeyed(this)?this.toKeyedSeq():this.toSetSeq()},toStack:function(){return Stack(isKeyed(this)?this.valueSeq():this)},toList:function(){return List(isKeyed(this)?this.valueSeq():this)},toString:function(){return"[Iterable]"},__toString:function(s,i){return 0===this.size?s+i:s+" "+this.toSeq().map(this.__toStringMapper).join(", ")+" "+i},concat:function(){return reify(this,concatFactory(this,s.call(arguments,0)))},includes:function(s){return this.some((function(i){return is(i,s)}))},entries:function(){return this.__iterator(ee)},every:function(s,i){assertNotInfinite(this.size);var u=!0;return this.__iterate((function(_,w,x){if(!s.call(i,_,w,x))return u=!1,!1})),u},filter:function(s,i){return reify(this,filterFactory(this,s,i,!0))},find:function(s,i,u){var _=this.findEntry(s,i);return _?_[1]:u},forEach:function(s,i){return assertNotInfinite(this.size),this.__iterate(i?s.bind(i):s)},join:function(s){assertNotInfinite(this.size),s=void 0!==s?""+s:",";var i="",u=!0;return this.__iterate((function(_){u?u=!1:i+=s,i+=null!=_?_.toString():""})),i},keys:function(){return this.__iterator(X)},map:function(s,i){return reify(this,mapFactory(this,s,i))},reduce:function(s,i,u){var _,w;return assertNotInfinite(this.size),arguments.length<2?w=!0:_=i,this.__iterate((function(i,x,j){w?(w=!1,_=i):_=s.call(u,_,i,x,j)})),_},reduceRight:function(s,i,u){var _=this.toKeyedSeq().reverse();return _.reduce.apply(_,arguments)},reverse:function(){return reify(this,reverseFactory(this,!0))},slice:function(s,i){return reify(this,sliceFactory(this,s,i,!0))},some:function(s,i){return!this.every(not(s),i)},sort:function(s){return reify(this,sortFactory(this,s))},values:function(){return this.__iterator(Z)},butLast:function(){return this.slice(0,-1)},isEmpty:function(){return void 0!==this.size?0===this.size:!this.some((function(){return!0}))},count:function(s,i){return ensureSize(s?this.toSeq().filter(s,i):this)},countBy:function(s,i){return countByFactory(this,s,i)},equals:function(s){return deepEqual(this,s)},entrySeq:function(){var s=this;if(s._cache)return new ArraySeq(s._cache);var i=s.toSeq().map(entryMapper).toIndexedSeq();return i.fromEntrySeq=function(){return s.toSeq()},i},filterNot:function(s,i){return this.filter(not(s),i)},findEntry:function(s,i,u){var _=u;return this.__iterate((function(u,w,x){if(s.call(i,u,w,x))return _=[w,u],!1})),_},findKey:function(s,i){var u=this.findEntry(s,i);return u&&u[0]},findLast:function(s,i,u){return this.toKeyedSeq().reverse().find(s,i,u)},findLastEntry:function(s,i,u){return this.toKeyedSeq().reverse().findEntry(s,i,u)},findLastKey:function(s,i){return this.toKeyedSeq().reverse().findKey(s,i)},first:function(){return this.find(returnTrue)},flatMap:function(s,i){return reify(this,flatMapFactory(this,s,i))},flatten:function(s){return reify(this,flattenFactory(this,s,!0))},fromEntrySeq:function(){return new FromEntriesSequence(this)},get:function(s,i){return this.find((function(i,u){return is(u,s)}),void 0,i)},getIn:function(s,i){for(var u,_=this,w=forceIterator(s);!(u=w.next()).done;){var x=u.value;if((_=_&&_.get?_.get(x,$):$)===$)return i}return _},groupBy:function(s,i){return groupByFactory(this,s,i)},has:function(s){return this.get(s,$)!==$},hasIn:function(s){return this.getIn(s,$)!==$},isSubset:function(s){return s="function"==typeof s.includes?s:Iterable(s),this.every((function(i){return s.includes(i)}))},isSuperset:function(s){return(s="function"==typeof s.isSubset?s:Iterable(s)).isSubset(this)},keyOf:function(s){return this.findKey((function(i){return is(i,s)}))},keySeq:function(){return this.toSeq().map(keyMapper).toIndexedSeq()},last:function(){return this.toSeq().reverse().first()},lastKeyOf:function(s){return this.toKeyedSeq().reverse().keyOf(s)},max:function(s){return maxFactory(this,s)},maxBy:function(s,i){return maxFactory(this,i,s)},min:function(s){return maxFactory(this,s?neg(s):defaultNegComparator)},minBy:function(s,i){return maxFactory(this,i?neg(i):defaultNegComparator,s)},rest:function(){return this.slice(1)},skip:function(s){return this.slice(Math.max(0,s))},skipLast:function(s){return reify(this,this.toSeq().reverse().skip(s).reverse())},skipWhile:function(s,i){return reify(this,skipWhileFactory(this,s,i,!0))},skipUntil:function(s,i){return this.skipWhile(not(s),i)},sortBy:function(s,i){return reify(this,sortFactory(this,i,s))},take:function(s){return this.slice(0,Math.max(0,s))},takeLast:function(s){return reify(this,this.toSeq().reverse().take(s).reverse())},takeWhile:function(s,i){return reify(this,takeWhileFactory(this,s,i))},takeUntil:function(s,i){return this.takeWhile(not(s),i)},valueSeq:function(){return this.toIndexedSeq()},hashCode:function(){return this.__hash||(this.__hash=hashIterable(this))}});var mt=Iterable.prototype;mt[i]=!0,mt[le]=mt.values,mt.__toJS=mt.toArray,mt.__toStringMapper=quoteString,mt.inspect=mt.toSource=function(){return this.toString()},mt.chain=mt.flatMap,mt.contains=mt.includes,mixin(KeyedIterable,{flip:function(){return reify(this,flipFactory(this))},mapEntries:function(s,i){var u=this,_=0;return reify(this,this.toSeq().map((function(w,x){return s.call(i,[x,w],_++,u)})).fromEntrySeq())},mapKeys:function(s,i){var u=this;return reify(this,this.toSeq().flip().map((function(_,w){return s.call(i,_,w,u)})).flip())}});var gt=KeyedIterable.prototype;function keyMapper(s,i){return i}function entryMapper(s,i){return[i,s]}function not(s){return function(){return!s.apply(this,arguments)}}function neg(s){return function(){return-s.apply(this,arguments)}}function quoteString(s){return"string"==typeof s?JSON.stringify(s):String(s)}function defaultZipper(){return arrCopy(arguments)}function defaultNegComparator(s,i){return si?-1:0}function hashIterable(s){if(s.size===1/0)return 0;var i=isOrdered(s),u=isKeyed(s),_=i?1:0;return murmurHashOfSize(s.__iterate(u?i?function(s,i){_=31*_+hashMerge(hash(s),hash(i))|0}:function(s,i){_=_+hashMerge(hash(s),hash(i))|0}:i?function(s){_=31*_+hash(s)|0}:function(s){_=_+hash(s)|0}),_)}function murmurHashOfSize(s,i){return i=ye(i,3432918353),i=ye(i<<15|i>>>-15,461845907),i=ye(i<<13|i>>>-13,5),i=ye((i=(i+3864292196|0)^s)^i>>>16,2246822507),i=smi((i=ye(i^i>>>13,3266489909))^i>>>16)}function hashMerge(s,i){return s^i+2654435769+(s<<6)+(s>>2)|0}return gt[u]=!0,gt[le]=mt.entries,gt.__toJS=mt.toObject,gt.__toStringMapper=function(s,i){return JSON.stringify(i)+": "+quoteString(s)},mixin(IndexedIterable,{toKeyedSeq:function(){return new ToKeyedSequence(this,!1)},filter:function(s,i){return reify(this,filterFactory(this,s,i,!1))},findIndex:function(s,i){var u=this.findEntry(s,i);return u?u[0]:-1},indexOf:function(s){var i=this.keyOf(s);return void 0===i?-1:i},lastIndexOf:function(s){var i=this.lastKeyOf(s);return void 0===i?-1:i},reverse:function(){return reify(this,reverseFactory(this,!1))},slice:function(s,i){return reify(this,sliceFactory(this,s,i,!1))},splice:function(s,i){var u=arguments.length;if(i=Math.max(0|i,0),0===u||2===u&&!i)return this;s=resolveBegin(s,s<0?this.count():this.size);var _=this.slice(0,s);return reify(this,1===u?_:_.concat(arrCopy(arguments,2),this.slice(s+i)))},findLastIndex:function(s,i){var u=this.findLastEntry(s,i);return u?u[0]:-1},first:function(){return this.get(0)},flatten:function(s){return reify(this,flattenFactory(this,s,!1))},get:function(s,i){return(s=wrapIndex(this,s))<0||this.size===1/0||void 0!==this.size&&s>this.size?i:this.find((function(i,u){return u===s}),void 0,i)},has:function(s){return(s=wrapIndex(this,s))>=0&&(void 0!==this.size?this.size===1/0||s{"function"==typeof Object.create?s.exports=function inherits(s,i){i&&(s.super_=i,s.prototype=Object.create(i.prototype,{constructor:{value:s,enumerable:!1,writable:!0,configurable:!0}}))}:s.exports=function inherits(s,i){if(i){s.super_=i;var TempCtor=function(){};TempCtor.prototype=i.prototype,s.prototype=new TempCtor,s.prototype.constructor=s}}},32144:s=>{s.exports=function(s,i,u,_){var w=new Blob(void 0!==_?[_,s]:[s],{type:u||"application/octet-stream"});if(void 0!==window.navigator.msSaveBlob)window.navigator.msSaveBlob(w,i);else{var x=window.URL&&window.URL.createObjectURL?window.URL.createObjectURL(w):window.webkitURL.createObjectURL(w),j=document.createElement("a");j.style.display="none",j.href=x,j.setAttribute("download",i),void 0===j.download&&j.setAttribute("target","_blank"),document.body.appendChild(j),j.click(),setTimeout((function(){document.body.removeChild(j),window.URL.revokeObjectURL(x)}),200)}}},68472:(s,i,u)=>{var _=NaN,w="[object Symbol]",x=/^\s+|\s+$/g,j=/^[-+]0x[0-9a-f]+$/i,P=/^0b[01]+$/i,B=/^0o[0-7]+$/i,$=parseInt,U="object"==typeof u.g&&u.g&&u.g.Object===Object&&u.g,Y="object"==typeof self&&self&&self.Object===Object&&self,X=U||Y||Function("return this")(),Z=Object.prototype.toString,ee=Math.max,ae=Math.min,now=function(){return X.Date.now()};function isObject(s){var i=typeof s;return!!s&&("object"==i||"function"==i)}function toNumber(s){if("number"==typeof s)return s;if(function isSymbol(s){return"symbol"==typeof s||function isObjectLike(s){return!!s&&"object"==typeof s}(s)&&Z.call(s)==w}(s))return _;if(isObject(s)){var i="function"==typeof s.valueOf?s.valueOf():s;s=isObject(i)?i+"":i}if("string"!=typeof s)return 0===s?s:+s;s=s.replace(x,"");var u=P.test(s);return u||B.test(s)?$(s.slice(2),u?2:8):j.test(s)?_:+s}s.exports=function debounce(s,i,u){var _,w,x,j,P,B,$=0,U=!1,Y=!1,X=!0;if("function"!=typeof s)throw new TypeError("Expected a function");function invokeFunc(i){var u=_,x=w;return _=w=void 0,$=i,j=s.apply(x,u)}function shouldInvoke(s){var u=s-B;return void 0===B||u>=i||u<0||Y&&s-$>=x}function timerExpired(){var s=now();if(shouldInvoke(s))return trailingEdge(s);P=setTimeout(timerExpired,function remainingWait(s){var u=i-(s-B);return Y?ae(u,x-(s-$)):u}(s))}function trailingEdge(s){return P=void 0,X&&_?invokeFunc(s):(_=w=void 0,j)}function debounced(){var s=now(),u=shouldInvoke(s);if(_=arguments,w=this,B=s,u){if(void 0===P)return function leadingEdge(s){return $=s,P=setTimeout(timerExpired,i),U?invokeFunc(s):j}(B);if(Y)return P=setTimeout(timerExpired,i),invokeFunc(B)}return void 0===P&&(P=setTimeout(timerExpired,i)),j}return i=toNumber(i)||0,isObject(u)&&(U=!!u.leading,x=(Y="maxWait"in u)?ee(toNumber(u.maxWait)||0,i):x,X="trailing"in u?!!u.trailing:X),debounced.cancel=function cancel(){void 0!==P&&clearTimeout(P),$=0,_=B=w=P=void 0},debounced.flush=function flush(){return void 0===P?j:trailingEdge(now())},debounced}},26032:(s,i,u)=>{var _=u(27892)(u(97188),"DataView");s.exports=_},11276:(s,i,u)=>{var _=u(94212),w=u(62688),x=u(43916),j=u(16952),P=u(51016);function Hash(s){var i=-1,u=null==s?0:s.length;for(this.clear();++i{var _=u(2471),w=u(59864);function LazyWrapper(s){this.__wrapped__=s,this.__actions__=[],this.__dir__=1,this.__filtered__=!1,this.__iteratees__=[],this.__takeCount__=4294967295,this.__views__=[]}LazyWrapper.prototype=_(w.prototype),LazyWrapper.prototype.constructor=LazyWrapper,s.exports=LazyWrapper},93040:(s,i,u)=>{var _=u(65968),w=u(23740),x=u(24996),j=u(82600),P=u(7336);function ListCache(s){var i=-1,u=null==s?0:s.length;for(this.clear();++i{var _=u(2471),w=u(59864);function LodashWrapper(s,i){this.__wrapped__=s,this.__actions__=[],this.__chain__=!!i,this.__index__=0,this.__values__=void 0}LodashWrapper.prototype=_(w.prototype),LodashWrapper.prototype.constructor=LodashWrapper,s.exports=LodashWrapper},70420:(s,i,u)=>{var _=u(27892)(u(97188),"Map");s.exports=_},41476:(s,i,u)=>{var _=u(98720),w=u(54760),x=u(50088),j=u(29776),P=u(8619);function MapCache(s){var i=-1,u=null==s?0:s.length;for(this.clear();++i{var _=u(27892)(u(97188),"Promise");s.exports=_},96920:(s,i,u)=>{var _=u(27892)(u(97188),"Set");s.exports=_},86152:(s,i,u)=>{var _=u(41476),w=u(29516),x=u(73504);function SetCache(s){var i=-1,u=null==s?0:s.length;for(this.__data__=new _;++i{var _=u(93040),w=u(5643),x=u(43368),j=u(50636),P=u(53012),B=u(73388);function Stack(s){var i=this.__data__=new _(s);this.size=i.size}Stack.prototype.clear=w,Stack.prototype.delete=x,Stack.prototype.get=j,Stack.prototype.has=P,Stack.prototype.set=B,s.exports=Stack},27128:(s,i,u)=>{var _=u(97188).Symbol;s.exports=_},99704:(s,i,u)=>{var _=u(97188).Uint8Array;s.exports=_},75200:(s,i,u)=>{var _=u(27892)(u(97188),"WeakMap");s.exports=_},82253:s=>{s.exports=function apply(s,i,u){switch(u.length){case 0:return s.call(i);case 1:return s.call(i,u[0]);case 2:return s.call(i,u[0],u[1]);case 3:return s.call(i,u[0],u[1],u[2])}return s.apply(i,u)}},16064:s=>{s.exports=function arrayEach(s,i){for(var u=-1,_=null==s?0:s.length;++u<_&&!1!==i(s[u],u,s););return s}},58640:s=>{s.exports=function arrayFilter(s,i){for(var u=-1,_=null==s?0:s.length,w=0,x=[];++u<_;){var j=s[u];i(j,u,s)&&(x[w++]=j)}return x}},38688:(s,i,u)=>{var _=u(73992);s.exports=function arrayIncludes(s,i){return!!(null==s?0:s.length)&&_(s,i,0)>-1}},17640:(s,i,u)=>{var _=u(60736),w=u(70348),x=u(52488),j=u(87684),P=u(51188),B=u(26700),$=Object.prototype.hasOwnProperty;s.exports=function arrayLikeKeys(s,i){var u=x(s),U=!u&&w(s),Y=!u&&!U&&j(s),X=!u&&!U&&!Y&&B(s),Z=u||U||Y||X,ee=Z?_(s.length,String):[],ae=ee.length;for(var ie in s)!i&&!$.call(s,ie)||Z&&("length"==ie||Y&&("offset"==ie||"parent"==ie)||X&&("buffer"==ie||"byteLength"==ie||"byteOffset"==ie)||P(ie,ae))||ee.push(ie);return ee}},12040:s=>{s.exports=function arrayMap(s,i){for(var u=-1,_=null==s?0:s.length,w=Array(_);++u<_;)w[u]=i(s[u],u,s);return w}},21168:s=>{s.exports=function arrayPush(s,i){for(var u=-1,_=i.length,w=s.length;++u<_;)s[w+u]=i[u];return s}},67748:s=>{s.exports=function arrayReduce(s,i,u,_){var w=-1,x=null==s?0:s.length;for(_&&x&&(u=s[++w]);++w{s.exports=function arraySome(s,i){for(var u=-1,_=null==s?0:s.length;++u<_;)if(i(s[u],u,s))return!0;return!1}},61120:s=>{s.exports=function asciiToArray(s){return s.split("")}},72628:s=>{var i=/[^\x00-\x2f\x3a-\x40\x5b-\x60\x7b-\x7f]+/g;s.exports=function asciiWords(s){return s.match(i)||[]}},89200:(s,i,u)=>{var _=u(76139),w=u(70864);s.exports=function assignMergeValue(s,i,u){(void 0!==u&&!w(s[i],u)||void 0===u&&!(i in s))&&_(s,i,u)}},28288:(s,i,u)=>{var _=u(76139),w=u(70864),x=Object.prototype.hasOwnProperty;s.exports=function assignValue(s,i,u){var j=s[i];x.call(s,i)&&w(j,u)&&(void 0!==u||i in s)||_(s,i,u)}},56600:(s,i,u)=>{var _=u(70864);s.exports=function assocIndexOf(s,i){for(var u=s.length;u--;)if(_(s[u][0],i))return u;return-1}},28040:(s,i,u)=>{var _=u(37612),w=u(95160);s.exports=function baseAssign(s,i){return s&&_(i,w(i),s)}},34244:(s,i,u)=>{var _=u(37612),w=u(72756);s.exports=function baseAssignIn(s,i){return s&&_(i,w(i),s)}},76139:(s,i,u)=>{var _=u(97792);s.exports=function baseAssignValue(s,i,u){"__proto__"==i&&_?_(s,i,{configurable:!0,enumerable:!0,value:u,writable:!0}):s[i]=u}},35620:(s,i,u)=>{var _=u(10520),w=u(16064),x=u(28288),j=u(28040),P=u(34244),B=u(31328),$=u(68416),U=u(78219),Y=u(10792),X=u(21096),Z=u(48856),ee=u(43871),ae=u(34636),ie=u(7576),le=u(46224),ce=u(52488),pe=u(87684),de=u(48544),fe=u(8940),ye=u(49504),be=u(95160),_e=u(72756),we="[object Arguments]",Se="[object Function]",xe="[object Object]",Pe={};Pe[we]=Pe["[object Array]"]=Pe["[object ArrayBuffer]"]=Pe["[object DataView]"]=Pe["[object Boolean]"]=Pe["[object Date]"]=Pe["[object Float32Array]"]=Pe["[object Float64Array]"]=Pe["[object Int8Array]"]=Pe["[object Int16Array]"]=Pe["[object Int32Array]"]=Pe["[object Map]"]=Pe["[object Number]"]=Pe[xe]=Pe["[object RegExp]"]=Pe["[object Set]"]=Pe["[object String]"]=Pe["[object Symbol]"]=Pe["[object Uint8Array]"]=Pe["[object Uint8ClampedArray]"]=Pe["[object Uint16Array]"]=Pe["[object Uint32Array]"]=!0,Pe["[object Error]"]=Pe[Se]=Pe["[object WeakMap]"]=!1,s.exports=function baseClone(s,i,u,Te,Re,qe){var $e,ze=1&i,We=2&i,He=4&i;if(u&&($e=Re?u(s,Te,Re,qe):u(s)),void 0!==$e)return $e;if(!fe(s))return s;var Ye=ce(s);if(Ye){if($e=ae(s),!ze)return $(s,$e)}else{var Qe=ee(s),Xe=Qe==Se||"[object GeneratorFunction]"==Qe;if(pe(s))return B(s,ze);if(Qe==xe||Qe==we||Xe&&!Re){if($e=We||Xe?{}:le(s),!ze)return We?Y(s,P($e,s)):U(s,j($e,s))}else{if(!Pe[Qe])return Re?s:{};$e=ie(s,Qe,ze)}}qe||(qe=new _);var et=qe.get(s);if(et)return et;qe.set(s,$e),ye(s)?s.forEach((function(_){$e.add(baseClone(_,i,u,_,s,qe))})):de(s)&&s.forEach((function(_,w){$e.set(w,baseClone(_,i,u,w,s,qe))}));var tt=Ye?void 0:(He?We?Z:X:We?_e:be)(s);return w(tt||s,(function(_,w){tt&&(_=s[w=_]),x($e,w,baseClone(_,i,u,w,s,qe))})),$e}},2471:(s,i,u)=>{var _=u(8940),w=Object.create,x=function(){function object(){}return function(s){if(!_(s))return{};if(w)return w(s);object.prototype=s;var i=new object;return object.prototype=void 0,i}}();s.exports=x},3651:(s,i,u)=>{var _=u(80316),w=u(59236)(_);s.exports=w},64832:s=>{s.exports=function baseFindIndex(s,i,u,_){for(var w=s.length,x=u+(_?1:-1);_?x--:++x{var _=u(21168),w=u(76552);s.exports=function baseFlatten(s,i,u,x,j){var P=-1,B=s.length;for(u||(u=w),j||(j=[]);++P0&&u($)?i>1?baseFlatten($,i-1,u,x,j):_(j,$):x||(j[j.length]=$)}return j}},64596:(s,i,u)=>{var _=u(68168)();s.exports=_},80316:(s,i,u)=>{var _=u(64596),w=u(95160);s.exports=function baseForOwn(s,i){return s&&_(s,i,w)}},34240:(s,i,u)=>{var _=u(97736),w=u(7668);s.exports=function baseGet(s,i){for(var u=0,x=(i=_(i,s)).length;null!=s&&u{var _=u(21168),w=u(52488);s.exports=function baseGetAllKeys(s,i,u){var x=i(s);return w(s)?x:_(x,u(s))}},16944:(s,i,u)=>{var _=u(27128),w=u(5664),x=u(73168),j=_?_.toStringTag:void 0;s.exports=function baseGetTag(s){return null==s?void 0===s?"[object Undefined]":"[object Null]":j&&j in Object(s)?w(s):x(s)}},77732:s=>{s.exports=function baseHasIn(s,i){return null!=s&&i in Object(s)}},73992:(s,i,u)=>{var _=u(64832),w=u(21624),x=u(4568);s.exports=function baseIndexOf(s,i,u){return i==i?x(s,i,u):_(s,w,u)}},93432:(s,i,u)=>{var _=u(16944),w=u(22892);s.exports=function baseIsArguments(s){return w(s)&&"[object Arguments]"==_(s)}},59184:(s,i,u)=>{var _=u(4840),w=u(22892);s.exports=function baseIsEqual(s,i,u,x,j){return s===i||(null==s||null==i||!w(s)&&!w(i)?s!=s&&i!=i:_(s,i,u,x,baseIsEqual,j))}},4840:(s,i,u)=>{var _=u(10520),w=u(19124),x=u(72352),j=u(18608),P=u(43871),B=u(52488),$=u(87684),U=u(26700),Y="[object Arguments]",X="[object Array]",Z="[object Object]",ee=Object.prototype.hasOwnProperty;s.exports=function baseIsEqualDeep(s,i,u,ae,ie,le){var ce=B(s),pe=B(i),de=ce?X:P(s),fe=pe?X:P(i),ye=(de=de==Y?Z:de)==Z,be=(fe=fe==Y?Z:fe)==Z,_e=de==fe;if(_e&&$(s)){if(!$(i))return!1;ce=!0,ye=!1}if(_e&&!ye)return le||(le=new _),ce||U(s)?w(s,i,u,ae,ie,le):x(s,i,de,u,ae,ie,le);if(!(1&u)){var we=ye&&ee.call(s,"__wrapped__"),Se=be&&ee.call(i,"__wrapped__");if(we||Se){var xe=we?s.value():s,Pe=Se?i.value():i;return le||(le=new _),ie(xe,Pe,u,ae,le)}}return!!_e&&(le||(le=new _),j(s,i,u,ae,ie,le))}},63740:(s,i,u)=>{var _=u(43871),w=u(22892);s.exports=function baseIsMap(s){return w(s)&&"[object Map]"==_(s)}},57320:(s,i,u)=>{var _=u(10520),w=u(59184);s.exports=function baseIsMatch(s,i,u,x){var j=u.length,P=j,B=!x;if(null==s)return!P;for(s=Object(s);j--;){var $=u[j];if(B&&$[2]?$[1]!==s[$[0]]:!($[0]in s))return!1}for(;++j{s.exports=function baseIsNaN(s){return s!=s}},67200:(s,i,u)=>{var _=u(87920),w=u(86084),x=u(8940),j=u(87456),P=/^\[object .+?Constructor\]$/,B=Function.prototype,$=Object.prototype,U=B.toString,Y=$.hasOwnProperty,X=RegExp("^"+U.call(Y).replace(/[\\^$.*+?()[\]{}|]/g,"\\$&").replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g,"$1.*?")+"$");s.exports=function baseIsNative(s){return!(!x(s)||w(s))&&(_(s)?X:P).test(j(s))}},10156:(s,i,u)=>{var _=u(43871),w=u(22892);s.exports=function baseIsSet(s){return w(s)&&"[object Set]"==_(s)}},87160:(s,i,u)=>{var _=u(16944),w=u(49024),x=u(22892),j={};j["[object Float32Array]"]=j["[object Float64Array]"]=j["[object Int8Array]"]=j["[object Int16Array]"]=j["[object Int32Array]"]=j["[object Uint8Array]"]=j["[object Uint8ClampedArray]"]=j["[object Uint16Array]"]=j["[object Uint32Array]"]=!0,j["[object Arguments]"]=j["[object Array]"]=j["[object ArrayBuffer]"]=j["[object Boolean]"]=j["[object DataView]"]=j["[object Date]"]=j["[object Error]"]=j["[object Function]"]=j["[object Map]"]=j["[object Number]"]=j["[object Object]"]=j["[object RegExp]"]=j["[object Set]"]=j["[object String]"]=j["[object WeakMap]"]=!1,s.exports=function baseIsTypedArray(s){return x(s)&&w(s.length)&&!!j[_(s)]}},73968:(s,i,u)=>{var _=u(24493),w=u(78056),x=u(10552),j=u(52488),P=u(54860);s.exports=function baseIteratee(s){return"function"==typeof s?s:null==s?x:"object"==typeof s?j(s)?w(s[0],s[1]):_(s):P(s)}},25552:(s,i,u)=>{var _=u(11004),w=u(93320),x=Object.prototype.hasOwnProperty;s.exports=function baseKeys(s){if(!_(s))return w(s);var i=[];for(var u in Object(s))x.call(s,u)&&"constructor"!=u&&i.push(u);return i}},63632:(s,i,u)=>{var _=u(8940),w=u(11004),x=u(98512),j=Object.prototype.hasOwnProperty;s.exports=function baseKeysIn(s){if(!_(s))return x(s);var i=w(s),u=[];for(var P in s)("constructor"!=P||!i&&j.call(s,P))&&u.push(P);return u}},59864:s=>{s.exports=function baseLodash(){}},24493:(s,i,u)=>{var _=u(57320),w=u(73640),x=u(12584);s.exports=function baseMatches(s){var i=w(s);return 1==i.length&&i[0][2]?x(i[0][0],i[0][1]):function(u){return u===s||_(u,s,i)}}},78056:(s,i,u)=>{var _=u(59184),w=u(19448),x=u(71256),j=u(19640),P=u(53960),B=u(12584),$=u(7668);s.exports=function baseMatchesProperty(s,i){return j(s)&&P(i)?B($(s),i):function(u){var j=w(u,s);return void 0===j&&j===i?x(u,s):_(i,j,3)}}},90660:(s,i,u)=>{var _=u(10520),w=u(89200),x=u(64596),j=u(77132),P=u(8940),B=u(72756),$=u(57972);s.exports=function baseMerge(s,i,u,U,Y){s!==i&&x(i,(function(x,B){if(Y||(Y=new _),P(x))j(s,i,B,u,baseMerge,U,Y);else{var X=U?U($(s,B),x,B+"",s,i,Y):void 0;void 0===X&&(X=x),w(s,B,X)}}),B)}},77132:(s,i,u)=>{var _=u(89200),w=u(31328),x=u(52100),j=u(68416),P=u(46224),B=u(70348),$=u(52488),U=u(2480),Y=u(87684),X=u(87920),Z=u(8940),ee=u(42688),ae=u(26700),ie=u(57972),le=u(61824);s.exports=function baseMergeDeep(s,i,u,ce,pe,de,fe){var ye=ie(s,u),be=ie(i,u),_e=fe.get(be);if(_e)_(s,u,_e);else{var we=de?de(ye,be,u+"",s,i,fe):void 0,Se=void 0===we;if(Se){var xe=$(be),Pe=!xe&&Y(be),Te=!xe&&!Pe&&ae(be);we=be,xe||Pe||Te?$(ye)?we=ye:U(ye)?we=j(ye):Pe?(Se=!1,we=w(be,!0)):Te?(Se=!1,we=x(be,!0)):we=[]:ee(be)||B(be)?(we=ye,B(ye)?we=le(ye):Z(ye)&&!X(ye)||(we=P(be))):Se=!1}Se&&(fe.set(be,we),pe(we,be,ce,de,fe),fe.delete(be)),_(s,u,we)}}},94732:s=>{s.exports=function baseProperty(s){return function(i){return null==i?void 0:i[s]}}},64184:(s,i,u)=>{var _=u(34240);s.exports=function basePropertyDeep(s){return function(i){return _(i,s)}}},72904:s=>{s.exports=function basePropertyOf(s){return function(i){return null==s?void 0:s[i]}}},16216:s=>{s.exports=function baseReduce(s,i,u,_,w){return w(s,(function(s,w,x){u=_?(_=!1,s):i(u,s,w,x)})),u}},48292:(s,i,u)=>{var _=u(10552),w=u(78840),x=u(27360);s.exports=function baseRest(s,i){return x(w(s,i,_),s+"")}},86040:(s,i,u)=>{var _=u(28288),w=u(97736),x=u(51188),j=u(8940),P=u(7668);s.exports=function baseSet(s,i,u,B){if(!j(s))return s;for(var $=-1,U=(i=w(i,s)).length,Y=U-1,X=s;null!=X&&++${var _=u(10552),w=u(94820),x=w?function(s,i){return w.set(s,i),s}:_;s.exports=x},53120:(s,i,u)=>{var _=u(6347),w=u(97792),x=u(10552),j=w?function(s,i){return w(s,"toString",{configurable:!0,enumerable:!1,value:_(i),writable:!0})}:x;s.exports=j},54732:s=>{s.exports=function baseSlice(s,i,u){var _=-1,w=s.length;i<0&&(i=-i>w?0:w+i),(u=u>w?w:u)<0&&(u+=w),w=i>u?0:u-i>>>0,i>>>=0;for(var x=Array(w);++_{var _=u(3651);s.exports=function baseSome(s,i){var u;return _(s,(function(s,_,w){return!(u=i(s,_,w))})),!!u}},60736:s=>{s.exports=function baseTimes(s,i){for(var u=-1,_=Array(s);++u{var _=u(27128),w=u(12040),x=u(52488),j=u(77712),P=_?_.prototype:void 0,B=P?P.toString:void 0;s.exports=function baseToString(s){if("string"==typeof s)return s;if(x(s))return w(s,baseToString)+"";if(j(s))return B?B.call(s):"";var i=s+"";return"0"==i&&1/s==-Infinity?"-0":i}},34428:(s,i,u)=>{var _=u(15608),w=/^\s+/;s.exports=function baseTrim(s){return s?s.slice(0,_(s)+1).replace(w,""):s}},89165:s=>{s.exports=function baseUnary(s){return function(i){return s(i)}}},35840:(s,i,u)=>{var _=u(97736),w=u(2008),x=u(35088),j=u(7668);s.exports=function baseUnset(s,i){return i=_(i,s),null==(s=x(s,i))||delete s[j(w(i))]}},58712:s=>{s.exports=function baseZipObject(s,i,u){for(var _=-1,w=s.length,x=i.length,j={};++_{s.exports=function cacheHas(s,i){return s.has(i)}},97736:(s,i,u)=>{var _=u(52488),w=u(19640),x=u(30976),j=u(71972);s.exports=function castPath(s,i){return _(s)?s:w(s,i)?[s]:x(j(s))}},77320:(s,i,u)=>{var _=u(54732);s.exports=function castSlice(s,i,u){var w=s.length;return u=void 0===u?w:u,!i&&u>=w?s:_(s,i,u)}},75987:(s,i,u)=>{var _=u(99704);s.exports=function cloneArrayBuffer(s){var i=new s.constructor(s.byteLength);return new _(i).set(new _(s)),i}},31328:(s,i,u)=>{s=u.nmd(s);var _=u(97188),w=i&&!i.nodeType&&i,x=w&&s&&!s.nodeType&&s,j=x&&x.exports===w?_.Buffer:void 0,P=j?j.allocUnsafe:void 0;s.exports=function cloneBuffer(s,i){if(i)return s.slice();var u=s.length,_=P?P(u):new s.constructor(u);return s.copy(_),_}},49488:(s,i,u)=>{var _=u(75987);s.exports=function cloneDataView(s,i){var u=i?_(s.buffer):s.buffer;return new s.constructor(u,s.byteOffset,s.byteLength)}},22640:s=>{var i=/\w*$/;s.exports=function cloneRegExp(s){var u=new s.constructor(s.source,i.exec(s));return u.lastIndex=s.lastIndex,u}},52656:(s,i,u)=>{var _=u(27128),w=_?_.prototype:void 0,x=w?w.valueOf:void 0;s.exports=function cloneSymbol(s){return x?Object(x.call(s)):{}}},52100:(s,i,u)=>{var _=u(75987);s.exports=function cloneTypedArray(s,i){var u=i?_(s.buffer):s.buffer;return new s.constructor(u,s.byteOffset,s.length)}},13676:s=>{var i=Math.max;s.exports=function composeArgs(s,u,_,w){for(var x=-1,j=s.length,P=_.length,B=-1,$=u.length,U=i(j-P,0),Y=Array($+U),X=!w;++B<$;)Y[B]=u[B];for(;++x{var i=Math.max;s.exports=function composeArgsRight(s,u,_,w){for(var x=-1,j=s.length,P=-1,B=_.length,$=-1,U=u.length,Y=i(j-B,0),X=Array(Y+U),Z=!w;++x{s.exports=function copyArray(s,i){var u=-1,_=s.length;for(i||(i=Array(_));++u<_;)i[u]=s[u];return i}},37612:(s,i,u)=>{var _=u(28288),w=u(76139);s.exports=function copyObject(s,i,u,x){var j=!u;u||(u={});for(var P=-1,B=i.length;++P{var _=u(37612),w=u(23520);s.exports=function copySymbols(s,i){return _(s,w(s),i)}},10792:(s,i,u)=>{var _=u(37612),w=u(21216);s.exports=function copySymbolsIn(s,i){return _(s,w(s),i)}},55280:(s,i,u)=>{var _=u(97188)["__core-js_shared__"];s.exports=_},23108:s=>{s.exports=function countHolders(s,i){for(var u=s.length,_=0;u--;)s[u]===i&&++_;return _}},20764:(s,i,u)=>{var _=u(48292),w=u(74221);s.exports=function createAssigner(s){return _((function(i,u){var _=-1,x=u.length,j=x>1?u[x-1]:void 0,P=x>2?u[2]:void 0;for(j=s.length>3&&"function"==typeof j?(x--,j):void 0,P&&w(u[0],u[1],P)&&(j=x<3?void 0:j,x=1),i=Object(i);++_{var _=u(54900);s.exports=function createBaseEach(s,i){return function(u,w){if(null==u)return u;if(!_(u))return s(u,w);for(var x=u.length,j=i?x:-1,P=Object(u);(i?j--:++j{s.exports=function createBaseFor(s){return function(i,u,_){for(var w=-1,x=Object(i),j=_(i),P=j.length;P--;){var B=j[s?P:++w];if(!1===u(x[B],B,x))break}return i}}},20776:(s,i,u)=>{var _=u(95896),w=u(97188);s.exports=function createBind(s,i,u){var x=1&i,j=_(s);return function wrapper(){return(this&&this!==w&&this instanceof wrapper?j:s).apply(x?u:this,arguments)}}},7288:(s,i,u)=>{var _=u(77320),w=u(99240),x=u(28595),j=u(71972);s.exports=function createCaseFirst(s){return function(i){i=j(i);var u=w(i)?x(i):void 0,P=u?u[0]:i.charAt(0),B=u?_(u,1).join(""):i.slice(1);return P[s]()+B}}},30184:(s,i,u)=>{var _=u(67748),w=u(5928),x=u(42996),j=RegExp("['’]","g");s.exports=function createCompounder(s){return function(i){return _(x(w(i).replace(j,"")),s,"")}}},95896:(s,i,u)=>{var _=u(2471),w=u(8940);s.exports=function createCtor(s){return function(){var i=arguments;switch(i.length){case 0:return new s;case 1:return new s(i[0]);case 2:return new s(i[0],i[1]);case 3:return new s(i[0],i[1],i[2]);case 4:return new s(i[0],i[1],i[2],i[3]);case 5:return new s(i[0],i[1],i[2],i[3],i[4]);case 6:return new s(i[0],i[1],i[2],i[3],i[4],i[5]);case 7:return new s(i[0],i[1],i[2],i[3],i[4],i[5],i[6])}var u=_(s.prototype),x=s.apply(u,i);return w(x)?x:u}}},57592:(s,i,u)=>{var _=u(82253),w=u(95896),x=u(79928),j=u(74048),P=u(94372),B=u(58180),$=u(97188);s.exports=function createCurry(s,i,u){var U=w(s);return function wrapper(){for(var w=arguments.length,Y=Array(w),X=w,Z=P(wrapper);X--;)Y[X]=arguments[X];var ee=w<3&&Y[0]!==Z&&Y[w-1]!==Z?[]:B(Y,Z);return(w-=ee.length){var _=u(73968),w=u(54900),x=u(95160);s.exports=function createFind(s){return function(i,u,j){var P=Object(i);if(!w(i)){var B=_(u,3);i=x(i),u=function(s){return B(P[s],s,P)}}var $=s(i,u,j);return $>-1?P[B?i[$]:$]:void 0}}},79928:(s,i,u)=>{var _=u(13676),w=u(28524),x=u(23108),j=u(95896),P=u(74048),B=u(94372),$=u(48440),U=u(58180),Y=u(97188);s.exports=function createHybrid(s,i,u,X,Z,ee,ae,ie,le,ce){var pe=128&i,de=1&i,fe=2&i,ye=24&i,be=512&i,_e=fe?void 0:j(s);return function wrapper(){for(var we=arguments.length,Se=Array(we),xe=we;xe--;)Se[xe]=arguments[xe];if(ye)var Pe=B(wrapper),Te=x(Se,Pe);if(X&&(Se=_(Se,X,Z,ye)),ee&&(Se=w(Se,ee,ae,ye)),we-=Te,ye&&we1&&Se.reverse(),pe&&le{var _=u(82253),w=u(95896),x=u(97188);s.exports=function createPartial(s,i,u,j){var P=1&i,B=w(s);return function wrapper(){for(var i=-1,w=arguments.length,$=-1,U=j.length,Y=Array(U+w),X=this&&this!==x&&this instanceof wrapper?B:s;++${var _=u(99716),w=u(97915),x=u(50024);s.exports=function createRecurry(s,i,u,j,P,B,$,U,Y,X){var Z=8&i;i|=Z?32:64,4&(i&=~(Z?64:32))||(i&=-4);var ee=[s,i,P,Z?B:void 0,Z?$:void 0,Z?void 0:B,Z?void 0:$,U,Y,X],ae=u.apply(void 0,ee);return _(s)&&w(ae,ee),ae.placeholder=j,x(ae,s,i)}},35424:(s,i,u)=>{var _=u(24101),w=u(20776),x=u(57592),j=u(79928),P=u(4996),B=u(80216),$=u(12344),U=u(97915),Y=u(50024),X=u(4400),Z=Math.max;s.exports=function createWrap(s,i,u,ee,ae,ie,le,ce){var pe=2&i;if(!pe&&"function"!=typeof s)throw new TypeError("Expected a function");var de=ee?ee.length:0;if(de||(i&=-97,ee=ae=void 0),le=void 0===le?le:Z(X(le),0),ce=void 0===ce?ce:X(ce),de-=ae?ae.length:0,64&i){var fe=ee,ye=ae;ee=ae=void 0}var be=pe?void 0:B(s),_e=[s,i,u,ee,ae,fe,ye,ie,le,ce];if(be&&$(_e,be),s=_e[0],i=_e[1],u=_e[2],ee=_e[3],ae=_e[4],!(ce=_e[9]=void 0===_e[9]?pe?0:s.length:Z(_e[9]-de,0))&&24&i&&(i&=-25),i&&1!=i)we=8==i||16==i?x(s,i,ce):32!=i&&33!=i||ae.length?j.apply(void 0,_e):P(s,i,u,ee);else var we=w(s,i,u);return Y((be?_:U)(we,_e),s,i)}},53104:(s,i,u)=>{var _=u(42688);s.exports=function customOmitClone(s){return _(s)?void 0:s}},81180:(s,i,u)=>{var _=u(72904)({À:"A",Á:"A",Â:"A",Ã:"A",Ä:"A",Å:"A",à:"a",á:"a",â:"a",ã:"a",ä:"a",å:"a",Ç:"C",ç:"c",Ð:"D",ð:"d",È:"E",É:"E",Ê:"E",Ë:"E",è:"e",é:"e",ê:"e",ë:"e",Ì:"I",Í:"I",Î:"I",Ï:"I",ì:"i",í:"i",î:"i",ï:"i",Ñ:"N",ñ:"n",Ò:"O",Ó:"O",Ô:"O",Õ:"O",Ö:"O",Ø:"O",ò:"o",ó:"o",ô:"o",õ:"o",ö:"o",ø:"o",Ù:"U",Ú:"U",Û:"U",Ü:"U",ù:"u",ú:"u",û:"u",ü:"u",Ý:"Y",ý:"y",ÿ:"y",Æ:"Ae",æ:"ae",Þ:"Th",þ:"th",ß:"ss",Ā:"A",Ă:"A",Ą:"A",ā:"a",ă:"a",ą:"a",Ć:"C",Ĉ:"C",Ċ:"C",Č:"C",ć:"c",ĉ:"c",ċ:"c",č:"c",Ď:"D",Đ:"D",ď:"d",đ:"d",Ē:"E",Ĕ:"E",Ė:"E",Ę:"E",Ě:"E",ē:"e",ĕ:"e",ė:"e",ę:"e",ě:"e",Ĝ:"G",Ğ:"G",Ġ:"G",Ģ:"G",ĝ:"g",ğ:"g",ġ:"g",ģ:"g",Ĥ:"H",Ħ:"H",ĥ:"h",ħ:"h",Ĩ:"I",Ī:"I",Ĭ:"I",Į:"I",İ:"I",ĩ:"i",ī:"i",ĭ:"i",į:"i",ı:"i",Ĵ:"J",ĵ:"j",Ķ:"K",ķ:"k",ĸ:"k",Ĺ:"L",Ļ:"L",Ľ:"L",Ŀ:"L",Ł:"L",ĺ:"l",ļ:"l",ľ:"l",ŀ:"l",ł:"l",Ń:"N",Ņ:"N",Ň:"N",Ŋ:"N",ń:"n",ņ:"n",ň:"n",ŋ:"n",Ō:"O",Ŏ:"O",Ő:"O",ō:"o",ŏ:"o",ő:"o",Ŕ:"R",Ŗ:"R",Ř:"R",ŕ:"r",ŗ:"r",ř:"r",Ś:"S",Ŝ:"S",Ş:"S",Š:"S",ś:"s",ŝ:"s",ş:"s",š:"s",Ţ:"T",Ť:"T",Ŧ:"T",ţ:"t",ť:"t",ŧ:"t",Ũ:"U",Ū:"U",Ŭ:"U",Ů:"U",Ű:"U",Ų:"U",ũ:"u",ū:"u",ŭ:"u",ů:"u",ű:"u",ų:"u",Ŵ:"W",ŵ:"w",Ŷ:"Y",ŷ:"y",Ÿ:"Y",Ź:"Z",Ż:"Z",Ž:"Z",ź:"z",ż:"z",ž:"z",IJ:"IJ",ij:"ij",Œ:"Oe",œ:"oe",ʼn:"'n",ſ:"s"});s.exports=_},97792:(s,i,u)=>{var _=u(27892),w=function(){try{var s=_(Object,"defineProperty");return s({},"",{}),s}catch(s){}}();s.exports=w},19124:(s,i,u)=>{var _=u(86152),w=u(35600),x=u(30968);s.exports=function equalArrays(s,i,u,j,P,B){var $=1&u,U=s.length,Y=i.length;if(U!=Y&&!($&&Y>U))return!1;var X=B.get(s),Z=B.get(i);if(X&&Z)return X==i&&Z==s;var ee=-1,ae=!0,ie=2&u?new _:void 0;for(B.set(s,i),B.set(i,s);++ee{var _=u(27128),w=u(99704),x=u(70864),j=u(19124),P=u(73152),B=u(2060),$=_?_.prototype:void 0,U=$?$.valueOf:void 0;s.exports=function equalByTag(s,i,u,_,$,Y,X){switch(u){case"[object DataView]":if(s.byteLength!=i.byteLength||s.byteOffset!=i.byteOffset)return!1;s=s.buffer,i=i.buffer;case"[object ArrayBuffer]":return!(s.byteLength!=i.byteLength||!Y(new w(s),new w(i)));case"[object Boolean]":case"[object Date]":case"[object Number]":return x(+s,+i);case"[object Error]":return s.name==i.name&&s.message==i.message;case"[object RegExp]":case"[object String]":return s==i+"";case"[object Map]":var Z=P;case"[object Set]":var ee=1&_;if(Z||(Z=B),s.size!=i.size&&!ee)return!1;var ae=X.get(s);if(ae)return ae==i;_|=2,X.set(s,i);var ie=j(Z(s),Z(i),_,$,Y,X);return X.delete(s),ie;case"[object Symbol]":if(U)return U.call(s)==U.call(i)}return!1}},18608:(s,i,u)=>{var _=u(21096),w=Object.prototype.hasOwnProperty;s.exports=function equalObjects(s,i,u,x,j,P){var B=1&u,$=_(s),U=$.length;if(U!=_(i).length&&!B)return!1;for(var Y=U;Y--;){var X=$[Y];if(!(B?X in i:w.call(i,X)))return!1}var Z=P.get(s),ee=P.get(i);if(Z&&ee)return Z==i&&ee==s;var ae=!0;P.set(s,i),P.set(i,s);for(var ie=B;++Y{var _=u(14576),w=u(78840),x=u(27360);s.exports=function flatRest(s){return x(w(s,void 0,_),s+"")}},4848:(s,i,u)=>{var _="object"==typeof u.g&&u.g&&u.g.Object===Object&&u.g;s.exports=_},21096:(s,i,u)=>{var _=u(94668),w=u(23520),x=u(95160);s.exports=function getAllKeys(s){return _(s,x,w)}},48856:(s,i,u)=>{var _=u(94668),w=u(21216),x=u(72756);s.exports=function getAllKeysIn(s){return _(s,x,w)}},80216:(s,i,u)=>{var _=u(94820),w=u(12648),x=_?function(s){return _.get(s)}:w;s.exports=x},87280:(s,i,u)=>{var _=u(30536),w=Object.prototype.hasOwnProperty;s.exports=function getFuncName(s){for(var i=s.name+"",u=_[i],x=w.call(_,i)?u.length:0;x--;){var j=u[x],P=j.func;if(null==P||P==s)return j.name}return i}},94372:s=>{s.exports=function getHolder(s){return s.placeholder}},36068:(s,i,u)=>{var _=u(16096);s.exports=function getMapData(s,i){var u=s.__data__;return _(i)?u["string"==typeof i?"string":"hash"]:u.map}},73640:(s,i,u)=>{var _=u(53960),w=u(95160);s.exports=function getMatchData(s){for(var i=w(s),u=i.length;u--;){var x=i[u],j=s[x];i[u]=[x,j,_(j)]}return i}},27892:(s,i,u)=>{var _=u(67200),w=u(15692);s.exports=function getNative(s,i){var u=w(s,i);return _(u)?u:void 0}},30476:(s,i,u)=>{var _=u(21304)(Object.getPrototypeOf,Object);s.exports=_},5664:(s,i,u)=>{var _=u(27128),w=Object.prototype,x=w.hasOwnProperty,j=w.toString,P=_?_.toStringTag:void 0;s.exports=function getRawTag(s){var i=x.call(s,P),u=s[P];try{s[P]=void 0;var _=!0}catch(s){}var w=j.call(s);return _&&(i?s[P]=u:delete s[P]),w}},23520:(s,i,u)=>{var _=u(58640),w=u(40872),x=Object.prototype.propertyIsEnumerable,j=Object.getOwnPropertySymbols,P=j?function(s){return null==s?[]:(s=Object(s),_(j(s),(function(i){return x.call(s,i)})))}:w;s.exports=P},21216:(s,i,u)=>{var _=u(21168),w=u(30476),x=u(23520),j=u(40872),P=Object.getOwnPropertySymbols?function(s){for(var i=[];s;)_(i,x(s)),s=w(s);return i}:j;s.exports=P},43871:(s,i,u)=>{var _=u(26032),w=u(70420),x=u(50404),j=u(96920),P=u(75200),B=u(16944),$=u(87456),U="[object Map]",Y="[object Promise]",X="[object Set]",Z="[object WeakMap]",ee="[object DataView]",ae=$(_),ie=$(w),le=$(x),ce=$(j),pe=$(P),de=B;(_&&de(new _(new ArrayBuffer(1)))!=ee||w&&de(new w)!=U||x&&de(x.resolve())!=Y||j&&de(new j)!=X||P&&de(new P)!=Z)&&(de=function(s){var i=B(s),u="[object Object]"==i?s.constructor:void 0,_=u?$(u):"";if(_)switch(_){case ae:return ee;case ie:return U;case le:return Y;case ce:return X;case pe:return Z}return i}),s.exports=de},15692:s=>{s.exports=function getValue(s,i){return null==s?void 0:s[i]}},15488:s=>{var i=/\{\n\/\* \[wrapped with (.+)\] \*/,u=/,? & /;s.exports=function getWrapDetails(s){var _=s.match(i);return _?_[1].split(u):[]}},42828:(s,i,u)=>{var _=u(97736),w=u(70348),x=u(52488),j=u(51188),P=u(49024),B=u(7668);s.exports=function hasPath(s,i,u){for(var $=-1,U=(i=_(i,s)).length,Y=!1;++${var i=RegExp("[\\u200d\\ud800-\\udfff\\u0300-\\u036f\\ufe20-\\ufe2f\\u20d0-\\u20ff\\ufe0e\\ufe0f]");s.exports=function hasUnicode(s){return i.test(s)}},94360:s=>{var i=/[a-z][A-Z]|[A-Z]{2}[a-z]|[0-9][a-zA-Z]|[a-zA-Z][0-9]|[^a-zA-Z0-9 ]/;s.exports=function hasUnicodeWord(s){return i.test(s)}},94212:(s,i,u)=>{var _=u(45604);s.exports=function hashClear(){this.__data__=_?_(null):{},this.size=0}},62688:s=>{s.exports=function hashDelete(s){var i=this.has(s)&&delete this.__data__[s];return this.size-=i?1:0,i}},43916:(s,i,u)=>{var _=u(45604),w=Object.prototype.hasOwnProperty;s.exports=function hashGet(s){var i=this.__data__;if(_){var u=i[s];return"__lodash_hash_undefined__"===u?void 0:u}return w.call(i,s)?i[s]:void 0}},16952:(s,i,u)=>{var _=u(45604),w=Object.prototype.hasOwnProperty;s.exports=function hashHas(s){var i=this.__data__;return _?void 0!==i[s]:w.call(i,s)}},51016:(s,i,u)=>{var _=u(45604);s.exports=function hashSet(s,i){var u=this.__data__;return this.size+=this.has(s)?0:1,u[s]=_&&void 0===i?"__lodash_hash_undefined__":i,this}},34636:s=>{var i=Object.prototype.hasOwnProperty;s.exports=function initCloneArray(s){var u=s.length,_=new s.constructor(u);return u&&"string"==typeof s[0]&&i.call(s,"index")&&(_.index=s.index,_.input=s.input),_}},7576:(s,i,u)=>{var _=u(75987),w=u(49488),x=u(22640),j=u(52656),P=u(52100);s.exports=function initCloneByTag(s,i,u){var B=s.constructor;switch(i){case"[object ArrayBuffer]":return _(s);case"[object Boolean]":case"[object Date]":return new B(+s);case"[object DataView]":return w(s,u);case"[object Float32Array]":case"[object Float64Array]":case"[object Int8Array]":case"[object Int16Array]":case"[object Int32Array]":case"[object Uint8Array]":case"[object Uint8ClampedArray]":case"[object Uint16Array]":case"[object Uint32Array]":return P(s,u);case"[object Map]":case"[object Set]":return new B;case"[object Number]":case"[object String]":return new B(s);case"[object RegExp]":return x(s);case"[object Symbol]":return j(s)}}},46224:(s,i,u)=>{var _=u(2471),w=u(30476),x=u(11004);s.exports=function initCloneObject(s){return"function"!=typeof s.constructor||x(s)?{}:_(w(s))}},82208:s=>{var i=/\{(?:\n\/\* \[wrapped with .+\] \*\/)?\n?/;s.exports=function insertWrapDetails(s,u){var _=u.length;if(!_)return s;var w=_-1;return u[w]=(_>1?"& ":"")+u[w],u=u.join(_>2?", ":" "),s.replace(i,"{\n/* [wrapped with "+u+"] */\n")}},76552:(s,i,u)=>{var _=u(27128),w=u(70348),x=u(52488),j=_?_.isConcatSpreadable:void 0;s.exports=function isFlattenable(s){return x(s)||w(s)||!!(j&&s&&s[j])}},51188:s=>{var i=/^(?:0|[1-9]\d*)$/;s.exports=function isIndex(s,u){var _=typeof s;return!!(u=null==u?9007199254740991:u)&&("number"==_||"symbol"!=_&&i.test(s))&&s>-1&&s%1==0&&s{var _=u(70864),w=u(54900),x=u(51188),j=u(8940);s.exports=function isIterateeCall(s,i,u){if(!j(u))return!1;var P=typeof i;return!!("number"==P?w(u)&&x(i,u.length):"string"==P&&i in u)&&_(u[i],s)}},19640:(s,i,u)=>{var _=u(52488),w=u(77712),x=/\.|\[(?:[^[\]]*|(["'])(?:(?!\1)[^\\]|\\.)*?\1)\]/,j=/^\w*$/;s.exports=function isKey(s,i){if(_(s))return!1;var u=typeof s;return!("number"!=u&&"symbol"!=u&&"boolean"!=u&&null!=s&&!w(s))||(j.test(s)||!x.test(s)||null!=i&&s in Object(i))}},16096:s=>{s.exports=function isKeyable(s){var i=typeof s;return"string"==i||"number"==i||"symbol"==i||"boolean"==i?"__proto__"!==s:null===s}},99716:(s,i,u)=>{var _=u(34136),w=u(80216),x=u(87280),j=u(76987);s.exports=function isLaziable(s){var i=x(s),u=j[i];if("function"!=typeof u||!(i in _.prototype))return!1;if(s===u)return!0;var P=w(u);return!!P&&s===P[0]}},86084:(s,i,u)=>{var _,w=u(55280),x=(_=/[^.]+$/.exec(w&&w.keys&&w.keys.IE_PROTO||""))?"Symbol(src)_1."+_:"";s.exports=function isMasked(s){return!!x&&x in s}},11004:s=>{var i=Object.prototype;s.exports=function isPrototype(s){var u=s&&s.constructor;return s===("function"==typeof u&&u.prototype||i)}},53960:(s,i,u)=>{var _=u(8940);s.exports=function isStrictComparable(s){return s==s&&!_(s)}},65968:s=>{s.exports=function listCacheClear(){this.__data__=[],this.size=0}},23740:(s,i,u)=>{var _=u(56600),w=Array.prototype.splice;s.exports=function listCacheDelete(s){var i=this.__data__,u=_(i,s);return!(u<0)&&(u==i.length-1?i.pop():w.call(i,u,1),--this.size,!0)}},24996:(s,i,u)=>{var _=u(56600);s.exports=function listCacheGet(s){var i=this.__data__,u=_(i,s);return u<0?void 0:i[u][1]}},82600:(s,i,u)=>{var _=u(56600);s.exports=function listCacheHas(s){return _(this.__data__,s)>-1}},7336:(s,i,u)=>{var _=u(56600);s.exports=function listCacheSet(s,i){var u=this.__data__,w=_(u,s);return w<0?(++this.size,u.push([s,i])):u[w][1]=i,this}},98720:(s,i,u)=>{var _=u(11276),w=u(93040),x=u(70420);s.exports=function mapCacheClear(){this.size=0,this.__data__={hash:new _,map:new(x||w),string:new _}}},54760:(s,i,u)=>{var _=u(36068);s.exports=function mapCacheDelete(s){var i=_(this,s).delete(s);return this.size-=i?1:0,i}},50088:(s,i,u)=>{var _=u(36068);s.exports=function mapCacheGet(s){return _(this,s).get(s)}},29776:(s,i,u)=>{var _=u(36068);s.exports=function mapCacheHas(s){return _(this,s).has(s)}},8619:(s,i,u)=>{var _=u(36068);s.exports=function mapCacheSet(s,i){var u=_(this,s),w=u.size;return u.set(s,i),this.size+=u.size==w?0:1,this}},73152:s=>{s.exports=function mapToArray(s){var i=-1,u=Array(s.size);return s.forEach((function(s,_){u[++i]=[_,s]})),u}},12584:s=>{s.exports=function matchesStrictComparable(s,i){return function(u){return null!=u&&(u[s]===i&&(void 0!==i||s in Object(u)))}}},9032:(s,i,u)=>{var _=u(1576);s.exports=function memoizeCapped(s){var i=_(s,(function(s){return 500===u.size&&u.clear(),s})),u=i.cache;return i}},12344:(s,i,u)=>{var _=u(13676),w=u(28524),x=u(58180),j="__lodash_placeholder__",P=128,B=Math.min;s.exports=function mergeData(s,i){var u=s[1],$=i[1],U=u|$,Y=U<131,X=$==P&&8==u||$==P&&256==u&&s[7].length<=i[8]||384==$&&i[7].length<=i[8]&&8==u;if(!Y&&!X)return s;1&$&&(s[2]=i[2],U|=1&u?0:4);var Z=i[3];if(Z){var ee=s[3];s[3]=ee?_(ee,Z,i[4]):Z,s[4]=ee?x(s[3],j):i[4]}return(Z=i[5])&&(ee=s[5],s[5]=ee?w(ee,Z,i[6]):Z,s[6]=ee?x(s[5],j):i[6]),(Z=i[7])&&(s[7]=Z),$&P&&(s[8]=null==s[8]?i[8]:B(s[8],i[8])),null==s[9]&&(s[9]=i[9]),s[0]=i[0],s[1]=U,s}},94820:(s,i,u)=>{var _=u(75200),w=_&&new _;s.exports=w},45604:(s,i,u)=>{var _=u(27892)(Object,"create");s.exports=_},93320:(s,i,u)=>{var _=u(21304)(Object.keys,Object);s.exports=_},98512:s=>{s.exports=function nativeKeysIn(s){var i=[];if(null!=s)for(var u in Object(s))i.push(u);return i}},59180:(s,i,u)=>{s=u.nmd(s);var _=u(4848),w=i&&!i.nodeType&&i,x=w&&s&&!s.nodeType&&s,j=x&&x.exports===w&&_.process,P=function(){try{var s=x&&x.require&&x.require("util").types;return s||j&&j.binding&&j.binding("util")}catch(s){}}();s.exports=P},73168:s=>{var i=Object.prototype.toString;s.exports=function objectToString(s){return i.call(s)}},21304:s=>{s.exports=function overArg(s,i){return function(u){return s(i(u))}}},78840:(s,i,u)=>{var _=u(82253),w=Math.max;s.exports=function overRest(s,i,u){return i=w(void 0===i?s.length-1:i,0),function(){for(var x=arguments,j=-1,P=w(x.length-i,0),B=Array(P);++j{var _=u(34240),w=u(54732);s.exports=function parent(s,i){return i.length<2?s:_(s,w(i,0,-1))}},30536:s=>{s.exports={}},48440:(s,i,u)=>{var _=u(68416),w=u(51188),x=Math.min;s.exports=function reorder(s,i){for(var u=s.length,j=x(i.length,u),P=_(s);j--;){var B=i[j];s[j]=w(B,u)?P[B]:void 0}return s}},58180:s=>{var i="__lodash_placeholder__";s.exports=function replaceHolders(s,u){for(var _=-1,w=s.length,x=0,j=[];++_{var _=u(4848),w="object"==typeof self&&self&&self.Object===Object&&self,x=_||w||Function("return this")();s.exports=x},57972:s=>{s.exports=function safeGet(s,i){if(("constructor"!==i||"function"!=typeof s[i])&&"__proto__"!=i)return s[i]}},29516:s=>{s.exports=function setCacheAdd(s){return this.__data__.set(s,"__lodash_hash_undefined__"),this}},73504:s=>{s.exports=function setCacheHas(s){return this.__data__.has(s)}},97915:(s,i,u)=>{var _=u(24101),w=u(24208)(_);s.exports=w},2060:s=>{s.exports=function setToArray(s){var i=-1,u=Array(s.size);return s.forEach((function(s){u[++i]=s})),u}},27360:(s,i,u)=>{var _=u(53120),w=u(24208)(_);s.exports=w},50024:(s,i,u)=>{var _=u(15488),w=u(82208),x=u(27360),j=u(56172);s.exports=function setWrapToString(s,i,u){var P=i+"";return x(s,w(P,j(_(P),u)))}},24208:s=>{var i=Date.now;s.exports=function shortOut(s){var u=0,_=0;return function(){var w=i(),x=16-(w-_);if(_=w,x>0){if(++u>=800)return arguments[0]}else u=0;return s.apply(void 0,arguments)}}},5643:(s,i,u)=>{var _=u(93040);s.exports=function stackClear(){this.__data__=new _,this.size=0}},43368:s=>{s.exports=function stackDelete(s){var i=this.__data__,u=i.delete(s);return this.size=i.size,u}},50636:s=>{s.exports=function stackGet(s){return this.__data__.get(s)}},53012:s=>{s.exports=function stackHas(s){return this.__data__.has(s)}},73388:(s,i,u)=>{var _=u(93040),w=u(70420),x=u(41476);s.exports=function stackSet(s,i){var u=this.__data__;if(u instanceof _){var j=u.__data__;if(!w||j.length<199)return j.push([s,i]),this.size=++u.size,this;u=this.__data__=new x(j)}return u.set(s,i),this.size=u.size,this}},4568:s=>{s.exports=function strictIndexOf(s,i,u){for(var _=u-1,w=s.length;++_{var _=u(61120),w=u(99240),x=u(66448);s.exports=function stringToArray(s){return w(s)?x(s):_(s)}},30976:(s,i,u)=>{var _=u(9032),w=/[^.[\]]+|\[(?:(-?\d+(?:\.\d+)?)|(["'])((?:(?!\2)[^\\]|\\.)*?)\2)\]|(?=(?:\.|\[\])(?:\.|\[\]|$))/g,x=/\\(\\)?/g,j=_((function(s){var i=[];return 46===s.charCodeAt(0)&&i.push(""),s.replace(w,(function(s,u,_,w){i.push(_?w.replace(x,"$1"):u||s)})),i}));s.exports=j},7668:(s,i,u)=>{var _=u(77712);s.exports=function toKey(s){if("string"==typeof s||_(s))return s;var i=s+"";return"0"==i&&1/s==-Infinity?"-0":i}},87456:s=>{var i=Function.prototype.toString;s.exports=function toSource(s){if(null!=s){try{return i.call(s)}catch(s){}try{return s+""}catch(s){}}return""}},15608:s=>{var i=/\s/;s.exports=function trimmedEndIndex(s){for(var u=s.length;u--&&i.test(s.charAt(u)););return u}},66448:s=>{var i="\\ud800-\\udfff",u="["+i+"]",_="[\\u0300-\\u036f\\ufe20-\\ufe2f\\u20d0-\\u20ff]",w="\\ud83c[\\udffb-\\udfff]",x="[^"+i+"]",j="(?:\\ud83c[\\udde6-\\uddff]){2}",P="[\\ud800-\\udbff][\\udc00-\\udfff]",B="(?:"+_+"|"+w+")"+"?",$="[\\ufe0e\\ufe0f]?",U=$+B+("(?:\\u200d(?:"+[x,j,P].join("|")+")"+$+B+")*"),Y="(?:"+[x+_+"?",_,j,P,u].join("|")+")",X=RegExp(w+"(?="+w+")|"+Y+U,"g");s.exports=function unicodeToArray(s){return s.match(X)||[]}},82744:s=>{var i="\\ud800-\\udfff",u="\\u2700-\\u27bf",_="a-z\\xdf-\\xf6\\xf8-\\xff",w="A-Z\\xc0-\\xd6\\xd8-\\xde",x="\\xac\\xb1\\xd7\\xf7\\x00-\\x2f\\x3a-\\x40\\x5b-\\x60\\x7b-\\xbf\\u2000-\\u206f \\t\\x0b\\f\\xa0\\ufeff\\n\\r\\u2028\\u2029\\u1680\\u180e\\u2000\\u2001\\u2002\\u2003\\u2004\\u2005\\u2006\\u2007\\u2008\\u2009\\u200a\\u202f\\u205f\\u3000",j="["+x+"]",P="\\d+",B="["+u+"]",$="["+_+"]",U="[^"+i+x+P+u+_+w+"]",Y="(?:\\ud83c[\\udde6-\\uddff]){2}",X="[\\ud800-\\udbff][\\udc00-\\udfff]",Z="["+w+"]",ee="(?:"+$+"|"+U+")",ae="(?:"+Z+"|"+U+")",ie="(?:['’](?:d|ll|m|re|s|t|ve))?",le="(?:['’](?:D|LL|M|RE|S|T|VE))?",ce="(?:[\\u0300-\\u036f\\ufe20-\\ufe2f\\u20d0-\\u20ff]|\\ud83c[\\udffb-\\udfff])?",pe="[\\ufe0e\\ufe0f]?",de=pe+ce+("(?:\\u200d(?:"+["[^"+i+"]",Y,X].join("|")+")"+pe+ce+")*"),fe="(?:"+[B,Y,X].join("|")+")"+de,ye=RegExp([Z+"?"+$+"+"+ie+"(?="+[j,Z,"$"].join("|")+")",ae+"+"+le+"(?="+[j,Z+ee,"$"].join("|")+")",Z+"?"+ee+"+"+ie,Z+"+"+le,"\\d*(?:1ST|2ND|3RD|(?![123])\\dTH)(?=\\b|[a-z_])","\\d*(?:1st|2nd|3rd|(?![123])\\dth)(?=\\b|[A-Z_])",P,fe].join("|"),"g");s.exports=function unicodeWords(s){return s.match(ye)||[]}},56172:(s,i,u)=>{var _=u(16064),w=u(38688),x=[["ary",128],["bind",1],["bindKey",2],["curry",8],["curryRight",16],["flip",512],["partial",32],["partialRight",64],["rearg",256]];s.exports=function updateWrapDetails(s,i){return _(x,(function(u){var _="_."+u[0];i&u[1]&&!w(s,_)&&s.push(_)})),s.sort()}},85220:(s,i,u)=>{var _=u(34136),w=u(99432),x=u(68416);s.exports=function wrapperClone(s){if(s instanceof _)return s.clone();var i=new w(s.__wrapped__,s.__chain__);return i.__actions__=x(s.__actions__),i.__index__=s.__index__,i.__values__=s.__values__,i}},29004:(s,i,u)=>{var _=u(35424);s.exports=function ary(s,i,u){return i=u?void 0:i,i=s&&null==i?s.length:i,_(s,128,void 0,void 0,void 0,void 0,i)}},45144:(s,i,u)=>{var _=u(1912),w=u(30184)((function(s,i,u){return i=i.toLowerCase(),s+(u?_(i):i)}));s.exports=w},1912:(s,i,u)=>{var _=u(71972),w=u(64704);s.exports=function capitalize(s){return w(_(s).toLowerCase())}},23844:(s,i,u)=>{var _=u(35620);s.exports=function clone(s){return _(s,4)}},6347:s=>{s.exports=function constant(s){return function(){return s}}},4560:(s,i,u)=>{var _=u(35424);function curry(s,i,u){var w=_(s,8,void 0,void 0,void 0,void 0,void 0,i=u?void 0:i);return w.placeholder=curry.placeholder,w}curry.placeholder={},s.exports=curry},73336:(s,i,u)=>{var _=u(8940),w=u(19868),x=u(8472),j=Math.max,P=Math.min;s.exports=function debounce(s,i,u){var B,$,U,Y,X,Z,ee=0,ae=!1,ie=!1,le=!0;if("function"!=typeof s)throw new TypeError("Expected a function");function invokeFunc(i){var u=B,_=$;return B=$=void 0,ee=i,Y=s.apply(_,u)}function shouldInvoke(s){var u=s-Z;return void 0===Z||u>=i||u<0||ie&&s-ee>=U}function timerExpired(){var s=w();if(shouldInvoke(s))return trailingEdge(s);X=setTimeout(timerExpired,function remainingWait(s){var u=i-(s-Z);return ie?P(u,U-(s-ee)):u}(s))}function trailingEdge(s){return X=void 0,le&&B?invokeFunc(s):(B=$=void 0,Y)}function debounced(){var s=w(),u=shouldInvoke(s);if(B=arguments,$=this,Z=s,u){if(void 0===X)return function leadingEdge(s){return ee=s,X=setTimeout(timerExpired,i),ae?invokeFunc(s):Y}(Z);if(ie)return clearTimeout(X),X=setTimeout(timerExpired,i),invokeFunc(Z)}return void 0===X&&(X=setTimeout(timerExpired,i)),Y}return i=x(i)||0,_(u)&&(ae=!!u.leading,U=(ie="maxWait"in u)?j(x(u.maxWait)||0,i):U,le="trailing"in u?!!u.trailing:le),debounced.cancel=function cancel(){void 0!==X&&clearTimeout(X),ee=0,B=Z=$=X=void 0},debounced.flush=function flush(){return void 0===X?Y:trailingEdge(w())},debounced}},5928:(s,i,u)=>{var _=u(81180),w=u(71972),x=/[\xc0-\xd6\xd8-\xf6\xf8-\xff\u0100-\u017f]/g,j=RegExp("[\\u0300-\\u036f\\ufe20-\\ufe2f\\u20d0-\\u20ff]","g");s.exports=function deburr(s){return(s=w(s))&&s.replace(x,_).replace(j,"")}},70864:s=>{s.exports=function eq(s,i){return s===i||s!=s&&i!=i}},67120:(s,i,u)=>{var _=u(40100)(u(22988));s.exports=_},22988:(s,i,u)=>{var _=u(64832),w=u(73968),x=u(4400),j=Math.max;s.exports=function findIndex(s,i,u){var P=null==s?0:s.length;if(!P)return-1;var B=null==u?0:x(u);return B<0&&(B=j(P+B,0)),_(s,w(i,3),B)}},14576:(s,i,u)=>{var _=u(38108);s.exports=function flatten(s){return(null==s?0:s.length)?_(s,1):[]}},44944:(s,i,u)=>{var _=u(98716),w=u(82120),x=Array.prototype.push;function baseAry(s,i){return 2==i?function(i,u){return s(i,u)}:function(i){return s(i)}}function cloneArray(s){for(var i=s?s.length:0,u=Array(i);i--;)u[i]=s[i];return u}function wrapImmutable(s,i){return function(){var u=arguments.length;if(u){for(var _=Array(u);u--;)_[u]=arguments[u];var w=_[0]=i.apply(void 0,_);return s.apply(void 0,_),w}}}s.exports=function baseConvert(s,i,u,j){var P="function"==typeof i,B=i===Object(i);if(B&&(j=u,u=i,i=void 0),null==u)throw new TypeError;j||(j={});var $={cap:!("cap"in j)||j.cap,curry:!("curry"in j)||j.curry,fixed:!("fixed"in j)||j.fixed,immutable:!("immutable"in j)||j.immutable,rearg:!("rearg"in j)||j.rearg},U=P?u:w,Y="curry"in j&&j.curry,X="fixed"in j&&j.fixed,Z="rearg"in j&&j.rearg,ee=P?u.runInContext():void 0,ae=P?u:{ary:s.ary,assign:s.assign,clone:s.clone,curry:s.curry,forEach:s.forEach,isArray:s.isArray,isError:s.isError,isFunction:s.isFunction,isWeakMap:s.isWeakMap,iteratee:s.iteratee,keys:s.keys,rearg:s.rearg,toInteger:s.toInteger,toPath:s.toPath},ie=ae.ary,le=ae.assign,ce=ae.clone,pe=ae.curry,de=ae.forEach,fe=ae.isArray,ye=ae.isError,be=ae.isFunction,_e=ae.isWeakMap,we=ae.keys,Se=ae.rearg,xe=ae.toInteger,Pe=ae.toPath,Te=we(_.aryMethod),Re={castArray:function(s){return function(){var i=arguments[0];return fe(i)?s(cloneArray(i)):s.apply(void 0,arguments)}},iteratee:function(s){return function(){var i=arguments[1],u=s(arguments[0],i),_=u.length;return $.cap&&"number"==typeof i?(i=i>2?i-2:1,_&&_<=i?u:baseAry(u,i)):u}},mixin:function(s){return function(i){var u=this;if(!be(u))return s(u,Object(i));var _=[];return de(we(i),(function(s){be(i[s])&&_.push([s,u.prototype[s]])})),s(u,Object(i)),de(_,(function(s){var i=s[1];be(i)?u.prototype[s[0]]=i:delete u.prototype[s[0]]})),u}},nthArg:function(s){return function(i){var u=i<0?1:xe(i)+1;return pe(s(i),u)}},rearg:function(s){return function(i,u){var _=u?u.length:0;return pe(s(i,u),_)}},runInContext:function(i){return function(u){return baseConvert(s,i(u),j)}}};function castCap(s,i){if($.cap){var u=_.iterateeRearg[s];if(u)return function iterateeRearg(s,i){return overArg(s,(function(s){var u=i.length;return function baseArity(s,i){return 2==i?function(i,u){return s.apply(void 0,arguments)}:function(i){return s.apply(void 0,arguments)}}(Se(baseAry(s,u),i),u)}))}(i,u);var w=!P&&_.iterateeAry[s];if(w)return function iterateeAry(s,i){return overArg(s,(function(s){return"function"==typeof s?baseAry(s,i):s}))}(i,w)}return i}function castFixed(s,i,u){if($.fixed&&(X||!_.skipFixed[s])){var w=_.methodSpread[s],j=w&&w.start;return void 0===j?ie(i,u):function flatSpread(s,i){return function(){for(var u=arguments.length,_=u-1,w=Array(u);u--;)w[u]=arguments[u];var j=w[i],P=w.slice(0,i);return j&&x.apply(P,j),i!=_&&x.apply(P,w.slice(i+1)),s.apply(this,P)}}(i,j)}return i}function castRearg(s,i,u){return $.rearg&&u>1&&(Z||!_.skipRearg[s])?Se(i,_.methodRearg[s]||_.aryRearg[u]):i}function cloneByPath(s,i){for(var u=-1,_=(i=Pe(i)).length,w=_-1,x=ce(Object(s)),j=x;null!=j&&++u<_;){var P=i[u],B=j[P];null==B||be(B)||ye(B)||_e(B)||(j[P]=ce(u==w?B:Object(B))),j=j[P]}return x}function createConverter(s,i){var u=_.aliasToReal[s]||s,w=_.remap[u]||u,x=j;return function(s){var _=P?ee:ae,j=P?ee[w]:i,B=le(le({},x),s);return baseConvert(_,u,j,B)}}function overArg(s,i){return function(){var u=arguments.length;if(!u)return s();for(var _=Array(u);u--;)_[u]=arguments[u];var w=$.rearg?0:u-1;return _[w]=i(_[w]),s.apply(void 0,_)}}function wrap(s,i,u){var w,x=_.aliasToReal[s]||s,j=i,P=Re[x];return P?j=P(i):$.immutable&&(_.mutate.array[x]?j=wrapImmutable(i,cloneArray):_.mutate.object[x]?j=wrapImmutable(i,function createCloner(s){return function(i){return s({},i)}}(i)):_.mutate.set[x]&&(j=wrapImmutable(i,cloneByPath))),de(Te,(function(s){return de(_.aryMethod[s],(function(i){if(x==i){var u=_.methodSpread[x],P=u&&u.afterRearg;return w=P?castFixed(x,castRearg(x,j,s),s):castRearg(x,castFixed(x,j,s),s),w=function castCurry(s,i,u){return Y||$.curry&&u>1?pe(i,u):i}(0,w=castCap(x,w),s),!1}})),!w})),w||(w=j),w==i&&(w=Y?pe(w,1):function(){return i.apply(this,arguments)}),w.convert=createConverter(x,i),w.placeholder=i.placeholder=u,w}if(!B)return wrap(i,u,U);var qe=u,$e=[];return de(Te,(function(s){de(_.aryMethod[s],(function(s){var i=qe[_.remap[s]||s];i&&$e.push([s,wrap(s,i,qe)])}))})),de(we(qe),(function(s){var i=qe[s];if("function"==typeof i){for(var u=$e.length;u--;)if($e[u][0]==s)return;i.convert=createConverter(s,i),$e.push([s,i])}})),de($e,(function(s){qe[s[0]]=s[1]})),qe.convert=function convertLib(s){return qe.runInContext.convert(s)(void 0)},qe.placeholder=qe,de(we(qe),(function(s){de(_.realToAlias[s]||[],(function(i){qe[i]=qe[s]}))})),qe}},98716:(s,i)=>{i.aliasToReal={each:"forEach",eachRight:"forEachRight",entries:"toPairs",entriesIn:"toPairsIn",extend:"assignIn",extendAll:"assignInAll",extendAllWith:"assignInAllWith",extendWith:"assignInWith",first:"head",conforms:"conformsTo",matches:"isMatch",property:"get",__:"placeholder",F:"stubFalse",T:"stubTrue",all:"every",allPass:"overEvery",always:"constant",any:"some",anyPass:"overSome",apply:"spread",assoc:"set",assocPath:"set",complement:"negate",compose:"flowRight",contains:"includes",dissoc:"unset",dissocPath:"unset",dropLast:"dropRight",dropLastWhile:"dropRightWhile",equals:"isEqual",identical:"eq",indexBy:"keyBy",init:"initial",invertObj:"invert",juxt:"over",omitAll:"omit",nAry:"ary",path:"get",pathEq:"matchesProperty",pathOr:"getOr",paths:"at",pickAll:"pick",pipe:"flow",pluck:"map",prop:"get",propEq:"matchesProperty",propOr:"getOr",props:"at",symmetricDifference:"xor",symmetricDifferenceBy:"xorBy",symmetricDifferenceWith:"xorWith",takeLast:"takeRight",takeLastWhile:"takeRightWhile",unapply:"rest",unnest:"flatten",useWith:"overArgs",where:"conformsTo",whereEq:"isMatch",zipObj:"zipObject"},i.aryMethod={1:["assignAll","assignInAll","attempt","castArray","ceil","create","curry","curryRight","defaultsAll","defaultsDeepAll","floor","flow","flowRight","fromPairs","invert","iteratee","memoize","method","mergeAll","methodOf","mixin","nthArg","over","overEvery","overSome","rest","reverse","round","runInContext","spread","template","trim","trimEnd","trimStart","uniqueId","words","zipAll"],2:["add","after","ary","assign","assignAllWith","assignIn","assignInAllWith","at","before","bind","bindAll","bindKey","chunk","cloneDeepWith","cloneWith","concat","conformsTo","countBy","curryN","curryRightN","debounce","defaults","defaultsDeep","defaultTo","delay","difference","divide","drop","dropRight","dropRightWhile","dropWhile","endsWith","eq","every","filter","find","findIndex","findKey","findLast","findLastIndex","findLastKey","flatMap","flatMapDeep","flattenDepth","forEach","forEachRight","forIn","forInRight","forOwn","forOwnRight","get","groupBy","gt","gte","has","hasIn","includes","indexOf","intersection","invertBy","invoke","invokeMap","isEqual","isMatch","join","keyBy","lastIndexOf","lt","lte","map","mapKeys","mapValues","matchesProperty","maxBy","meanBy","merge","mergeAllWith","minBy","multiply","nth","omit","omitBy","overArgs","pad","padEnd","padStart","parseInt","partial","partialRight","partition","pick","pickBy","propertyOf","pull","pullAll","pullAt","random","range","rangeRight","rearg","reject","remove","repeat","restFrom","result","sampleSize","some","sortBy","sortedIndex","sortedIndexOf","sortedLastIndex","sortedLastIndexOf","sortedUniqBy","split","spreadFrom","startsWith","subtract","sumBy","take","takeRight","takeRightWhile","takeWhile","tap","throttle","thru","times","trimChars","trimCharsEnd","trimCharsStart","truncate","union","uniqBy","uniqWith","unset","unzipWith","without","wrap","xor","zip","zipObject","zipObjectDeep"],3:["assignInWith","assignWith","clamp","differenceBy","differenceWith","findFrom","findIndexFrom","findLastFrom","findLastIndexFrom","getOr","includesFrom","indexOfFrom","inRange","intersectionBy","intersectionWith","invokeArgs","invokeArgsMap","isEqualWith","isMatchWith","flatMapDepth","lastIndexOfFrom","mergeWith","orderBy","padChars","padCharsEnd","padCharsStart","pullAllBy","pullAllWith","rangeStep","rangeStepRight","reduce","reduceRight","replace","set","slice","sortedIndexBy","sortedLastIndexBy","transform","unionBy","unionWith","update","xorBy","xorWith","zipWith"],4:["fill","setWith","updateWith"]},i.aryRearg={2:[1,0],3:[2,0,1],4:[3,2,0,1]},i.iterateeAry={dropRightWhile:1,dropWhile:1,every:1,filter:1,find:1,findFrom:1,findIndex:1,findIndexFrom:1,findKey:1,findLast:1,findLastFrom:1,findLastIndex:1,findLastIndexFrom:1,findLastKey:1,flatMap:1,flatMapDeep:1,flatMapDepth:1,forEach:1,forEachRight:1,forIn:1,forInRight:1,forOwn:1,forOwnRight:1,map:1,mapKeys:1,mapValues:1,partition:1,reduce:2,reduceRight:2,reject:1,remove:1,some:1,takeRightWhile:1,takeWhile:1,times:1,transform:2},i.iterateeRearg={mapKeys:[1],reduceRight:[1,0]},i.methodRearg={assignInAllWith:[1,0],assignInWith:[1,2,0],assignAllWith:[1,0],assignWith:[1,2,0],differenceBy:[1,2,0],differenceWith:[1,2,0],getOr:[2,1,0],intersectionBy:[1,2,0],intersectionWith:[1,2,0],isEqualWith:[1,2,0],isMatchWith:[2,1,0],mergeAllWith:[1,0],mergeWith:[1,2,0],padChars:[2,1,0],padCharsEnd:[2,1,0],padCharsStart:[2,1,0],pullAllBy:[2,1,0],pullAllWith:[2,1,0],rangeStep:[1,2,0],rangeStepRight:[1,2,0],setWith:[3,1,2,0],sortedIndexBy:[2,1,0],sortedLastIndexBy:[2,1,0],unionBy:[1,2,0],unionWith:[1,2,0],updateWith:[3,1,2,0],xorBy:[1,2,0],xorWith:[1,2,0],zipWith:[1,2,0]},i.methodSpread={assignAll:{start:0},assignAllWith:{start:0},assignInAll:{start:0},assignInAllWith:{start:0},defaultsAll:{start:0},defaultsDeepAll:{start:0},invokeArgs:{start:2},invokeArgsMap:{start:2},mergeAll:{start:0},mergeAllWith:{start:0},partial:{start:1},partialRight:{start:1},without:{start:1},zipAll:{start:0}},i.mutate={array:{fill:!0,pull:!0,pullAll:!0,pullAllBy:!0,pullAllWith:!0,pullAt:!0,remove:!0,reverse:!0},object:{assign:!0,assignAll:!0,assignAllWith:!0,assignIn:!0,assignInAll:!0,assignInAllWith:!0,assignInWith:!0,assignWith:!0,defaults:!0,defaultsAll:!0,defaultsDeep:!0,defaultsDeepAll:!0,merge:!0,mergeAll:!0,mergeAllWith:!0,mergeWith:!0},set:{set:!0,setWith:!0,unset:!0,update:!0,updateWith:!0}},i.realToAlias=function(){var s=Object.prototype.hasOwnProperty,u=i.aliasToReal,_={};for(var w in u){var x=u[w];s.call(_,x)?_[x].push(w):_[x]=[w]}return _}(),i.remap={assignAll:"assign",assignAllWith:"assignWith",assignInAll:"assignIn",assignInAllWith:"assignInWith",curryN:"curry",curryRightN:"curryRight",defaultsAll:"defaults",defaultsDeepAll:"defaultsDeep",findFrom:"find",findIndexFrom:"findIndex",findLastFrom:"findLast",findLastIndexFrom:"findLastIndex",getOr:"get",includesFrom:"includes",indexOfFrom:"indexOf",invokeArgs:"invoke",invokeArgsMap:"invokeMap",lastIndexOfFrom:"lastIndexOf",mergeAll:"merge",mergeAllWith:"mergeWith",padChars:"pad",padCharsEnd:"padEnd",padCharsStart:"padStart",propertyOf:"get",rangeStep:"range",rangeStepRight:"rangeRight",restFrom:"rest",spreadFrom:"spread",trimChars:"trim",trimCharsEnd:"trimEnd",trimCharsStart:"trimStart",zipAll:"zip"},i.skipFixed={castArray:!0,flow:!0,flowRight:!0,iteratee:!0,mixin:!0,rearg:!0,runInContext:!0},i.skipRearg={add:!0,assign:!0,assignIn:!0,bind:!0,bindKey:!0,concat:!0,difference:!0,divide:!0,eq:!0,gt:!0,gte:!0,isEqual:!0,lt:!0,lte:!0,matchesProperty:!0,merge:!0,multiply:!0,overArgs:!0,partial:!0,partialRight:!0,propertyOf:!0,random:!0,range:!0,rangeRight:!0,subtract:!0,zip:!0,zipObject:!0,zipObjectDeep:!0}},38048:(s,i,u)=>{s.exports={ary:u(29004),assign:u(28040),clone:u(23844),curry:u(4560),forEach:u(16064),isArray:u(52488),isError:u(16308),isFunction:u(87920),isWeakMap:u(68436),iteratee:u(78192),keys:u(25552),rearg:u(67547),toInteger:u(4400),toPath:u(51768)}},25436:(s,i,u)=>{s.exports=u(90544)},92464:(s,i,u)=>{var _=u(44944),w=u(38048);s.exports=function convert(s,i,u){return _(w,s,i,u)}},82120:s=>{s.exports={}},90544:(s,i,u)=>{var _=u(92464)("set",u(32552));_.placeholder=u(82120),s.exports=_},19448:(s,i,u)=>{var _=u(34240);s.exports=function get(s,i,u){var w=null==s?void 0:_(s,i);return void 0===w?u:w}},71256:(s,i,u)=>{var _=u(77732),w=u(42828);s.exports=function hasIn(s,i){return null!=s&&w(s,i,_)}},10552:s=>{s.exports=function identity(s){return s}},70348:(s,i,u)=>{var _=u(93432),w=u(22892),x=Object.prototype,j=x.hasOwnProperty,P=x.propertyIsEnumerable,B=_(function(){return arguments}())?_:function(s){return w(s)&&j.call(s,"callee")&&!P.call(s,"callee")};s.exports=B},52488:s=>{var i=Array.isArray;s.exports=i},54900:(s,i,u)=>{var _=u(87920),w=u(49024);s.exports=function isArrayLike(s){return null!=s&&w(s.length)&&!_(s)}},2480:(s,i,u)=>{var _=u(54900),w=u(22892);s.exports=function isArrayLikeObject(s){return w(s)&&_(s)}},37e3:(s,i,u)=>{var _=u(16944),w=u(22892);s.exports=function isBoolean(s){return!0===s||!1===s||w(s)&&"[object Boolean]"==_(s)}},87684:(s,i,u)=>{s=u.nmd(s);var _=u(97188),w=u(16448),x=i&&!i.nodeType&&i,j=x&&s&&!s.nodeType&&s,P=j&&j.exports===x?_.Buffer:void 0,B=(P?P.isBuffer:void 0)||w;s.exports=B},40656:(s,i,u)=>{var _=u(25552),w=u(43871),x=u(70348),j=u(52488),P=u(54900),B=u(87684),$=u(11004),U=u(26700),Y=Object.prototype.hasOwnProperty;s.exports=function isEmpty(s){if(null==s)return!0;if(P(s)&&(j(s)||"string"==typeof s||"function"==typeof s.splice||B(s)||U(s)||x(s)))return!s.length;var i=w(s);if("[object Map]"==i||"[object Set]"==i)return!s.size;if($(s))return!_(s).length;for(var u in s)if(Y.call(s,u))return!1;return!0}},90948:(s,i,u)=>{var _=u(59184);s.exports=function isEqual(s,i){return _(s,i)}},16308:(s,i,u)=>{var _=u(16944),w=u(22892),x=u(42688);s.exports=function isError(s){if(!w(s))return!1;var i=_(s);return"[object Error]"==i||"[object DOMException]"==i||"string"==typeof s.message&&"string"==typeof s.name&&!x(s)}},87920:(s,i,u)=>{var _=u(16944),w=u(8940);s.exports=function isFunction(s){if(!w(s))return!1;var i=_(s);return"[object Function]"==i||"[object GeneratorFunction]"==i||"[object AsyncFunction]"==i||"[object Proxy]"==i}},49024:s=>{s.exports=function isLength(s){return"number"==typeof s&&s>-1&&s%1==0&&s<=9007199254740991}},48544:(s,i,u)=>{var _=u(63740),w=u(89165),x=u(59180),j=x&&x.isMap,P=j?w(j):_;s.exports=P},56756:s=>{s.exports=function isNull(s){return null===s}},30568:(s,i,u)=>{var _=u(16944),w=u(22892);s.exports=function isNumber(s){return"number"==typeof s||w(s)&&"[object Number]"==_(s)}},8940:s=>{s.exports=function isObject(s){var i=typeof s;return null!=s&&("object"==i||"function"==i)}},22892:s=>{s.exports=function isObjectLike(s){return null!=s&&"object"==typeof s}},42688:(s,i,u)=>{var _=u(16944),w=u(30476),x=u(22892),j=Function.prototype,P=Object.prototype,B=j.toString,$=P.hasOwnProperty,U=B.call(Object);s.exports=function isPlainObject(s){if(!x(s)||"[object Object]"!=_(s))return!1;var i=w(s);if(null===i)return!0;var u=$.call(i,"constructor")&&i.constructor;return"function"==typeof u&&u instanceof u&&B.call(u)==U}},49504:(s,i,u)=>{var _=u(10156),w=u(89165),x=u(59180),j=x&&x.isSet,P=j?w(j):_;s.exports=P},36384:(s,i,u)=>{var _=u(16944),w=u(52488),x=u(22892);s.exports=function isString(s){return"string"==typeof s||!w(s)&&x(s)&&"[object String]"==_(s)}},77712:(s,i,u)=>{var _=u(16944),w=u(22892);s.exports=function isSymbol(s){return"symbol"==typeof s||w(s)&&"[object Symbol]"==_(s)}},26700:(s,i,u)=>{var _=u(87160),w=u(89165),x=u(59180),j=x&&x.isTypedArray,P=j?w(j):_;s.exports=P},68436:(s,i,u)=>{var _=u(43871),w=u(22892);s.exports=function isWeakMap(s){return w(s)&&"[object WeakMap]"==_(s)}},78192:(s,i,u)=>{var _=u(35620),w=u(73968);s.exports=function iteratee(s){return w("function"==typeof s?s:_(s,1))}},95160:(s,i,u)=>{var _=u(17640),w=u(25552),x=u(54900);s.exports=function keys(s){return x(s)?_(s):w(s)}},72756:(s,i,u)=>{var _=u(17640),w=u(63632),x=u(54900);s.exports=function keysIn(s){return x(s)?_(s,!0):w(s)}},2008:s=>{s.exports=function last(s){var i=null==s?0:s.length;return i?s[i-1]:void 0}},1576:(s,i,u)=>{var _=u(41476);function memoize(s,i){if("function"!=typeof s||null!=i&&"function"!=typeof i)throw new TypeError("Expected a function");var memoized=function(){var u=arguments,_=i?i.apply(this,u):u[0],w=memoized.cache;if(w.has(_))return w.get(_);var x=s.apply(this,u);return memoized.cache=w.set(_,x)||w,x};return memoized.cache=new(memoize.Cache||_),memoized}memoize.Cache=_,s.exports=memoize},7060:(s,i,u)=>{var _=u(90660),w=u(20764)((function(s,i,u){_(s,i,u)}));s.exports=w},12548:s=>{s.exports=function negate(s){if("function"!=typeof s)throw new TypeError("Expected a function");return function(){var i=arguments;switch(i.length){case 0:return!s.call(this);case 1:return!s.call(this,i[0]);case 2:return!s.call(this,i[0],i[1]);case 3:return!s.call(this,i[0],i[1],i[2])}return!s.apply(this,i)}}},12648:s=>{s.exports=function noop(){}},19868:(s,i,u)=>{var _=u(97188);s.exports=function(){return _.Date.now()}},94240:(s,i,u)=>{var _=u(12040),w=u(35620),x=u(35840),j=u(97736),P=u(37612),B=u(53104),$=u(77452),U=u(48856),Y=$((function(s,i){var u={};if(null==s)return u;var $=!1;i=_(i,(function(i){return i=j(i,s),$||($=i.length>1),i})),P(s,U(s),u),$&&(u=w(u,7,B));for(var Y=i.length;Y--;)x(u,i[Y]);return u}));s.exports=Y},54860:(s,i,u)=>{var _=u(94732),w=u(64184),x=u(19640),j=u(7668);s.exports=function property(s){return x(s)?_(j(s)):w(s)}},67547:(s,i,u)=>{var _=u(35424),w=u(77452),x=w((function(s,i){return _(s,256,void 0,void 0,void 0,i)}));s.exports=x},73848:(s,i,u)=>{var _=u(67748),w=u(3651),x=u(73968),j=u(16216),P=u(52488);s.exports=function reduce(s,i,u){var B=P(s)?_:j,$=arguments.length<3;return B(s,x(i,4),u,$,w)}},32552:(s,i,u)=>{var _=u(86040);s.exports=function set(s,i,u){return null==s?s:_(s,i,u)}},29500:(s,i,u)=>{var _=u(35600),w=u(73968),x=u(83748),j=u(52488),P=u(74221);s.exports=function some(s,i,u){var B=j(s)?_:x;return u&&P(s,i,u)&&(i=void 0),B(s,w(i,3))}},40872:s=>{s.exports=function stubArray(){return[]}},16448:s=>{s.exports=function stubFalse(){return!1}},97556:(s,i,u)=>{var _=u(8472),w=1/0;s.exports=function toFinite(s){return s?(s=_(s))===w||s===-1/0?17976931348623157e292*(s<0?-1:1):s==s?s:0:0===s?s:0}},4400:(s,i,u)=>{var _=u(97556);s.exports=function toInteger(s){var i=_(s),u=i%1;return i==i?u?i-u:i:0}},97256:(s,i,u)=>{var _=u(71972);s.exports=function toLower(s){return _(s).toLowerCase()}},8472:(s,i,u)=>{var _=u(34428),w=u(8940),x=u(77712),j=/^[-+]0x[0-9a-f]+$/i,P=/^0b[01]+$/i,B=/^0o[0-7]+$/i,$=parseInt;s.exports=function toNumber(s){if("number"==typeof s)return s;if(x(s))return NaN;if(w(s)){var i="function"==typeof s.valueOf?s.valueOf():s;s=w(i)?i+"":i}if("string"!=typeof s)return 0===s?s:+s;s=_(s);var u=P.test(s);return u||B.test(s)?$(s.slice(2),u?2:8):j.test(s)?NaN:+s}},51768:(s,i,u)=>{var _=u(12040),w=u(68416),x=u(52488),j=u(77712),P=u(30976),B=u(7668),$=u(71972);s.exports=function toPath(s){return x(s)?_(s,B):j(s)?[s]:w(P($(s)))}},61824:(s,i,u)=>{var _=u(37612),w=u(72756);s.exports=function toPlainObject(s){return _(s,w(s))}},71972:(s,i,u)=>{var _=u(46524);s.exports=function toString(s){return null==s?"":_(s)}},64704:(s,i,u)=>{var _=u(7288)("toUpperCase");s.exports=_},42996:(s,i,u)=>{var _=u(72628),w=u(94360),x=u(71972),j=u(82744);s.exports=function words(s,i,u){return s=x(s),void 0===(i=u?void 0:i)?w(s)?j(s):_(s):s.match(i)||[]}},76987:(s,i,u)=>{var _=u(34136),w=u(99432),x=u(59864),j=u(52488),P=u(22892),B=u(85220),$=Object.prototype.hasOwnProperty;function lodash(s){if(P(s)&&!j(s)&&!(s instanceof _)){if(s instanceof w)return s;if($.call(s,"__wrapped__"))return B(s)}return new w(s)}lodash.prototype=x.prototype,lodash.prototype.constructor=lodash,s.exports=lodash},52772:(s,i,u)=>{var _=u(28288),w=u(58712);s.exports=function zipObject(s,i){return w(s||[],i||[],_)}},35244:(s,i,u)=>{"use strict";var _=u(4728),w=u(98556);i.highlight=highlight,i.highlightAuto=function highlightAuto(s,i){var u,j,P,B,$=i||{},U=$.subset||_.listLanguages(),Y=$.prefix,X=U.length,Z=-1;null==Y&&(Y=x);if("string"!=typeof s)throw w("Expected `string` for value, got `%s`",s);j={relevance:0,language:null,value:[]},u={relevance:0,language:null,value:[]};for(;++Zj.relevance&&(j=P),P.relevance>u.relevance&&(j=u,u=P));j.language&&(u.secondBest=j);return u},i.registerLanguage=function registerLanguage(s,i){_.registerLanguage(s,i)},i.listLanguages=function listLanguages(){return _.listLanguages()},i.registerAlias=function registerAlias(s,i){var u,w=s;i&&((w={})[s]=i);for(u in w)_.registerAliases(w[u],{languageName:u})},Emitter.prototype.addText=function text(s){var i,u,_=this.stack;if(""===s)return;i=_[_.length-1],(u=i.children[i.children.length-1])&&"text"===u.type?u.value+=s:i.children.push({type:"text",value:s})},Emitter.prototype.addKeyword=function addKeyword(s,i){this.openNode(i),this.addText(s),this.closeNode()},Emitter.prototype.addSublanguage=function addSublanguage(s,i){var u=this.stack,_=u[u.length-1],w=s.rootNode.children,x=i?{type:"element",tagName:"span",properties:{className:[i]},children:w}:w;_.children=_.children.concat(x)},Emitter.prototype.openNode=function open(s){var i=this.stack,u=this.options.classPrefix+s,_=i[i.length-1],w={type:"element",tagName:"span",properties:{className:[u]},children:[]};_.children.push(w),i.push(w)},Emitter.prototype.closeNode=function close(){this.stack.pop()},Emitter.prototype.closeAllNodes=noop,Emitter.prototype.finalize=noop,Emitter.prototype.toHTML=function toHtmlNoop(){return""};var x="hljs-";function highlight(s,i,u){var j,P=_.configure({}),B=(u||{}).prefix;if("string"!=typeof s)throw w("Expected `string` for name, got `%s`",s);if(!_.getLanguage(s))throw w("Unknown language: `%s` is not registered",s);if("string"!=typeof i)throw w("Expected `string` for value, got `%s`",i);if(null==B&&(B=x),_.configure({__emitter:Emitter,classPrefix:B}),j=_.highlight(i,{language:s,ignoreIllegals:!0}),_.configure(P||{}),j.errorRaised)throw j.errorRaised;return{relevance:j.relevance,language:j.language,value:j.emitter.rootNode.children}}function Emitter(s){this.options=s,this.rootNode={children:[]},this.stack=[this.rootNode]}function noop(){}},68440:(s,i,u)=>{const _=u(12548);function coerceElementMatchingCallback(s){return"string"==typeof s?i=>i.element===s:s.constructor&&s.extend?i=>i instanceof s:s}class ArraySlice{constructor(s){this.elements=s||[]}toValue(){return this.elements.map((s=>s.toValue()))}map(s,i){return this.elements.map(s,i)}flatMap(s,i){return this.map(s,i).reduce(((s,i)=>s.concat(i)),[])}compactMap(s,i){const u=[];return this.forEach((_=>{const w=s.bind(i)(_);w&&u.push(w)})),u}filter(s,i){return s=coerceElementMatchingCallback(s),new ArraySlice(this.elements.filter(s,i))}reject(s,i){return s=coerceElementMatchingCallback(s),new ArraySlice(this.elements.filter(_(s),i))}find(s,i){return s=coerceElementMatchingCallback(s),this.elements.find(s,i)}forEach(s,i){this.elements.forEach(s,i)}reduce(s,i){return this.elements.reduce(s,i)}includes(s){return this.elements.some((i=>i.equals(s)))}shift(){return this.elements.shift()}unshift(s){this.elements.unshift(this.refract(s))}push(s){return this.elements.push(this.refract(s)),this}add(s){this.push(s)}get(s){return this.elements[s]}getValue(s){const i=this.elements[s];if(i)return i.toValue()}get length(){return this.elements.length}get isEmpty(){return 0===this.elements.length}get first(){return this.elements[0]}}"undefined"!=typeof Symbol&&(ArraySlice.prototype[Symbol.iterator]=function symbol(){return this.elements[Symbol.iterator]()}),s.exports=ArraySlice},8032:s=>{class KeyValuePair{constructor(s,i){this.key=s,this.value=i}clone(){const s=new KeyValuePair;return this.key&&(s.key=this.key.clone()),this.value&&(s.value=this.value.clone()),s}}s.exports=KeyValuePair},43280:(s,i,u)=>{const _=u(56756),w=u(36384),x=u(30568),j=u(37e3),P=u(8940),B=u(85472),$=u(46508);class Namespace{constructor(s){this.elementMap={},this.elementDetection=[],this.Element=$.Element,this.KeyValuePair=$.KeyValuePair,s&&s.noDefault||this.useDefault(),this._attributeElementKeys=[],this._attributeElementArrayKeys=[]}use(s){return s.namespace&&s.namespace({base:this}),s.load&&s.load({base:this}),this}useDefault(){return this.register("null",$.NullElement).register("string",$.StringElement).register("number",$.NumberElement).register("boolean",$.BooleanElement).register("array",$.ArrayElement).register("object",$.ObjectElement).register("member",$.MemberElement).register("ref",$.RefElement).register("link",$.LinkElement),this.detect(_,$.NullElement,!1).detect(w,$.StringElement,!1).detect(x,$.NumberElement,!1).detect(j,$.BooleanElement,!1).detect(Array.isArray,$.ArrayElement,!1).detect(P,$.ObjectElement,!1),this}register(s,i){return this._elements=void 0,this.elementMap[s]=i,this}unregister(s){return this._elements=void 0,delete this.elementMap[s],this}detect(s,i,u){return void 0===u||u?this.elementDetection.unshift([s,i]):this.elementDetection.push([s,i]),this}toElement(s){if(s instanceof this.Element)return s;let i;for(let u=0;u{const i=s[0].toUpperCase()+s.substr(1);this._elements[i]=this.elementMap[s]}))),this._elements}get serialiser(){return new B(this)}}B.prototype.Namespace=Namespace,s.exports=Namespace},74512:(s,i,u)=>{const _=u(12548),w=u(68440);class ObjectSlice extends w{map(s,i){return this.elements.map((u=>s.bind(i)(u.value,u.key,u)))}filter(s,i){return new ObjectSlice(this.elements.filter((u=>s.bind(i)(u.value,u.key,u))))}reject(s,i){return this.filter(_(s.bind(i)))}forEach(s,i){return this.elements.forEach(((u,_)=>{s.bind(i)(u.value,u.key,u,_)}))}keys(){return this.map(((s,i)=>i.toValue()))}values(){return this.map((s=>s.toValue()))}}s.exports=ObjectSlice},46508:(s,i,u)=>{const _=u(97928),w=u(15448),x=u(65052),j=u(6236),P=u(26416),B=u(17352),$=u(34036),U=u(94408),Y=u(17080),X=u(63552),Z=u(68440),ee=u(74512),ae=u(8032);function refract(s){if(s instanceof _)return s;if("string"==typeof s)return new x(s);if("number"==typeof s)return new j(s);if("boolean"==typeof s)return new P(s);if(null===s)return new w;if(Array.isArray(s))return new B(s.map(refract));if("object"==typeof s){return new U(s)}return s}_.prototype.ObjectElement=U,_.prototype.RefElement=X,_.prototype.MemberElement=$,_.prototype.refract=refract,Z.prototype.refract=refract,s.exports={Element:_,NullElement:w,StringElement:x,NumberElement:j,BooleanElement:P,ArrayElement:B,MemberElement:$,ObjectElement:U,LinkElement:Y,RefElement:X,refract,ArraySlice:Z,ObjectSlice:ee,KeyValuePair:ae}},17080:(s,i,u)=>{const _=u(97928);s.exports=class LinkElement extends _{constructor(s,i,u){super(s||[],i,u),this.element="link"}get relation(){return this.attributes.get("relation")}set relation(s){this.attributes.set("relation",s)}get href(){return this.attributes.get("href")}set href(s){this.attributes.set("href",s)}}},63552:(s,i,u)=>{const _=u(97928);s.exports=class RefElement extends _{constructor(s,i,u){super(s||[],i,u),this.element="ref",this.path||(this.path="element")}get path(){return this.attributes.get("path")}set path(s){this.attributes.set("path",s)}}},50784:(s,i,u)=>{const _=u(43280),w=u(46508);i.MH=_,i.KeyValuePair=u(8032),i.eW=w.ArraySlice,i.wL=w.ObjectSlice,i.gr=w.Element,i.M$=w.StringElement,i.wH=w.NumberElement,i.Ar=w.BooleanElement,i.WM=w.NullElement,i.uQ=w.ArrayElement,i.We=w.ObjectElement,i.u6=w.MemberElement,i.eE=w.RefElement,i.UH=w.LinkElement,i.WG=w.refract,u(85472),u(40344)},17352:(s,i,u)=>{const _=u(12548),w=u(97928),x=u(68440);class ArrayElement extends w{constructor(s,i,u){super(s||[],i,u),this.element="array"}primitive(){return"array"}get(s){return this.content[s]}getValue(s){const i=this.get(s);if(i)return i.toValue()}getIndex(s){return this.content[s]}set(s,i){return this.content[s]=this.refract(i),this}remove(s){const i=this.content.splice(s,1);return i.length?i[0]:null}map(s,i){return this.content.map(s,i)}flatMap(s,i){return this.map(s,i).reduce(((s,i)=>s.concat(i)),[])}compactMap(s,i){const u=[];return this.forEach((_=>{const w=s.bind(i)(_);w&&u.push(w)})),u}filter(s,i){return new x(this.content.filter(s,i))}reject(s,i){return this.filter(_(s),i)}reduce(s,i){let u,_;void 0!==i?(u=0,_=this.refract(i)):(u=1,_="object"===this.primitive()?this.first.value:this.first);for(let i=u;i{s.bind(i)(u,this.refract(_))}))}shift(){return this.content.shift()}unshift(s){this.content.unshift(this.refract(s))}push(s){return this.content.push(this.refract(s)),this}add(s){this.push(s)}findElements(s,i){const u=i||{},_=!!u.recursive,w=void 0===u.results?[]:u.results;return this.forEach(((i,u,x)=>{_&&void 0!==i.findElements&&i.findElements(s,{results:w,recursive:_}),s(i,u,x)&&w.push(i)})),w}find(s){return new x(this.findElements(s,{recursive:!0}))}findByElement(s){return this.find((i=>i.element===s))}findByClass(s){return this.find((i=>i.classes.includes(s)))}getById(s){return this.find((i=>i.id.toValue()===s)).first}includes(s){return this.content.some((i=>i.equals(s)))}contains(s){return this.includes(s)}empty(){return new this.constructor([])}"fantasy-land/empty"(){return this.empty()}concat(s){return new this.constructor(this.content.concat(s.content))}"fantasy-land/concat"(s){return this.concat(s)}"fantasy-land/map"(s){return new this.constructor(this.map(s))}"fantasy-land/chain"(s){return this.map((i=>s(i)),this).reduce(((s,i)=>s.concat(i)),this.empty())}"fantasy-land/filter"(s){return new this.constructor(this.content.filter(s))}"fantasy-land/reduce"(s,i){return this.content.reduce(s,i)}get length(){return this.content.length}get isEmpty(){return 0===this.content.length}get first(){return this.getIndex(0)}get second(){return this.getIndex(1)}get last(){return this.getIndex(this.length-1)}}ArrayElement.empty=function empty(){return new this},ArrayElement["fantasy-land/empty"]=ArrayElement.empty,"undefined"!=typeof Symbol&&(ArrayElement.prototype[Symbol.iterator]=function symbol(){return this.content[Symbol.iterator]()}),s.exports=ArrayElement},26416:(s,i,u)=>{const _=u(97928);s.exports=class BooleanElement extends _{constructor(s,i,u){super(s,i,u),this.element="boolean"}primitive(){return"boolean"}}},97928:(s,i,u)=>{const _=u(90948),w=u(8032),x=u(68440);class Element{constructor(s,i,u){i&&(this.meta=i),u&&(this.attributes=u),this.content=s}freeze(){Object.isFrozen(this)||(this._meta&&(this.meta.parent=this,this.meta.freeze()),this._attributes&&(this.attributes.parent=this,this.attributes.freeze()),this.children.forEach((s=>{s.parent=this,s.freeze()}),this),this.content&&Array.isArray(this.content)&&Object.freeze(this.content),Object.freeze(this))}primitive(){}clone(){const s=new this.constructor;return s.element=this.element,this.meta.length&&(s._meta=this.meta.clone()),this.attributes.length&&(s._attributes=this.attributes.clone()),this.content?this.content.clone?s.content=this.content.clone():Array.isArray(this.content)?s.content=this.content.map((s=>s.clone())):s.content=this.content:s.content=this.content,s}toValue(){return this.content instanceof Element?this.content.toValue():this.content instanceof w?{key:this.content.key.toValue(),value:this.content.value?this.content.value.toValue():void 0}:this.content&&this.content.map?this.content.map((s=>s.toValue()),this):this.content}toRef(s){if(""===this.id.toValue())throw Error("Cannot create reference to an element that does not contain an ID");const i=new this.RefElement(this.id.toValue());return s&&(i.path=s),i}findRecursive(...s){if(arguments.length>1&&!this.isFrozen)throw new Error("Cannot find recursive with multiple element names without first freezing the element. Call `element.freeze()`");const i=s.pop();let u=new x;const append=(s,i)=>(s.push(i),s),checkElement=(s,u)=>{u.element===i&&s.push(u);const _=u.findRecursive(i);return _&&_.reduce(append,s),u.content instanceof w&&(u.content.key&&checkElement(s,u.content.key),u.content.value&&checkElement(s,u.content.value)),s};return this.content&&(this.content.element&&checkElement(u,this.content),Array.isArray(this.content)&&this.content.reduce(checkElement,u)),s.isEmpty||(u=u.filter((i=>{let u=i.parents.map((s=>s.element));for(const i in s){const _=s[i],w=u.indexOf(_);if(-1===w)return!1;u=u.splice(0,w)}return!0}))),u}set(s){return this.content=s,this}equals(s){return _(this.toValue(),s)}getMetaProperty(s,i){if(!this.meta.hasKey(s)){if(this.isFrozen){const s=this.refract(i);return s.freeze(),s}this.meta.set(s,i)}return this.meta.get(s)}setMetaProperty(s,i){this.meta.set(s,i)}get element(){return this._storedElement||"element"}set element(s){this._storedElement=s}get content(){return this._content}set content(s){if(s instanceof Element)this._content=s;else if(s instanceof x)this.content=s.elements;else if("string"==typeof s||"number"==typeof s||"boolean"==typeof s||"null"===s||null==s)this._content=s;else if(s instanceof w)this._content=s;else if(Array.isArray(s))this._content=s.map(this.refract);else{if("object"!=typeof s)throw new Error("Cannot set content to given value");this._content=Object.keys(s).map((i=>new this.MemberElement(i,s[i])))}}get meta(){if(!this._meta){if(this.isFrozen){const s=new this.ObjectElement;return s.freeze(),s}this._meta=new this.ObjectElement}return this._meta}set meta(s){s instanceof this.ObjectElement?this._meta=s:this.meta.set(s||{})}get attributes(){if(!this._attributes){if(this.isFrozen){const s=new this.ObjectElement;return s.freeze(),s}this._attributes=new this.ObjectElement}return this._attributes}set attributes(s){s instanceof this.ObjectElement?this._attributes=s:this.attributes.set(s||{})}get id(){return this.getMetaProperty("id","")}set id(s){this.setMetaProperty("id",s)}get classes(){return this.getMetaProperty("classes",[])}set classes(s){this.setMetaProperty("classes",s)}get title(){return this.getMetaProperty("title","")}set title(s){this.setMetaProperty("title",s)}get description(){return this.getMetaProperty("description","")}set description(s){this.setMetaProperty("description",s)}get links(){return this.getMetaProperty("links",[])}set links(s){this.setMetaProperty("links",s)}get isFrozen(){return Object.isFrozen(this)}get parents(){let{parent:s}=this;const i=new x;for(;s;)i.push(s),s=s.parent;return i}get children(){if(Array.isArray(this.content))return new x(this.content);if(this.content instanceof w){const s=new x([this.content.key]);return this.content.value&&s.push(this.content.value),s}return this.content instanceof Element?new x([this.content]):new x}get recursiveChildren(){const s=new x;return this.children.forEach((i=>{s.push(i),i.recursiveChildren.forEach((i=>{s.push(i)}))})),s}}s.exports=Element},34036:(s,i,u)=>{const _=u(8032),w=u(97928);s.exports=class MemberElement extends w{constructor(s,i,u,w){super(new _,u,w),this.element="member",this.key=s,this.value=i}get key(){return this.content.key}set key(s){this.content.key=this.refract(s)}get value(){return this.content.value}set value(s){this.content.value=this.refract(s)}}},15448:(s,i,u)=>{const _=u(97928);s.exports=class NullElement extends _{constructor(s,i,u){super(s||null,i,u),this.element="null"}primitive(){return"null"}set(){return new Error("Cannot set the value of null")}}},6236:(s,i,u)=>{const _=u(97928);s.exports=class NumberElement extends _{constructor(s,i,u){super(s,i,u),this.element="number"}primitive(){return"number"}}},94408:(s,i,u)=>{const _=u(12548),w=u(8940),x=u(17352),j=u(34036),P=u(74512);s.exports=class ObjectElement extends x{constructor(s,i,u){super(s||[],i,u),this.element="object"}primitive(){return"object"}toValue(){return this.content.reduce(((s,i)=>(s[i.key.toValue()]=i.value?i.value.toValue():void 0,s)),{})}get(s){const i=this.getMember(s);if(i)return i.value}getMember(s){if(void 0!==s)return this.content.find((i=>i.key.toValue()===s))}remove(s){let i=null;return this.content=this.content.filter((u=>u.key.toValue()!==s||(i=u,!1))),i}getKey(s){const i=this.getMember(s);if(i)return i.key}set(s,i){if(w(s))return Object.keys(s).forEach((i=>{this.set(i,s[i])})),this;const u=s,_=this.getMember(u);return _?_.value=i:this.content.push(new j(u,i)),this}keys(){return this.content.map((s=>s.key.toValue()))}values(){return this.content.map((s=>s.value.toValue()))}hasKey(s){return this.content.some((i=>i.key.equals(s)))}items(){return this.content.map((s=>[s.key.toValue(),s.value.toValue()]))}map(s,i){return this.content.map((u=>s.bind(i)(u.value,u.key,u)))}compactMap(s,i){const u=[];return this.forEach(((_,w,x)=>{const j=s.bind(i)(_,w,x);j&&u.push(j)})),u}filter(s,i){return new P(this.content).filter(s,i)}reject(s,i){return this.filter(_(s),i)}forEach(s,i){return this.content.forEach((u=>s.bind(i)(u.value,u.key,u)))}}},65052:(s,i,u)=>{const _=u(97928);s.exports=class StringElement extends _{constructor(s,i,u){super(s,i,u),this.element="string"}primitive(){return"string"}get length(){return this.content.length}}},40344:(s,i,u)=>{const _=u(85472);s.exports=class JSON06Serialiser extends _{serialise(s){if(!(s instanceof this.namespace.elements.Element))throw new TypeError(`Given element \`${s}\` is not an Element instance`);let i;s._attributes&&s.attributes.get("variable")&&(i=s.attributes.get("variable"));const u={element:s.element};s._meta&&s._meta.length>0&&(u.meta=this.serialiseObject(s.meta));const _="enum"===s.element||-1!==s.attributes.keys().indexOf("enumerations");if(_){const i=this.enumSerialiseAttributes(s);i&&(u.attributes=i)}else if(s._attributes&&s._attributes.length>0){let{attributes:_}=s;_.get("metadata")&&(_=_.clone(),_.set("meta",_.get("metadata")),_.remove("metadata")),"member"===s.element&&i&&(_=_.clone(),_.remove("variable")),_.length>0&&(u.attributes=this.serialiseObject(_))}if(_)u.content=this.enumSerialiseContent(s,u);else if(this[`${s.element}SerialiseContent`])u.content=this[`${s.element}SerialiseContent`](s,u);else if(void 0!==s.content){let _;i&&s.content.key?(_=s.content.clone(),_.key.attributes.set("variable",i),_=this.serialiseContent(_)):_=this.serialiseContent(s.content),this.shouldSerialiseContent(s,_)&&(u.content=_)}else this.shouldSerialiseContent(s,s.content)&&s instanceof this.namespace.elements.Array&&(u.content=[]);return u}shouldSerialiseContent(s,i){return"parseResult"===s.element||"httpRequest"===s.element||"httpResponse"===s.element||"category"===s.element||"link"===s.element||void 0!==i&&(!Array.isArray(i)||0!==i.length)}refSerialiseContent(s,i){return delete i.attributes,{href:s.toValue(),path:s.path.toValue()}}sourceMapSerialiseContent(s){return s.toValue()}dataStructureSerialiseContent(s){return[this.serialiseContent(s.content)]}enumSerialiseAttributes(s){const i=s.attributes.clone(),u=i.remove("enumerations")||new this.namespace.elements.Array([]),_=i.get("default");let w=i.get("samples")||new this.namespace.elements.Array([]);if(_&&_.content&&(_.content.attributes&&_.content.attributes.remove("typeAttributes"),i.set("default",new this.namespace.elements.Array([_.content]))),w.forEach((s=>{s.content&&s.content.element&&s.content.attributes.remove("typeAttributes")})),s.content&&0!==u.length&&w.unshift(s.content),w=w.map((s=>s instanceof this.namespace.elements.Array?[s]:new this.namespace.elements.Array([s.content]))),w.length&&i.set("samples",w),i.length>0)return this.serialiseObject(i)}enumSerialiseContent(s){if(s._attributes){const i=s.attributes.get("enumerations");if(i&&i.length>0)return i.content.map((s=>{const i=s.clone();return i.attributes.remove("typeAttributes"),this.serialise(i)}))}if(s.content){const i=s.content.clone();return i.attributes.remove("typeAttributes"),[this.serialise(i)]}return[]}deserialise(s){if("string"==typeof s)return new this.namespace.elements.String(s);if("number"==typeof s)return new this.namespace.elements.Number(s);if("boolean"==typeof s)return new this.namespace.elements.Boolean(s);if(null===s)return new this.namespace.elements.Null;if(Array.isArray(s))return new this.namespace.elements.Array(s.map(this.deserialise,this));const i=this.namespace.getElementClass(s.element),u=new i;u.element!==s.element&&(u.element=s.element),s.meta&&this.deserialiseObject(s.meta,u.meta),s.attributes&&this.deserialiseObject(s.attributes,u.attributes);const _=this.deserialiseContent(s.content);if(void 0===_&&null!==u.content||(u.content=_),"enum"===u.element){u.content&&u.attributes.set("enumerations",u.content);let s=u.attributes.get("samples");if(u.attributes.remove("samples"),s){const _=s;s=new this.namespace.elements.Array,_.forEach((_=>{_.forEach((_=>{const w=new i(_);w.element=u.element,s.push(w)}))}));const w=s.shift();u.content=w?w.content:void 0,u.attributes.set("samples",s)}else u.content=void 0;let _=u.attributes.get("default");if(_&&_.length>0){_=_.get(0);const s=new i(_);s.element=u.element,u.attributes.set("default",s)}}else if("dataStructure"===u.element&&Array.isArray(u.content))[u.content]=u.content;else if("category"===u.element){const s=u.attributes.get("meta");s&&(u.attributes.set("metadata",s),u.attributes.remove("meta"))}else"member"===u.element&&u.key&&u.key._attributes&&u.key._attributes.getValue("variable")&&(u.attributes.set("variable",u.key.attributes.get("variable")),u.key.attributes.remove("variable"));return u}serialiseContent(s){if(s instanceof this.namespace.elements.Element)return this.serialise(s);if(s instanceof this.namespace.KeyValuePair){const i={key:this.serialise(s.key)};return s.value&&(i.value=this.serialise(s.value)),i}return s&&s.map?s.map(this.serialise,this):s}deserialiseContent(s){if(s){if(s.element)return this.deserialise(s);if(s.key){const i=new this.namespace.KeyValuePair(this.deserialise(s.key));return s.value&&(i.value=this.deserialise(s.value)),i}if(s.map)return s.map(this.deserialise,this)}return s}shouldRefract(s){return!!(s._attributes&&s.attributes.keys().length||s._meta&&s.meta.keys().length)||"enum"!==s.element&&(s.element!==s.primitive()||"member"===s.element)}convertKeyToRefract(s,i){return this.shouldRefract(i)?this.serialise(i):"enum"===i.element?this.serialiseEnum(i):"array"===i.element?i.map((i=>this.shouldRefract(i)||"default"===s?this.serialise(i):"array"===i.element||"object"===i.element||"enum"===i.element?i.children.map((s=>this.serialise(s))):i.toValue())):"object"===i.element?(i.content||[]).map(this.serialise,this):i.toValue()}serialiseEnum(s){return s.children.map((s=>this.serialise(s)))}serialiseObject(s){const i={};return s.forEach(((s,u)=>{if(s){const _=u.toValue();i[_]=this.convertKeyToRefract(_,s)}})),i}deserialiseObject(s,i){Object.keys(s).forEach((u=>{i.set(u,this.deserialise(s[u]))}))}}},85472:s=>{s.exports=class JSONSerialiser{constructor(s){this.namespace=s||new this.Namespace}serialise(s){if(!(s instanceof this.namespace.elements.Element))throw new TypeError(`Given element \`${s}\` is not an Element instance`);const i={element:s.element};s._meta&&s._meta.length>0&&(i.meta=this.serialiseObject(s.meta)),s._attributes&&s._attributes.length>0&&(i.attributes=this.serialiseObject(s.attributes));const u=this.serialiseContent(s.content);return void 0!==u&&(i.content=u),i}deserialise(s){if(!s.element)throw new Error("Given value is not an object containing an element name");const i=new(this.namespace.getElementClass(s.element));i.element!==s.element&&(i.element=s.element),s.meta&&this.deserialiseObject(s.meta,i.meta),s.attributes&&this.deserialiseObject(s.attributes,i.attributes);const u=this.deserialiseContent(s.content);return void 0===u&&null!==i.content||(i.content=u),i}serialiseContent(s){if(s instanceof this.namespace.elements.Element)return this.serialise(s);if(s instanceof this.namespace.KeyValuePair){const i={key:this.serialise(s.key)};return s.value&&(i.value=this.serialise(s.value)),i}if(s&&s.map){if(0===s.length)return;return s.map(this.serialise,this)}return s}deserialiseContent(s){if(s){if(s.element)return this.deserialise(s);if(s.key){const i=new this.namespace.KeyValuePair(this.deserialise(s.key));return s.value&&(i.value=this.deserialise(s.value)),i}if(s.map)return s.map(this.deserialise,this)}return s}serialiseObject(s){const i={};if(s.forEach(((s,u)=>{s&&(i[u.toValue()]=this.serialise(s))})),0!==Object.keys(i).length)return i}deserialiseObject(s,i){Object.keys(s).forEach((u=>{i.set(u,this.deserialise(s[u]))}))}}},57872:(s,i,u)=>{var _="function"==typeof Map&&Map.prototype,w=Object.getOwnPropertyDescriptor&&_?Object.getOwnPropertyDescriptor(Map.prototype,"size"):null,x=_&&w&&"function"==typeof w.get?w.get:null,j=_&&Map.prototype.forEach,P="function"==typeof Set&&Set.prototype,B=Object.getOwnPropertyDescriptor&&P?Object.getOwnPropertyDescriptor(Set.prototype,"size"):null,$=P&&B&&"function"==typeof B.get?B.get:null,U=P&&Set.prototype.forEach,Y="function"==typeof WeakMap&&WeakMap.prototype?WeakMap.prototype.has:null,X="function"==typeof WeakSet&&WeakSet.prototype?WeakSet.prototype.has:null,Z="function"==typeof WeakRef&&WeakRef.prototype?WeakRef.prototype.deref:null,ee=Boolean.prototype.valueOf,ae=Object.prototype.toString,ie=Function.prototype.toString,le=String.prototype.match,ce=String.prototype.slice,pe=String.prototype.replace,de=String.prototype.toUpperCase,fe=String.prototype.toLowerCase,ye=RegExp.prototype.test,be=Array.prototype.concat,_e=Array.prototype.join,we=Array.prototype.slice,Se=Math.floor,xe="function"==typeof BigInt?BigInt.prototype.valueOf:null,Pe=Object.getOwnPropertySymbols,Te="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?Symbol.prototype.toString:null,Re="function"==typeof Symbol&&"object"==typeof Symbol.iterator,qe="function"==typeof Symbol&&Symbol.toStringTag&&(typeof Symbol.toStringTag===Re||"symbol")?Symbol.toStringTag:null,$e=Object.prototype.propertyIsEnumerable,ze=("function"==typeof Reflect?Reflect.getPrototypeOf:Object.getPrototypeOf)||([].__proto__===Array.prototype?function(s){return s.__proto__}:null);function addNumericSeparator(s,i){if(s===1/0||s===-1/0||s!=s||s&&s>-1e3&&s<1e3||ye.call(/e/,i))return i;var u=/[0-9](?=(?:[0-9]{3})+(?![0-9]))/g;if("number"==typeof s){var _=s<0?-Se(-s):Se(s);if(_!==s){var w=String(_),x=ce.call(i,w.length+1);return pe.call(w,u,"$&_")+"."+pe.call(pe.call(x,/([0-9]{3})/g,"$&_"),/_$/,"")}}return pe.call(i,u,"$&_")}var We=u(16204),He=We.custom,Ye=isSymbol(He)?He:null;function wrapQuotes(s,i,u){var _="double"===(u.quoteStyle||i)?'"':"'";return _+s+_}function quote(s){return pe.call(String(s),/"/g,""")}function isArray(s){return!("[object Array]"!==toStr(s)||qe&&"object"==typeof s&&qe in s)}function isRegExp(s){return!("[object RegExp]"!==toStr(s)||qe&&"object"==typeof s&&qe in s)}function isSymbol(s){if(Re)return s&&"object"==typeof s&&s instanceof Symbol;if("symbol"==typeof s)return!0;if(!s||"object"!=typeof s||!Te)return!1;try{return Te.call(s),!0}catch(s){}return!1}s.exports=function inspect_(s,i,_,w){var P=i||{};if(has(P,"quoteStyle")&&"single"!==P.quoteStyle&&"double"!==P.quoteStyle)throw new TypeError('option "quoteStyle" must be "single" or "double"');if(has(P,"maxStringLength")&&("number"==typeof P.maxStringLength?P.maxStringLength<0&&P.maxStringLength!==1/0:null!==P.maxStringLength))throw new TypeError('option "maxStringLength", if provided, must be a positive integer, Infinity, or `null`');var B=!has(P,"customInspect")||P.customInspect;if("boolean"!=typeof B&&"symbol"!==B)throw new TypeError("option \"customInspect\", if provided, must be `true`, `false`, or `'symbol'`");if(has(P,"indent")&&null!==P.indent&&"\t"!==P.indent&&!(parseInt(P.indent,10)===P.indent&&P.indent>0))throw new TypeError('option "indent" must be "\\t", an integer > 0, or `null`');if(has(P,"numericSeparator")&&"boolean"!=typeof P.numericSeparator)throw new TypeError('option "numericSeparator", if provided, must be `true` or `false`');var ae=P.numericSeparator;if(void 0===s)return"undefined";if(null===s)return"null";if("boolean"==typeof s)return s?"true":"false";if("string"==typeof s)return inspectString(s,P);if("number"==typeof s){if(0===s)return 1/0/s>0?"0":"-0";var de=String(s);return ae?addNumericSeparator(s,de):de}if("bigint"==typeof s){var ye=String(s)+"n";return ae?addNumericSeparator(s,ye):ye}var Se=void 0===P.depth?5:P.depth;if(void 0===_&&(_=0),_>=Se&&Se>0&&"object"==typeof s)return isArray(s)?"[Array]":"[Object]";var Pe=function getIndent(s,i){var u;if("\t"===s.indent)u="\t";else{if(!("number"==typeof s.indent&&s.indent>0))return null;u=_e.call(Array(s.indent+1)," ")}return{base:u,prev:_e.call(Array(i+1),u)}}(P,_);if(void 0===w)w=[];else if(indexOf(w,s)>=0)return"[Circular]";function inspect(s,i,u){if(i&&(w=we.call(w)).push(i),u){var x={depth:P.depth};return has(P,"quoteStyle")&&(x.quoteStyle=P.quoteStyle),inspect_(s,x,_+1,w)}return inspect_(s,P,_+1,w)}if("function"==typeof s&&!isRegExp(s)){var He=function nameOf(s){if(s.name)return s.name;var i=le.call(ie.call(s),/^function\s*([\w$]+)/);if(i)return i[1];return null}(s),Qe=arrObjKeys(s,inspect);return"[Function"+(He?": "+He:" (anonymous)")+"]"+(Qe.length>0?" { "+_e.call(Qe,", ")+" }":"")}if(isSymbol(s)){var Xe=Re?pe.call(String(s),/^(Symbol\(.*\))_[^)]*$/,"$1"):Te.call(s);return"object"!=typeof s||Re?Xe:markBoxed(Xe)}if(function isElement(s){if(!s||"object"!=typeof s)return!1;if("undefined"!=typeof HTMLElement&&s instanceof HTMLElement)return!0;return"string"==typeof s.nodeName&&"function"==typeof s.getAttribute}(s)){for(var et="<"+fe.call(String(s.nodeName)),tt=s.attributes||[],rt=0;rt"}if(isArray(s)){if(0===s.length)return"[]";var nt=arrObjKeys(s,inspect);return Pe&&!function singleLineValues(s){for(var i=0;i=0)return!1;return!0}(nt)?"["+indentedJoin(nt,Pe)+"]":"[ "+_e.call(nt,", ")+" ]"}if(function isError(s){return!("[object Error]"!==toStr(s)||qe&&"object"==typeof s&&qe in s)}(s)){var ot=arrObjKeys(s,inspect);return"cause"in Error.prototype||!("cause"in s)||$e.call(s,"cause")?0===ot.length?"["+String(s)+"]":"{ ["+String(s)+"] "+_e.call(ot,", ")+" }":"{ ["+String(s)+"] "+_e.call(be.call("[cause]: "+inspect(s.cause),ot),", ")+" }"}if("object"==typeof s&&B){if(Ye&&"function"==typeof s[Ye]&&We)return We(s,{depth:Se-_});if("symbol"!==B&&"function"==typeof s.inspect)return s.inspect()}if(function isMap(s){if(!x||!s||"object"!=typeof s)return!1;try{x.call(s);try{$.call(s)}catch(s){return!0}return s instanceof Map}catch(s){}return!1}(s)){var at=[];return j&&j.call(s,(function(i,u){at.push(inspect(u,s,!0)+" => "+inspect(i,s))})),collectionOf("Map",x.call(s),at,Pe)}if(function isSet(s){if(!$||!s||"object"!=typeof s)return!1;try{$.call(s);try{x.call(s)}catch(s){return!0}return s instanceof Set}catch(s){}return!1}(s)){var st=[];return U&&U.call(s,(function(i){st.push(inspect(i,s))})),collectionOf("Set",$.call(s),st,Pe)}if(function isWeakMap(s){if(!Y||!s||"object"!=typeof s)return!1;try{Y.call(s,Y);try{X.call(s,X)}catch(s){return!0}return s instanceof WeakMap}catch(s){}return!1}(s))return weakCollectionOf("WeakMap");if(function isWeakSet(s){if(!X||!s||"object"!=typeof s)return!1;try{X.call(s,X);try{Y.call(s,Y)}catch(s){return!0}return s instanceof WeakSet}catch(s){}return!1}(s))return weakCollectionOf("WeakSet");if(function isWeakRef(s){if(!Z||!s||"object"!=typeof s)return!1;try{return Z.call(s),!0}catch(s){}return!1}(s))return weakCollectionOf("WeakRef");if(function isNumber(s){return!("[object Number]"!==toStr(s)||qe&&"object"==typeof s&&qe in s)}(s))return markBoxed(inspect(Number(s)));if(function isBigInt(s){if(!s||"object"!=typeof s||!xe)return!1;try{return xe.call(s),!0}catch(s){}return!1}(s))return markBoxed(inspect(xe.call(s)));if(function isBoolean(s){return!("[object Boolean]"!==toStr(s)||qe&&"object"==typeof s&&qe in s)}(s))return markBoxed(ee.call(s));if(function isString(s){return!("[object String]"!==toStr(s)||qe&&"object"==typeof s&&qe in s)}(s))return markBoxed(inspect(String(s)));if("undefined"!=typeof window&&s===window)return"{ [object Window] }";if(s===u.g)return"{ [object globalThis] }";if(!function isDate(s){return!("[object Date]"!==toStr(s)||qe&&"object"==typeof s&&qe in s)}(s)&&!isRegExp(s)){var it=arrObjKeys(s,inspect),lt=ze?ze(s)===Object.prototype:s instanceof Object||s.constructor===Object,ct=s instanceof Object?"":"null prototype",ut=!lt&&qe&&Object(s)===s&&qe in s?ce.call(toStr(s),8,-1):ct?"Object":"",pt=(lt||"function"!=typeof s.constructor?"":s.constructor.name?s.constructor.name+" ":"")+(ut||ct?"["+_e.call(be.call([],ut||[],ct||[]),": ")+"] ":"");return 0===it.length?pt+"{}":Pe?pt+"{"+indentedJoin(it,Pe)+"}":pt+"{ "+_e.call(it,", ")+" }"}return String(s)};var Qe=Object.prototype.hasOwnProperty||function(s){return s in this};function has(s,i){return Qe.call(s,i)}function toStr(s){return ae.call(s)}function indexOf(s,i){if(s.indexOf)return s.indexOf(i);for(var u=0,_=s.length;u<_;u++)if(s[u]===i)return u;return-1}function inspectString(s,i){if(s.length>i.maxStringLength){var u=s.length-i.maxStringLength,_="... "+u+" more character"+(u>1?"s":"");return inspectString(ce.call(s,0,i.maxStringLength),i)+_}return wrapQuotes(pe.call(pe.call(s,/(['\\])/g,"\\$1"),/[\x00-\x1f]/g,lowbyte),"single",i)}function lowbyte(s){var i=s.charCodeAt(0),u={8:"b",9:"t",10:"n",12:"f",13:"r"}[i];return u?"\\"+u:"\\x"+(i<16?"0":"")+de.call(i.toString(16))}function markBoxed(s){return"Object("+s+")"}function weakCollectionOf(s){return s+" { ? }"}function collectionOf(s,i,u,_){return s+" ("+i+") {"+(_?indentedJoin(u,_):_e.call(u,", "))+"}"}function indentedJoin(s,i){if(0===s.length)return"";var u="\n"+i.prev+i.base;return u+_e.call(s,","+u)+"\n"+i.prev}function arrObjKeys(s,i){var u=isArray(s),_=[];if(u){_.length=s.length;for(var w=0;w{var i,u,_=s.exports={};function defaultSetTimout(){throw new Error("setTimeout has not been defined")}function defaultClearTimeout(){throw new Error("clearTimeout has not been defined")}function runTimeout(s){if(i===setTimeout)return setTimeout(s,0);if((i===defaultSetTimout||!i)&&setTimeout)return i=setTimeout,setTimeout(s,0);try{return i(s,0)}catch(u){try{return i.call(null,s,0)}catch(u){return i.call(this,s,0)}}}!function(){try{i="function"==typeof setTimeout?setTimeout:defaultSetTimout}catch(s){i=defaultSetTimout}try{u="function"==typeof clearTimeout?clearTimeout:defaultClearTimeout}catch(s){u=defaultClearTimeout}}();var w,x=[],j=!1,P=-1;function cleanUpNextTick(){j&&w&&(j=!1,w.length?x=w.concat(x):P=-1,x.length&&drainQueue())}function drainQueue(){if(!j){var s=runTimeout(cleanUpNextTick);j=!0;for(var i=x.length;i;){for(w=x,x=[];++P1)for(var u=1;u{"use strict";var _=u(69143);function emptyFunction(){}function emptyFunctionWithReset(){}emptyFunctionWithReset.resetWarningCache=emptyFunction,s.exports=function(){function shim(s,i,u,w,x,j){if(j!==_){var P=new Error("Calling PropTypes validators directly is not supported by the `prop-types` package. Use PropTypes.checkPropTypes() to call them. Read more at http://fb.me/use-check-prop-types");throw P.name="Invariant Violation",P}}function getShim(){return shim}shim.isRequired=shim;var s={array:shim,bigint:shim,bool:shim,func:shim,number:shim,object:shim,string:shim,symbol:shim,any:shim,arrayOf:getShim,element:shim,elementType:shim,instanceOf:getShim,node:shim,objectOf:getShim,oneOf:getShim,oneOfType:getShim,shape:getShim,exact:getShim,checkPropTypes:emptyFunctionWithReset,resetWarningCache:emptyFunction};return s.PropTypes=s,s}},3268:(s,i,u)=>{s.exports=u(39776)()},69143:s=>{"use strict";s.exports="SECRET_DO_NOT_PASS_THIS_OR_YOU_WILL_BE_FIRED"},6515:s=>{"use strict";var i=String.prototype.replace,u=/%20/g,_="RFC1738",w="RFC3986";s.exports={default:w,formatters:{RFC1738:function(s){return i.call(s,u,"+")},RFC3986:function(s){return String(s)}},RFC1738:_,RFC3986:w}},87392:(s,i,u)=>{"use strict";var _=u(84368),w=u(34812),x=u(6515);s.exports={formats:x,parse:w,stringify:_}},34812:(s,i,u)=>{"use strict";var _=u(15872),w=Object.prototype.hasOwnProperty,x=Array.isArray,j={allowDots:!1,allowPrototypes:!1,allowSparse:!1,arrayLimit:20,charset:"utf-8",charsetSentinel:!1,comma:!1,decoder:_.decode,delimiter:"&",depth:5,ignoreQueryPrefix:!1,interpretNumericEntities:!1,parameterLimit:1e3,parseArrays:!0,plainObjects:!1,strictNullHandling:!1},interpretNumericEntities=function(s){return s.replace(/&#(\d+);/g,(function(s,i){return String.fromCharCode(parseInt(i,10))}))},parseArrayValue=function(s,i){return s&&"string"==typeof s&&i.comma&&s.indexOf(",")>-1?s.split(","):s},P=function parseQueryStringKeys(s,i,u,_){if(s){var x=u.allowDots?s.replace(/\.([^.[]+)/g,"[$1]"):s,j=/(\[[^[\]]*])/g,P=u.depth>0&&/(\[[^[\]]*])/.exec(x),B=P?x.slice(0,P.index):x,$=[];if(B){if(!u.plainObjects&&w.call(Object.prototype,B)&&!u.allowPrototypes)return;$.push(B)}for(var U=0;u.depth>0&&null!==(P=j.exec(x))&&U=0;--x){var j,P=s[x];if("[]"===P&&u.parseArrays)j=[].concat(w);else{j=u.plainObjects?Object.create(null):{};var B="["===P.charAt(0)&&"]"===P.charAt(P.length-1)?P.slice(1,-1):P,$=parseInt(B,10);u.parseArrays||""!==B?!isNaN($)&&P!==B&&String($)===B&&$>=0&&u.parseArrays&&$<=u.arrayLimit?(j=[])[$]=w:"__proto__"!==B&&(j[B]=w):j={0:w}}w=j}return w}($,i,u,_)}};s.exports=function(s,i){var u=function normalizeParseOptions(s){if(!s)return j;if(null!==s.decoder&&void 0!==s.decoder&&"function"!=typeof s.decoder)throw new TypeError("Decoder has to be a function.");if(void 0!==s.charset&&"utf-8"!==s.charset&&"iso-8859-1"!==s.charset)throw new TypeError("The charset option must be either utf-8, iso-8859-1, or undefined");var i=void 0===s.charset?j.charset:s.charset;return{allowDots:void 0===s.allowDots?j.allowDots:!!s.allowDots,allowPrototypes:"boolean"==typeof s.allowPrototypes?s.allowPrototypes:j.allowPrototypes,allowSparse:"boolean"==typeof s.allowSparse?s.allowSparse:j.allowSparse,arrayLimit:"number"==typeof s.arrayLimit?s.arrayLimit:j.arrayLimit,charset:i,charsetSentinel:"boolean"==typeof s.charsetSentinel?s.charsetSentinel:j.charsetSentinel,comma:"boolean"==typeof s.comma?s.comma:j.comma,decoder:"function"==typeof s.decoder?s.decoder:j.decoder,delimiter:"string"==typeof s.delimiter||_.isRegExp(s.delimiter)?s.delimiter:j.delimiter,depth:"number"==typeof s.depth||!1===s.depth?+s.depth:j.depth,ignoreQueryPrefix:!0===s.ignoreQueryPrefix,interpretNumericEntities:"boolean"==typeof s.interpretNumericEntities?s.interpretNumericEntities:j.interpretNumericEntities,parameterLimit:"number"==typeof s.parameterLimit?s.parameterLimit:j.parameterLimit,parseArrays:!1!==s.parseArrays,plainObjects:"boolean"==typeof s.plainObjects?s.plainObjects:j.plainObjects,strictNullHandling:"boolean"==typeof s.strictNullHandling?s.strictNullHandling:j.strictNullHandling}}(i);if(""===s||null==s)return u.plainObjects?Object.create(null):{};for(var B="string"==typeof s?function parseQueryStringValues(s,i){var u,P={},B=i.ignoreQueryPrefix?s.replace(/^\?/,""):s,$=i.parameterLimit===1/0?void 0:i.parameterLimit,U=B.split(i.delimiter,$),Y=-1,X=i.charset;if(i.charsetSentinel)for(u=0;u-1&&(ee=x(ee)?[ee]:ee),w.call(P,Z)?P[Z]=_.combine(P[Z],ee):P[Z]=ee}return P}(s,u):s,$=u.plainObjects?Object.create(null):{},U=Object.keys(B),Y=0;Y{"use strict";var _=u(27223),w=u(15872),x=u(6515),j=Object.prototype.hasOwnProperty,P={brackets:function brackets(s){return s+"[]"},comma:"comma",indices:function indices(s,i){return s+"["+i+"]"},repeat:function repeat(s){return s}},B=Array.isArray,$=String.prototype.split,U=Array.prototype.push,pushToArray=function(s,i){U.apply(s,B(i)?i:[i])},Y=Date.prototype.toISOString,X=x.default,Z={addQueryPrefix:!1,allowDots:!1,charset:"utf-8",charsetSentinel:!1,delimiter:"&",encode:!0,encoder:w.encode,encodeValuesOnly:!1,format:X,formatter:x.formatters[X],indices:!1,serializeDate:function serializeDate(s){return Y.call(s)},skipNulls:!1,strictNullHandling:!1},ee={},ae=function stringify(s,i,u,x,j,P,U,Y,X,ae,ie,le,ce,pe,de,fe){for(var ye=s,be=fe,_e=0,we=!1;void 0!==(be=be.get(ee))&&!we;){var Se=be.get(s);if(_e+=1,void 0!==Se){if(Se===_e)throw new RangeError("Cyclic object value");we=!0}void 0===be.get(ee)&&(_e=0)}if("function"==typeof Y?ye=Y(i,ye):ye instanceof Date?ye=ie(ye):"comma"===u&&B(ye)&&(ye=w.maybeMap(ye,(function(s){return s instanceof Date?ie(s):s}))),null===ye){if(j)return U&&!pe?U(i,Z.encoder,de,"key",le):i;ye=""}if(function isNonNullishPrimitive(s){return"string"==typeof s||"number"==typeof s||"boolean"==typeof s||"symbol"==typeof s||"bigint"==typeof s}(ye)||w.isBuffer(ye)){if(U){var xe=pe?i:U(i,Z.encoder,de,"key",le);if("comma"===u&&pe){for(var Pe=$.call(String(ye),","),Te="",Re=0;Re0?ye.join(",")||null:void 0}];else if(B(Y))qe=Y;else{var ze=Object.keys(ye);qe=X?ze.sort(X):ze}for(var We=x&&B(ye)&&1===ye.length?i+"[]":i,He=0;He0?de+pe:""}},15872:(s,i,u)=>{"use strict";var _=u(6515),w=Object.prototype.hasOwnProperty,x=Array.isArray,j=function(){for(var s=[],i=0;i<256;++i)s.push("%"+((i<16?"0":"")+i.toString(16)).toUpperCase());return s}(),P=function arrayToObject(s,i){for(var u=i&&i.plainObjects?Object.create(null):{},_=0;_1;){var i=s.pop(),u=i.obj[i.prop];if(x(u)){for(var _=[],w=0;w=48&&U<=57||U>=65&&U<=90||U>=97&&U<=122||x===_.RFC1738&&(40===U||41===U)?B+=P.charAt($):U<128?B+=j[U]:U<2048?B+=j[192|U>>6]+j[128|63&U]:U<55296||U>=57344?B+=j[224|U>>12]+j[128|U>>6&63]+j[128|63&U]:($+=1,U=65536+((1023&U)<<10|1023&P.charCodeAt($)),B+=j[240|U>>18]+j[128|U>>12&63]+j[128|U>>6&63]+j[128|63&U])}return B},isBuffer:function isBuffer(s){return!(!s||"object"!=typeof s)&&!!(s.constructor&&s.constructor.isBuffer&&s.constructor.isBuffer(s))},isRegExp:function isRegExp(s){return"[object RegExp]"===Object.prototype.toString.call(s)},maybeMap:function maybeMap(s,i){if(x(s)){for(var u=[],_=0;_{"use strict";var u=Object.prototype.hasOwnProperty;function decode(s){try{return decodeURIComponent(s.replace(/\+/g," "))}catch(s){return null}}function encode(s){try{return encodeURIComponent(s)}catch(s){return null}}i.stringify=function querystringify(s,i){i=i||"";var _,w,x=[];for(w in"string"!=typeof i&&(i="?"),s)if(u.call(s,w)){if((_=s[w])||null!=_&&!isNaN(_)||(_=""),w=encode(w),_=encode(_),null===w||null===_)continue;x.push(w+"="+_)}return x.length?i+x.join("&"):""},i.parse=function querystring(s){for(var i,u=/([^=?#&]+)=?([^&]*)/g,_={};i=u.exec(s);){var w=decode(i[1]),x=decode(i[2]);null===w||null===x||w in _||(_[w]=x)}return _}},77496:(s,i,u)=>{const _=u(50861),w=u(64716),x=_.types;s.exports=class RandExp{constructor(s,i){if(this._setDefaults(s),s instanceof RegExp)this.ignoreCase=s.ignoreCase,this.multiline=s.multiline,s=s.source;else{if("string"!=typeof s)throw new Error("Expected a regexp or string");this.ignoreCase=i&&-1!==i.indexOf("i"),this.multiline=i&&-1!==i.indexOf("m")}this.tokens=_(s)}_setDefaults(s){this.max=null!=s.max?s.max:null!=RandExp.prototype.max?RandExp.prototype.max:100,this.defaultRange=s.defaultRange?s.defaultRange:this.defaultRange.clone(),s.randInt&&(this.randInt=s.randInt)}gen(){return this._gen(this.tokens,[])}_gen(s,i){var u,_,w,j,P;switch(s.type){case x.ROOT:case x.GROUP:if(s.followedBy||s.notFollowedBy)return"";for(s.remember&&void 0===s.groupNumber&&(s.groupNumber=i.push(null)-1),_="",j=0,P=(u=s.options?this._randSelect(s.options):s.stack).length;j{"use strict";var _=u(26512),w=65536,x=4294967295;var j=u(75856).Buffer,P=u.g.crypto||u.g.msCrypto;P&&P.getRandomValues?s.exports=function randomBytes(s,i){if(s>x)throw new RangeError("requested too many random bytes");var u=j.allocUnsafe(s);if(s>0)if(s>w)for(var B=0;B{"use strict";function _typeof(s){return _typeof="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(s){return typeof s}:function(s){return s&&"function"==typeof Symbol&&s.constructor===Symbol&&s!==Symbol.prototype?"symbol":typeof s},_typeof(s)}Object.defineProperty(i,"__esModule",{value:!0}),i.CopyToClipboard=void 0;var _=_interopRequireDefault(u(11504)),w=_interopRequireDefault(u(6272)),x=["text","onCopy","options","children"];function _interopRequireDefault(s){return s&&s.__esModule?s:{default:s}}function ownKeys(s,i){var u=Object.keys(s);if(Object.getOwnPropertySymbols){var _=Object.getOwnPropertySymbols(s);i&&(_=_.filter((function(i){return Object.getOwnPropertyDescriptor(s,i).enumerable}))),u.push.apply(u,_)}return u}function _objectSpread(s){for(var i=1;i=0||(w[u]=s[u]);return w}(s,i);if(Object.getOwnPropertySymbols){var x=Object.getOwnPropertySymbols(s);for(_=0;_=0||Object.prototype.propertyIsEnumerable.call(s,u)&&(w[u]=s[u])}return w}function _defineProperties(s,i){for(var u=0;u{"use strict";var _=u(25396).CopyToClipboard;_.CopyToClipboard=_,s.exports=_},41392:(s,i,u)=>{"use strict";function _typeof(s){return _typeof="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(s){return typeof s}:function(s){return s&&"function"==typeof Symbol&&s.constructor===Symbol&&s!==Symbol.prototype?"symbol":typeof s},_typeof(s)}Object.defineProperty(i,"__esModule",{value:!0}),i.DebounceInput=void 0;var _=_interopRequireDefault(u(11504)),w=_interopRequireDefault(u(68472)),x=["element","onChange","value","minLength","debounceTimeout","forceNotifyByEnter","forceNotifyOnBlur","onKeyDown","onBlur","inputRef"];function _interopRequireDefault(s){return s&&s.__esModule?s:{default:s}}function _objectWithoutProperties(s,i){if(null==s)return{};var u,_,w=function _objectWithoutPropertiesLoose(s,i){if(null==s)return{};var u,_,w={},x=Object.keys(s);for(_=0;_=0||(w[u]=s[u]);return w}(s,i);if(Object.getOwnPropertySymbols){var x=Object.getOwnPropertySymbols(s);for(_=0;_=0||Object.prototype.propertyIsEnumerable.call(s,u)&&(w[u]=s[u])}return w}function ownKeys(s,i){var u=Object.keys(s);if(Object.getOwnPropertySymbols){var _=Object.getOwnPropertySymbols(s);i&&(_=_.filter((function(i){return Object.getOwnPropertyDescriptor(s,i).enumerable}))),u.push.apply(u,_)}return u}function _objectSpread(s){for(var i=1;i=_?u.notify(s):i.length>w.length&&u.notify(_objectSpread(_objectSpread({},s),{},{target:_objectSpread(_objectSpread({},s.target),{},{value:""})}))}))})),_defineProperty(_assertThisInitialized(u),"onKeyDown",(function(s){"Enter"===s.key&&u.forceNotify(s);var i=u.props.onKeyDown;i&&(s.persist(),i(s))})),_defineProperty(_assertThisInitialized(u),"onBlur",(function(s){u.forceNotify(s);var i=u.props.onBlur;i&&(s.persist(),i(s))})),_defineProperty(_assertThisInitialized(u),"createNotifier",(function(s){if(s<0)u.notify=function(){return null};else if(0===s)u.notify=u.doNotify;else{var i=(0,w.default)((function(s){u.isDebouncing=!1,u.doNotify(s)}),s);u.notify=function(s){u.isDebouncing=!0,i(s)},u.flush=function(){return i.flush()},u.cancel=function(){u.isDebouncing=!1,i.cancel()}}})),_defineProperty(_assertThisInitialized(u),"doNotify",(function(){u.props.onChange.apply(void 0,arguments)})),_defineProperty(_assertThisInitialized(u),"forceNotify",(function(s){var i=u.props.debounceTimeout;if(u.isDebouncing||!(i>0)){u.cancel&&u.cancel();var _=u.state.value,w=u.props.minLength;_.length>=w?u.doNotify(s):u.doNotify(_objectSpread(_objectSpread({},s),{},{target:_objectSpread(_objectSpread({},s.target),{},{value:_})}))}})),u.isDebouncing=!1,u.state={value:void 0===s.value||null===s.value?"":s.value};var _=u.props.debounceTimeout;return u.createNotifier(_),u}return function _createClass(s,i,u){return i&&_defineProperties(s.prototype,i),u&&_defineProperties(s,u),Object.defineProperty(s,"prototype",{writable:!1}),s}(DebounceInput,[{key:"componentDidUpdate",value:function componentDidUpdate(s){if(!this.isDebouncing){var i=this.props,u=i.value,_=i.debounceTimeout,w=s.debounceTimeout,x=s.value,j=this.state.value;void 0!==u&&x!==u&&j!==u&&this.setState({value:u}),_!==w&&this.createNotifier(_)}}},{key:"componentWillUnmount",value:function componentWillUnmount(){this.flush&&this.flush()}},{key:"render",value:function render(){var s,i,u=this.props,w=u.element,j=(u.onChange,u.value,u.minLength,u.debounceTimeout,u.forceNotifyByEnter),P=u.forceNotifyOnBlur,B=u.onKeyDown,$=u.onBlur,U=u.inputRef,Y=_objectWithoutProperties(u,x),X=this.state.value;s=j?{onKeyDown:this.onKeyDown}:B?{onKeyDown:B}:{},i=P?{onBlur:this.onBlur}:$?{onBlur:$}:{};var Z=U?{ref:U}:{};return _.default.createElement(w,_objectSpread(_objectSpread(_objectSpread(_objectSpread({},Y),{},{onChange:this.onChange,value:X},s),i),Z))}}]),DebounceInput}(_.default.PureComponent);i.DebounceInput=j,_defineProperty(j,"defaultProps",{element:"input",type:"text",onKeyDown:void 0,onBlur:void 0,value:void 0,minLength:0,debounceTimeout:100,forceNotifyByEnter:!0,forceNotifyOnBlur:!0,inputRef:void 0})},50132:(s,i,u)=>{"use strict";var _=u(41392).DebounceInput;_.DebounceInput=_,s.exports=_},79516:(s,i,u)=>{"use strict";var _=u(11504),w=u(14712);function p(s){for(var i="https://reactjs.org/docs/error-decoder.html?invariant="+s,u=1;u