From 778302cfa1044a07738f549b59117821176f83ff Mon Sep 17 00:00:00 2001 From: Kaustubh Mani Date: Wed, 4 Oct 2023 13:50:12 -0400 Subject: [PATCH] add kaustubh [ci skip] --- .gitmodules | 4 - 404.html | 179 + 404.md | 12 - Gemfile.lock | 285 + Makefile | 25 - README.md | 125 - _bibliography/papers.bib | 567 -- _config.yml | 115 - _data/people.yml | 582 -- _events/aido-6.md | 26 - _events/ecorl.md | 16 - _events/iros2020.md | 16 - _events/iros2021.md | 28 - _events/learningseriesfall2020.md | 21 - _events/learningseriessummer2020.md | 21 - _events/learningserieswinter2021.md | 21 - _events/mrss.md | 19 - _events/physical-reasoning.md | 15 - _pages/publications.md | 25 - _posts/2018-04-20-cvpr.md | 7 - _posts/2018-09-05-welcomephd.md | 7 - _posts/2018-11-28-image.md | 7 - _posts/2019-08-01-dal.md | 7 - _posts/2019-09-01-welcomeinterns.md | 7 - _posts/2019-09-01-welcomestudents.md | 7 - _posts/2019-09-10-adrpaper.md | 7 - _posts/2020-01-20-gradslam-icra.md | 7 - _posts/2020-06-05-maplite-award.md | 7 - _posts/2020-06-30-gunshi-graduates.md | 7 - _posts/2020-09-10-robotlearningseries.md | 7 - _posts/2020-10-10-neurips.md | 7 - _posts/2020-10-15-lamaml.md | 7 - _posts/2020-10-30-irosworkshop.md | 7 - _posts/2020-11-30-gradslam.md | 7 - _posts/2020-12-05-krishna-fellowship.md | 7 - _projects/01-gradslam.md | 26 - _projects/adr.md | 24 - _projects/conceptfusion.md | 42 - _projects/ctcnet.md | 20 - _projects/dal.md | 26 - _projects/fcal.md | 27 - _projects/gradsim.md | 36 - _projects/ivrl.md | 53 - _projects/lamaml.md | 25 - _projects/ltvn.md | 25 - _projects/o4a.md | 23 - _projects/taskography.md | 29 - assets/css/style.css | 2883 +++++++++ blog.html | 600 +- blog/2018/04/20/cvpr.html | 179 + blog/2018/09/05/welcomephd.html | 179 + blog/2018/11/28/image.html | 179 + blog/2019/08/01/dal.html | 179 + blog/2019/09/01/welcomeinterns.html | 179 + blog/2019/09/01/welcomestudents.html | 179 + blog/2019/09/10/adrpaper.html | 179 + blog/2020/01/20/gradslam-icra.html | 179 + blog/2020/06/05/maplite-award.html | 179 + blog/2020/06/30/gunshi-graduates.html | 179 + blog/2020/09/10/robotlearningseries.html | 179 + blog/2020/10/10/neurips.html | 179 + blog/2020/10/15/lamaml.html | 179 + blog/2020/10/30/irosworkshop.html | 179 + blog/2020/11/30/gradslam.html | 179 + blog/2020/12/05/krishna-fellowship.html | 179 + code.html | 177 + code.md | 8 - contact.html | 216 + contact.md | 51 - events.html | 1314 +++- events/aido-6.html | 5 + events/ecorl.html | 1 + events/iros2020.html | 1 + events/iros2021.html | 11 + events/learningseriesfall2020.html | 1 + events/learningseriessummer2020.html | 1 + events/learningserieswinter2021.html | 1 + events/mrss.html | 1 + events/physical-reasoning.html | 1 + index.html | 2689 +++++++- interns-winter-2021.html | 198 + people.html | 5127 ++++++++++++++- projects/01-gradslam.html | 305 + projects/adr.html | 420 ++ projects/conceptfusion.html | 305 + projects/ctcnet.html | 360 ++ projects/dal.html | 479 ++ projects/fcal.html | 483 ++ projects/gradsim.html | 423 ++ projects/ivrl.html | 444 ++ projects/lamaml.html | 301 + projects/ltvn.html | 305 + projects/o4a.html | 363 ++ projects/taskography.html | 304 + publications/index.html | 7381 ++++++++++++++++++++++ research.html | 2120 ++++++- screenshot.png | Bin 108257 -> 0 bytes 97 files changed, 30415 insertions(+), 2298 deletions(-) delete mode 100644 .gitmodules create mode 100644 404.html delete mode 100644 404.md create mode 100644 Gemfile.lock delete mode 100644 Makefile delete mode 100644 README.md delete mode 100644 _bibliography/papers.bib delete mode 100644 _config.yml delete mode 100644 _data/people.yml delete mode 100644 _events/aido-6.md delete mode 100644 _events/ecorl.md delete mode 100644 _events/iros2020.md delete mode 100644 _events/iros2021.md delete mode 100644 _events/learningseriesfall2020.md delete mode 100644 _events/learningseriessummer2020.md delete mode 100644 _events/learningserieswinter2021.md delete mode 100644 _events/mrss.md delete mode 100644 _events/physical-reasoning.md delete mode 100644 _pages/publications.md delete mode 100644 _posts/2018-04-20-cvpr.md delete mode 100644 _posts/2018-09-05-welcomephd.md delete mode 100644 _posts/2018-11-28-image.md delete mode 100644 _posts/2019-08-01-dal.md delete mode 100644 _posts/2019-09-01-welcomeinterns.md delete mode 100644 _posts/2019-09-01-welcomestudents.md delete mode 100644 _posts/2019-09-10-adrpaper.md delete mode 100644 _posts/2020-01-20-gradslam-icra.md delete mode 100644 _posts/2020-06-05-maplite-award.md delete mode 100644 _posts/2020-06-30-gunshi-graduates.md delete mode 100644 _posts/2020-09-10-robotlearningseries.md delete mode 100644 _posts/2020-10-10-neurips.md delete mode 100644 _posts/2020-10-15-lamaml.md delete mode 100644 _posts/2020-10-30-irosworkshop.md delete mode 100644 _posts/2020-11-30-gradslam.md delete mode 100644 _posts/2020-12-05-krishna-fellowship.md delete mode 100644 _projects/01-gradslam.md delete mode 100644 _projects/adr.md delete mode 100644 _projects/conceptfusion.md delete mode 100644 _projects/ctcnet.md delete mode 100644 _projects/dal.md delete mode 100644 _projects/fcal.md delete mode 100644 _projects/gradsim.md delete mode 100644 _projects/ivrl.md delete mode 100644 _projects/lamaml.md delete mode 100644 _projects/ltvn.md delete mode 100644 _projects/o4a.md delete mode 100644 _projects/taskography.md create mode 100644 assets/css/style.css create mode 100644 blog/2018/04/20/cvpr.html create mode 100644 blog/2018/09/05/welcomephd.html create mode 100644 blog/2018/11/28/image.html create mode 100644 blog/2019/08/01/dal.html create mode 100644 blog/2019/09/01/welcomeinterns.html create mode 100644 blog/2019/09/01/welcomestudents.html create mode 100644 blog/2019/09/10/adrpaper.html create mode 100644 blog/2020/01/20/gradslam-icra.html create mode 100644 blog/2020/06/05/maplite-award.html create mode 100644 blog/2020/06/30/gunshi-graduates.html create mode 100644 blog/2020/09/10/robotlearningseries.html create mode 100644 blog/2020/10/10/neurips.html create mode 100644 blog/2020/10/15/lamaml.html create mode 100644 blog/2020/10/30/irosworkshop.html create mode 100644 blog/2020/11/30/gradslam.html create mode 100644 blog/2020/12/05/krishna-fellowship.html create mode 100644 code.html delete mode 100644 code.md create mode 100644 contact.html delete mode 100644 contact.md create mode 100644 events/aido-6.html create mode 100644 events/ecorl.html create mode 100644 events/iros2020.html create mode 100644 events/iros2021.html create mode 100644 events/learningseriesfall2020.html create mode 100644 events/learningseriessummer2020.html create mode 100644 events/learningserieswinter2021.html create mode 100644 events/mrss.html create mode 100644 events/physical-reasoning.html create mode 100644 interns-winter-2021.html create mode 100644 projects/01-gradslam.html create mode 100644 projects/adr.html create mode 100644 projects/conceptfusion.html create mode 100644 projects/ctcnet.html create mode 100644 projects/dal.html create mode 100644 projects/fcal.html create mode 100644 projects/gradsim.html create mode 100644 projects/ivrl.html create mode 100644 projects/lamaml.html create mode 100644 projects/ltvn.html create mode 100644 projects/o4a.html create mode 100644 projects/taskography.html create mode 100644 publications/index.html delete mode 100644 screenshot.png diff --git a/.gitmodules b/.gitmodules deleted file mode 100644 index 36e949d..0000000 --- a/.gitmodules +++ /dev/null @@ -1,4 +0,0 @@ -[submodule "website-assets"] - path = website-assets - url = git@github.com:montrealrobotics/website-assets.git - branch = master diff --git a/404.html b/404.html new file mode 100644 index 0000000..4318e45 --- /dev/null +++ b/404.html @@ -0,0 +1,179 @@ + + + + + + + + + Robotics Group @ University of Montreal | 404 Not Found + + + + + + + + + + + +
+ + + + + + + +

Sorry! The server can’t find that page.

+ +

Please consider trying to find what you need from the home page.

+ +

If you think something might be broken, please send an email to webmaster.

+ + + +
+

+ + + + | + + + + | + + + + + +

+
+ +
+ + + + + + + + + diff --git a/404.md b/404.md deleted file mode 100644 index 583b48a..0000000 --- a/404.md +++ /dev/null @@ -1,12 +0,0 @@ ---- -layout: default -title: 404 Not Found ---- -Sorry! The server can't find that page. - -Please consider trying to find what you need from [the home page][home]. - -If you think something might be broken, please send an email to [webmaster][]. - -[home]: {{ site.base }}/ -[webmaster]: mailto:paulll@iro.umontreal.ca diff --git a/Gemfile.lock b/Gemfile.lock new file mode 100644 index 0000000..af3db10 --- /dev/null +++ b/Gemfile.lock @@ -0,0 +1,285 @@ +GEM + remote: https://rubygems.org/ + specs: + activesupport (7.0.8) + concurrent-ruby (~> 1.0, >= 1.0.2) + i18n (>= 1.6, < 2) + minitest (>= 5.1) + tzinfo (~> 2.0) + addressable (2.8.5) + public_suffix (>= 2.0.2, < 6.0) + base64 (0.1.1) + bibtex-ruby (4.4.7) + latex-decode (~> 0.0) + citeproc (1.0.10) + namae (~> 1.0) + citeproc-ruby (1.1.14) + citeproc (~> 1.0, >= 1.0.9) + csl (~> 1.6) + coffee-script (2.4.1) + coffee-script-source + execjs + coffee-script-source (1.11.1) + colorator (1.1.0) + commonmarker (0.23.10) + concurrent-ruby (1.2.2) + csl (1.6.0) + namae (~> 1.0) + rexml + csl-styles (1.0.1.11) + csl (~> 1.0) + dnsruby (1.70.0) + simpleidn (~> 0.2.1) + em-websocket (0.5.3) + eventmachine (>= 0.12.9) + http_parser.rb (~> 0) + ethon (0.16.0) + ffi (>= 1.15.0) + eventmachine (1.2.7) + execjs (2.9.1) + faraday (2.7.11) + base64 + faraday-net_http (>= 2.0, < 3.1) + ruby2_keywords (>= 0.0.4) + faraday-net_http (3.0.2) + ffi (1.16.3) + forwardable-extended (2.6.0) + gemoji (3.0.1) + github-pages (228) + github-pages-health-check (= 1.17.9) + jekyll (= 3.9.3) + jekyll-avatar (= 0.7.0) + jekyll-coffeescript (= 1.1.1) + jekyll-commonmark-ghpages (= 0.4.0) + jekyll-default-layout (= 0.1.4) + jekyll-feed (= 0.15.1) + jekyll-gist (= 1.5.0) + jekyll-github-metadata (= 2.13.0) + jekyll-include-cache (= 0.2.1) + jekyll-mentions (= 1.6.0) + jekyll-optional-front-matter (= 0.3.2) + jekyll-paginate (= 1.1.0) + jekyll-readme-index (= 0.3.0) + jekyll-redirect-from (= 0.16.0) + jekyll-relative-links (= 0.6.1) + jekyll-remote-theme (= 0.4.3) + jekyll-sass-converter (= 1.5.2) + jekyll-seo-tag (= 2.8.0) + jekyll-sitemap (= 1.4.0) + jekyll-swiss (= 1.0.0) + jekyll-theme-architect (= 0.2.0) + jekyll-theme-cayman (= 0.2.0) + jekyll-theme-dinky (= 0.2.0) + jekyll-theme-hacker (= 0.2.0) + jekyll-theme-leap-day (= 0.2.0) + jekyll-theme-merlot (= 0.2.0) + jekyll-theme-midnight (= 0.2.0) + jekyll-theme-minimal (= 0.2.0) + jekyll-theme-modernist (= 0.2.0) + jekyll-theme-primer (= 0.6.0) + jekyll-theme-slate (= 0.2.0) + jekyll-theme-tactile (= 0.2.0) + jekyll-theme-time-machine (= 0.2.0) + jekyll-titles-from-headings (= 0.5.3) + jemoji (= 0.12.0) + kramdown (= 2.3.2) + kramdown-parser-gfm (= 1.1.0) + liquid (= 4.0.4) + mercenary (~> 0.3) + minima (= 2.5.1) + nokogiri (>= 1.13.6, < 2.0) + rouge (= 3.26.0) + terminal-table (~> 1.4) + github-pages-health-check (1.17.9) + addressable (~> 2.3) + dnsruby (~> 1.60) + octokit (~> 4.0) + public_suffix (>= 3.0, < 5.0) + typhoeus (~> 1.3) + html-pipeline (2.14.3) + activesupport (>= 2) + nokogiri (>= 1.4) + http_parser.rb (0.8.0) + i18n (1.14.1) + concurrent-ruby (~> 1.0) + jekyll (3.9.3) + addressable (~> 2.4) + colorator (~> 1.0) + em-websocket (~> 0.5) + i18n (>= 0.7, < 2) + jekyll-sass-converter (~> 1.0) + jekyll-watch (~> 2.0) + kramdown (>= 1.17, < 3) + liquid (~> 4.0) + mercenary (~> 0.3.3) + pathutil (~> 0.9) + rouge (>= 1.7, < 4) + safe_yaml (~> 1.0) + jekyll-avatar (0.7.0) + jekyll (>= 3.0, < 5.0) + jekyll-coffeescript (1.1.1) + coffee-script (~> 2.2) + coffee-script-source (~> 1.11.1) + jekyll-commonmark (1.4.0) + commonmarker (~> 0.22) + jekyll-commonmark-ghpages (0.4.0) + commonmarker (~> 0.23.7) + jekyll (~> 3.9.0) + jekyll-commonmark (~> 1.4.0) + rouge (>= 2.0, < 5.0) + jekyll-default-layout (0.1.4) + jekyll (~> 3.0) + jekyll-feed (0.15.1) + jekyll (>= 3.7, < 5.0) + jekyll-gist (1.5.0) + octokit (~> 4.2) + jekyll-github-metadata (2.13.0) + jekyll (>= 3.4, < 5.0) + octokit (~> 4.0, != 4.4.0) + jekyll-include-cache (0.2.1) + jekyll (>= 3.7, < 5.0) + jekyll-mentions (1.6.0) + html-pipeline (~> 2.3) + jekyll (>= 3.7, < 5.0) + jekyll-optional-front-matter (0.3.2) + jekyll (>= 3.0, < 5.0) + jekyll-paginate (1.1.0) + jekyll-readme-index (0.3.0) + jekyll (>= 3.0, < 5.0) + jekyll-redirect-from (0.16.0) + jekyll (>= 3.3, < 5.0) + jekyll-relative-links (0.6.1) + jekyll (>= 3.3, < 5.0) + jekyll-remote-theme (0.4.3) + addressable (~> 2.0) + jekyll (>= 3.5, < 5.0) + jekyll-sass-converter (>= 1.0, <= 3.0.0, != 2.0.0) + rubyzip (>= 1.3.0, < 3.0) + jekyll-sass-converter (1.5.2) + sass (~> 3.4) + jekyll-scholar (5.16.0) + bibtex-ruby (~> 4.0, >= 4.0.13) + citeproc-ruby (~> 1.0) + csl-styles (~> 1.0) + jekyll (~> 3.0) + jekyll-seo-tag (2.8.0) + jekyll (>= 3.8, < 5.0) + jekyll-sitemap (1.4.0) + jekyll (>= 3.7, < 5.0) + jekyll-swiss (1.0.0) + jekyll-theme-architect (0.2.0) + jekyll (> 3.5, < 5.0) + jekyll-seo-tag (~> 2.0) + jekyll-theme-cayman (0.2.0) + jekyll (> 3.5, < 5.0) + jekyll-seo-tag (~> 2.0) + jekyll-theme-dinky (0.2.0) + jekyll (> 3.5, < 5.0) + jekyll-seo-tag (~> 2.0) + jekyll-theme-hacker (0.2.0) + jekyll (> 3.5, < 5.0) + jekyll-seo-tag (~> 2.0) + jekyll-theme-leap-day (0.2.0) + jekyll (> 3.5, < 5.0) + jekyll-seo-tag (~> 2.0) + jekyll-theme-merlot (0.2.0) + jekyll (> 3.5, < 5.0) + jekyll-seo-tag (~> 2.0) + jekyll-theme-midnight (0.2.0) + jekyll (> 3.5, < 5.0) + jekyll-seo-tag (~> 2.0) + jekyll-theme-minimal (0.2.0) + jekyll (> 3.5, < 5.0) + jekyll-seo-tag (~> 2.0) + jekyll-theme-modernist (0.2.0) + jekyll (> 3.5, < 5.0) + jekyll-seo-tag (~> 2.0) + jekyll-theme-primer (0.6.0) + jekyll (> 3.5, < 5.0) + jekyll-github-metadata (~> 2.9) + jekyll-seo-tag (~> 2.0) + jekyll-theme-slate (0.2.0) + jekyll (> 3.5, < 5.0) + jekyll-seo-tag (~> 2.0) + jekyll-theme-tactile (0.2.0) + jekyll (> 3.5, < 5.0) + jekyll-seo-tag (~> 2.0) + jekyll-theme-time-machine (0.2.0) + jekyll (> 3.5, < 5.0) + jekyll-seo-tag (~> 2.0) + jekyll-titles-from-headings (0.5.3) + jekyll (>= 3.3, < 5.0) + jekyll-watch (2.2.1) + listen (~> 3.0) + jemoji (0.12.0) + gemoji (~> 3.0) + html-pipeline (~> 2.2) + jekyll (>= 3.0, < 5.0) + kramdown (2.3.2) + rexml + kramdown-parser-gfm (1.1.0) + kramdown (~> 2.0) + latex-decode (0.4.0) + liquid (4.0.4) + listen (3.8.0) + rb-fsevent (~> 0.10, >= 0.10.3) + rb-inotify (~> 0.9, >= 0.9.10) + mercenary (0.3.6) + minima (2.5.1) + jekyll (>= 3.5, < 5.0) + jekyll-feed (~> 0.9) + jekyll-seo-tag (~> 2.1) + minitest (5.20.0) + namae (1.1.1) + nokogiri (1.15.4-arm64-darwin) + racc (~> 1.4) + octokit (4.25.1) + faraday (>= 1, < 3) + sawyer (~> 0.9) + pathutil (0.16.2) + forwardable-extended (~> 2.6) + public_suffix (4.0.7) + racc (1.7.1) + rb-fsevent (0.11.2) + rb-inotify (0.10.1) + ffi (~> 1.0) + rexml (3.2.6) + rouge (3.26.0) + ruby2_keywords (0.0.5) + rubyzip (2.3.2) + safe_yaml (1.0.5) + sass (3.7.4) + sass-listen (~> 4.0.0) + sass-listen (4.0.0) + rb-fsevent (~> 0.9, >= 0.9.4) + rb-inotify (~> 0.9, >= 0.9.7) + sawyer (0.9.2) + addressable (>= 2.3.5) + faraday (>= 0.17.3, < 3) + simpleidn (0.2.1) + unf (~> 0.1.4) + terminal-table (1.8.0) + unicode-display_width (~> 1.1, >= 1.1.1) + typhoeus (1.4.0) + ethon (>= 0.9.0) + tzinfo (2.0.6) + concurrent-ruby (~> 1.0) + unf (0.1.4) + unf_ext + unf_ext (0.0.8.2) + unicode-display_width (1.8.0) + unicode_utils (1.4.0) + +PLATFORMS + arm64-darwin-22 + +DEPENDENCIES + github-pages + jekyll + jekyll-paginate + jekyll-scholar + jemoji + unicode_utils + +BUNDLED WITH + 2.4.20 diff --git a/Makefile b/Makefile deleted file mode 100644 index 88f0c4b..0000000 --- a/Makefile +++ /dev/null @@ -1,25 +0,0 @@ -# targets that aren't filenames -.PHONY: all clean deploy build serve - -all: build - -build: - bundle exec jekyll build - -install: - bundle install - -# you can configure these at the shell, e.g.: -# SERVE_PORT=5001 make serve -SERVE_HOST ?= 127.0.0.1 -SERVE_PORT ?= 5000 - -serve: - bundle exec jekyll serve --port $(SERVE_PORT) --host $(SERVE_HOST) - -clean: - $(RM) -r _site - - -deploy: clean build - ./bin/deploy diff --git a/README.md b/README.md deleted file mode 100644 index 8b6cc9b..0000000 --- a/README.md +++ /dev/null @@ -1,125 +0,0 @@ -Research Group Web Site Template -================================ - -This is a [Jekyll][]-based Web site intended for research groups. Your group should be able to get up and running with minimal fuss. - -

-screenshot of the template -

- -This project originated at the University of Washington. You can see the machinery working live at [our site][sampa]. - -This work is licensed under a [Creative Commons Attribution-NonCommercial 4.0 International License][license]. - -[sampa]: http://sampa.cs.washington.edu/ -[license]: https://creativecommons.org/licenses/by-nc/4.0/ - - -Features --------- - -* Thanks to [Jekyll][], content is just text files. So even faculty should be able to figure it out. -* Publications list generated from BibTeX. -* Personnel list. Organize your professors, students, staff, and alumni. -* Combined news stream and blog posts. -* Easily extensible navigation bar. -* Responsive (mobile-ready) design based on [Bootstrap][]. - -[Bootstrap]: http://getbootstrap.com/ - - -Setup ------ - -1. Install the dependencies. You will need [Python][], [bibble][] (`pip install bibble`), and [Jekyll][] (`gem install jekyll`). -2. [Fork][] this repository on GitHub. -3. Clone the fork to your own machine: `git clone git@github.com:yourgroup/research-group-web.git`. -4. Add an "upstream" remote for the original repository so you can stay abreast of bugfixes: `git remote add upstream git://github.com/uwsampa/research-group-web.git`. -5. Customize. Start with the `_config.yml` file, where you enter the name of the site and its URL. -6. Type `make` to build the site and then run `make serve` to view your site. -7. Keep adding content. See below for instructions for each of the various sections. -8. Periodically pull from the upstream repository: `git pull upstream master`. - -[Python]: https://www.python.org/ -[Fork]: https://github.com/uwsampa/research-group-web/fork - - -Publication List ----------------- - -The list of publications is in `bib/pubs.bib`. Typing `make` will generate `pubs.html`, which contains a pretty, sorted HTML-formatted list of papers. The public page, `publications.html`, also has a link to download the original BibTeX. - - -News Items and Blog Posts -------------------------- - -For both long-form blog posts and short news updates, we use Jekyll's blogging system. To post a new item of either type, you create a file in the `_posts` directory using the naming convention `YYYY-MM-DD-title-for-url.md`. The date part of the filename always matters; the title part is currently only used for full blog posts (but is still required for news updates). - -The file must begin with [YAML front matter][yfm]. For news updates, use this: - - --- - layout: post - shortnews: true - --- - -For full blog posts, use this format: - - --- - layout: post - title: "Some Great Title Here" - --- - -And concoct a page title for your post. The body of the post goes after the `---` in either case. - -You can also customize the icon that is displayed on the news feed. By default it's `newspaper-o`. We use icons from the [FontAwesome][fa] icon set. - -[yfm]: http://jekyllrb.com/docs/frontmatter/ -[fa]: http://fontawesome.io/icons/ - -Projects --------- - -To create a project, just create a markdown file in the `_projects` folder. Here are the things you can put in the YAML frontmatter: - -- `title:` The project title. -- `notitle:` Set this to `true` if you don't want a title displayed on the project card. Optional. -- `description:` The text shown in the project card. It supports markdown. -- `people:` The people working on the project. This is a list of keys from the `_data/people.yml` file. -- `layout: project` This sets the layout of the actual project page. It should be set to `project`. -- `image:` The URL of an image for the project. This is shown on both the project page and the project card. Optional. -- `last-updated:` Date in the format of `YYYY-MM-DD`. The project cards are sorted by this, most recent first. -- `status: inactive` Set this to `inactive` if don't want the project to appear on the front page. Just ignore it otherwise. -- `link:` Set this to an external URL if this project has a page somewhere else on the web. If you don't have a `link:`, then the content of this markdown file (below the YAML frontmatter) will be this project's page. -- `no-link: true` Set this if you just don't want a project page for your project. - -Personnel ---------- - -People are listed in a [YAML][] file in `_data/people.yml`. You can list the name, link, bio, and role of each person. Roles (e.g., "Faculty", "Staff", and "Students") are defined in `_config.yml`. - -[YAML]: https://en.wikipedia.org/wiki/YAML - - -Building --------- - -The requirements for building the site are: - -* [Jekyll][]: run `gem install jekyll` -* [bibble][]: available on `pip` -* ssh and rsync, only if you want to deploy directly. - -`make` compiles the bibliography and the website content to the `_site` -directory. To preview the site, run `jekyll serve`` and head to -http://0.0.0.0:5000. - - -Deploying to Your Sever ------------------------ - -To set up deployments, edit the Makefile and look for the lines where `HOST` and `DIR` are defined. Change these to the host where your HTML files should be copied to. - -To upload a new version of the site via rsync over ssh, type `make deploy`. A web hook does this automatically when you push to GitHub. Be aware that the Makefile is configured to have rsync delete stray files from the destination directory. - -[Jekyll]: http://jekyllrb.com/ -[bibble]: https://github.com/sampsyo/bibble/ diff --git a/_bibliography/papers.bib b/_bibliography/papers.bib deleted file mode 100644 index ecaeab7..0000000 --- a/_bibliography/papers.bib +++ /dev/null @@ -1,567 +0,0 @@ -@article{conceptfusion, - author = {Jatavallabhula, {Krishna Murthy} and Kuwajerwala, Alihusein and Gu, Qiao and Omama, Mohd and Chen, Tao and Li, Shuang and Iyer, Ganesh and Saryazdi, Soroush and Keetha, Nikhil and Tewari, Ayush and Tenenbaum, {Joshua B.} and {de Melo}, {Celso Miguel} and Krishna, Madhava and Paull, Liam and Shkurti, Florian and Torralba, Antonio}, - title = {ConceptFusion: Open-set Multimodal 3D Mapping}, - journal = {Robotics: Science and Systems (RSS)}, - year = {2023}, - image = {papers/conceptfusion.gif}, - arxiv = {https://arxiv.org/abs/2302.07241}, - projectpage = {https://concept-fusion.github.io/}, - video = {https://www.youtube.com/watch?v=rkXgws8fiDs}, - abstract = { - Building 3D maps of the environment is central to robot navigation, planning, and interaction with objects in a scene. Most existing approaches that integrate semantic concepts with 3D maps largely remain confined to the closed-set setting: they can only reason about a finite set of concepts, pre-defined at training time. Further, these maps can only be queried using class labels, or in recent work, using text prompts. -We address both these issues with ConceptFusion, a scene representation that is (1) fundamentally open-set, enabling reasoning beyond a closed set of concepts and (ii) inherently multimodal, enabling a diverse range of possible queries to the 3D map, from language, to images, to audio, to 3D geometry, all working in concert. ConceptFusion leverages the open-set capabilities of today's foundation models pre-trained on internet-scale data to reason about concepts across modalities such as natural language, images, and audio. We demonstrate that pixel-aligned open-set features can be fused into 3D maps via traditional SLAM and multi-view fusion approaches. This enables effective zero-shot spatial reasoning, not needing any additional training or finetuning, and retains long-tailed concepts better than supervised approaches, outperforming them by more than 40 percent margin on 3D IoU. We extensively evaluate ConceptFusion on a number of real-world datasets, simulated home environments, a real-world tabletop manipulation task, and an autonomous driving platform. We showcase new avenues for blending foundation models with 3D open-set multimodal mapping. - }, - -} - -@inproceedings{quadsoccer, - author = {Yandong Ji and Zhongyu Li* and Yinan Sun and Xue Bin Peng and Sergey Levine and Glen Berseth and Koushil Sreenath}, - title = {Hierarchical Reinforcement Learning for Precise Soccer Shooting Skills using a Quadrupedal Robot }, - booktitle = {Proc. IEEE/RSJ Intl Conf on Intelligent Robots and Systems (IROS 2022)}, - arxiv = {https://arxiv.org/abs/2208.01160}, - image = {papers/a-reinforcement-learni-1.jpg}, - projectpage = {https://mila.quebec/en/article/anymorph-learning-transferable-policies-by-inferring-agent-morphology/}, - video = {https://www.youtube.com/watch?v=bteipHcJ8BM}, - abstract = {We address the problem of enabling quadrupedal robots to perform precise shooting skills in the real world using reinforcement learning. Developing algorithms to enable a legged robot to shoot a soccer ball to a given target is a challenging problem that combines robot motion control and planning into one task. To solve this problem, we need to consider the dynamics limitation and motion stability during the control of a dynamic legged robot. Moreover, we need to consider motion planning to shoot the hard-to-model deformable ball rolling on the ground with uncertain friction to a desired location. In this paper, we propose a hierarchical framework that leverages deep reinforcement learning to train (a) a robust motion control policy that can track arbitrary motions and (b) a planning policy to decide the desired kicking motion to shoot a soccer ball to a target. We deploy the proposed framework on an A1 quadrupedal robot and enable it to accurately shoot the ball to random targets in the real world. }, - year={2022} -} - -@article{Traboco2022, - title={AnyMorph: Learning Transferable Policies By Inferring Agent Morphology}, - author={Brandon Trabucco and Phielipp Mariano and Glen Berseth}, - journal = {Internation Conference on Machine Learning}, - arxiv = {https://arxiv.org/abs/2206.12279}, - image = {papers/anymorph_prompt.gif}, - projectpage = {https://mila.quebec/en/article/anymorph-learning-transferable-policies-by-inferring-agent-morphology/}, - abstract = {The prototypical approach to reinforcement learning involves training policies tailored to a particular agent from scratch for every new morphology. Recent work aims to eliminate the re-training of policies by investigating whether a morphology-agnostic policy, trained on a diverse set of agents with similar task objectives, can be transferred to new agents with unseen morphologies without re-training. This is a challenging problem that required previous approaches to use hand-designed descriptions of the new agent's morphology. Instead of hand-designing this description, we propose a data-driven method that learns a representation of morphology directly from the reinforcement learning objective. Ours is the first reinforcement learning algorithm that can train a policy to generalize to new agent morphologies without requiring a description of the agent's morphology in advance. We evaluate our approach on the standard benchmark for agent-agnostic control, and improve over the current state of the art in zero-shot generalization to new agents. Importantly, our method attains good performance without an explicit description of morphology. }, - year={2022} -} - -@inproceedings{paull2016unified, - title={A Unified Resource-Constrained Framework for Graph SLAM}, - author={Paull, Liam and Huang, Guoquan and Leonard, John J}, - booktitle={IEEE International Conference on Robotics and Automation (ICRA)}, - pages={1--8}, - month={May}, - year={2016}, - image = {papers/paull2016unified.png}, - arxiv = {http://liampaull.ca/publications/Paull_ICRA_2016.pdf}, - code = {https://github.com/liampaull/Resource_Constrained}, - slides = {http://liampaull.ca/publications/Paull_ICRA_2016_presentation.pptx}, - poster = {http://liampaull.ca/publications/Paull_ICRA_2016_poster.pptx}, - abstract = {Graphical methods have proven an extremely useful tool employed by the mobile robotics community to frame estimation problems. Incremental solvers are able to process incoming sensor data and produce maximum a posteriori (MAP) estimates in realtime by exploiting the natural sparsity within the graph for reasonable-sized problems. However, to enable truly longterm operation in prior unknown environments requires algorithms whose computation, memory, and bandwidth (in the case of distributed systems) requirements scale constantly with time and environment size. Some recent approaches have addressed this problem through a two-step process - first the variables selected for removal are marginalized which induces density, and then the result is sparsified to maintain computational efficiency. Previous literature generally addresses only one of these two components. In this work, we attempt to explicitly connect all of the aforementioned resource constraint requirements by considering the node removal and sparsification pipeline in its entirety. We formulate the node selection problem as a minimization problem over the penalty to be paid in the resulting sparsification. As a result, we produce node subset selection strategies that are optimal in terms of minimizing the impact, in terms of Kullback-Liebler divergence (KLD), of approximating the dense distribution by a sparse one. We then show that one instantiation of this problem yields a computationally tractable formulation. Finally, we evaluate the method on standard datasets and show that the KLD is minimized as compared to other commonly-used heuristic node selection techniques.}, -} - -@inproceedings{mu2016iros, - title={Slam with objects using a nonparametric pose graph}, - author={Mu, Beipeng and Liu, Shih-Yuan and Paull, Liam and Leonard, John and How, Jonathan P}, - booktitle={IEEE/RSJ International Conference onnIntelligent Robots and Systems (IROS)}, - year={2016}, - month={Oct}, - image = {papers/mu2016iros.png}, - arxiv = {1704.05959}, - video = {https://www.youtube.com/watch?v=gOwMiFlj8KU}, - abstract = {Mapping and self-localization in unknown environments are fundamental capabilities in many robotic applications. These tasks typically involve the identification of objects as unique features or landmarks, which requires the objects both to be detected and then assigned a unique identifier that can be maintained when viewed from different perspectives and in different images. The data association and simultaneous localization and mapping (SLAM) problems are, individually, well-studied in the literature. But these two problems are inherently tightly coupled, and that has not been well-addressed. Without accurate SLAM, possible data associations are combinatorial and become intractable easily. Without accurate data association, the error of SLAM algorithms diverge easily. This paper proposes a novel nonparametric pose graph that models data association and SLAM in a single framework. An algorithm is further introduced to alternate between inferring data association and performing SLAM. Experimental results show that our approach has the new capability of associating object detections and localizing objects at the same time, leading to significantly better performance on both the data association and SLAM problems than achieved by considering only one and ignoring imperfections in the other.}, -} - -@inproceedings{paull2017duckietown, - title={Duckietown: an open, inexpensive and flexible platform for autonomy education and research}, - author={Paull, Liam and Tani, Jacopo and Ahn, Heejin and Alonso-Mora, Javier and Carlone, Luca and Cap, Michal and Chen, Yu Fan and Choi, Changhyun and Dusek, Jeff and Fang, Yajun and others}, - booktitle={IEEE International Conference on Robotics and Automation (ICRA)}, - pages={1497--1504}, - year={2017}, - month={May}, - image = {papers/paull2017duckietown.png}, - arxiv = {http://www.mit.edu/~hangzhao/papers/duckietown.pdf}, - abstract = {Duckietown is an open, inexpensive and flexible platform for autonomy education and research. The platform comprises small autonomous vehicles (“Duckiebots”) built from off-the-shelf components, and cities (“Duckietowns”) complete with roads, signage, traffic lights, obstacles, and citizens (duckies) in need of transportation. The Duckietown platform offers a wide range of functionalities at a low cost. Duckiebots sense the world with only one monocular camera and perform all processing onboard with a Raspberry Pi 2, yet are able to: follow lanes while avoiding obstacles, pedestrians (duckies) and other Duckiebots, localize within a global map, navigate a city, and coordinate with other Duckiebots to avoid collisions. Duckietown is a useful tool since educators and researchers can save money and time by not having to develop all of the necessary supporting infrastructure and capabilities. All materials are available as open source, and the hope is that others in the community will adopt the platform for education and research.}, -} - -@inproceedings{rosman2017hybrid, - title={Hybrid control and learning with coresets for autonomous vehicles}, - author={Rosman, Guy and Paull, Liam and Rus, Daniela}, - booktitle={IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)}, - pages={6894--6901}, - year={2017}, - month={Oct}, - arxiv = {http://people.csail.mit.edu/rosman/papers/ctrl_embedding.pdf}, - image = {papers/rosman2017hybrid.png}, - abstract = {Modern autonomous systems such as driverless vehicles need to safely operate in a wide range of conditions. A potential solution is to employ a hybrid systems approach, where safety is guaranteed in each individual mode within the system. This offsets complexity and responsibility from the individual controllers onto the complexity of determining discrete mode transitions. In this work we propose an efficient framework based on recursive neural networks and coreset data summarization to learn the transitions between an arbitrary number of controller modes that can have arbitrary complexity. Our approach allows us to efficiently gather annotation data from the large-scale datasets that are required to train such hybrid nonlinear systems to be safe under all operating conditions, favoring underexplored parts of the data. We demonstrate the construction of the embedding, and efficient detection of switching points for autonomous and nonautonomous car data. We further show how our approach enables efficient sampling of training data, to further improve either our embedding or the controllers}, -} - -@inproceedings{paull2018probabilistic, - title={Probabilistic cooperative mobile robot area coverage and its application to autonomous seabed mapping}, - author={Paull, Liam and Seto, Mae and Leonard, John J and Li, Howard}, - journal={The International Journal of Robotics Research}, - volume={37}, - number={1}, - pages={21--45}, - year={2018}, - image = {papers/paull2018coverage.png}, - abstract = {There are many applications that require mobile robots to autonomously cover an entire area with a sensor or end effector. The vast majority of the literature on this subject is focused on addressing path planning for area coverage under the assumption that the robot’s pose is known or that error is bounded. In this work, we remove this assumption and develop a completely probabilistic representation of coverage. We show that coverage is guaranteed as long as the robot pose estimates are consistent, a much milder assumption than zero or bounded error. After formally connecting robot sensor uncertainty with area coverage, we propose an adaptive sliding window filter pose estimator that provides a close approximation to the full maximum a posteriori estimate with a computation cost that is bounded over time. Subsequently, an adaptive planning strategy is presented that automatically exploits conditions of low vehicle uncertainty to more efficiently cover an area. We further extend this approach to the multi-robot case where robots can communicate through a (possibly faulty and low-bandwidth) channel and make relative measurements of one another. In this case, area coverage is achieved more quickly since the uncertainty over the robots’ trajectories is reduced. We apply the framework to the scenario of mapping an area of seabed with an autonomous underwater vehicle. Experimental results support the claim that our method achieves guaranteed complete coverage notwithstanding poor navigational sensors and that resulting path lengths required to cover the entire area are shortest using the proposed cooperative and adaptive approach.}, -} - -@inproceedings{ort2018icra, - title={Autonomous Vehicle Navigation in Rural Environments without Detailed Prior Maps}, - author={Ort, Teddy and Paull, Liam and Rus, Daniela}, - booktitle={IEEE International Conference on Robotics and Automation (ICRA)}, - year={2018}, - month={May}, - image = {papers/ort2018icra.png}, - arxiv = {https://toyota.csail.mit.edu/sites/default/files/documents/papers/ICRA2018_AutonomousVehicleNavigationRuralEnvironment.pdf}, - abstract = {State-of-the-art autonomous driving systems rely heavily on detailed and highly accurate prior maps. However, outside of small urban areas, it is very challenging to build, store, and transmit detailed maps since the spatial scales are so large. Furthermore, maintaining detailed maps of large rural areas can be impracticable due to the rapid rate at which these environments can change. This is a significant limitation for the widespread applicability of autonomous driving technology, which has the potential for an incredibly positive societal impact. In this paper, we address the problem of autonomous navigation in rural environments through a novel mapless driving framework that combines sparse topological maps for global navigation with a sensor-based perception system for local navigation. First, a local navigation goal within the sensor view of the vehicle is chosen as a waypoint leading towards the global goal. Next, the local perception system generates a feasible trajectory in the vehicle frame to reach the waypoint while abiding by the rules of the road for the segment being traversed. These trajectories are updated to remain in the local frame using the vehicle’s odometry and the associated uncertainty based on the least-squares residual and a recursive filtering approach, which allows the vehicle to navigate road networks reliably, and at high speed, without detailed prior maps. We demonstrate the performance of the system on a full-scale autonomous vehicle navigating in a challenging rural environment and benchmark the system on a large amount of collected data.}, -} - -@inproceedings{mai2018local, - title={Local Positioning System Using UWB Range Measurements for an Unmanned Blimp}, - author={Mai, Vincent and Kamel, Mina and Krebs, Matthias and Schaffner, Andreas and Meier, Daniel and Paull, Liam and Siegwart, Roland}, - journal={IEEE Robotics and Automation Letters}, - volume={3}, - number={4}, - pages={2971--2978}, - year={2018}, - month={October}, - image = {papers/mai2018blimp.png}, - arxiv = {https://ieeexplore.ieee.org/document/8392389}, - abstract = {Unmanned blimps are a safe and reliable alternative to conventional drones when flying above people. On-board real-time tracking of their pose and velocities is a necessary step toward autonomous navigation. There is a need for an easily deployable technology that is able to accurately and robustly estimate the pose and velocities of a blimp in 6 DOF, as well as unexpected applied forces and torques, in an uncontrolled environment. We present two multiplicative extended Kalman filters using ultrawideband radio sensors and a gyroscope to address this challenge. One filter is updated using a dynamics model of the blimp, whereas the other uses a constant speed model. We describe a set of experiments in which these estimators have been implemented on an embedded flight controller. They were tested and compared in accuracy and robustness in a hardware-in-loop simulation as well as on a real blimp. This approach can be generalized to any lighter than air robot to track it with the necessary accuracy, precision, and robustness to allow autonomous navigation.}, -} - -@inproceedings{CTCNet, - title = {Geometric Consistency for Self-Supervised End-to-End Visual Odometry}, - author = {Iyer, Ganesh and Murthy, J Krishna and Gunshi Gupta, K and Paull, Liam}, - booktitle = {CVPR Workshop on Deep Learning for Visual SLAM}, - month = {June}, - year = {2018}, - arxiv = {1804.03789}, - projectpage = {https://krrish94.github.io/CTCNet-release/}, - image = {papers/ctcnet.png}, - abstract = {With the success of deep learning based approaches in tackling challenging problems in computer vision, a wide range of deep architectures have recently been proposed for the task of visual odometry (VO) estimation. Most of these proposed solutions rely on supervision, which requires the acquisition of precise ground-truth camera pose information, collected using expensive motion capture systems or high-precision IMU/GPS sensor rigs. In this work, we propose an unsupervised paradigm for deep visual odometry learning. We show that using a noisy teacher, which could be a standard VO pipeline, and by designing a loss term that enforces geometric consistency of the trajectory, we can train accurate deep models for VO that do not require ground-truth labels. We leverage geometry as a self-supervisory signal and propose "Composite Transformation Constraints (CTCs)", that automatically generate supervisory signals for training and enforce geometric consistency in the VO estimate. We also present a method of characterizing the uncertainty in VO estimates thus obtained. To evaluate our VO pipeline, we present exhaustive ablation studies that demonstrate the efficacy of end-to-end, self-supervised methodologies to train deep models for monocular VO. We show that leveraging concepts from geometry and incorporating them into the training of a recurrent neural network results in performance competitive to supervised deep VO methods.}, -} - -@inproceedings{amini2018learning, - title={Learning steering bounds for parallel autonomous systems}, - author={Amini, Alexander and Paull, Liam and Balch, Thomas and Karaman, Sertac and Rus, Daniela}, - booktitle={IEEE International Conference on Robotics and Automation (ICRA)}, - year={2018}, - month={May}, - arxiv = {https://dspace.mit.edu/handle/1721.1/117632}, - image = {papers/amini2018parallel.png}, - abstract = {Deep learning has been successfully applied to “end-to-end” learning of the autonomous driving task, where a deep neural network learns to predict steering control commands from camera data input. While these previous works support reactionary control, the representation learned is not usable for higher-level decision making required for autonomous navigation. This paper tackles the problem of learning a representation to predict a continuous control probability distribution, and thus steering control options and bounds for those options, which can be used for autonomous navigation. Each mode of the distribution encodes a possible macro-action that the system could execute at that instant, and the covariances of the modes place bounds on safe steering control values. Our approach has the added advantage of being trained on unlabeled data collected from inexpensive cameras. The deep neural network based algorithm generates a probability distribution over the space of steering angles, from which we leverage Variational Bayesian methods to extract a mixture model and compute the different possible actions in the environment. A bound, which the autonomous vehicle must respect in our parallel autonomy setting, is then computed for each of these actions. We evaluate our approach on a challenging dataset containing a wide variety of driving conditions, and show that our algorithm is capable of parameterizing Gaussian Mixture Models for possible actions, and extract steering bounds with a mean error of only 2 degrees. Additionally, we demonstrate our system working on a full scale autonomous vehicle and evaluate its ability to successful handle various different parallel autonomy situations.}, -} - -@inproceedings{bharadhwaj2018data, - title={A Data-Efficient Framework for Training and Sim-to-Real Transfer of Navigation Policies}, - author={Bharadhwaj, Homanga and Wang, Zihan and Bengio, Yoshua and Paull, Liam}, - booktitle={IEEE International Conference on Robotics and Automation (ICRA)}, - year={2019}, - month={May}, - arxiv = {1810.04871}, - image = {papers/homanga2019icra.png}, - abstract = {Learning effective visuomotor policies for robots purely from data is challenging, but also appealing since a learning-based system should not require manual tuning or calibration. In the case of a robot operating in a real environment the training process can be costly, time-consuming, and even dangerous since failures are common at the start of training. For this reason, it is desirable to be able to leverage \textit{simulation} and \textit{off-policy} data to the extent possible to train the robot. In this work, we introduce a robust framework that plans in simulation and transfers well to the real environment. Our model incorporates a gradient-descent based planning module, which, given the initial image and goal image, encodes the images to a lower dimensional latent state and plans a trajectory to reach the goal. The model, consisting of the encoder and planner modules, is trained through a meta-learning strategy in simulation first. We subsequently perform adversarial domain transfer on the encoder by using a bank of unlabelled but random images from the simulation and real environments to enable the encoder to map images from the real and simulated environments to a similarly distributed latent representation. By fine tuning the entire model (encoder + planner) with far fewer real world expert demonstrations, we show successful planning performances in different navigation tasks.}, -} - -@inproceedings{sai2019dal, - title = {Deep Active Localization}, - author = {Krishna, Sai and Seo, Keehong and Bhatt, Dhaivat and Mai, Vincent and Murthy, Krishna and Paull, Liam}, - booktitle = {IEEE Robotics and Automation Letters (RAL)}, - year = {2019}, - month = {May}, - arxiv = {1903.01669}, - code = {https://github.com/montrealrobotics/dal}, - image = {papers/dal.png}, - abstract = {Active localization is the problem of generating robot actions that allow it to maximally disambiguate its pose within a reference map. Traditional approaches to this use an information-theoretic criterion for action selection and hand-crafted perceptual models. In this work we propose an end-to-end differentiable method for learning to take informative actions that is trainable entirely in simulation and then transferable to real robot hardware with zero refinement. The system is composed of two modules: a convolutional neural network for perception, and a deep reinforcement learned planning module. We introduce a multi-scale approach to the learned perceptual model since the accuracy needed to perform action selection with reinforcement learning is much less than the accuracy needed for robot control. We demonstrate that the resulting system outperforms using the traditional approach for either perception or planning. We also demonstrate our approaches robustness to different map configurations and other nuisance parameters through the use of domain randomization in training. The code is also compatible with the OpenAI gym framework, as well as the Gazebo simulator.}, -} - -@inproceedings{adr, - title = {Active Domain Randomization}, - author = {Mehta, Bhairav and Diaz, Manfred and Golemo, Florian and Pal, Christopher and Paull, Liam}, - booktitle = {Conference on Robot Learning (CoRL)}, - year = {2019}, - arxiv = {1904.04762}, - code = {https://github.com/montrealrobotics/active-domainrand}, - image = {papers/adr.gif}, - abstract = {We tackle the uniform sampling assumption in domain randomization and learn a randomization strategy, looking for the most informative environments. Our method shows significant improvements in agent performance, agent generalization, sample complexity, and interpretability over the traditional domain and dynamics randomization strategies.}, -} - -@inproceedings{gradslam, - title = {gradSLAM: Dense SLAM meets automatic differentiation}, - author = {{Krishna Murthy}, Jatavallabhula and Iyer, Ganesh and Paull, Liam}, - booktitle = {International Conference on Robotics and Automation (ICRA)}, - year={2020}, - arxiv = {1910.10672}, - projectpage = {https://gradslam.github.io}, - code = {https://github.com/gradslam/gradslam}, - image = {papers/gradslam.png}, - abstract = {The question of "representation" is central in the context of dense simultaneous localization and mapping (SLAM). Newer learning-based approaches have the potential to leverage data or task performance to directly inform the choice of representation. However, learning representations for SLAM has been an open question, because traditional SLAM systems are not end-to-end differentiable.In this work, we present gradSLAM, a differentiable computational graph take on SLAM. Leveraging the automatic differentiation capabilities of computational graphs, gradSLAM enables the design of SLAM systems that allow for gradient-based learning across each of their components, or the system as a whole. This is achieved by creating differentiable alternatives for each non-differentiable component in a typical dense SLAM system. Specifically, we demonstrate how to design differentiable trust-region optimizers, surface measurement and fusion schemes, as well as differentiate over rays, without sacrificing performance. This amalgamation of dense SLAM with computational graphs enables us to backprop all the way from 3D maps to 2D pixels, opening up new possibilities in gradient-based learning for SLAM.}, -} - -@inproceedings{lamaml, - title = {La-MAML: Look-ahead Meta Learning for Continual Learning}, - author = {Gupta, Gunshi and Yadav, Karmesh and Paull, Liam}, - booktitle = {Neural Information Processing Systems (Neurips)}, - highlight = {Oral (top 1.1%)}, - year = {2020}, - arxiv = {2007.13904}, - projectpage = {https://mila.quebec/en/article/la-maml-look-ahead-meta-learning-for-continual-learning/}, - image = {papers/lamaml.png}, - abstract = {The continual learning problem involves training models with limited capacity to perform well on a set of an unknown number of sequentially arriving tasks. While meta-learning shows great potential for reducing interference between old and new tasks, the current training procedures tend to be either slow or offline, and sensitive to many hyper-parameters. In this work, we propose Look-ahead MAML (La-MAML), a fast optimisation-based meta-learning algorithm for online-continual learning, aided by a small episodic memory. Our proposed modulation of per-parameter learning rates in our meta-learning update allows us to draw connections to prior work on hypergradients and meta-descent. This provides a more flexible and efficient way to mitigate catastrophic forgetting compared to conventional prior-based methods. La-MAML achieves performance superior to other replay-based, prior-based and meta-learning based approaches for continual learning on real-world visual classification benchmarks.}, -} - -@inproceedings{che2020neurips, - title = {Your GAN is Secretly an Energy-based Model and You Should Use Discriminator Driven Latent Sampling}, - author = {Che, Tong and Zhang, Ruixiang and Sohl-Dickstein, Jascha and Larochelle, Hugo and Paull, Liam and Cao, Yuan and Bengio, Yoshua}, - booktitle = {Neural Information Processing Systems (Neurips)}, - year = {2020}, - image = {papers/gan.png}, - arxiv = {2003.06060}, - abstract = {We show that the sum of the implicit generator log-density of a GAN with the logit score of the discriminator defines an energy function which yields the true data density when the generator is imperfect but the discriminator is optimal, thus making it possible to improve on the typical generator. To make that practical, we show that sampling from this modified density can be achieved by sampling in latent space according to an energy-based model induced by the sum of the latent prior log-density and the discriminator output score. This can be achieved by running a Langevin MCMC in latent space and then applying the generator function, which we call Discriminator Driven Latent Sampling~(DDLS). We show that DDLS is highly efficient compared to previous methods which work in the high-dimensional pixel space and can be applied to improve on previously trained GANs of many types. We evaluate DDLS on both synthetic and real-world datasets qualitatively and quantitatively. On CIFAR-10, DDLS substantially improves the Inception Score of an off-the-shelf pre-trained SN-GAN from 8.22 to 9.09 which is even comparable to the class-conditional BigGAN model. This achieves a new state-of-the-art in unconditional image synthesis setting without introducing extra parameters or additional training.}, -} - -@inproceedings{mehta2020curriculum, - title = {Curriculum in Gradient-Based Meta-Reinforcement Learning}, - author = {Mehta, Bhairav and Deleu, Tristan and Raparthy, {Sharath Chandra} and Pal, Christopher and Paull, Liam}, - booktitle = {BETR-RL Workshop}, - year = {2020}, - arxiv = {2002.07956}, - image = {papers/mehta2020curriculum.png}, - abstract = {Can Meta-RL use curriculum learning? In this work, we explore that question and find that curriculum learning stabilizes meta-RL in complex navigation and locomotion tasks. We also highlight issues with Meta-RL benchmarks by highlighting failure cases when we vary task distributions.}, -} - -@inproceedings{ssadr, - title = {Generating Automatic Curricula via Self-Supervised Active Domain Randomization}, - author = {Raparthy, {Sharath Chandra} and Mehta, Bhairav and Paull, Liam}, - booktitle = {BETR-RL Workshop}, - year = {2020}, - arxiv = {2002.07911}, - code = {https://github.com/montrealrobotics/unsupervised-adr}, - image = {papers/ssadr.png}, - abstract = {Can you learn domain randomization curricula with no rewards? We show that agents trained via self-play in the ADR framework outperform uniform domain randomization by magnitudes in both simulated and real-world transfer.}, -} - -@inproceedings{aido2018, - title = {The AI Driving Olympics at NeurIPS 2018}, - author = {Zilly, Julian and Tani, Jacopo and Considine, Breandan and Mehta, Bhairav and Daniele, {Andrea F} and Diaz, Manfred and Bernasconi, Gianmarco and Ruch, Claudio Hakenberg, Jan and Golemo, Florian and Bowser, {A Kirsten} and Walter, {Matthew R} and Hristov, Ruslan and Mallya, Sunil and Frazzoli, Emilio and Censi, Andrea and Paull, Liam}, - booktitle = {Springer}, - year = {2020}, - arxiv = {1903.02503}, - image = {papers/aido18.png}, - abstract = {Despite recent breakthroughs, the ability of deep learning and reinforcement learning to outperform traditional approaches to control physically embodied robotic agents remains largely unproven. To help bridge this gap, we created the “AI Driving Olympics” (AI-DO), a competition with the objective of evaluating the state of the art in machine learning and artificial intelligence for mobile robotics. Based on the simple and well specified autonomous driving and navigation environment called “Duckietown,” AI-DO includes a series of tasks of increasing complexity – from simple lane-following to fleet management. For each task, we provide tools for competitors to use in the form of simulators, logs, code templates, baseline implementations and low-cost access to robotic hardware. We evaluate submissions in simulation online, on standardized hardware environments, and finally at the competition event. The first AI-DO, AI-DO 1, occurred at the Neural Information Processing Systems (NeurIPS) conference in December 2018. The results of AI-DO 1 highlight the need for better benchmarks, which are lacking in robotics, as well as improved mechanisms to bridge the gap between simulation and reality.}, -} - -@inproceedings{probod, - title = {Probabilistic Object Detection: Strenghts, Weaknesses, and Opportunities}, - author = {Bhatt, Dhaivat and Bansal, Dishank and Gupta, Gunshi and Jatavallabhula, {Krishna Murthy} and Lee, Hanju and Paull, Liam}, - booktitle = {ICML workshop on AI for autonomous driving}, - year = {2020}, - projectpage = {https://gunshigupta.netlify.app/publication/probod/}, - image = {papers/probod.png}, - abstract = {Deep neural networks are the de-facto standard for object detection in autonomous driving applications. However, neural networks cannot be blindly trusted even within the training data distribution, let alone outside it. This has paved way for several probabilistic object detection techniques that measure uncertainty in the outputs of an object detector. Through this position paper, we serve three main purposes. First, we briefly sketch the landscape of current methods for probabilistic object detection. Second, we present the main shortcomings of these approaches. Finally, we present promising avenues for future research, and proof-of-concept results where applicable. Through this effort, we hope to bring the community one step closer to performing accurate, reliable, and consistent probabilistic object detection.}, -} - - -@inproceedings{biv, - title = {Batch Inverse-Variance Weighting: Deep Heteroscedastic Regression}, - author = {Mai, Vincent and Khamies, Waleed and Paull, Liam}, - booktitle = {ICML Workshop on Uncertainty & Robustness in Deep Learning}, - year = {2021}, - arxiv = {2107.04497}, - image = {papers/biv.png}, - abstract = {In the supervised learning task of heteroscedastic regression, each label is subject to noise from a different distribution. The label generator may estimate the variance of the noise distribution for each label, which is useful information to mitigate its impact. We adapt an inverse-variance weighted mean square error, based on the Gauss-Markov theorem, for gradient descent on neural networks. We introduce Batch Inverse-Variance, a loss function which is robust to near-ground truth samples, and allows to control the effective learning rate. Our experimental results show that BIV improves significantly the performance of the networks on two noisy datasets, compared to L2 loss, inverse-variance weighting, and a filtering-based baseline.}, -} - -@inproceedings{loco, - title = {LOCO: Adaptive exploration in reinforcement learning via local estimation of contraction coefficients}, - author = {Manfred Diaz, Liam Paull, Pablo Samuel Castro}, - booktitle = {Self-Supervision for Reinforcement Learning Workshop-ICLR 2021}, - year = {2021}, - openreview = {SmvsysIyHW-}, - image = {papers/loco.png}, - pdf = {diaz_loco.pdf}, - abstract = {We offer a novel approach to balance exploration and exploitation in reinforcement learning (RL). To do so, we characterize an environment’s exploration difficulty via the Second Largest Eigenvalue Modulus (SLEM) of the Markov chain induced by uniform stochastic behaviour. Specifically, we investigate the connection of state-space coverage with the SLEM of this Markov chain and use the theory of contraction coefficients to derive estimates of this eigenvalue of interest. Furthermore, we introduce a method for estimating the contraction coefficients on a local level and leverage it to design a novel exploration algorithm. We evaluate our algorithm on a series of GridWorld tasks of varying sizes and complexity.}, -} - - -@inproceedings{schwarting2017parallel, - title={Parallel autonomy in automated vehicles: Safe motion generation with minimal intervention}, - author={Schwarting, Wilko and Alonso-Mora, Javier and Paull, Liam and Karaman, Sertac and Rus, Daniela}, - booktitle={Robotics and Automation (ICRA), 2017 IEEE International Conference on}, - pages={1928--1935}, - pdf = {schwarting_ICRA_2017.pdf}, - image = {papers/parallel_autonomy_ICRA.png}, - abstract = {Current state-of-the-art vehicle safety systems, such as assistive braking or automatic lane following, are still only able to help in relatively simple driving situations. We introduce a Parallel Autonomy shared-control framework that produces safe trajectories based on human inputs even in much more complex driving scenarios, such as those commonly encountered in an urban setting. We minimize the deviation from the human inputs while ensuring safety via a set of collision avoidance constraints. We develop a receding horizon planner formulated as a Non-linear Model Predictive Control (NMPC) including analytic descriptions of road boundaries, and the configurations and future uncertainties of other traffic participants, and directly supplying them to the optimizer without linearization. The NMPC operates over both steering and acceleration simultaneously. Furthermore, the proposed receding horizon planner also applies to fully autonomous vehicles. We validate the proposed approach through simulations in a wide variety of complex driving scenarios such as left-turns across traffic, passing on busy streets, and under dynamic constraints in sharp turns on a race track.}, - year={2017}, - organization={IEEE} -} - -@inproceedings{naser2017parallel, - title={A parallel autonomy research platform}, - author={Naser, Felix and Dorhout, David and Proulx, Stephen and Pendleton, Scott Drew and Andersen, Hans and Schwarting, Wilko and Paull, Liam and Alonso-Mora, Javier and Ang, Marcelo H and Karaman, Sertac and others}, - booktitle={2017 IEEE Intelligent Vehicles Symposium (IV)}, - pages={933--940}, - year={2017}, - abstract={We present the development of a full-scale “parallel autonomy” research platform including software and hardware. In the parallel autonomy paradigm, the control of the vehicle is shared; the human is still in control of the vehicle, but the autonomy system is always running in the background to prevent accidents. Our holistic approach includes: (1) a drive-by-wire conversion method only based on reverse engineering mounting of relatively inexpensive sensors onto the vehicle implementation of a localization and mapping system, (4) obstacle detection and (5) a shared controller as well as (6) integration with an advanced autonomy simulation system (Drake) for rapid development and testing. The system can operate in three modes: (a) manual driving, (b) full autonomy, where the system is in complete control of the vehicle and (c) parallel autonomy, where the shared controller is implemented. We present results from extensive testing of a full-scale vehicle on closed tracks that demonstrate these capabilities.}, - pdf = {naser_IV_2017.pdf}, - image = {papers/prius.png}, - organization={IEEE} -} - -@article{schwarting2017safe, - title={Safe nonlinear trajectory generation for parallel autonomy with a dynamic vehicle model}, - author={Schwarting, Wilko and Alonso-Mora, Javier and Paull, Liam and Karaman, Sertac and Rus, Daniela}, - journal={IEEE Transactions on Intelligent Transportation Systems}, - volume={19}, - number={9}, - pages={2994--3008}, - year={2017}, - abstact={High-end vehicles are already equipped with safety systems, such as assistive braking and automatic lane following, enhancing vehicle safety. Yet, these current solutions can only help in low-complexity driving situations. In this paper, we introduce a parallel autonomy, or shared control, framework that computes safe trajectories for an automated vehicle, based on human inputs. We minimize the deviation from the human inputs while ensuring safety via a set of collision avoidance constraints. Our method achieves safe motion even in complex driving scenarios, such as those commonly encountered in an urban setting. We introduce a receding horizon planner formulated as nonlinear model predictive control (NMPC), which includes the analytic descriptions of road boundaries and the configuration and future uncertainties of other road participants. The NMPC operates over both steering and acceleration simultaneously. We introduce a nonslip model suitable for handling complex environments with dynamic obstacles, and a nonlinear combined slip vehicle model including normal load transfer capable of handling static environments. We validate the proposed approach in two complex driving scenarios. First, in an urban environment that includes a left-turn across traffic and passing on a busy street. And second, under snow conditions on a race track with sharp turns and under complex dynamic constraints. We evaluate the performance of the method with various human driving styles. We consequently observe that the method successfully avoids collisions and generates motions with minimal intervention for parallel autonomy. We note that the method can also be applied to generate safe motion for fully autonomous vehicles.}, - pdf={schwarting_TITS_2018.pdf}, - image={papers/parallel_autonomy_TITS.png}, - publisher={IEEE} -} - -@article{ort2020maplite, - title={MapLite: Autonomous Intersection Navigation without a Detailed Prior Map}, - author={Ort, Teddy and Murthy, Krishna and Banerjee, Rohan and Gottipati, Sai Krishna and Bhatt, Dhaivat and Gilitschenski, Igor and Paull, Liam and Rus, Daniela}, - journal={IEEE Robotics and Automation Letters}, - abstract={In this work, we present MapLite: a one-click -autonomous navigation system capable of piloting a vehicle to an -arbitrary desired destination point given only a sparse publicly -available topometric map (from OpenStreetMap). The onboard -sensors are used to segment the road region and register the -topometric map in order to fuse the high-level navigation goals -with a variational path planner in the vehicle frame. This enables -the system to plan trajectories that correctly navigate road -intersections without the use of an external localization system -such as GPS or a detailed prior map. Since the topometric -maps already exist for the vast majority of roads, this solution -greatly increases the geographical scope for autonomous mobility -solutions. We implement MapLite on a full-scale autonomous -vehicle and exhaustively test it on over 15km of road including -over 100 autonomous intersection traversals. We further extend -these results through simulated testing to validate the system on -complex road junction topologies such as traffic circles.}, - image={papers/maplite_RAL.png}, - pdf={MapLite_RAL.pdf}, - year={2020} -} - -@inproceedings{tani2020integrated, - title={Integrated benchmarking and design for reproducible and accessible evaluation of robotic agents}, - author={Tani, Jacopo and Daniele, Andrea F and Bernasconi, Gianmarco and Camus, Amaury and Petrov, Aleksandar and Courchesne, Anthony and Mehta, Bhairav and Suri, Rohit and Zaluska, Tomasz and Walter, Matthew R and others}, - booktitle={2020 IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)}, - pages={6229--6236}, - year={2020}, - arxiv={2009.04362}, - abstract={As robotics matures and increases in complexity, -it is more necessary than ever that robot autonomy research -be reproducible. Compared to other sciences, there are specific -challenges to benchmarking autonomy, such as the complexity -of the software stacks, the variability of the hardware and -the reliance on data-driven techniques, amongst others. In this -paper, we describe a new concept for reproducible robotics -research that integrates development and benchmarking, so -that reproducibility is obtained “by design” from the beginning -of the research/development processes. We first provide the -overall conceptual objectives to achieve this goal and then a -concrete instance that we have built: the DUCKIENet. One -of the central components of this setup is the Duckietown -Autolab, a remotely accessible standardized setup that is itself -also relatively low-cost and reproducible. When evaluating -agents, careful definition of interfaces allows users to choose -among local versus remote evaluation using simulation, logs, -or remote automated hardware setups. We validate the system -by analyzing the repeatability of experiments conducted using -the infrastructure and show that there is low variance across -different robot hardware and across different remote labs.}, - image={papers/autolab.png}, - organization={IEEE} -} - -@inproceedings{zhang2020perceptual, - title={Perceptual generative autoencoders}, - author={Zhang, Zijun and Zhang, Ruixiang and Li, Zongpeng and Bengio, Yoshua and Paull, Liam}, - booktitle={International Conference on Machine Learning}, - pages={11298--11306}, - year={2020}, - arxiv={1906.10335}, - abstract={Modern generative models are usually designed to match target distributions directly in the data space, where the intrinsic dimension of data can be much lower than the ambient dimension. We argue that this discrepancy may contribute to the difficulties in training generative models. We therefore propose to map both the generated and target distributions to a latent space using the encoder of a standard autoencoder, and train the generator (or decoder) to match the target distribution in the latent space. Specifically, we enforce the consistency in both the data space and the latent space with theoretically justified data and latent reconstruction losses. The resulting generative model, which we call a perceptual generative autoencoder (PGA), is then trained with a maximum likelihood or variational autoencoder (VAE) objective. With maximum likelihood, PGAs generalize the idea of reversible generative models to unrestricted neural network architectures and arbitrary number of latent dimensions. When combined with VAEs, PGAs substantially improve over the baseline VAEs in terms of sample quality. Compared to other autoencoder-based generative models using simple priors, PGAs achieve state-of-the-art FID scores on CIFAR-10 and CelebA.}, - image={papers/pga.png}, - organization={PMLR} -} - -@inproceedings{liu2021orthogonal, - title={Orthogonal over-parameterized training}, - author={Liu, Weiyang and Lin, Rongmei and Liu, Zhen and Rehg, James M and Paull, Liam and Xiong, Li and Song, Le and Weller, Adrian}, - booktitle={Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition}, - pages={7251--7260}, - arxiv={2004.04690}, - abstract={The inductive bias of a neural network is largely determined by the architecture and the training algorithm. To achieve good generalization, how to effectively train a neural network is of great importance. We propose a novel orthogonal over-parameterized training (OPT) framework that can provably minimize the hyperspherical energy which characterizes the diversity of neurons on a hypersphere. By maintaining the minimum hyperspherical energy during training, OPT can greatly improve the empirical generalization. Specifically, OPT fixes the randomly initialized weights of the neurons and learns an orthogonal transformation that applies to these neurons. We consider multiple ways to learn such an orthogonal transformation, including unrolling orthogonalization algorithms, applying orthogonal parameterization, and designing orthogonality-preserving gradient descent. For better scalability, we propose the stochastic OPT which performs orthogonal transformation stochastically for partial dimensions of neurons. Interestingly, OPT reveals that learning a proper coordinate system for neurons is crucial to generalization. We provide some insights on why OPT yields better generalization. Extensive experiments validate the superiority of OPT over the standard training.}, - image={papers/orthogonal.png}, - year={2021} -} - -@inproceedings{murthy2020gradsim, - title={gradSim: Differentiable simulation for system identification and visuomotor control}, - author={Murthy, J Krishna and Macklin, Miles and Golemo, Florian and Voleti, Vikram and Petrini, Linda and Weiss, Martin and Considine, Breandan and Parent-L{\'e}vesque, J{\'e}r{\^o}me and Xie, Kevin and Erleben, Kenny and others}, - booktitle={International Conference on Learning Representations}, - abstract={We consider the problem of estimating an object's physical properties such as mass, friction, and elasticity directly from video sequences. Such a system identification problem is fundamentally ill-posed due to the loss of information during image formation. Current solutions require precise 3D labels which are labor-intensive to gather, and infeasible to create for many systems such as deformable solids or cloth. We present gradSim, a framework that overcomes the dependence on 3D supervision by leveraging differentiable multiphysics simulation and differentiable rendering to jointly model the evolution of scene dynamics and image formation. This novel combination enables backpropagation from pixels in a video sequence through to the underlying physical attributes that generated them. Moreover, our unified computation graph -- spanning from the dynamics and through the rendering process -- enables learning in challenging visuomotor control tasks, without relying on state-based (3D) supervision, while obtaining performance competitive to or better than techniques that rely on precise 3D labels.}, - arxiv={2104.02646}, - image={papers/gradsim.png}, - projectpage={https://gradsim.github.io/}, - year={2020} -} - -@inproceedings{diaz2021uncertainty, - title={Uncertainty-Aware Policy Sampling and Mixing for Safe Interactive Imitation Learning}, - author={Diaz, Manfred and Fevens, Thomas and Paull, Liam}, - booktitle={2021 18th Conference on Robots and Vision (CRV)}, - pages={72--78}, - year={2021}, - abstract={Teaching robots how to execute tasks through demonstrations is appealing since it sidesteps the need to explicitly specify a reward function. However, posing imitation learning as a simple supervised learning problem suffers from the well-known problem of distributional shift - the teacher will only demonstrate the optimal trajectory and therefore the learner is unable to recover if it deviates even slightly from this trajectory since it has no training data for this case. This problem has been overcome in the literature by some element of interactivity in the learning process - usually be somehow interleaving the execution of the learner and the teacher so that the teacher can demonstrate to the learner also how to recover from mistakes. In this paper, we consider the cases where the robot has the potential to do harm, and therefore safety must be imposed at every step in the learning process. We show that uncertainty is an appropriate measure of safety and that both the mixing of the policies and the data sampling procedure benefit from considering the uncertainty of both the learner and the teacher. Our method, uncertainty-aware policy sampling and mixing (UPMS), is used to teach an agent to drive down a lane with less safety violations and less queries to the teacher than state-of-the-art methods.}, - pdf={Diaz_CRV_2021.pdf}, - image={papers/diaz_CRV_2021.png}, - organization={IEEE} -} - -@inproceedings{laferriere2021deep, - title={Deep Koopman Representation for Control over Images (DKRCI)}, - author={Laferri{\`e}re, Philippe and Laferri{\`e}re, Samuel and Dahdah, Steven and Forbes, James Richard and Paull, Liam}, - booktitle={2021 18th Conference on Robots and Vision (CRV)}, - pages={158--164}, - year={2021}, - abstract={The Koopman operator provides a means to represent nonlinear systems as infinite dimensional linear systems in a lifted state space. This enables the application of linear control techniques to nonlinear systems. However, the choice of a finite number of lifting functions, or Koopman observables, is still an unresolved problem. Deep learning techniques have recently been used to jointly learn these lifting function along with the Koopman operator. However, these methods require knowledge of the system's state space. In this paper, we present a method to learn a Koopman representation directly from images and control inputs. We then demonstrate our deep learning architecture on a cart-pole system with external inputs.}, - image={papers/koopman.png}, - pdf={koopman.pdf}, - organization={IEEE} -} - -@inproceedings{agia2022taskography, - title={Taskography: Evaluating robot task planning over large 3D scene graphs}, - author={Agia, Christopher and Jatavallabhula, Krishna Murthy and Khodeir, Mohamed and Miksik, Ondrej and Vineet, Vibhav and Mukadam, Mustafa and Paull, Liam and Shkurti, Florian}, - booktitle={Conference on Robot Learning}, - pages={46--58}, - year={2021}, - projectpage={https://taskography.github.io/}, - image={papers/taskography.png}, - openreview={nWLt35BU1z_}, - pdf={taskography.pdf}, - abstract={3D scene graphs (3DSGs) are an emerging description; unifying symbolic, topological, and metric scene representations. However, typical 3DSGs -contain hundreds of objects and symbols even for small environments; rendering task planning on the full graph impractical. We construct TASKOGRAPHY, -the first large-scale robotic task planning benchmark over 3DSGs. While most -benchmarking efforts in this area focus on vision-based planning, we systematically study symbolic planning, to decouple planning performance from visual representation learning. We observe that, among existing methods, neither classical -nor learning-based planners are capable of real-time planning over full 3DSGs. -Enabling real-time planning demands progress on both (a) sparsifying 3DSGs -for tractable planning and (b) designing planners that better exploit 3DSG hierarchies. Towards the former goal, we propose SCRUB, a task-conditioned 3DSG -sparsification method; enabling classical planners to match and in some cases surpass state-of-the-art learning-based planners. Towards the latter goal, we propose -SEEK, a procedure enabling learning-based planners to exploit 3DSG structure, -reducing the number of replanning queries required by current best approaches by -an order of magnitude. We will open-source all code and baselines to spur further -research along the intersections of robot task planning, learning and 3DSGs.}, - organization={PMLR} -} - -@inproceedings{courchesne2021assessing, - title={On Assessing the Usefulness of Proxy Domains for Developing and Evaluating Embodied Agents}, - author={Courchesne, Anthony and Censi, Andrea and Paull, Liam}, - booktitle={2021 IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)}, - pages={4298--4305}, - year={2021}, - arxiv={2109.14516}, - image={papers/Courchesne_IROS_2021.png}, - abstract={In many situations it is either impossible or -impractical to develop and evaluate agents entirely on the target -domain on which they will be deployed. This is particularly -true in robotics, where doing experiments on hardware is -much more arduous than in simulation. This has become -arguably more so in the case of learning-based agents. To this -end, considerable recent effort has been devoted to developing -increasingly realistic and higher fidelity simulators. However, -we lack any principled way to evaluate how good a “proxy -domain” is, specifically in terms of how useful it is in helping -us achieve our end objective of building an agent that performs -well in the target domain. In this work, we investigate methods -to address this need. We begin by clearly separating two uses of -proxy domains that are often conflated: 1) their ability to be a -faithful predictor of agent performance and 2) their ability to be -a useful tool for learning. In this paper, we attempt to clarify the -role of proxy domains and establish new proxy usefulness (PU) -metrics to compare the usefulness of different proxy domains. -We propose the relative predictive PU to assess the predictive -ability of a proxy domain and the learning PU to quantify -the usefulness of a proxy as a tool to generate learning data. -Furthermore, we argue that the value of a proxy is conditioned -on the task that it is being used to help solve. We demonstrate -how these new metrics can be used to optimize parameters of -the proxy domain for which obtaining ground truth via system -identification is not trivial.}, - organization={IEEE} -} - -@article{wiyatno2022lifelong, - title={Lifelong Topological Visual Navigation}, - author={Wiyatno, Rey Reza and Xu, Anqi and Paull, Liam}, - journal={IEEE Robotics and Automation Letters}, - volume={7}, - number={4}, - pages={9271--9278}, - arxiv={2110.08488}, - abstract={Commonly, learning-based topological navigation approaches produce a local policy while preserving some loose connectivity of the space through a topological map. Nevertheless, spurious or missing edges in the topological graph often lead to navigation failure. In this work, we propose a sampling-based graph building method, which results in sparser graphs yet with higher navigation performance compared to baseline methods. We also propose graph maintenance strategies that eliminate spurious edges and expand the graph as needed, which improves lifelong navigation performance. Unlike controllers that learn from fixed training environments, we show that our model can be fine-tuned using only a small number of collected trajectory images from a real-world environment where the agent is deployed. We demonstrate successful navigation after fine-tuning on real-world environments, and notably show significant navigation improvements over time by applying our lifelong graph maintenance strategies.}, - image={papers/ltvn.png}, - projectpage={https://montrealrobotics.ca/ltvn/}, - year={2022}, - publisher={IEEE} -} - -@article{liu2021iterative, - title={Iterative teaching by label synthesis}, - author={Liu, Weiyang and Liu, Zhen and Wang, Hanchen and Paull, Liam and Sch{\"o}lkopf, Bernhard and Weller, Adrian}, - journal={Advances in Neural Information Processing Systems (NeurIPS)}, - volume={34}, - pages={21681--21695}, - openreview={9rphbXqgmqM}, - abstract={In this paper, we consider the problem of iterative machine teaching, where a teacher provides examples sequentially based on the current iterative learner. In contrast to previous methods that have to scan over the entire pool and select teaching examples from it in each iteration, we propose a label synthesis teaching framework where the teacher randomly selects input teaching examples (e.g., images) and then synthesizes suitable outputs (e.g., labels) for them. We show that this framework can avoid costly example selection while still provably achieving exponential teachability. We propose multiple novel teaching algorithms in this framework. Finally, we empirically demonstrate the value of our framework.}, - image={papers/iterative.png}, - year={2021} -} - -@article{mai2022sample, - title={Sample efficient deep reinforcement learning via uncertainty estimation}, - author={Mai, Vincent and Mani, Kaustubh and Paull, Liam}, - journal={International Conference on Learning Representations (ICLR)}, - arxiv={2201.01666}, - abstract={In model-free deep reinforcement learning (RL) algorithms, using noisy value estimates to supervise policy evaluation and optimization is detrimental to the sample efficiency. As this noise is heteroscedastic, its effects can be mitigated using uncertainty-based weights in the optimization process. Previous methods rely on sampled ensembles, which do not capture all aspects of uncertainty. We provide a systematic analysis of the sources of uncertainty in the noisy supervision that occurs in RL, and introduce inverse-variance RL, a Bayesian framework which combines probabilistic ensembles and Batch Inverse Variance weighting. We propose a method whereby two complementary uncertainty estimation methods account for both the Q-value and the environment stochasticity to better mitigate the negative impacts of noisy supervision. Our results show significant improvement in terms of sample efficiency on discrete and continuous control tasks.}, - image={papers/mai_ICLR_2022.png}, - year={2022} -} - -@inproceedings{saavedra2022monocular, - title={Monocular Robot Navigation with Self-Supervised Pretrained Vision Transformers}, - author={Saavedra-Ruiz, Miguel and Morin, Sacha and Paull, Liam}, - journal={IEEE Conference on Robots and Vision}, - abstract={In this work, we consider the problem of learning a perception model for monocular robot navigation using few annotated images. Using a Vision Transformer (ViT) pretrained with a label-free self-supervised method, we successfully train a coarse image segmentation model for the Duckietown environment using 70 training images. Our model performs coarse image segmentation at the 8x8 patch level, and the inference resolution can be adjusted to balance prediction granularity and real-time perception constraints. We study how best to adapt a ViT to our task and environment, and find that some lightweight architectures can yield good single-image segmentation at a usable frame rate, even on CPU. The resulting perception model is used as the backbone for a simple yet robust visual servoing agent, which we deploy on a differential drive mobile robot to perform two tasks: lane following and obstacle avoidance.}, - arxiv={2203.03682}, - image={papers/transformers.png}, - year={2022} -} - -@inproceedings{diaz2022generalization, - title={Generalization Games for Reinforcement Learning}, - author={Diaz, Manfred and Gauthier, Charlie and Berseth, Glen and Paull, Liam}, - booktitle={ICLR 2022 Workshop on Gamification and Multiagent Solutions}, - openreview={HIc8rQv-LZq}, - abstract={In reinforcement learning (RL), the term generalization has either denoted introducing function approximation to reduce the intractability of problems with large state and action spaces or designated RL agents' ability to transfer learned experiences to one or more evaluation tasks. Recently, many subfields have emerged to understand how distributions of training tasks affect an RL agent's performance in unseen environments. While the field is extensive and ever-growing, recent research has underlined that variability among the different approaches is not as significant. We leverage this intuition to demonstrate how current methods for generalization in RL are specializations of a general framework. We obtain the fundamental aspects of this formulation by rebuilding a Markov Decision Process (MDP) from the ground up by resurfacing the game-theoretic framework of games against nature. The two-player game that arises from considering nature as a complete player in this formulation explains how existing methods rely on learned and randomized dynamics and initial state distributions. We develop this result further by drawing inspiration from mechanism design theory to introduce the role of a principal as a third player that can modify the payoff functions of the decision-making agent and nature. The games induced by playing against the principal extend our framework to explain how learned and randomized reward functions induce generalization in RL agents. The main contribution of our work is the complete description of the Generalization Games for Reinforcement Learning, a multiagent, multiplayer, game-theoretic formal approach to study generalization methods in RL. We offer a preliminary ablation experiment of the different components of the framework. We demonstrate that a more simplified composition of the objectives that we introduce for each player leads to comparable, and in some cases superior, zero-shot generalization compared to state-of-the-art methods, all while requiring almost two orders of magnitude fewer samples.}, - image={papers/generalization.png}, - year={2022} -} - -@inproceedings{bhatt2022f, - title={f-Cal: Aleatoric uncertainty quantification for robot perception via calibrated neural regression}, - author={Bhatt, Dhaivat and Mani, Kaustubh and Bansal, Dishank and Murthy, Krishna and Lee, Hanju and Paull, Liam}, - booktitle={2022 International Conference on Robotics and Automation (ICRA)}, - pages={6533--6539}, - year={2022}, - abstract={While modern deep neural networks are performant perception modules, performance (accuracy) alone is insufficient, particularly for safety-critical robotic applications such as self-driving vehicles. Robot autonomy stacks also require these otherwise blackbox models to produce reliable and calibrated measures of confidence on their predictions. Existing approaches estimate uncertainty from these neural network perception stacks by modifying network architectures, inference procedure, or loss functions. However, in general, these methods lack calibration, meaning that the predictive uncertainties do not faithfully represent the true underlying uncertainties (process noise). Our key insight is that calibration is only achieved by imposing constraints across multiple examples, such as those in a mini-batch; as opposed to existing approaches which only impose constraints per-sample, often leading to overconfident (thus miscalibrated) uncertainty estimates. By enforcing the distribution of outputs of a neural network to resemble a target distribution by minimizing an f -divergence, we obtain significantly better-calibrated models compared to prior approaches. Our approach, f-Cal, outperforms existing uncertainty calibration approaches on robot perception tasks such as object detection and monocular depth estimation over multiple real-world benchmarks.}, - projectpage={https://f-cal.github.io/}, - pdf={f-cal.pdf}, - image={papers/f-cal.png}, - organization={IEEE} -} diff --git a/_config.yml b/_config.yml deleted file mode 100644 index e52d5c8..0000000 --- a/_config.yml +++ /dev/null @@ -1,115 +0,0 @@ -name: Robotics Group @ University of Montreal -description: The Robotics and Embodied AI Lab @ U de Montreal -url: https://montrealrobotics.github.io - -exclude: [bibble, README.md, Makefile, screenshot.png] - -# colorize code snippets with the pygments module -highlighter: rouge - -# The path structure for blog posts. -permalink: /blog/:year/:month/:day/:title.html - -# Sections on the "people" page. -roles: - - key: faculty - name: Faculty - - key: staff - name: Staff - - key: developer - name: Software Developers - - key: postdoc - name: Postdocs - - key: masters - name: Master's Students - - key: phd - name: PhD Students - - key: intern - name: Undergraduate Researchers and Interns - - key: postdoc-alum - name: Past Postocs - - key: phd-alum - name: Past PhD Students - - key: masters-alum - name: Past Master's Students - - key: intern-alum - name: Past Undergraduate Researchers and Interns - - -includes_dir: website-assets/_includes -layouts_dir: website-assets/_layouts -sass: - sass_dir: website-assets/_sass -asset_dir: /website-assets/assets - -# Number of news stories on the front page. -front_page_news: 8 - -# Number of old projects on the front page -front_page_old_projects: 4 - -# Base pathname for links. -base: '' - -# make pages for the _projects folder -collections: - projects: - output: true - events: - output: true - -# Navigation bar links. -navigation: - - title: Home - link: / - - title: People - link: /people.html - - title: Research - link: /research.html - - title: Publications - link: /publications/ - - title: Events - link: /events.html - - title: Contact - link: /contact.html - - title: Blog - link: /blog.html - -# Includes -include: ['_pages'] - -gems: - - jekyll-paginate - - jekyll/scholar - - jemoji - - -# Build settings -markdown: kramdown -highlighter: pygments - -# Jekyll-Scholar -scholar: - - style: apa - locale: en - - sort_by: year - order: descending - - source: /_bibliography/ - bibliography: pubs.bib - bibliography_template: bib - - replace_strings: true - join_strings: true - - details_dir: bibliography - details_layout: bibtex.html - details_link: Details - - query: "@*" - -jquery_version: "1.12.4" -katex_version: "0.7.1" -anchorjs_version: "3.2.2" diff --git a/_data/people.yml b/_data/people.yml deleted file mode 100644 index 3da3968..0000000 --- a/_data/people.yml +++ /dev/null @@ -1,582 +0,0 @@ -#### Role keys: -# roles: -# - key: faculty -# name: Faculty -# - key: staff -# name: Staff -# - key: developer -# name: Software Developers -# - key: postdoc -# name: Postdocs -# - key: masters -# name: Master's Students -# - key: phd -# name: PhD Students -# - key: intern -# name: Undergraduate Researchers and Interns -# - key: postdoc-alum -# name: Past Postocs -# - key: phd-alum -# name: Past PhD Students -# - key: masters-alum -# name: Past Master's Students -# - key: intern-alum -# name: Past Undergraduate Researchers and Interns - -liam: - display_name: "Liam Paull" - webpage: "http://liampaull.ca" - image: /img/people/liam.jpg - role: faculty - interests: "Robot perception, uncertainty, sim2real, and robot benchmarking" - -glen: - display_name: "Glen Berseth" - webpage: "http://fracturedplane.com" - image: /img/people/glen.jpg - role: faculty - interests: "Reinforcement learning, robotics, machine learning, generalization, planning" - - -florian: - display_name: "Florian Golemo" - webpage: "https://fgolemo.github.io/" - role: postdoc-alum - advisor: Liam Paull - coadvisor: Chris Pal - image: /img/people/florian.jpg - -ali: - display_name: "Ali Harakeh" - webpage: "https://www.aharakeh.com/" - interests: "Bayesian deep learning, conformal prediction, out-of-distribution generalization, and continual learning" - advisor: Liam Paull - role: postdoc-alum - image: /img/people/ali-h.jpg - current_role: Senior Applied Research Scientist at Mila - -kaustubh2: - display_name: "Kaustubh Mani" - interests: "Safety, Robustness and Model-Based RL" - advisor: Liam Paull - role: phd - - -steven: - display_name: "Steven Parkison" - webpage: "https://saparkison.github.io/" - interests: "SLAM, optimization, robotic perception, and lukewarm coffee" - advisor: Liam Paull - role: postdoc-alum - image: /img/people/stevenhead.png - current_role: Robotics researcher at the Institut de Recherche d'Hydro Québec (IREQ) - -kirsty: - display_name: "Kirsty Ellis" - role: developer - image: /img/people/kirsty.jpg - -roger: - display_name: "Roger Creus Castanyer" - role: masters - advisor: Glen Berseth - image: /img/people/rogercreus.jpeg - interests: "(Unsupervised) (Deep) reinforcement learning" - webpage: "https://roger-creus.github.io/" - -vincent: - display_name: "Vincent Mai" - role: phd-alum - advisor: Liam Paull - image: /img/people/vincent.jpg - current_role: AI researcher at the Institut de Recherche d'Hydro Québec (IREQ) - -krishna: - display_name: "Krishna Murthy Jatavallabhula" - role: phd-alum - advisor: Liam Paull - image: /img/people/krishna.jpg - webpage: "https://krrish94.github.io" - current_role: PostDoc at MIT with Antonio Torralba and Joshua Tenenbaum - -manfred: - display_name: "Manfred Diaz" - role: phd - advisor: Liam Paull - image: /img/people/manfred.jpg - webpage: "https://manfreddiaz.github.io/" - -luke: - display_name: "Luke Rowe" - role: phd - advisor: Liam Paull - image: /img/people/luke.png - coadvisor: "Chris Pal" - interests: "Prediction, planning, and simulation for autonomous driving" - -ruixiang: - display_name: "Ruixiang Zhang" - role: phd - advisor: Liam Paull - image: /img/people/ruixiang.jpg - coadvisor: "Yoshua Bengio" - -zhen: - display_name: "Zhen Liu" - advisor: Liam Paull - coadvisor: "Yoshua Bengio" - role: phd - image: /img/people/zhen.jpeg - website: "http://itszhen.com/" - -mostafa: - display_name: "Mostafa Elaraby" - role: phd - advisor: Liam Paull - interests: "Continual learning, imitation learning" - image: /img/people/mostafa.jpeg - - -sacha: - display_name: "Sacha Morin" - role: phd - advisor: Liam Paull - webpage: https://sachamorin.github.io/ - image: /img/people/sacha.jpg - coadvisor: "Guy Wolf" - -ali-k: - display_name: "Ali Kuwajerwala" - role: masters - advisor: Liam Paull - image: /img/people/ali-k.jpeg - webpage: "https://alihkw.com/" - -dishank: - display_name: "Dishank Bansal" - role: masters-alum - advisor: Liam Paull - webpage: "http://dishank-b.github.io/" - thesis: "Calibrated Uncertainty Estimation for SLAM" - image: /img/people/dishank.jpg - current_role: Research Engineer at Meta - -charlie: - display_name: "Charlie Gauthier" - role: masters-alum - image: /img/people/charlie.jpg - advisor: Liam Paull - thesis: "Fear prediction for training robust RL agents" - thesis_link: "https://papyrus.bib.umontreal.ca/xmlui/handle/1866/28235" - current_role: PhD Student at Mila / REAL - -charlie2: - display_name: "Charlie Gauthier" - advisor: Liam Paull - role: phd - coadvisor: "Glen Berseth" - image: /img/people/charlie.jpg - - -miguel: - display_name: "Miguel Saavedra-Ruiz" - role: phd - advisor: Liam Paull - webpage: "https://mikes96.github.io/" - image: /img/people/miguel.jpg - - - -miguel2: - display_name: "Miguel Saavedra-Ruiz" - role: masters-alum - advisor: Liam Paull - webpage: "https://mikes96.github.io/" - image: /img/people/miguel.jpg - thesis: "Leveraging Self-Supervision for Visual Embodied Navigation with Neuralized Potential Fields" - current_role: PhD Student at Mila / REAL - - -simon: - display_name: "Simon Demeule" - role: masters - advisor: Glen Berseth - webpage: "https://simondemeule.com/" - image: /img/people/simon.jpg - -adriana: - display_name: "Adriana Hugessen" - advisor: Glen Berseth - role: masters - image: /img/people/adriana.jpg - -albert: - display_name: "Albert Zhan" - role: phd - advisor: Glen Berseth - image: /img/people/albert-z.jpg - webpage: "albertzhan.github.io" - -siddarth: - display_name: "Siddarth Venkatraman" - advisor: Glen Berseth - role: phd - image: /img/people/siddarth.jpg - interests: Reinforcement learning, representation learning - -raj: - display_name: "Raj Ghugare" - role: intern - image: /img/people/raj-g.png - webpage: "http://RajGhugare19.github.io/" - -aurelien: - display_name: "Aurélien Bück-Kaeffer" - role: intern-alum - -atharva: - display_name: "Atharva Chandak" - role: intern-alum - image: /img/people/atharva.jpeg - -bipasha: - display_name: "Bipasha Sen" - role: intern-alum - image: /img/people/bipasha.jpeg - current_role: PhD student at MIT - - -aditya: - display_name: "Aditya Agarwal" - role: intern-alum - image: /img/people/aditya.png - current_role: PhD student at Brown - - -sai: - display_name: "Sai Krishna G.V." - role: masters-alum - thesis: "Deep active localization" - advisor: Liam Paull - thesis_link: "https://papyrus.bib.umontreal.ca/xmlui/handle/1866/22526" - current_role: "Reinforcement learning researcher at AI-Redefined" - image: /img/people/sai.jpg - webpage: "https://saikrishna-1996.github.io/" - - -gunshi: - display_name: "Gunshi Gupta" - role: masters-alum - advisor: Liam Paull - thesis: "Look-ahead meta-learning for continual learning" - thesis_link: "https://papyrus.bib.umontreal.ca/xmlui/handle/1866/24315" - current_role: PhD student at Oxford - image: /img/people/gunshi.jpg - -nithin: - display_name: "Nithin Vasisth" - role: masters-alum - thesis: "Lifelong learning of concepts in CRAFT" - thesis_link: "https://papyrus.bib.umontreal.ca/xmlui/handle/1866/24335" - advisor: Liam Paull - image: /img/people/nithin.jpg - current_role: Senior project scientist at Robert Bosch Center for Cyber-Physical Systems @ IISc - -breandan: - display_name: "Breandan Considine" - role: masters-alum - thesis: "Programming tools for intelligent systems" - thesis_link: "https://papyrus.bib.umontreal.ca/xmlui/handle/1866/24310" - advisor: Liam Paull - current_role: "PhD student at McGill" - coadvisor: "Michalis Famelis" - image: /img/people/breandan.jpg - -bhairav: - display_name: "Bhairav Mehta" - role: masters-alum - thesis: "On learning and generalization in unstructured task spaces" - advisor: Liam Paull - thesis_link: "https://papyrus.bib.umontreal.ca/xmlui/handle/1866/24310" - current_role: "CEO at Innabox" - image: /img/people/bhairav.jpg - -anthony: - display_name: "Anthony Courchesne" - role: masters-alum - thesis: "On quantifying the value of simulation for training and evaluating robotic agents" - advisor: Liam Paull - thesis_link: - image: /img/people/anthony.jpg - webpage: "https://helium.sparkpi.ca/" - current_role: Project manager at Institut du Vehicule Innovant (IVI) - - -dhaivat: - display_name: "Dhaivat Bhatt" - role: masters-alum - thesis: "Variational aleatoric uncertainty calibration in neural regression" - advisor: Liam Paull - thesis_link: - current_role: Research engineer at Samsung - image: /img/people/dhaivat.jpeg - -rey: - display_name: "Rey Reza Wiyatno" - role: masters-alum - thesis: "Lifelong Topological Visual Navigation" - advisor: Liam Paull - thesis_link: - current_role: - image: /img/people/rey.jpg - webpage: "http://rrwiyatn.github.io/" - - - - -kaustubh: - display_name: "Kaustubh Mani" - role: intern-alum - current_role: "PhD student at Mila / REALl" - -nikhil: - display_name: "Nikhil Varma Keetha" - role: intern-alum - current_role: PhD student at CMU - -harsha: - display_name: "Sai Sree Harsha" - role: intern-alum - current_role: Master's student at UCSD - -abhishek: - display_name: "Abhishek Jain" - role: intern-alum - -sharath: - display_name: "Sharath Chandra Raparthy" - role: intern-alum - current_role: "PhD student at Mila with Irina Rish" - -mark: - display_name: "Mark Van der Merwe" - role: intern-alum - current_role: "PhD student at the University of Michigan" - webpage: "https://mvandermerwe.github.io/" - -amrut: - display_name: "Amrut Sarangi" - role: intern-alum - - -rohan: - display_name: "Rohan Raj" - role: intern-alum - - -waleed: - display_name: "Waleed Khamies" - role: intern-alum - image: /img/people/waleed.jpg - webpage: "https://khamies.github.io/about" - -zihan: - display_name: "Zihan Wang" - role: intern-alum - image: /img/people/zihan.jpg - current_role: "Master's student at Stanford" - -homanga: - display_name: "Homanga Bharadhwaj" - role: intern-alum - current_role: "PhD student at the University of Toronto" - -adam: - display_name: "Adam Sigal" - role: intern-alum - current_role: "PhD student at McGill" - -sarthak: - display_name: "Sarthak Sharma" - role: intern-alum - current_role: "AI/ML Engineer at Verisk AI Lab" - - -keehong: - display_name: "Keehong Seo" - role: collab - -yoshua: - display_name: "Yoshua Bengio" - role: collab - -teddy: - display_name: "Teddy Ort" - role: collab - -daniela: - display_name: "Daniela Rus" - role: collab - -chris: - display_name: "Chris Pal" - role: collab - -michalis: - display_name: "Michalis Famelis" - role: collab - -sarath: - display_name: "Sarath Chandar" - role: collab - -ganesh: - display_name: "Ganesh Iyer" - role: collab - webpage: "https://epiception.github.io" - -soroush: - display_name: "Soroush Saryazdi" - role: collab - webpage: "https://saryazdi.github.io/" - -madhav: - display_name: "K. Madhava Krishna" - role: collab - webpage: "https://robotics.iiit.ac.in" - -anqi: - display_name: "Anqi Xu" - role: collab - webpage: "https://anqixu.me/" - -hanju: - display_name: "Hanju Lee" - role: collab - webpage: "https://www.linkedin.com/in/lee-hanju-1848323/?originalSubdomain=jp" - -agia: - display_name: Christopher Agia - role: collab - webpage: https://agiachris.github.io/ - -khodeir: - display_name: Mohamed Khodeir - role: collab - webpage: https://www.linkedin.com/in/khodeir/?originalSubdomain=ca - -miksik: - display_name: Ondrej Miksik - role: collab - webpage: https://www.microsoft.com/en-us/research/people/onmiksik/ - -vineet: - display_name: Vibhav Vineet - role: collab - webpage: http://vibhavvineet.info/ - -mukadam: - display_name: Mustafa Mukadam - role: collab - webpage: https://www.mustafamukadam.com/ - -shkurti: - display_name: Florian Shkurti - role: collab - webpage: http://www.cs.toronto.edu/~florian/ - -qiao: - display_name: Qiao Gu - role: collab - webpage: https://georgegu1997.github.io/ - -mohd: - display_name: Mohd Omama - role: collab - webpage: https://scholar.google.com/citations?user=jFH3ShsAAAAJ&hl=en - -tao: - display_name: Tao Chen - role: collab - webpage: https://taochenshh.github.io/ - -alaa: - display_name: Alaa Maalouf - role: collab - webpage: https://www.csail.mit.edu/person/alaa-maalouf - -shuang: - display_name: Shuang Li - role: collab - webpage: https://people.csail.mit.edu/lishuang/ - -ayush: - display_name: Ayush Tewari - role: collab - webpage: https://ayushtewari.com/ - -tenenbaum: - display_name: Joshua B. Tenenbaum - role: collab - webpage: http://web.mit.edu/cocosci/josh.html - -celso: - display_name: Celso Miguel de Melo - role: collab - webpage: https://celsodemelo.net/ - -torralba: - display_name: Antonio Torralba - role: collab - webpage: https://groups.csail.mit.edu/vision/torralbalab/ - - -macklin: - display_name: Miles Macklin - role: collab - - - -voleti: - display_name: Vikram Voleti - role: collab - - -petrini: - display_name: Linda Petrini - role: collab - - -weiss: - display_name: Martin Weiss - role: collab - - -parent: - display_name: Jerome Parent-Levesque - role: collab - - -xie: - display_name: Kevin Xie - role: collab - - - -nowrouzerzahrai: - display_name: Derek Nowrouzerzahrai - role: collab - - -fidler: - display_name: Sanja Fidler - role: collab - - -erleben: - display_name: Kenny Erleben - role: collab - -yadav: - display_name: Karmesh Yadav - role: collab diff --git a/_events/aido-6.md b/_events/aido-6.md deleted file mode 100644 index 81d25df..0000000 --- a/_events/aido-6.md +++ /dev/null @@ -1,26 +0,0 @@ ---- -title: The 6th AI Driving Olympics Competition - -notitle: false - -description: | - The 6th iteration of the AI Driving Olympics, taking place virtually at NeurIPS 2021. The AI-DO serves to benchmark the state of the art of artificial intelligence in autonomous driving by providing standardized simulation and hardware environments for tasks related to multi-sensory perception and embodied AI. - -people: - - liam - - charlie - - mostafa - -image: /img/events/aido-6.jpg -link: "https:/driving-olympics.ai" -date: 2021-12-10 ---- - -# The AI Driving Olympics 6 - - - - -Duckietown traditionally hosts AI-DO competitions biannually, with finals events held at machine learning and robotics conferences such as the International Conference on Robotics and Automation (ICRA) and the Neural Information Processing Systems (NeurIPS). - -AI-DO 6 will be in conjunction with NeurIPS 2021 and have three leagues: urban driving, advanced perception, and racing. The winter champions will be announced during NeurIPS 2021, on December 10, 2021! diff --git a/_events/ecorl.md b/_events/ecorl.md deleted file mode 100644 index 456f2c0..0000000 --- a/_events/ecorl.md +++ /dev/null @@ -1,16 +0,0 @@ ---- -title: Workshop on the Ecological Theory of RL - -notitle: false - -description: | - Workshop at NeurIPS 2021 - -people: - - manfred - -image: /img/events/ecorl.jpeg -link: "https://sites.google.com/view/ecorl2021/" -date: 2021-12-14 ---- - diff --git a/_events/iros2020.md b/_events/iros2020.md deleted file mode 100644 index a05a634..0000000 --- a/_events/iros2020.md +++ /dev/null @@ -1,16 +0,0 @@ ---- -title: IROS 2020 Workshop on Benchmarking Progress in Autonomous Driving - -notitle: false - -description: | - Autonomous driving has seen incredible progress of-late. Recent workshops at top conferences in robotics, computer vision, and machine learning have primarily showcased the technological advancements in the field. This workshop provides an platform to investigate and discuss the methods by which progress in autonomous driving is evaluated, benchmarked, and verified. - -people: - - liam - -image: /img/events/iros2020-bpad.jpg -link: "https://montrealrobotics.ca/driving-benchmarks/" -date: 2020-10-25 ---- - diff --git a/_events/iros2021.md b/_events/iros2021.md deleted file mode 100644 index 1404dcb..0000000 --- a/_events/iros2021.md +++ /dev/null @@ -1,28 +0,0 @@ ---- -title: IROS 2021 Workshop on Evaluating the Broader Impacts of Self-Driving Cars - -notitle: false - -description: | - The primary objective of this workshop is to stimulate a conversation between roboticists, who focus on the development and implementation of autonomy algorithms, and regulators, economists, psychologists, and lawyers who are experts on the broader impacts that self-driving vehicles will have on society. - -people: - - liam - -image: /img/events/iros2021-bisc.jpg -link: "https://montrealrobotics.ca/broader-impacts-self-driving/" -date: 2021-09-27 ---- - -# IROS 2021 Workshop on Evaluating the Broader Impacts of Self-Driving Cars - -Self-driving cars have received significant attention in the last decade, and arguably have the potential to be the most impactful robotics application to date. The question that is usually asked by the public is “when are self-driving cars going to be here?” On one side, entrusting the entire driving problem to an autonomous agent seems frustratingly daunting. On the other side, we have started to see real deployments of autonomous vehicles in limited capacities, so perhaps there is reason for hope. - -Autonomous driving advancements are typically evaluated along well-defined, but potentially myopic performance criteria. These metrics are reasonable in the sense that they do give us some quantitative measure that we can use for comparison. However, the true potential impact of this technology reaches far beyond these relatively simplistic measures. In this workshop we will take a broader perspective with respect to evaluating the progress that we have made towards making self-driving a reality. In the process, we will focus particularly on aspects of the integration of this technology that are rarely covered in technical papers on the subject. Specifically, we will focus on the following three objectives: - -The primary objective of this workshop is to stimulate a conversation between roboticists, who focus on the development and implementation of autonomy algorithms, and regulators, economists, psychologists, and lawyers who are experts on the broader impacts that self-driving vehicles will have on society. We feel that it is critical to foster a community of researchers and practitioners whose expertise extends beyond the algorithmic challenges of realizing self-driving vehicles. As roboticists, we are ill-equipped to understand the broad impacts of this technology in areas that include ethics, philosophy, psychology, regulations, legal policy, and risk, to name a few, and it is critical that technological development is guided by such impacts. We will achieve our objective by inviting speakers and panelists who are experts in these adjacent fields to stimulate a broader conversation around this technology. This objective would be considered achieved if participants take the new perspectives they were exposed to and consider them in their own specific field of interest. For roboticists, this means explicitly considering these broader issues in the development of their algorithms. A stretch goal would be to spawn research collaborations between roboticists and researchers from these adjacent fields. - - -Duckietown traditionally hosts AI-DO competitions biannually, with finals events held at machine learning and robotics conferences such as the International Conference on Robotics and Automation (ICRA) and the Neural Information Processing Systems (NeurIPS). - -AI-DO 6 will be in conjunction with NeurIPS 2021 and have three leagues: urban driving, advanced perception, and racing. The winter champions will be announced during NeurIPS 2021, on December 10, 2021! diff --git a/_events/learningseriesfall2020.md b/_events/learningseriesfall2020.md deleted file mode 100644 index 1ac43f1..0000000 --- a/_events/learningseriesfall2020.md +++ /dev/null @@ -1,21 +0,0 @@ ---- -title: Summer 2020 Robot Learning Seminar Series - -notitle: false - -description: | - The Robotics and Embodied AI Lab and Mila are hosting the Winter 2021 edition of robot learning seminar series; a set of virtual talks by researchers in this field. Speakers in this inaugural session include Stefani Tellex, Rika Antonova, Gunshi Gupta, Igor Gilitschenski, and Bhairav Mehta. - -people: - - krishna - - florian - - dishank - - rey - - zhen - - liam - -image: /img/events/learningseries.jpg -link: "https://montrealrobotics.ca/robotlearningseries/fall2020/" -date: 2021-01-15 ---- - diff --git a/_events/learningseriessummer2020.md b/_events/learningseriessummer2020.md deleted file mode 100644 index cbc2485..0000000 --- a/_events/learningseriessummer2020.md +++ /dev/null @@ -1,21 +0,0 @@ ---- -title: Fall 2020 Robot Learning Seminar Series - -notitle: false - -description: | - The Robotics and Embodied AI Lab and Mila are hosting the Winter 2021 edition of robot learning seminar series; a set of virtual talks by researchers in this field. Speakers this session include Florian Shkurti, Valentin Peretroukhin, Ankur Handa, Shubham Tulsiani, Ronald Clark, Lerrel Pinto, Mustafa Mukadam, Shuran Song and Angela Shoellig. - -people: - - krishna - - florian - - dishank - - rey - - zhen - - liam - -image: /img/events/learningseries.jpg -link: "https://montrealrobotics.ca/robotlearningseries/summer2020" -date: 2020-09-4 ---- - diff --git a/_events/learningserieswinter2021.md b/_events/learningserieswinter2021.md deleted file mode 100644 index 0e0b754..0000000 --- a/_events/learningserieswinter2021.md +++ /dev/null @@ -1,21 +0,0 @@ ---- -title: Winter 2021 Robot Learning Seminar Series - -notitle: false - -description: | - The Robotics and Embodied AI Lab and Mila are hosting the Winter 2021 edition of robot learning seminar series; a set of virtual talks by researchers in this field. Speakers this session include Steven Waslander, Animesh Garg, Sylvia Herbert, Georgia Chalvatzaki, Deepak Pathak, Pulkit Agrawal, Lilian Weng, Kelsey Allen, Manolis Savva, and Jiajun Wu. - -people: - - krishna - - florian - - dishank - - rey - - zhen - - liam - -image: /img/events/learningseries.jpg -link: "https://montrealrobotics.ca/robotlearningseries/" -date: 2021-01-15 ---- - diff --git a/_events/mrss.md b/_events/mrss.md deleted file mode 100644 index b84b084..0000000 --- a/_events/mrss.md +++ /dev/null @@ -1,19 +0,0 @@ ---- -title: Montreal Robotics Summer School - -notitle: false - -description: | - Robotics is a rapidly growing field with interest from around the world. This summer school offers tutorials and lectures on state-of-the-art machine learning methods for training the next generation of learning robots. This summer school is an extension supported by the many robotics groups around Montreal. - -people: - - glen - - florian - - steven - -image: /img/events/mrss.png -link: "https://www.notion.so/fracturedplane/Montreal-Robotics-Summer-School-e9c969cc262b4f85aa17e5808a51e225" -date: 2022-08-22 ---- - - diff --git a/_events/physical-reasoning.md b/_events/physical-reasoning.md deleted file mode 100644 index 84939d7..0000000 --- a/_events/physical-reasoning.md +++ /dev/null @@ -1,15 +0,0 @@ ---- -title: Workshop on Physical Reasoning and Inductive Biases for the Real World - -notitle: false - -description: | - Workshop at NeurIPS 2021 - -people: - - krishna - -image: /img/events/physical-reasoning.jpg -link: "https://physical-reasoning.github.io/" -date: 2021-12-14 ---- \ No newline at end of file diff --git a/_pages/publications.md b/_pages/publications.md deleted file mode 100644 index 43cac85..0000000 --- a/_pages/publications.md +++ /dev/null @@ -1,25 +0,0 @@ ---- -layout: default -permalink: /publications/ -title: Publications -description: Publications (reverse chronological order) -years: [2023, 2022, 2021, 2020, 2019, 2018, 2017, 2016] ---- -
-
-
-
-
Publications
-
-
-
-
- -{% for y in page.years %} -
-
-
{{y}}
-
-
-{% bibliography -f papers -q @*[year={{y}}]* %} -{% endfor %} diff --git a/_posts/2018-04-20-cvpr.md b/_posts/2018-04-20-cvpr.md deleted file mode 100644 index d0a2af6..0000000 --- a/_posts/2018-04-20-cvpr.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -layout: post -shortnews: true -icon: newspaper ---- - -Paper on self-supervised visual odometry estimation accepted to CVPR workshop on SLAM. diff --git a/_posts/2018-09-05-welcomephd.md b/_posts/2018-09-05-welcomephd.md deleted file mode 100644 index 57ce5a3..0000000 --- a/_posts/2018-09-05-welcomephd.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -layout: post -shortnews: true -icon: newspaper ---- - -Manfred Diaz and Ruixiang Zhang join the group as PhD students ... diff --git a/_posts/2018-11-28-image.md b/_posts/2018-11-28-image.md deleted file mode 100644 index a2588ac..0000000 --- a/_posts/2018-11-28-image.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -layout: post -shortnews: true -icon: newspaper ---- - -Dhaivat Bhatt just joined our group as an intern. Welcome! diff --git a/_posts/2019-08-01-dal.md b/_posts/2019-08-01-dal.md deleted file mode 100644 index 51075d7..0000000 --- a/_posts/2019-08-01-dal.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -layout: post -shortnews: true -icon: newspaper ---- - -Our paper _Deep Active Localization_ got accepted into Robotics and Automation Letters \ No newline at end of file diff --git a/_posts/2019-09-01-welcomeinterns.md b/_posts/2019-09-01-welcomeinterns.md deleted file mode 100644 index 2452d92..0000000 --- a/_posts/2019-09-01-welcomeinterns.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -layout: post -shortnews: true -icon: newspaper ---- - -Sharath, Mark, Amrut, Rohan, and Dishank joined the group as interns. Welcome! diff --git a/_posts/2019-09-01-welcomestudents.md b/_posts/2019-09-01-welcomestudents.md deleted file mode 100644 index 8df6497..0000000 --- a/_posts/2019-09-01-welcomestudents.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -layout: post -shortnews: true -icon: newspaper ---- - -Dhaivat, Rey, and Philippe joined the group as Masters' students. Welcome! diff --git a/_posts/2019-09-10-adrpaper.md b/_posts/2019-09-10-adrpaper.md deleted file mode 100644 index ea22f8a..0000000 --- a/_posts/2019-09-10-adrpaper.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -layout: post -shortnews: true -icon: newspaper ---- - -The "Active Domain Randomization" paper got accepted to CoRL 2019. Congrats Bhairav, Manfred, and Florian. diff --git a/_posts/2020-01-20-gradslam-icra.md b/_posts/2020-01-20-gradslam-icra.md deleted file mode 100644 index 209a376..0000000 --- a/_posts/2020-01-20-gradslam-icra.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -layout: post -shortnews: true -icon: newspaper ---- - -Check out our new ICRA 2020 paper [gradSLAM: Dense SLAM meets automatic differentiation](https://arxiv.org/abs/1910.10672) on fully differentiable dense SLAM: [Project page](http://montrealrobotics.ca/gradSLAM/), [Video](http://www.youtube.com/watch?feature=player_embedded&v=2ygtSJTmo08). diff --git a/_posts/2020-06-05-maplite-award.md b/_posts/2020-06-05-maplite-award.md deleted file mode 100644 index 3d56460..0000000 --- a/_posts/2020-06-05-maplite-award.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -layout: post -shortnews: true -icon: newspaper ---- - -Our paper [MapLite: Autonomous intersection navigation without detailed prior maps] was adjudged _best Robotics and Automation Letters (RAL) paper for 2019_! Check it out [here](https://ieeexplore.ieee.org/stamp/stamp.jsp?tp=&arnumber=8936918). And, here's a short [video abstract](https://youtu.be/P6Kk5pB2gF4). diff --git a/_posts/2020-06-30-gunshi-graduates.md b/_posts/2020-06-30-gunshi-graduates.md deleted file mode 100644 index 4b9193e..0000000 --- a/_posts/2020-06-30-gunshi-graduates.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -layout: post -shortnews: true -icon: newspaper ---- - -Gunshi Gupta succesfully completes her M.Sc. and joins Wayve as a deep learning researcher! diff --git a/_posts/2020-09-10-robotlearningseries.md b/_posts/2020-09-10-robotlearningseries.md deleted file mode 100644 index f9330bb..0000000 --- a/_posts/2020-09-10-robotlearningseries.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -layout: post -shortnews: true -icon: newspaper ---- - -[Robot learning seminar series](http://montrealrobotics.ca/robotlearningseries/) launched! diff --git a/_posts/2020-10-10-neurips.md b/_posts/2020-10-10-neurips.md deleted file mode 100644 index 4d65b99..0000000 --- a/_posts/2020-10-10-neurips.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -layout: post -shortnews: true -icon: newspaper ---- - -Two papers accepted to Neurips 2020 (one of them an oral - top 1.1%). Congratulations Gunshi and Ruixiang! diff --git a/_posts/2020-10-15-lamaml.md b/_posts/2020-10-15-lamaml.md deleted file mode 100644 index 5c369d4..0000000 --- a/_posts/2020-10-15-lamaml.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -layout: post -shortnews: true -icon: newspaper ---- - -Checkout our new Neurips 2020 Oral paper [La-MAML: Look-Ahead Meta-Learning for Continual Learning](https://arxiv.org/abs/2007.13904) [[Code](https://github.com/montrealrobotics/La-MAML)], [[Short Video](https://www.youtube.com/watch?v=HzewyVu8LaY)]. diff --git a/_posts/2020-10-30-irosworkshop.md b/_posts/2020-10-30-irosworkshop.md deleted file mode 100644 index 4709675..0000000 --- a/_posts/2020-10-30-irosworkshop.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -layout: post -shortnews: true -icon: newspaper ---- - -We organized an IROS workshop on [Benchmarking progress in autonomous driving](http://montrealrobotics.ca/driving-benchmarks/) diff --git a/_posts/2020-11-30-gradslam.md b/_posts/2020-11-30-gradslam.md deleted file mode 100644 index 5f93ee8..0000000 --- a/_posts/2020-11-30-gradslam.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -layout: post -shortnews: true -icon: newspaper ---- - -We released [gradslam](http://gradslam.github.io/) - a differentiable dense SLAM framework for deep learning. Check it out! diff --git a/_posts/2020-12-05-krishna-fellowship.md b/_posts/2020-12-05-krishna-fellowship.md deleted file mode 100644 index 523ca8e..0000000 --- a/_posts/2020-12-05-krishna-fellowship.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -layout: post -shortnews: true -icon: newspaper ---- - -Krishna [won an NVIDIA fellowship](https://blogs.nvidia.com/blog/2020/12/04/graduate-fellowships-gpu-computing-research/) for 2021-22. Congratulations! diff --git a/_projects/01-gradslam.md b/_projects/01-gradslam.md deleted file mode 100644 index 85e53d7..0000000 --- a/_projects/01-gradslam.md +++ /dev/null @@ -1,26 +0,0 @@ ---- -title: gradslam - -notitle: false - -description: | - gradslam is an open-source framework providing differentiable building blocks for simultaneous localization and mapping (SLAM) systems. We enable the usage of dense SLAM subsystems from the comfort of PyTorch. - -people: - - krishna - - liam - -collaborators: - - ganesh - - soroush - - -layout: project -image: "https://gradslam.github.io/images/pointfusiondemo.gif" -link: https://gradslam.github.io/ -last-updated: 2020-12-06 ---- - -## gradslam - -gradslam is an open-source framework providing differentiable building blocks for simultaneous localization and mapping (SLAM) systems. We enable the usage of dense SLAM subsystems from the comfort of PyTorch. diff --git a/_projects/adr.md b/_projects/adr.md deleted file mode 100644 index ca0d78e..0000000 --- a/_projects/adr.md +++ /dev/null @@ -1,24 +0,0 @@ ---- -title: Active Domain Randomization - -description: Making sim-to-real transfer more efficient - -people: - - bhairav - - florian - - manfred - - liam - - -collaborators: - - chris - -layout: project -image: /img/papers/adr.gif - -last-updated: 2019-06-28 ---- - -## Active Domain Randomization - -Domain randomization is a popular technique for improving domain transfer, often used in a zero-shot setting when the target domain is unknown or cannot easily be used for training. In this work, we empirically examine the effects of domain randomization on agent generalization. Our experiments show that domain randomization may lead to suboptimal, high-variance policies, which we attribute to the uniform sampling of environment parameters. We propose Active Domain Randomization, a novel algorithm that learns a parameter sampling strategy. Our method looks for the most informative environment variations within the given randomization ranges by leveraging the discrepancies of policy rollouts in randomized and reference environment instances. We find that training more frequently on these instances leads to better overall agent generalization. Our experiments across various physics-based simulated and real-robot tasks show that this enhancement leads to more robust, consistent policies. diff --git a/_projects/conceptfusion.md b/_projects/conceptfusion.md deleted file mode 100644 index ff5062c..0000000 --- a/_projects/conceptfusion.md +++ /dev/null @@ -1,42 +0,0 @@ ---- -title: "ConceptFusion: Open-set Multimodal 3D Mapping" - -# status: active - -notitle: false - -description: | - ConceptFusion builds open-set 3D maps that can be queried via text, click, image, or audio. Given a series of RGB-D images, our system builds a 3D scene representation, that is inherently multimodal by leveraging foundation models such as CLIP, and therefore doesn't require any additional training or finetuning. - -people: - - ali-k - - liam - -collaborators: - - krishna - - qiao - - mohd - - tao - - alaa - - shuang - - ganesh - - soroush - - nikhil - - ayush - - tenenbaum - - celso - - madhav - - shkurti - - torralba - -layout: project -image: /img/papers/conceptfusion.gif -link: https://concept-fusion.github.io/ -last-updated: 2023-06-16 ---- - -## ConceptFusion: Open-set Multimodal 3D Mapping - -Building 3D maps of the environment is central to robot navigation, planning, and interaction with objects in a scene. Most existing approaches that integrate semantic concepts with 3D maps largely remain confined to the closed-set setting: they can only reason about a finite set of concepts, pre-defined at training time. Further, these maps can only be queried using class labels, or in recent work, using text prompts. - -We address both these issues with ConceptFusion, a scene representation that is: (i) fundamentally open-set, enabling reasoning beyond a closed set of concepts (ii) inherently multi-modal, enabling a diverse range of possible queries to the 3D map, from language, to images, to audio, to 3D geometry, all working in concert. ConceptFusion leverages the open-set capabilities of today’s foundation models pre-trained on internet-scale data to reason about concepts across modalities such as natural language, images, and audio. We demonstrate that pixel-aligned open-set features can be fused into 3D maps via traditional SLAM and multi-view fusion approaches. This enables effective zero-shot spatial reasoning, not needing any additional training or finetuning, and retains long-tailed concepts better than supervised approaches, outperforming them by more than 40% margin on 3D IoU. We extensively evaluate ConceptFusion on a number of real-world datasets, simulated home environments, a real-world tabletop manipulation task, and an autonomous driving platform. We showcase new avenues for blending foundation models with 3D open-set multimodal mapping. diff --git a/_projects/ctcnet.md b/_projects/ctcnet.md deleted file mode 100644 index d361201..0000000 --- a/_projects/ctcnet.md +++ /dev/null @@ -1,20 +0,0 @@ ---- -title: Self-supervised visual odometry estimation - -description: | - A self-supervised deep network for visual odometry estimation from monocular imagery. - -people: - - krishna - - gunshi - - liam - -collaborators: - - ganesh - - madhav - -layout: project -link: "https://krrish94.github.io/CTCNet-release/" -image: /img/events/ctcnet.png -last-updated: 2019-01-01 ---- diff --git a/_projects/dal.md b/_projects/dal.md deleted file mode 100644 index 3a407d7..0000000 --- a/_projects/dal.md +++ /dev/null @@ -1,26 +0,0 @@ ---- -title: Deep Active Localization - -description: | - Learned active localization, implemented on "real" robots. - -people: - - sai - - dhaivat - - krishna - - vincent - - liam - -collaborators: - - keehong - -layout: project -last-updated: 2018-11-27 - -image: /img/papers/dal.jpg ---- - - -# Deep Active Localization - -Active localization is the problem of generating robot actions that allow it to maximally disambiguate its pose within a reference map. Traditional approaches to this use an information-theoretic criterion for action selection and hand-crafted perceptual models. In this work we propose an end-to-end differentiable method for learning to take informative actions that is trainable entirely in simulation and then transferable to real robot hardware with zero refinement. The system is composed of two modules: a convolutional neural network for perception, and a deep reinforcement learned planning module. We introduce a multi-scale approach to the learned perceptual model since the accuracy needed to perform action selection with reinforcement learning is much less than the accuracy needed for robot control. We demonstrate that the resulting system outperforms using the traditional approach for either perception or planning. We also demonstrate our approaches robustness to different map configurations and other nuisance parameters through the use of domain randomization in training. The code is also compatible with the OpenAI gym framework, as well as the Gazebo simulator. diff --git a/_projects/fcal.md b/_projects/fcal.md deleted file mode 100644 index b8f4293..0000000 --- a/_projects/fcal.md +++ /dev/null @@ -1,27 +0,0 @@ ---- -title: f-Cal - Calibrated aleatoric uncertainty estimation from neural networks for robot perception - -notitle: false - -description: | - f-Cal is calibration method proposed to calibrate probabilistic regression networks. Typical bayesian neural networks are shown to be overconfident in their predictions. To use the predictions for downstream tasks, reliable and calibrated uncertainity estimates are critical. f-Cal is a straightforward loss function, which can be employed to train any probabilistic neural regressor, and obtain calibrated uncertainty estimates. - -people: - - dhaivat - - kaustubh - - dishank - - krishna - - liam - -collaborators: - - hanju - - -layout: project -image: /img/papers/f_cal_od_gif1.gif -last-updated: 2022-04-20 -link: https://f-cal.github.io/ ---- - -## f-cal - diff --git a/_projects/gradsim.md b/_projects/gradsim.md deleted file mode 100644 index 4e64160..0000000 --- a/_projects/gradsim.md +++ /dev/null @@ -1,36 +0,0 @@ ---- -title: gradsim - -notitle: false - -description: | - gradSim is a framework that overcomes the dependence on 3D supervision by leveraging differentiable multiphysics simulation and differentiable rendering to jointly model the evolution of scene dynamics and image formation. - -people: - - krishna - - florian - - breandan - - liam - - -collaborators: - - macklin - - voleti - - petrini - - weiss - - parent - - xie - - erleben - - shkurti - - nowrouzerzahrai - - fidler - - -layout: project -image: /img/papers/walker.gif -link: https://gradslam.github.io/ -last-updated: 2021-03-29 ---- - -## gradsim - diff --git a/_projects/ivrl.md b/_projects/ivrl.md deleted file mode 100644 index 2eaae6a..0000000 --- a/_projects/ivrl.md +++ /dev/null @@ -1,53 +0,0 @@ ---- -title: Inverse Variance Reinforcement Learning -status: active - -notitle: false - -description: | - Improving sample efficiency in deep reinforcement learning by mitigating the impacts of heteroscedastic noise in the bootstraped target using uncertainty estimation. - -people: - - vincent - - kaustubh - - waleed - - liam - -layout: project -image: /img/papers/mai2022ivrl_resized.jpg -link: https://montrealrobotics.ca/ivrl/ -last-updated: 2022-01-12 ---- - -## Inverse Variance Reinforcement Learning - -Most robotics problems can be written as (Partially Observable) Markov Decision Processes (MDPs), with discrete or continuous observation and action spaces. Deep Reinforcement Learning (DRL) is a powerful tool to find an optimal policy for these processes, based on experience acquired during the training process. The training of a DRL agent requires many trajectories, which can be arduous and expensive to produce in the real world. Indeed, the real world is not parallelizable, may require human efforts to reset, and comes with risks for the robot and the environment. Gathering sufficient experience is therefore one of the most important challenges when applying DRL to robotics. *The objective of this project is to reduce the amount of samples necessary to train a DRL agent on a robot.* - -A diagram representing the generation process of the noisy target. - -DRL algorithms are complex processes. An important part of most model-free algorithms is learning the value function of a given state or state-action pair, i.e., the expected return given the current policy. To do so, deep supervised learning components are used, where the input is the state(-action), and the label is called the target. The target T is a noisy sample of the value. Often, it is computed using the reward r and the next state s' sampled from experience, the next action a' based on s' and the current policy, and the value Q of the next state-action pair which is bootstrapped from the current value estimator (this is the Temporal Difference target). The noise on the target negatively impacts the learning process: the networks learn from wrong data, which entails slower learning and instability. - -The key element in this project is the fact that the noise affecting in the target, i.e. its difference from the true and unique value function, is heteroscedastic. This means that the distribution it is sampled from changes for each input and training step. Sometimes, this distribution has a very low variance: the target is close to the value. Sometimes, on the other hand, the target is subject to a lot of noise and it does not contain useful information with respect to the value. Therefore, the value estimation task in DRL is a case of heteroscedastic regression. - -## Projects - -### Batch Inverse-Variance Weighting for Deep Heteroscedastic Regression - -Noisy labels slows the learning process in regression: the first part of this project was to prove that the effect of noisy labels can be mitigated given the hypothesis that we know the variance of the noise distribution of each label. How can we include this additional information for heteroscedastic regression? Intuitively, we shoud give more weight to the labels we trust more. In linear regression, the Gauss-Markov theorem shows that the optimal solution is to weigh each sample by the inverse of the variance of the label noise. We show that adapting inverse-variance weighting for gradient-based optimization methods allows to significantly improve the performance of the learning process. Our paper, [Batch Inverse-Variance Weighting: Deep Heteroscedastic Regression](https://arxiv.org/abs/2107.04497) (BIV), was presented at the [Uncertainty and Robustness in Deep Learning](https://sites.google.com/view/udlworkshop2021/home?authuser=0) workshop at ICML 2021. - -
- A plot showing learning curves, where BIV is doing better than L2 and some baselines. -
BIV improves the learning performance with noisy labels compared to the L2 loss. Source: Batch Inverse-Variance Weighting: Deep Heteroscedastic Regression
-
- -### Inverse-Variance Reinforcement Learning - -See project page: https://montrealrobotics.ca/ivrl/ - -The second part of the project was to use this weighting scheme in a DRL setting. For this work, the challenge was to estimate the uncertainty of the target. A systematic analysis of the sources of uncertainty in the target generation process justifies the use of deep variance ensembles. These are used to estimate the variance due to the stochasticity of the environment and the policy, as well as the predictive uncertainty of the value prediction used to bootstrap the target. As the variance output by these deep ensembles is also the result of a training process, the uncertainty estimation is subject to complex dynamics. We show that the BIV weighting scheme is robust to changes of scale in the variance estimation. We show that combining BIV with deep variance ensembles in DRL algorithms such as DQN and SAC leads to significant improvements in the sample efficiency. This framework, called Inverse-Variance Reinforcement Learning (IV-RL), is presented in our [Sample Efficient Deep Reinforcement Learning via Uncertainty Estimation](https://openreview.net/forum?id=vrW3tvDfOJQ) submission to ICLR 2022. - - -
- A plot showing learning curves, where IV-SAC is doing better than DQN and other ensemble baselines. -
IV-RL on SAC improves the learning performance and the sample efficiency compared to other ensemble-based baselines. Source: Sample Efficient Deep Reinforcement Learning via Uncertainty Estimation
-
diff --git a/_projects/lamaml.md b/_projects/lamaml.md deleted file mode 100644 index ba7a007..0000000 --- a/_projects/lamaml.md +++ /dev/null @@ -1,25 +0,0 @@ ---- -title: La-MAML - -notitle: false - -description: | - Look-ahead meta-learning for continual learning - -people: - - gunshi - - liam - - -collaborators: - - yadav - - -layout: project -image: "https://mila.quebec/wp-content/uploads/2020/11/lamaml_jpg.gif" -link: https://mila.quebec/en/article/la-maml-look-ahead-meta-learning-for-continual-learning/ -last-updated: 2020-11-19 ---- - -## La-MAML - diff --git a/_projects/ltvn.md b/_projects/ltvn.md deleted file mode 100644 index ea5ee16..0000000 --- a/_projects/ltvn.md +++ /dev/null @@ -1,25 +0,0 @@ ---- -title: Lifelong Topological Visual Navigation -# status: active - -notitle: false - -description: | - A learning-based topological visual navigation method with graph update strategies that improves lifelong navigation performance over time. - -people: - - rey - - liam - -collaborators: - - anqi - -layout: project -image: /img/papers/ltvn.gif -link: https://montrealrobotics.ca/ltvn/ -last-updated: 2021-11-26 ---- - -## Lifelong Topological Visual Navigation - -See project page: https://montrealrobotics.ca/ltvn/ diff --git a/_projects/o4a.md b/_projects/o4a.md deleted file mode 100644 index 294577c..0000000 --- a/_projects/o4a.md +++ /dev/null @@ -1,23 +0,0 @@ ---- -title: One-4-All - Neural Potential Fields for Embodied Navigation -# status: active - -notitle: false - -description: | - An end-to-end fully parametric method for image-goal navigation that leverages self-supervised and manifold learning to replace a topological graph with a geodesic regressor. During navigation, the geodesic regressor is used as an attractor in a potential function defined in latent space, allowing to frame navigation as a minimization problem. - -people: - - sacha - - miguel - - liam - -layout: project -image: /img/papers/o4a.gif -link: https://montrealrobotics.ca/o4a/ -last-updated: 2023-03-16 ---- - -## One-4-All: Neural Potential Fields for Embodied Navigation - -A fundamental task in robotics is to navigate between two locations. In particular, real-world navigation can require long-horizon planning using high-dimensional RGB images, which poses a substantial challenge for end-to-end learning-based approaches. Current semi-parametric methods instead achieve long-horizon navigation by combining learned modules with a topological memory of the environment, often represented as a graph over previously collected images. However, using these graphs in practice typically involves tuning a number of pruning heuristics to avoid spurious edges, limit runtime memory usage and allow reasonably fast graph queries. In this work, we present One-4-All (O4A), a method leveraging self-supervised and manifold learning to obtain a graph-free, end-to-end navigation pipeline in which the goal is specified as an image. Navigation is achieved by greedily minimizing a potential function defined continuously over the O4A latent space. Our system is trained offline on non-expert exploration sequences of RGB data and controls, and does not require any depth or pose measurements. We show that O4A can reach long-range goals in 8 simulated Gibson indoor environments, and further demonstrate successful real-world navigation using a Jackal UGV platform. diff --git a/_projects/taskography.md b/_projects/taskography.md deleted file mode 100644 index 41da054..0000000 --- a/_projects/taskography.md +++ /dev/null @@ -1,29 +0,0 @@ ---- -title: Taskography - Evaluating robot task planning over large 3D scene graphs - -notitle: false - -description: | - Taskography is the first large-scale robotic task planning benchmark over 3DSGs. While most benchmarking efforts in this area focus on vision-based planning, we systematically study symbolic planning, to decouple planning performance from visual representation learning. - -people: - - krishna - - liam - -collaborators: - - agia - - khodeir - - miksik - - vineet - - mukadam - - shkurti - - -layout: project -image: /img/papers/3dsg.png -last-updated: 2021-07-20 -link: https://taskography.github.io ---- - -## Taskography - diff --git a/assets/css/style.css b/assets/css/style.css new file mode 100644 index 0000000..bc2033b --- /dev/null +++ b/assets/css/style.css @@ -0,0 +1,2883 @@ +/*! normalize.css v4.1.1 | MIT License | github.com/necolas/normalize.css */ +/** 1. Change the default font family in all browsers (opinionated). 2. Prevent adjustments of font size after orientation changes in IE and iOS. */ +html { font-family: sans-serif; /* 1 */ -ms-text-size-adjust: 100%; /* 2 */ -webkit-text-size-adjust: 100%; /* 2 */ } + +/** Remove the margin in all browsers (opinionated). */ +body { margin: 0; } + +/* HTML5 display definitions ========================================================================== */ +/** Add the correct display in IE 9-. 1. Add the correct display in Edge, IE, and Firefox. 2. Add the correct display in IE. */ +article, aside, details, figcaption, figure, footer, header, main, menu, nav, section { /* 1 */ display: block; } + +summary { display: list-item; } + +/** Add the correct display in IE 9-. */ +audio, canvas, progress, video { display: inline-block; } + +/** Add the correct display in iOS 4-7. */ +audio:not([controls]) { display: none; height: 0; } + +/** Add the correct vertical alignment in Chrome, Firefox, and Opera. */ +progress { vertical-align: baseline; } + +/** Add the correct display in IE 10-. 1. Add the correct display in IE. */ +template, [hidden] { display: none !important; } + +/* Links ========================================================================== */ +/** Remove the gray background on active links in IE 10. */ +a { background-color: transparent; /* 1 */ } + +/** Remove the outline on focused links when they are also active or hovered in all browsers (opinionated). */ +a:active, a:hover { outline-width: 0; } + +/* Text-level semantics ========================================================================== */ +/** 1. Remove the bottom border in Firefox 39-. 2. Add the correct text decoration in Chrome, Edge, IE, Opera, and Safari. */ +abbr[title] { border-bottom: none; /* 1 */ text-decoration: underline; /* 2 */ text-decoration: underline dotted; /* 2 */ } + +/** Prevent the duplicate application of `bolder` by the next rule in Safari 6. */ +b, strong { font-weight: inherit; } + +/** Add the correct font weight in Chrome, Edge, and Safari. */ +b, strong { font-weight: bolder; } + +/** Add the correct font style in Android 4.3-. */ +dfn { font-style: italic; } + +/** Correct the font size and margin on `h1` elements within `section` and `article` contexts in Chrome, Firefox, and Safari. */ +h1 { font-size: 2em; margin: 0.67em 0; } + +/** Add the correct background and color in IE 9-. */ +mark { background-color: #ff0; color: #000; } + +/** Add the correct font size in all browsers. */ +small { font-size: 80%; } + +/** Prevent `sub` and `sup` elements from affecting the line height in all browsers. */ +sub, sup { font-size: 75%; line-height: 0; position: relative; vertical-align: baseline; } + +sub { bottom: -0.25em; } + +sup { top: -0.5em; } + +/* Embedded content ========================================================================== */ +/** Remove the border on images inside links in IE 10-. */ +img { border-style: none; } + +/** Hide the overflow in IE. */ +svg:not(:root) { overflow: hidden; } + +/* Grouping content ========================================================================== */ +/** 1. Correct the inheritance and scaling of font size in all browsers. 2. Correct the odd `em` font sizing in all browsers. */ +code, kbd, pre, samp { font-family: monospace, monospace; /* 1 */ font-size: 1em; /* 2 */ } + +/** Add the correct margin in IE 8. */ +figure { margin: 1em 40px; } + +/** 1. Add the correct box sizing in Firefox. 2. Show the overflow in Edge and IE. */ +hr { box-sizing: content-box; /* 1 */ height: 0; /* 1 */ overflow: visible; /* 2 */ } + +/* Forms ========================================================================== */ +/** 1. Change font properties to `inherit` in all browsers (opinionated). 2. Remove the margin in Firefox and Safari. */ +button, input, select, textarea { font: inherit; /* 1 */ margin: 0; /* 2 */ } + +/** Restore the font weight unset by the previous rule. */ +optgroup { font-weight: bold; } + +/** Show the overflow in IE. 1. Show the overflow in Edge. */ +button, input { /* 1 */ overflow: visible; } + +/** Remove the inheritance of text transform in Edge, Firefox, and IE. 1. Remove the inheritance of text transform in Firefox. */ +button, select { /* 1 */ text-transform: none; } + +/** 1. Prevent a WebKit bug where (2) destroys native `audio` and `video` controls in Android 4. 2. Correct the inability to style clickable types in iOS and Safari. */ +button, html [type="button"], [type="reset"], [type="submit"] { -webkit-appearance: button; /* 2 */ } + +/** Remove the inner border and padding in Firefox. */ +button::-moz-focus-inner, [type="button"]::-moz-focus-inner, [type="reset"]::-moz-focus-inner, [type="submit"]::-moz-focus-inner { border-style: none; padding: 0; } + +/** Restore the focus styles unset by the previous rule. */ +button:-moz-focusring, [type="button"]:-moz-focusring, [type="reset"]:-moz-focusring, [type="submit"]:-moz-focusring { outline: 1px dotted ButtonText; } + +/** Change the border, margin, and padding in all browsers (opinionated). */ +fieldset { border: 1px solid #c0c0c0; margin: 0 2px; padding: 0.35em 0.625em 0.75em; } + +/** 1. Correct the text wrapping in Edge and IE. 2. Correct the color inheritance from `fieldset` elements in IE. 3. Remove the padding so developers are not caught out when they zero out `fieldset` elements in all browsers. */ +legend { box-sizing: border-box; /* 1 */ color: inherit; /* 2 */ display: table; /* 1 */ max-width: 100%; /* 1 */ padding: 0; /* 3 */ white-space: normal; /* 1 */ } + +/** Remove the default vertical scrollbar in IE. */ +textarea { overflow: auto; } + +/** 1. Add the correct box sizing in IE 10-. 2. Remove the padding in IE 10-. */ +[type="checkbox"], [type="radio"] { box-sizing: border-box; /* 1 */ padding: 0; /* 2 */ } + +/** Correct the cursor style of increment and decrement buttons in Chrome. */ +[type="number"]::-webkit-inner-spin-button, [type="number"]::-webkit-outer-spin-button { height: auto; } + +/** 1. Correct the odd appearance in Chrome and Safari. 2. Correct the outline style in Safari. */ +[type="search"] { -webkit-appearance: textfield; /* 1 */ outline-offset: -2px; /* 2 */ } + +/** Remove the inner padding and cancel buttons in Chrome and Safari on OS X. */ +[type="search"]::-webkit-search-cancel-button, [type="search"]::-webkit-search-decoration { -webkit-appearance: none; } + +/** Correct the text style of placeholders in Chrome, Edge, and Safari. */ +::-webkit-input-placeholder { color: inherit; opacity: 0.54; } + +/** 1. Correct the inability to style clickable types in iOS and Safari. 2. Change font properties to `inherit` in Safari. */ +::-webkit-file-upload-button { -webkit-appearance: button; /* 1 */ font: inherit; /* 2 */ } + +* { box-sizing: border-box; } + +input, select, textarea, button { font-family: inherit; font-size: inherit; line-height: inherit; } + +body { font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol"; font-size: 14px; line-height: 1.5; color: #24292e; background-color: #fff; } + +a { color: #0366d6; text-decoration: none; } +a:hover { text-decoration: underline; } + +b, strong { font-weight: 600; } + +hr, .rule { height: 0; margin: 15px 0; overflow: hidden; background: transparent; border: 0; border-bottom: 1px solid #dfe2e5; } +hr::before, .rule::before { display: table; content: ""; } +hr::after, .rule::after { display: table; clear: both; content: ""; } + +table { border-spacing: 0; border-collapse: collapse; } + +td, th { padding: 0; } + +button { cursor: pointer; border-radius: 0; } + +[hidden][hidden] { display: none !important; } + +details summary { cursor: pointer; } +details:not([open]) > *:not(summary) { display: none !important; } + +h1, h2, h3, h4, h5, h6 { margin-top: 0; margin-bottom: 0; } + +h1 { font-size: 32px; font-weight: 600; } + +h2 { font-size: 24px; font-weight: 600; } + +h3 { font-size: 20px; font-weight: 600; } + +h4 { font-size: 16px; font-weight: 600; } + +h5 { font-size: 14px; font-weight: 600; } + +h6 { font-size: 12px; font-weight: 600; } + +p { margin-top: 0; margin-bottom: 10px; } + +small { font-size: 90%; } + +blockquote { margin: 0; } + +ul, ol { padding-left: 0; margin-top: 0; margin-bottom: 0; } + +ol ol, ul ol { list-style-type: lower-roman; } + +ul ul ol, ul ol ol, ol ul ol, ol ol ol { list-style-type: lower-alpha; } + +dd { margin-left: 0; } + +tt, code { font-family: "SFMono-Regular", Consolas, "Liberation Mono", Menlo, Courier, monospace; font-size: 12px; } + +pre { margin-top: 0; margin-bottom: 0; font-family: "SFMono-Regular", Consolas, "Liberation Mono", Menlo, Courier, monospace; font-size: 12px; } + +.octicon { vertical-align: text-bottom; } + +/* Fade in an element */ +.anim-fade-in { animation-name: fade-in; animation-duration: 1s; animation-timing-function: ease-in-out; } +.anim-fade-in.fast { animation-duration: 300ms; } + +@keyframes fade-in { 0% { opacity: 0; } + 100% { opacity: 1; } } +/* Fade out an element */ +.anim-fade-out { animation-name: fade-out; animation-duration: 1s; animation-timing-function: ease-out; } +.anim-fade-out.fast { animation-duration: 0.3s; } + +@keyframes fade-out { 0% { opacity: 1; } + 100% { opacity: 0; } } +/* Fade in and slide up an element */ +.anim-fade-up { opacity: 0; animation-name: fade-up; animation-duration: 0.3s; animation-fill-mode: forwards; animation-timing-function: ease-out; animation-delay: 1s; } + +@keyframes fade-up { 0% { opacity: 0.8; transform: translateY(100%); } + 100% { opacity: 1; transform: translateY(0); } } +/* Fade an element out and slide down */ +.anim-fade-down { animation-name: fade-down; animation-duration: 0.3s; animation-fill-mode: forwards; animation-timing-function: ease-in; } + +@keyframes fade-down { 0% { opacity: 1; transform: translateY(0); } + 100% { opacity: 0.5; transform: translateY(100%); } } +/* Grow an element width from 0 to 100% */ +.anim-grow-x { width: 0%; animation-name: grow-x; animation-duration: 0.3s; animation-fill-mode: forwards; animation-timing-function: ease; animation-delay: 0.5s; } + +@keyframes grow-x { to { width: 100%; } } +/* Shrink an element from 100% to 0% */ +.anim-shrink-x { animation-name: shrink-x; animation-duration: 0.3s; animation-fill-mode: forwards; animation-timing-function: ease-in-out; animation-delay: 0.5s; } + +@keyframes shrink-x { to { width: 0%; } } +/* Fade in an element and scale it fast */ +.anim-scale-in { animation-name: scale-in; animation-duration: 0.15s; animation-timing-function: cubic-bezier(0.2, 0, 0.13, 1.5); } + +@keyframes scale-in { 0% { opacity: 0; transform: scale(0.5); } + 100% { opacity: 1; transform: scale(1); } } +/* Pulse an element's opacity */ +.anim-pulse { animation-name: pulse; animation-duration: 2s; animation-timing-function: linear; animation-iteration-count: infinite; } + +@keyframes pulse { 0% { opacity: 0.3; } + 10% { opacity: 1; } + 100% { opacity: 0.3; } } +/* Pulse in an element */ +.anim-pulse-in { animation-name: pulse-in; animation-duration: 0.5s; } + +@keyframes pulse-in { 0% { transform: scale3d(1, 1, 1); } + 50% { transform: scale3d(1.1, 1.1, 1.1); } + 100% { transform: scale3d(1, 1, 1); } } +/* Increase scale of an element on hover */ +.hover-grow { transition: transform 0.3s; backface-visibility: hidden; } +.hover-grow:hover { transform: scale(1.025); } + +/* Add a gray border on all sides */ +.border { border: 1px #e1e4e8 solid !important; } + +/* Add a gray border to the left and right */ +.border-y { border-top: 1px #e1e4e8 solid !important; border-bottom: 1px #e1e4e8 solid !important; } + +/* Remove borders from all sides */ +.border-0 { border: 0 !important; } + +.border-dashed { border-style: dashed !important; } + +/* Use with .border to turn the border blue */ +.border-blue { border-color: #0366d6 !important; } + +/* Use with .border to turn the border blue-light */ +.border-blue-light { border-color: #c8e1ff !important; } + +/* Use with .border to turn the border green */ +.border-green { border-color: #34d058 !important; } + +/* Use with .border to turn the border green light */ +.border-green-light { border-color: #a2cbac !important; } + +/* Use with .border to turn the border red */ +.border-red { border-color: #d73a49 !important; } + +/* Use with .border to turn the border red-light */ +.border-red-light { border-color: #cea0a5 !important; } + +/* Use with .border to turn the border purple */ +.border-purple { border-color: #6f42c1 !important; } + +/* Use with .border to turn the border yellow */ +.border-yellow { border-color: #d9d0a5 !important; } + +/* Use with .border to turn the border gray-light */ +.border-gray-light { border-color: #eaecef !important; } + +/* Use with .border to turn the border gray-dark */ +.border-gray-dark { border-color: #d1d5da !important; } + +/* Use with .border to turn the border rgba black 0.15 */ +.border-black-fade { border-color: rgba(27, 31, 35, 0.15) !important; } + +/* Add a gray border */ +/* Add a gray border to the top */ +.border-top { border-top: 1px #e1e4e8 solid !important; } + +/* Add a gray border to the right */ +.border-right { border-right: 1px #e1e4e8 solid !important; } + +/* Add a gray border to the bottom */ +.border-bottom { border-bottom: 1px #e1e4e8 solid !important; } + +/* Add a gray border to the left */ +.border-left { border-left: 1px #e1e4e8 solid !important; } + +/* Remove the top border */ +.border-top-0 { border-top: 0 !important; } + +/* Remove the right border */ +.border-right-0 { border-right: 0 !important; } + +/* Remove the bottom border */ +.border-bottom-0 { border-bottom: 0 !important; } + +/* Remove the left border */ +.border-left-0 { border-left: 0 !important; } + +/* Remove the border-radius */ +.rounded-0 { border-radius: 0 !important; } + +/* Add a border-radius to all corners */ +.rounded-1 { border-radius: 3px !important; } + +/* Add a 2x border-radius to all corners */ +.rounded-2 { border-radius: 6px !important; } + +.rounded-top-0 { border-top-left-radius: 0 !important; border-top-right-radius: 0 !important; } + +.rounded-top-1 { border-top-left-radius: 3px !important; border-top-right-radius: 3px !important; } + +.rounded-top-2 { border-top-left-radius: 6px !important; border-top-right-radius: 6px !important; } + +.rounded-right-0 { border-top-right-radius: 0 !important; border-bottom-right-radius: 0 !important; } + +.rounded-right-1 { border-top-right-radius: 3px !important; border-bottom-right-radius: 3px !important; } + +.rounded-right-2 { border-top-right-radius: 6px !important; border-bottom-right-radius: 6px !important; } + +.rounded-bottom-0 { border-bottom-right-radius: 0 !important; border-bottom-left-radius: 0 !important; } + +.rounded-bottom-1 { border-bottom-right-radius: 3px !important; border-bottom-left-radius: 3px !important; } + +.rounded-bottom-2 { border-bottom-right-radius: 6px !important; border-bottom-left-radius: 6px !important; } + +.rounded-left-0 { border-bottom-left-radius: 0 !important; border-top-left-radius: 0 !important; } + +.rounded-left-1 { border-bottom-left-radius: 3px !important; border-top-left-radius: 3px !important; } + +.rounded-left-2 { border-bottom-left-radius: 6px !important; border-top-left-radius: 6px !important; } + +@media (min-width: 544px) { /* Add a gray border */ + /* Add a gray border to the top */ + .border-sm-top { border-top: 1px #e1e4e8 solid !important; } + /* Add a gray border to the right */ + .border-sm-right { border-right: 1px #e1e4e8 solid !important; } + /* Add a gray border to the bottom */ + .border-sm-bottom { border-bottom: 1px #e1e4e8 solid !important; } + /* Add a gray border to the left */ + .border-sm-left { border-left: 1px #e1e4e8 solid !important; } + /* Remove the top border */ + .border-sm-top-0 { border-top: 0 !important; } + /* Remove the right border */ + .border-sm-right-0 { border-right: 0 !important; } + /* Remove the bottom border */ + .border-sm-bottom-0 { border-bottom: 0 !important; } + /* Remove the left border */ + .border-sm-left-0 { border-left: 0 !important; } + /* Remove the border-radius */ + .rounded-sm-0 { border-radius: 0 !important; } + /* Add a border-radius to all corners */ + .rounded-sm-1 { border-radius: 3px !important; } + /* Add a 2x border-radius to all corners */ + .rounded-sm-2 { border-radius: 6px !important; } + .rounded-sm-top-0 { border-top-left-radius: 0 !important; border-top-right-radius: 0 !important; } + .rounded-sm-top-1 { border-top-left-radius: 3px !important; border-top-right-radius: 3px !important; } + .rounded-sm-top-2 { border-top-left-radius: 6px !important; border-top-right-radius: 6px !important; } + .rounded-sm-right-0 { border-top-right-radius: 0 !important; border-bottom-right-radius: 0 !important; } + .rounded-sm-right-1 { border-top-right-radius: 3px !important; border-bottom-right-radius: 3px !important; } + .rounded-sm-right-2 { border-top-right-radius: 6px !important; border-bottom-right-radius: 6px !important; } + .rounded-sm-bottom-0 { border-bottom-right-radius: 0 !important; border-bottom-left-radius: 0 !important; } + .rounded-sm-bottom-1 { border-bottom-right-radius: 3px !important; border-bottom-left-radius: 3px !important; } + .rounded-sm-bottom-2 { border-bottom-right-radius: 6px !important; border-bottom-left-radius: 6px !important; } + .rounded-sm-left-0 { border-bottom-left-radius: 0 !important; border-top-left-radius: 0 !important; } + .rounded-sm-left-1 { border-bottom-left-radius: 3px !important; border-top-left-radius: 3px !important; } + .rounded-sm-left-2 { border-bottom-left-radius: 6px !important; border-top-left-radius: 6px !important; } } +@media (min-width: 768px) { /* Add a gray border */ + /* Add a gray border to the top */ + .border-md-top { border-top: 1px #e1e4e8 solid !important; } + /* Add a gray border to the right */ + .border-md-right { border-right: 1px #e1e4e8 solid !important; } + /* Add a gray border to the bottom */ + .border-md-bottom { border-bottom: 1px #e1e4e8 solid !important; } + /* Add a gray border to the left */ + .border-md-left { border-left: 1px #e1e4e8 solid !important; } + /* Remove the top border */ + .border-md-top-0 { border-top: 0 !important; } + /* Remove the right border */ + .border-md-right-0 { border-right: 0 !important; } + /* Remove the bottom border */ + .border-md-bottom-0 { border-bottom: 0 !important; } + /* Remove the left border */ + .border-md-left-0 { border-left: 0 !important; } + /* Remove the border-radius */ + .rounded-md-0 { border-radius: 0 !important; } + /* Add a border-radius to all corners */ + .rounded-md-1 { border-radius: 3px !important; } + /* Add a 2x border-radius to all corners */ + .rounded-md-2 { border-radius: 6px !important; } + .rounded-md-top-0 { border-top-left-radius: 0 !important; border-top-right-radius: 0 !important; } + .rounded-md-top-1 { border-top-left-radius: 3px !important; border-top-right-radius: 3px !important; } + .rounded-md-top-2 { border-top-left-radius: 6px !important; border-top-right-radius: 6px !important; } + .rounded-md-right-0 { border-top-right-radius: 0 !important; border-bottom-right-radius: 0 !important; } + .rounded-md-right-1 { border-top-right-radius: 3px !important; border-bottom-right-radius: 3px !important; } + .rounded-md-right-2 { border-top-right-radius: 6px !important; border-bottom-right-radius: 6px !important; } + .rounded-md-bottom-0 { border-bottom-right-radius: 0 !important; border-bottom-left-radius: 0 !important; } + .rounded-md-bottom-1 { border-bottom-right-radius: 3px !important; border-bottom-left-radius: 3px !important; } + .rounded-md-bottom-2 { border-bottom-right-radius: 6px !important; border-bottom-left-radius: 6px !important; } + .rounded-md-left-0 { border-bottom-left-radius: 0 !important; border-top-left-radius: 0 !important; } + .rounded-md-left-1 { border-bottom-left-radius: 3px !important; border-top-left-radius: 3px !important; } + .rounded-md-left-2 { border-bottom-left-radius: 6px !important; border-top-left-radius: 6px !important; } } +@media (min-width: 1012px) { /* Add a gray border */ + /* Add a gray border to the top */ + .border-lg-top { border-top: 1px #e1e4e8 solid !important; } + /* Add a gray border to the right */ + .border-lg-right { border-right: 1px #e1e4e8 solid !important; } + /* Add a gray border to the bottom */ + .border-lg-bottom { border-bottom: 1px #e1e4e8 solid !important; } + /* Add a gray border to the left */ + .border-lg-left { border-left: 1px #e1e4e8 solid !important; } + /* Remove the top border */ + .border-lg-top-0 { border-top: 0 !important; } + /* Remove the right border */ + .border-lg-right-0 { border-right: 0 !important; } + /* Remove the bottom border */ + .border-lg-bottom-0 { border-bottom: 0 !important; } + /* Remove the left border */ + .border-lg-left-0 { border-left: 0 !important; } + /* Remove the border-radius */ + .rounded-lg-0 { border-radius: 0 !important; } + /* Add a border-radius to all corners */ + .rounded-lg-1 { border-radius: 3px !important; } + /* Add a 2x border-radius to all corners */ + .rounded-lg-2 { border-radius: 6px !important; } + .rounded-lg-top-0 { border-top-left-radius: 0 !important; border-top-right-radius: 0 !important; } + .rounded-lg-top-1 { border-top-left-radius: 3px !important; border-top-right-radius: 3px !important; } + .rounded-lg-top-2 { border-top-left-radius: 6px !important; border-top-right-radius: 6px !important; } + .rounded-lg-right-0 { border-top-right-radius: 0 !important; border-bottom-right-radius: 0 !important; } + .rounded-lg-right-1 { border-top-right-radius: 3px !important; border-bottom-right-radius: 3px !important; } + .rounded-lg-right-2 { border-top-right-radius: 6px !important; border-bottom-right-radius: 6px !important; } + .rounded-lg-bottom-0 { border-bottom-right-radius: 0 !important; border-bottom-left-radius: 0 !important; } + .rounded-lg-bottom-1 { border-bottom-right-radius: 3px !important; border-bottom-left-radius: 3px !important; } + .rounded-lg-bottom-2 { border-bottom-right-radius: 6px !important; border-bottom-left-radius: 6px !important; } + .rounded-lg-left-0 { border-bottom-left-radius: 0 !important; border-top-left-radius: 0 !important; } + .rounded-lg-left-1 { border-bottom-left-radius: 3px !important; border-top-left-radius: 3px !important; } + .rounded-lg-left-2 { border-bottom-left-radius: 6px !important; border-top-left-radius: 6px !important; } } +@media (min-width: 1280px) { /* Add a gray border */ + /* Add a gray border to the top */ + .border-xl-top { border-top: 1px #e1e4e8 solid !important; } + /* Add a gray border to the right */ + .border-xl-right { border-right: 1px #e1e4e8 solid !important; } + /* Add a gray border to the bottom */ + .border-xl-bottom { border-bottom: 1px #e1e4e8 solid !important; } + /* Add a gray border to the left */ + .border-xl-left { border-left: 1px #e1e4e8 solid !important; } + /* Remove the top border */ + .border-xl-top-0 { border-top: 0 !important; } + /* Remove the right border */ + .border-xl-right-0 { border-right: 0 !important; } + /* Remove the bottom border */ + .border-xl-bottom-0 { border-bottom: 0 !important; } + /* Remove the left border */ + .border-xl-left-0 { border-left: 0 !important; } + /* Remove the border-radius */ + .rounded-xl-0 { border-radius: 0 !important; } + /* Add a border-radius to all corners */ + .rounded-xl-1 { border-radius: 3px !important; } + /* Add a 2x border-radius to all corners */ + .rounded-xl-2 { border-radius: 6px !important; } + .rounded-xl-top-0 { border-top-left-radius: 0 !important; border-top-right-radius: 0 !important; } + .rounded-xl-top-1 { border-top-left-radius: 3px !important; border-top-right-radius: 3px !important; } + .rounded-xl-top-2 { border-top-left-radius: 6px !important; border-top-right-radius: 6px !important; } + .rounded-xl-right-0 { border-top-right-radius: 0 !important; border-bottom-right-radius: 0 !important; } + .rounded-xl-right-1 { border-top-right-radius: 3px !important; border-bottom-right-radius: 3px !important; } + .rounded-xl-right-2 { border-top-right-radius: 6px !important; border-bottom-right-radius: 6px !important; } + .rounded-xl-bottom-0 { border-bottom-right-radius: 0 !important; border-bottom-left-radius: 0 !important; } + .rounded-xl-bottom-1 { border-bottom-right-radius: 3px !important; border-bottom-left-radius: 3px !important; } + .rounded-xl-bottom-2 { border-bottom-right-radius: 6px !important; border-bottom-left-radius: 6px !important; } + .rounded-xl-left-0 { border-bottom-left-radius: 0 !important; border-top-left-radius: 0 !important; } + .rounded-xl-left-1 { border-bottom-left-radius: 3px !important; border-top-left-radius: 3px !important; } + .rounded-xl-left-2 { border-bottom-left-radius: 6px !important; border-top-left-radius: 6px !important; } } +/* Add a 50% border-radius to make something into a circle */ +.circle { border-radius: 50% !important; } + +.box-shadow { box-shadow: 0 1px 1px rgba(27, 31, 35, 0.1) !important; } + +.box-shadow-medium { box-shadow: 0 1px 5px rgba(27, 31, 35, 0.15) !important; } + +.box-shadow-large { box-shadow: 0 1px 15px rgba(27, 31, 35, 0.15) !important; } + +.box-shadow-extra-large { box-shadow: 0 10px 50px rgba(27, 31, 35, 0.07) !important; } + +.box-shadow-none { box-shadow: none !important; } + +/* Set the background to $bg-white */ +.bg-white { background-color: #fff !important; } + +/* Set the background to $bg-blue */ +.bg-blue { background-color: #0366d6 !important; } + +/* Set the background to $bg-blue-light */ +.bg-blue-light { background-color: #f1f8ff !important; } + +/* Set the background to $bg-gray-dark */ +.bg-gray-dark { background-color: #24292e !important; } + +/* Set the background to $bg-gray */ +.bg-gray { background-color: #f6f8fa !important; } + +/* Set the background to $bg-gray-light */ +.bg-gray-light { background-color: #fafbfc !important; } + +/* Set the background to $bg-green */ +.bg-green { background-color: #28a745 !important; } + +/* Set the background to $bg-green-light */ +.bg-green-light { background-color: #dcffe4 !important; } + +/* Set the background to $bg-red */ +.bg-red { background-color: #d73a49 !important; } + +/* Set the background to $bg-red-light */ +.bg-red-light { background-color: #ffdce0 !important; } + +/* Set the background to $bg-yellow */ +.bg-yellow { background-color: #ffd33d !important; } + +/* Set the background to $bg-yellow-light */ +.bg-yellow-light { background-color: #fff5b1 !important; } + +/* Set the background to $bg-purple */ +.bg-purple { background-color: #6f42c1 !important; } + +/* Set the background to $bg-purple-light */ +.bg-purple-light { background-color: #f5f0ff !important; } + +.bg-shade-gradient { background-image: linear-gradient(180deg, rgba(27, 31, 35, 0.065), rgba(27, 31, 35, 0)) !important; background-repeat: no-repeat !important; background-size: 100% 200px !important; } + +/* Set the text color to $text-blue */ +.text-blue { color: #0366d6 !important; } + +/* Set the text color to $text-red */ +.text-red { color: #cb2431 !important; } + +/* Set the text color to $text-gray-light */ +.text-gray-light { color: #6a737d !important; } + +/* Set the text color to $text-gray */ +.text-gray { color: #586069 !important; } + +/* Set the text color to $text-gray-dark */ +.text-gray-dark { color: #24292e !important; } + +/* Set the text color to $text-green */ +.text-green { color: #28a745 !important; } + +/* Set the text color to $text-orange */ +.text-orange { color: #a04100 !important; } + +/* Set the text color to $text-orange-light */ +.text-orange-light { color: #e36209 !important; } + +/* Set the text color to $text-purple */ +.text-purple { color: #6f42c1 !important; } + +/* Set the text color to $text-white */ +.text-white { color: #fff !important; } + +/* Set the text color to inherit */ +.text-inherit { color: inherit !important; } + +.text-pending { color: #b08800 !important; } + +.bg-pending { color: #dbab09 !important; } + +.link-gray { color: #586069 !important; } +.link-gray:hover { color: #0366d6 !important; } + +.link-gray-dark { color: #24292e !important; } +.link-gray-dark:hover { color: #0366d6 !important; } + +/* Set the link color to $text-blue on hover Useful when you want only part of a link to turn blue on hover */ +.link-hover-blue:hover { color: #0366d6 !important; } + +/* Make a link $text-gray, then $text-blue on hover and removes the underline */ +.muted-link { color: #586069 !important; } +.muted-link:hover { color: #0366d6 !important; text-decoration: none; } + +.details-overlay[open] > summary::before { position: fixed; top: 0; right: 0; bottom: 0; left: 0; z-index: 80; display: block; cursor: default; content: " "; background: transparent; } + +.details-overlay-dark[open] > summary::before { z-index: 99; background: rgba(27, 31, 35, 0.5); } + +.flex-row { flex-direction: row !important; } + +.flex-row-reverse { flex-direction: row-reverse !important; } + +.flex-column { flex-direction: column !important; } + +.flex-wrap { flex-wrap: wrap !important; } + +.flex-nowrap { flex-wrap: nowrap !important; } + +.flex-justify-start { justify-content: flex-start !important; } + +.flex-justify-end { justify-content: flex-end !important; } + +.flex-justify-center { justify-content: center !important; } + +.flex-justify-between { justify-content: space-between !important; } + +.flex-justify-around { justify-content: space-around !important; } + +.flex-items-start { align-items: flex-start !important; } + +.flex-items-end { align-items: flex-end !important; } + +.flex-items-center { align-items: center !important; } + +.flex-items-baseline { align-items: baseline !important; } + +.flex-items-stretch { align-items: stretch !important; } + +.flex-content-start { align-content: flex-start !important; } + +.flex-content-end { align-content: flex-end !important; } + +.flex-content-center { align-content: center !important; } + +.flex-content-between { align-content: space-between !important; } + +.flex-content-around { align-content: space-around !important; } + +.flex-content-stretch { align-content: stretch !important; } + +.flex-auto { flex: 1 1 auto !important; } + +.flex-shrink-0 { flex-shrink: 0 !important; } + +.flex-self-auto { align-self: auto !important; } + +.flex-self-start { align-self: flex-start !important; } + +.flex-self-end { align-self: flex-end !important; } + +.flex-self-center { align-self: center !important; } + +.flex-self-baseline { align-self: baseline !important; } + +.flex-self-stretch { align-self: stretch !important; } + +.flex-item-equal { flex-grow: 1; flex-basis: 0; } + +@media (min-width: 544px) { .flex-sm-row { flex-direction: row !important; } + .flex-sm-row-reverse { flex-direction: row-reverse !important; } + .flex-sm-column { flex-direction: column !important; } + .flex-sm-wrap { flex-wrap: wrap !important; } + .flex-sm-nowrap { flex-wrap: nowrap !important; } + .flex-sm-justify-start { justify-content: flex-start !important; } + .flex-sm-justify-end { justify-content: flex-end !important; } + .flex-sm-justify-center { justify-content: center !important; } + .flex-sm-justify-between { justify-content: space-between !important; } + .flex-sm-justify-around { justify-content: space-around !important; } + .flex-sm-items-start { align-items: flex-start !important; } + .flex-sm-items-end { align-items: flex-end !important; } + .flex-sm-items-center { align-items: center !important; } + .flex-sm-items-baseline { align-items: baseline !important; } + .flex-sm-items-stretch { align-items: stretch !important; } + .flex-sm-content-start { align-content: flex-start !important; } + .flex-sm-content-end { align-content: flex-end !important; } + .flex-sm-content-center { align-content: center !important; } + .flex-sm-content-between { align-content: space-between !important; } + .flex-sm-content-around { align-content: space-around !important; } + .flex-sm-content-stretch { align-content: stretch !important; } + .flex-sm-auto { flex: 1 1 auto !important; } + .flex-sm-shrink-0 { flex-shrink: 0 !important; } + .flex-sm-self-auto { align-self: auto !important; } + .flex-sm-self-start { align-self: flex-start !important; } + .flex-sm-self-end { align-self: flex-end !important; } + .flex-sm-self-center { align-self: center !important; } + .flex-sm-self-baseline { align-self: baseline !important; } + .flex-sm-self-stretch { align-self: stretch !important; } + .flex-sm-item-equal { flex-grow: 1; flex-basis: 0; } } +@media (min-width: 768px) { .flex-md-row { flex-direction: row !important; } + .flex-md-row-reverse { flex-direction: row-reverse !important; } + .flex-md-column { flex-direction: column !important; } + .flex-md-wrap { flex-wrap: wrap !important; } + .flex-md-nowrap { flex-wrap: nowrap !important; } + .flex-md-justify-start { justify-content: flex-start !important; } + .flex-md-justify-end { justify-content: flex-end !important; } + .flex-md-justify-center { justify-content: center !important; } + .flex-md-justify-between { justify-content: space-between !important; } + .flex-md-justify-around { justify-content: space-around !important; } + .flex-md-items-start { align-items: flex-start !important; } + .flex-md-items-end { align-items: flex-end !important; } + .flex-md-items-center { align-items: center !important; } + .flex-md-items-baseline { align-items: baseline !important; } + .flex-md-items-stretch { align-items: stretch !important; } + .flex-md-content-start { align-content: flex-start !important; } + .flex-md-content-end { align-content: flex-end !important; } + .flex-md-content-center { align-content: center !important; } + .flex-md-content-between { align-content: space-between !important; } + .flex-md-content-around { align-content: space-around !important; } + .flex-md-content-stretch { align-content: stretch !important; } + .flex-md-auto { flex: 1 1 auto !important; } + .flex-md-shrink-0 { flex-shrink: 0 !important; } + .flex-md-self-auto { align-self: auto !important; } + .flex-md-self-start { align-self: flex-start !important; } + .flex-md-self-end { align-self: flex-end !important; } + .flex-md-self-center { align-self: center !important; } + .flex-md-self-baseline { align-self: baseline !important; } + .flex-md-self-stretch { align-self: stretch !important; } + .flex-md-item-equal { flex-grow: 1; flex-basis: 0; } } +@media (min-width: 1012px) { .flex-lg-row { flex-direction: row !important; } + .flex-lg-row-reverse { flex-direction: row-reverse !important; } + .flex-lg-column { flex-direction: column !important; } + .flex-lg-wrap { flex-wrap: wrap !important; } + .flex-lg-nowrap { flex-wrap: nowrap !important; } + .flex-lg-justify-start { justify-content: flex-start !important; } + .flex-lg-justify-end { justify-content: flex-end !important; } + .flex-lg-justify-center { justify-content: center !important; } + .flex-lg-justify-between { justify-content: space-between !important; } + .flex-lg-justify-around { justify-content: space-around !important; } + .flex-lg-items-start { align-items: flex-start !important; } + .flex-lg-items-end { align-items: flex-end !important; } + .flex-lg-items-center { align-items: center !important; } + .flex-lg-items-baseline { align-items: baseline !important; } + .flex-lg-items-stretch { align-items: stretch !important; } + .flex-lg-content-start { align-content: flex-start !important; } + .flex-lg-content-end { align-content: flex-end !important; } + .flex-lg-content-center { align-content: center !important; } + .flex-lg-content-between { align-content: space-between !important; } + .flex-lg-content-around { align-content: space-around !important; } + .flex-lg-content-stretch { align-content: stretch !important; } + .flex-lg-auto { flex: 1 1 auto !important; } + .flex-lg-shrink-0 { flex-shrink: 0 !important; } + .flex-lg-self-auto { align-self: auto !important; } + .flex-lg-self-start { align-self: flex-start !important; } + .flex-lg-self-end { align-self: flex-end !important; } + .flex-lg-self-center { align-self: center !important; } + .flex-lg-self-baseline { align-self: baseline !important; } + .flex-lg-self-stretch { align-self: stretch !important; } + .flex-lg-item-equal { flex-grow: 1; flex-basis: 0; } } +@media (min-width: 1280px) { .flex-xl-row { flex-direction: row !important; } + .flex-xl-row-reverse { flex-direction: row-reverse !important; } + .flex-xl-column { flex-direction: column !important; } + .flex-xl-wrap { flex-wrap: wrap !important; } + .flex-xl-nowrap { flex-wrap: nowrap !important; } + .flex-xl-justify-start { justify-content: flex-start !important; } + .flex-xl-justify-end { justify-content: flex-end !important; } + .flex-xl-justify-center { justify-content: center !important; } + .flex-xl-justify-between { justify-content: space-between !important; } + .flex-xl-justify-around { justify-content: space-around !important; } + .flex-xl-items-start { align-items: flex-start !important; } + .flex-xl-items-end { align-items: flex-end !important; } + .flex-xl-items-center { align-items: center !important; } + .flex-xl-items-baseline { align-items: baseline !important; } + .flex-xl-items-stretch { align-items: stretch !important; } + .flex-xl-content-start { align-content: flex-start !important; } + .flex-xl-content-end { align-content: flex-end !important; } + .flex-xl-content-center { align-content: center !important; } + .flex-xl-content-between { align-content: space-between !important; } + .flex-xl-content-around { align-content: space-around !important; } + .flex-xl-content-stretch { align-content: stretch !important; } + .flex-xl-auto { flex: 1 1 auto !important; } + .flex-xl-shrink-0 { flex-shrink: 0 !important; } + .flex-xl-self-auto { align-self: auto !important; } + .flex-xl-self-start { align-self: flex-start !important; } + .flex-xl-self-end { align-self: flex-end !important; } + .flex-xl-self-center { align-self: center !important; } + .flex-xl-self-baseline { align-self: baseline !important; } + .flex-xl-self-stretch { align-self: stretch !important; } + .flex-xl-item-equal { flex-grow: 1; flex-basis: 0; } } +/* Set position to static */ +.position-static { position: static !important; } + +/* Set position to relative */ +.position-relative { position: relative !important; } + +/* Set position to absolute */ +.position-absolute { position: absolute !important; } + +/* Set position to fixed */ +.position-fixed { position: fixed !important; } + +/* Set top 0 */ +.top-0 { top: 0 !important; } + +/* Set right 0 */ +.right-0 { right: 0 !important; } + +/* Set bottom 0 */ +.bottom-0 { bottom: 0 !important; } + +/* Set left 0 */ +.left-0 { left: 0 !important; } + +/* Vertical align middle */ +.v-align-middle { vertical-align: middle !important; } + +/* Vertical align top */ +.v-align-top { vertical-align: top !important; } + +/* Vertical align bottom */ +.v-align-bottom { vertical-align: bottom !important; } + +/* Vertical align to the top of the text */ +.v-align-text-top { vertical-align: text-top !important; } + +/* Vertical align to the bottom of the text */ +.v-align-text-bottom { vertical-align: text-bottom !important; } + +/* Vertical align to the parent's baseline */ +.v-align-baseline { vertical-align: baseline !important; } + +/* Set the overflow hidden */ +.overflow-hidden { overflow: hidden !important; } + +/* Set the overflow scroll */ +.overflow-scroll { overflow: scroll !important; } + +/* Set the overflow auto */ +.overflow-auto { overflow: auto !important; } + +/* Clear floats around the element */ +.clearfix::before { display: table; content: ""; } +.clearfix::after { display: table; clear: both; content: ""; } + +/* Float to the left */ +.float-left { float: left !important; } + +/* Float to the right */ +.float-right { float: right !important; } + +/* No float */ +.float-none { float: none !important; } + +@media (min-width: 544px) { /* Float to the left */ + .float-sm-left { float: left !important; } + /* Float to the right */ + .float-sm-right { float: right !important; } + /* No float */ + .float-sm-none { float: none !important; } } +@media (min-width: 768px) { /* Float to the left */ + .float-md-left { float: left !important; } + /* Float to the right */ + .float-md-right { float: right !important; } + /* No float */ + .float-md-none { float: none !important; } } +@media (min-width: 1012px) { /* Float to the left */ + .float-lg-left { float: left !important; } + /* Float to the right */ + .float-lg-right { float: right !important; } + /* No float */ + .float-lg-none { float: none !important; } } +@media (min-width: 1280px) { /* Float to the left */ + .float-xl-left { float: left !important; } + /* Float to the right */ + .float-xl-right { float: right !important; } + /* No float */ + .float-xl-none { float: none !important; } } +/* Max width 100% */ +.width-fit { max-width: 100% !important; } + +/* Set the width to 100% */ +.width-full { width: 100% !important; } + +/* Max height 100% */ +.height-fit { max-height: 100% !important; } + +/* Set the height to 100% */ +.height-full { height: 100% !important; } + +/* Remove min-width from element */ +.min-width-0 { min-width: 0 !important; } + +/* Set the direction to rtl */ +.direction-rtl { direction: rtl !important; } + +/* Set the direction to ltr */ +.direction-ltr { direction: ltr !important; } + +@media (min-width: 544px) { /* Set the direction to rtl */ + .direction-sm-rtl { direction: rtl !important; } + /* Set the direction to ltr */ + .direction-sm-ltr { direction: ltr !important; } } +@media (min-width: 768px) { /* Set the direction to rtl */ + .direction-md-rtl { direction: rtl !important; } + /* Set the direction to ltr */ + .direction-md-ltr { direction: ltr !important; } } +@media (min-width: 1012px) { /* Set the direction to rtl */ + .direction-lg-rtl { direction: rtl !important; } + /* Set the direction to ltr */ + .direction-lg-ltr { direction: ltr !important; } } +@media (min-width: 1280px) { /* Set the direction to rtl */ + .direction-xl-rtl { direction: rtl !important; } + /* Set the direction to ltr */ + .direction-xl-ltr { direction: ltr !important; } } +/* Set a $size margin to all sides at $breakpoint */ +.m-0 { margin: 0 !important; } + +/* Set a $size margin on the top at $breakpoint */ +.mt-0 { margin-top: 0 !important; } + +/* Set a $size margin on the right at $breakpoint */ +.mr-0 { margin-right: 0 !important; } + +/* Set a $size margin on the bottom at $breakpoint */ +.mb-0 { margin-bottom: 0 !important; } + +/* Set a $size margin on the left at $breakpoint */ +.ml-0 { margin-left: 0 !important; } + +/* Set a $size margin on the left & right at $breakpoint */ +.mx-0 { margin-right: 0 !important; margin-left: 0 !important; } + +/* Set a $size margin on the top & bottom at $breakpoint */ +.my-0 { margin-top: 0 !important; margin-bottom: 0 !important; } + +/* Set a $size margin to all sides at $breakpoint */ +.m-1 { margin: 4px !important; } + +/* Set a $size margin on the top at $breakpoint */ +.mt-1 { margin-top: 4px !important; } + +/* Set a $size margin on the right at $breakpoint */ +.mr-1 { margin-right: 4px !important; } + +/* Set a $size margin on the bottom at $breakpoint */ +.mb-1 { margin-bottom: 4px !important; } + +/* Set a $size margin on the left at $breakpoint */ +.ml-1 { margin-left: 4px !important; } + +/* Set a negative $size margin on top at $breakpoint */ +.mt-n1 { margin-top: -4px !important; } + +/* Set a negative $size margin on the right at $breakpoint */ +.mr-n1 { margin-right: -4px !important; } + +/* Set a negative $size margin on the bottom at $breakpoint */ +.mb-n1 { margin-bottom: -4px !important; } + +/* Set a negative $size margin on the left at $breakpoint */ +.ml-n1 { margin-left: -4px !important; } + +/* Set a $size margin on the left & right at $breakpoint */ +.mx-1 { margin-right: 4px !important; margin-left: 4px !important; } + +/* Set a $size margin on the top & bottom at $breakpoint */ +.my-1 { margin-top: 4px !important; margin-bottom: 4px !important; } + +/* Set a $size margin to all sides at $breakpoint */ +.m-2 { margin: 8px !important; } + +/* Set a $size margin on the top at $breakpoint */ +.mt-2 { margin-top: 8px !important; } + +/* Set a $size margin on the right at $breakpoint */ +.mr-2 { margin-right: 8px !important; } + +/* Set a $size margin on the bottom at $breakpoint */ +.mb-2 { margin-bottom: 8px !important; } + +/* Set a $size margin on the left at $breakpoint */ +.ml-2 { margin-left: 8px !important; } + +/* Set a negative $size margin on top at $breakpoint */ +.mt-n2 { margin-top: -8px !important; } + +/* Set a negative $size margin on the right at $breakpoint */ +.mr-n2 { margin-right: -8px !important; } + +/* Set a negative $size margin on the bottom at $breakpoint */ +.mb-n2 { margin-bottom: -8px !important; } + +/* Set a negative $size margin on the left at $breakpoint */ +.ml-n2 { margin-left: -8px !important; } + +/* Set a $size margin on the left & right at $breakpoint */ +.mx-2 { margin-right: 8px !important; margin-left: 8px !important; } + +/* Set a $size margin on the top & bottom at $breakpoint */ +.my-2 { margin-top: 8px !important; margin-bottom: 8px !important; } + +/* Set a $size margin to all sides at $breakpoint */ +.m-3 { margin: 16px !important; } + +/* Set a $size margin on the top at $breakpoint */ +.mt-3 { margin-top: 16px !important; } + +/* Set a $size margin on the right at $breakpoint */ +.mr-3 { margin-right: 16px !important; } + +/* Set a $size margin on the bottom at $breakpoint */ +.mb-3 { margin-bottom: 16px !important; } + +/* Set a $size margin on the left at $breakpoint */ +.ml-3 { margin-left: 16px !important; } + +/* Set a negative $size margin on top at $breakpoint */ +.mt-n3 { margin-top: -16px !important; } + +/* Set a negative $size margin on the right at $breakpoint */ +.mr-n3 { margin-right: -16px !important; } + +/* Set a negative $size margin on the bottom at $breakpoint */ +.mb-n3 { margin-bottom: -16px !important; } + +/* Set a negative $size margin on the left at $breakpoint */ +.ml-n3 { margin-left: -16px !important; } + +/* Set a $size margin on the left & right at $breakpoint */ +.mx-3 { margin-right: 16px !important; margin-left: 16px !important; } + +/* Set a $size margin on the top & bottom at $breakpoint */ +.my-3 { margin-top: 16px !important; margin-bottom: 16px !important; } + +/* Set a $size margin to all sides at $breakpoint */ +.m-4 { margin: 24px !important; } + +/* Set a $size margin on the top at $breakpoint */ +.mt-4 { margin-top: 24px !important; } + +/* Set a $size margin on the right at $breakpoint */ +.mr-4 { margin-right: 24px !important; } + +/* Set a $size margin on the bottom at $breakpoint */ +.mb-4 { margin-bottom: 24px !important; } + +/* Set a $size margin on the left at $breakpoint */ +.ml-4 { margin-left: 24px !important; } + +/* Set a negative $size margin on top at $breakpoint */ +.mt-n4 { margin-top: -24px !important; } + +/* Set a negative $size margin on the right at $breakpoint */ +.mr-n4 { margin-right: -24px !important; } + +/* Set a negative $size margin on the bottom at $breakpoint */ +.mb-n4 { margin-bottom: -24px !important; } + +/* Set a negative $size margin on the left at $breakpoint */ +.ml-n4 { margin-left: -24px !important; } + +/* Set a $size margin on the left & right at $breakpoint */ +.mx-4 { margin-right: 24px !important; margin-left: 24px !important; } + +/* Set a $size margin on the top & bottom at $breakpoint */ +.my-4 { margin-top: 24px !important; margin-bottom: 24px !important; } + +/* Set a $size margin to all sides at $breakpoint */ +.m-5 { margin: 32px !important; } + +/* Set a $size margin on the top at $breakpoint */ +.mt-5 { margin-top: 32px !important; } + +/* Set a $size margin on the right at $breakpoint */ +.mr-5 { margin-right: 32px !important; } + +/* Set a $size margin on the bottom at $breakpoint */ +.mb-5 { margin-bottom: 32px !important; } + +/* Set a $size margin on the left at $breakpoint */ +.ml-5 { margin-left: 32px !important; } + +/* Set a negative $size margin on top at $breakpoint */ +.mt-n5 { margin-top: -32px !important; } + +/* Set a negative $size margin on the right at $breakpoint */ +.mr-n5 { margin-right: -32px !important; } + +/* Set a negative $size margin on the bottom at $breakpoint */ +.mb-n5 { margin-bottom: -32px !important; } + +/* Set a negative $size margin on the left at $breakpoint */ +.ml-n5 { margin-left: -32px !important; } + +/* Set a $size margin on the left & right at $breakpoint */ +.mx-5 { margin-right: 32px !important; margin-left: 32px !important; } + +/* Set a $size margin on the top & bottom at $breakpoint */ +.my-5 { margin-top: 32px !important; margin-bottom: 32px !important; } + +/* Set a $size margin to all sides at $breakpoint */ +.m-6 { margin: 40px !important; } + +/* Set a $size margin on the top at $breakpoint */ +.mt-6 { margin-top: 40px !important; } + +/* Set a $size margin on the right at $breakpoint */ +.mr-6 { margin-right: 40px !important; } + +/* Set a $size margin on the bottom at $breakpoint */ +.mb-6 { margin-bottom: 40px !important; } + +/* Set a $size margin on the left at $breakpoint */ +.ml-6 { margin-left: 40px !important; } + +/* Set a negative $size margin on top at $breakpoint */ +.mt-n6 { margin-top: -40px !important; } + +/* Set a negative $size margin on the right at $breakpoint */ +.mr-n6 { margin-right: -40px !important; } + +/* Set a negative $size margin on the bottom at $breakpoint */ +.mb-n6 { margin-bottom: -40px !important; } + +/* Set a negative $size margin on the left at $breakpoint */ +.ml-n6 { margin-left: -40px !important; } + +/* Set a $size margin on the left & right at $breakpoint */ +.mx-6 { margin-right: 40px !important; margin-left: 40px !important; } + +/* Set a $size margin on the top & bottom at $breakpoint */ +.my-6 { margin-top: 40px !important; margin-bottom: 40px !important; } + +/* responsive horizontal auto margins */ +.mx-auto { margin-right: auto !important; margin-left: auto !important; } + +@media (min-width: 544px) { /* Set a $size margin to all sides at $breakpoint */ + .m-sm-0 { margin: 0 !important; } + /* Set a $size margin on the top at $breakpoint */ + .mt-sm-0 { margin-top: 0 !important; } + /* Set a $size margin on the right at $breakpoint */ + .mr-sm-0 { margin-right: 0 !important; } + /* Set a $size margin on the bottom at $breakpoint */ + .mb-sm-0 { margin-bottom: 0 !important; } + /* Set a $size margin on the left at $breakpoint */ + .ml-sm-0 { margin-left: 0 !important; } + /* Set a $size margin on the left & right at $breakpoint */ + .mx-sm-0 { margin-right: 0 !important; margin-left: 0 !important; } + /* Set a $size margin on the top & bottom at $breakpoint */ + .my-sm-0 { margin-top: 0 !important; margin-bottom: 0 !important; } + /* Set a $size margin to all sides at $breakpoint */ + .m-sm-1 { margin: 4px !important; } + /* Set a $size margin on the top at $breakpoint */ + .mt-sm-1 { margin-top: 4px !important; } + /* Set a $size margin on the right at $breakpoint */ + .mr-sm-1 { margin-right: 4px !important; } + /* Set a $size margin on the bottom at $breakpoint */ + .mb-sm-1 { margin-bottom: 4px !important; } + /* Set a $size margin on the left at $breakpoint */ + .ml-sm-1 { margin-left: 4px !important; } + /* Set a negative $size margin on top at $breakpoint */ + .mt-sm-n1 { margin-top: -4px !important; } + /* Set a negative $size margin on the right at $breakpoint */ + .mr-sm-n1 { margin-right: -4px !important; } + /* Set a negative $size margin on the bottom at $breakpoint */ + .mb-sm-n1 { margin-bottom: -4px !important; } + /* Set a negative $size margin on the left at $breakpoint */ + .ml-sm-n1 { margin-left: -4px !important; } + /* Set a $size margin on the left & right at $breakpoint */ + .mx-sm-1 { margin-right: 4px !important; margin-left: 4px !important; } + /* Set a $size margin on the top & bottom at $breakpoint */ + .my-sm-1 { margin-top: 4px !important; margin-bottom: 4px !important; } + /* Set a $size margin to all sides at $breakpoint */ + .m-sm-2 { margin: 8px !important; } + /* Set a $size margin on the top at $breakpoint */ + .mt-sm-2 { margin-top: 8px !important; } + /* Set a $size margin on the right at $breakpoint */ + .mr-sm-2 { margin-right: 8px !important; } + /* Set a $size margin on the bottom at $breakpoint */ + .mb-sm-2 { margin-bottom: 8px !important; } + /* Set a $size margin on the left at $breakpoint */ + .ml-sm-2 { margin-left: 8px !important; } + /* Set a negative $size margin on top at $breakpoint */ + .mt-sm-n2 { margin-top: -8px !important; } + /* Set a negative $size margin on the right at $breakpoint */ + .mr-sm-n2 { margin-right: -8px !important; } + /* Set a negative $size margin on the bottom at $breakpoint */ + .mb-sm-n2 { margin-bottom: -8px !important; } + /* Set a negative $size margin on the left at $breakpoint */ + .ml-sm-n2 { margin-left: -8px !important; } + /* Set a $size margin on the left & right at $breakpoint */ + .mx-sm-2 { margin-right: 8px !important; margin-left: 8px !important; } + /* Set a $size margin on the top & bottom at $breakpoint */ + .my-sm-2 { margin-top: 8px !important; margin-bottom: 8px !important; } + /* Set a $size margin to all sides at $breakpoint */ + .m-sm-3 { margin: 16px !important; } + /* Set a $size margin on the top at $breakpoint */ + .mt-sm-3 { margin-top: 16px !important; } + /* Set a $size margin on the right at $breakpoint */ + .mr-sm-3 { margin-right: 16px !important; } + /* Set a $size margin on the bottom at $breakpoint */ + .mb-sm-3 { margin-bottom: 16px !important; } + /* Set a $size margin on the left at $breakpoint */ + .ml-sm-3 { margin-left: 16px !important; } + /* Set a negative $size margin on top at $breakpoint */ + .mt-sm-n3 { margin-top: -16px !important; } + /* Set a negative $size margin on the right at $breakpoint */ + .mr-sm-n3 { margin-right: -16px !important; } + /* Set a negative $size margin on the bottom at $breakpoint */ + .mb-sm-n3 { margin-bottom: -16px !important; } + /* Set a negative $size margin on the left at $breakpoint */ + .ml-sm-n3 { margin-left: -16px !important; } + /* Set a $size margin on the left & right at $breakpoint */ + .mx-sm-3 { margin-right: 16px !important; margin-left: 16px !important; } + /* Set a $size margin on the top & bottom at $breakpoint */ + .my-sm-3 { margin-top: 16px !important; margin-bottom: 16px !important; } + /* Set a $size margin to all sides at $breakpoint */ + .m-sm-4 { margin: 24px !important; } + /* Set a $size margin on the top at $breakpoint */ + .mt-sm-4 { margin-top: 24px !important; } + /* Set a $size margin on the right at $breakpoint */ + .mr-sm-4 { margin-right: 24px !important; } + /* Set a $size margin on the bottom at $breakpoint */ + .mb-sm-4 { margin-bottom: 24px !important; } + /* Set a $size margin on the left at $breakpoint */ + .ml-sm-4 { margin-left: 24px !important; } + /* Set a negative $size margin on top at $breakpoint */ + .mt-sm-n4 { margin-top: -24px !important; } + /* Set a negative $size margin on the right at $breakpoint */ + .mr-sm-n4 { margin-right: -24px !important; } + /* Set a negative $size margin on the bottom at $breakpoint */ + .mb-sm-n4 { margin-bottom: -24px !important; } + /* Set a negative $size margin on the left at $breakpoint */ + .ml-sm-n4 { margin-left: -24px !important; } + /* Set a $size margin on the left & right at $breakpoint */ + .mx-sm-4 { margin-right: 24px !important; margin-left: 24px !important; } + /* Set a $size margin on the top & bottom at $breakpoint */ + .my-sm-4 { margin-top: 24px !important; margin-bottom: 24px !important; } + /* Set a $size margin to all sides at $breakpoint */ + .m-sm-5 { margin: 32px !important; } + /* Set a $size margin on the top at $breakpoint */ + .mt-sm-5 { margin-top: 32px !important; } + /* Set a $size margin on the right at $breakpoint */ + .mr-sm-5 { margin-right: 32px !important; } + /* Set a $size margin on the bottom at $breakpoint */ + .mb-sm-5 { margin-bottom: 32px !important; } + /* Set a $size margin on the left at $breakpoint */ + .ml-sm-5 { margin-left: 32px !important; } + /* Set a negative $size margin on top at $breakpoint */ + .mt-sm-n5 { margin-top: -32px !important; } + /* Set a negative $size margin on the right at $breakpoint */ + .mr-sm-n5 { margin-right: -32px !important; } + /* Set a negative $size margin on the bottom at $breakpoint */ + .mb-sm-n5 { margin-bottom: -32px !important; } + /* Set a negative $size margin on the left at $breakpoint */ + .ml-sm-n5 { margin-left: -32px !important; } + /* Set a $size margin on the left & right at $breakpoint */ + .mx-sm-5 { margin-right: 32px !important; margin-left: 32px !important; } + /* Set a $size margin on the top & bottom at $breakpoint */ + .my-sm-5 { margin-top: 32px !important; margin-bottom: 32px !important; } + /* Set a $size margin to all sides at $breakpoint */ + .m-sm-6 { margin: 40px !important; } + /* Set a $size margin on the top at $breakpoint */ + .mt-sm-6 { margin-top: 40px !important; } + /* Set a $size margin on the right at $breakpoint */ + .mr-sm-6 { margin-right: 40px !important; } + /* Set a $size margin on the bottom at $breakpoint */ + .mb-sm-6 { margin-bottom: 40px !important; } + /* Set a $size margin on the left at $breakpoint */ + .ml-sm-6 { margin-left: 40px !important; } + /* Set a negative $size margin on top at $breakpoint */ + .mt-sm-n6 { margin-top: -40px !important; } + /* Set a negative $size margin on the right at $breakpoint */ + .mr-sm-n6 { margin-right: -40px !important; } + /* Set a negative $size margin on the bottom at $breakpoint */ + .mb-sm-n6 { margin-bottom: -40px !important; } + /* Set a negative $size margin on the left at $breakpoint */ + .ml-sm-n6 { margin-left: -40px !important; } + /* Set a $size margin on the left & right at $breakpoint */ + .mx-sm-6 { margin-right: 40px !important; margin-left: 40px !important; } + /* Set a $size margin on the top & bottom at $breakpoint */ + .my-sm-6 { margin-top: 40px !important; margin-bottom: 40px !important; } + /* responsive horizontal auto margins */ + .mx-sm-auto { margin-right: auto !important; margin-left: auto !important; } } +@media (min-width: 768px) { /* Set a $size margin to all sides at $breakpoint */ + .m-md-0 { margin: 0 !important; } + /* Set a $size margin on the top at $breakpoint */ + .mt-md-0 { margin-top: 0 !important; } + /* Set a $size margin on the right at $breakpoint */ + .mr-md-0 { margin-right: 0 !important; } + /* Set a $size margin on the bottom at $breakpoint */ + .mb-md-0 { margin-bottom: 0 !important; } + /* Set a $size margin on the left at $breakpoint */ + .ml-md-0 { margin-left: 0 !important; } + /* Set a $size margin on the left & right at $breakpoint */ + .mx-md-0 { margin-right: 0 !important; margin-left: 0 !important; } + /* Set a $size margin on the top & bottom at $breakpoint */ + .my-md-0 { margin-top: 0 !important; margin-bottom: 0 !important; } + /* Set a $size margin to all sides at $breakpoint */ + .m-md-1 { margin: 4px !important; } + /* Set a $size margin on the top at $breakpoint */ + .mt-md-1 { margin-top: 4px !important; } + /* Set a $size margin on the right at $breakpoint */ + .mr-md-1 { margin-right: 4px !important; } + /* Set a $size margin on the bottom at $breakpoint */ + .mb-md-1 { margin-bottom: 4px !important; } + /* Set a $size margin on the left at $breakpoint */ + .ml-md-1 { margin-left: 4px !important; } + /* Set a negative $size margin on top at $breakpoint */ + .mt-md-n1 { margin-top: -4px !important; } + /* Set a negative $size margin on the right at $breakpoint */ + .mr-md-n1 { margin-right: -4px !important; } + /* Set a negative $size margin on the bottom at $breakpoint */ + .mb-md-n1 { margin-bottom: -4px !important; } + /* Set a negative $size margin on the left at $breakpoint */ + .ml-md-n1 { margin-left: -4px !important; } + /* Set a $size margin on the left & right at $breakpoint */ + .mx-md-1 { margin-right: 4px !important; margin-left: 4px !important; } + /* Set a $size margin on the top & bottom at $breakpoint */ + .my-md-1 { margin-top: 4px !important; margin-bottom: 4px !important; } + /* Set a $size margin to all sides at $breakpoint */ + .m-md-2 { margin: 8px !important; } + /* Set a $size margin on the top at $breakpoint */ + .mt-md-2 { margin-top: 8px !important; } + /* Set a $size margin on the right at $breakpoint */ + .mr-md-2 { margin-right: 8px !important; } + /* Set a $size margin on the bottom at $breakpoint */ + .mb-md-2 { margin-bottom: 8px !important; } + /* Set a $size margin on the left at $breakpoint */ + .ml-md-2 { margin-left: 8px !important; } + /* Set a negative $size margin on top at $breakpoint */ + .mt-md-n2 { margin-top: -8px !important; } + /* Set a negative $size margin on the right at $breakpoint */ + .mr-md-n2 { margin-right: -8px !important; } + /* Set a negative $size margin on the bottom at $breakpoint */ + .mb-md-n2 { margin-bottom: -8px !important; } + /* Set a negative $size margin on the left at $breakpoint */ + .ml-md-n2 { margin-left: -8px !important; } + /* Set a $size margin on the left & right at $breakpoint */ + .mx-md-2 { margin-right: 8px !important; margin-left: 8px !important; } + /* Set a $size margin on the top & bottom at $breakpoint */ + .my-md-2 { margin-top: 8px !important; margin-bottom: 8px !important; } + /* Set a $size margin to all sides at $breakpoint */ + .m-md-3 { margin: 16px !important; } + /* Set a $size margin on the top at $breakpoint */ + .mt-md-3 { margin-top: 16px !important; } + /* Set a $size margin on the right at $breakpoint */ + .mr-md-3 { margin-right: 16px !important; } + /* Set a $size margin on the bottom at $breakpoint */ + .mb-md-3 { margin-bottom: 16px !important; } + /* Set a $size margin on the left at $breakpoint */ + .ml-md-3 { margin-left: 16px !important; } + /* Set a negative $size margin on top at $breakpoint */ + .mt-md-n3 { margin-top: -16px !important; } + /* Set a negative $size margin on the right at $breakpoint */ + .mr-md-n3 { margin-right: -16px !important; } + /* Set a negative $size margin on the bottom at $breakpoint */ + .mb-md-n3 { margin-bottom: -16px !important; } + /* Set a negative $size margin on the left at $breakpoint */ + .ml-md-n3 { margin-left: -16px !important; } + /* Set a $size margin on the left & right at $breakpoint */ + .mx-md-3 { margin-right: 16px !important; margin-left: 16px !important; } + /* Set a $size margin on the top & bottom at $breakpoint */ + .my-md-3 { margin-top: 16px !important; margin-bottom: 16px !important; } + /* Set a $size margin to all sides at $breakpoint */ + .m-md-4 { margin: 24px !important; } + /* Set a $size margin on the top at $breakpoint */ + .mt-md-4 { margin-top: 24px !important; } + /* Set a $size margin on the right at $breakpoint */ + .mr-md-4 { margin-right: 24px !important; } + /* Set a $size margin on the bottom at $breakpoint */ + .mb-md-4 { margin-bottom: 24px !important; } + /* Set a $size margin on the left at $breakpoint */ + .ml-md-4 { margin-left: 24px !important; } + /* Set a negative $size margin on top at $breakpoint */ + .mt-md-n4 { margin-top: -24px !important; } + /* Set a negative $size margin on the right at $breakpoint */ + .mr-md-n4 { margin-right: -24px !important; } + /* Set a negative $size margin on the bottom at $breakpoint */ + .mb-md-n4 { margin-bottom: -24px !important; } + /* Set a negative $size margin on the left at $breakpoint */ + .ml-md-n4 { margin-left: -24px !important; } + /* Set a $size margin on the left & right at $breakpoint */ + .mx-md-4 { margin-right: 24px !important; margin-left: 24px !important; } + /* Set a $size margin on the top & bottom at $breakpoint */ + .my-md-4 { margin-top: 24px !important; margin-bottom: 24px !important; } + /* Set a $size margin to all sides at $breakpoint */ + .m-md-5 { margin: 32px !important; } + /* Set a $size margin on the top at $breakpoint */ + .mt-md-5 { margin-top: 32px !important; } + /* Set a $size margin on the right at $breakpoint */ + .mr-md-5 { margin-right: 32px !important; } + /* Set a $size margin on the bottom at $breakpoint */ + .mb-md-5 { margin-bottom: 32px !important; } + /* Set a $size margin on the left at $breakpoint */ + .ml-md-5 { margin-left: 32px !important; } + /* Set a negative $size margin on top at $breakpoint */ + .mt-md-n5 { margin-top: -32px !important; } + /* Set a negative $size margin on the right at $breakpoint */ + .mr-md-n5 { margin-right: -32px !important; } + /* Set a negative $size margin on the bottom at $breakpoint */ + .mb-md-n5 { margin-bottom: -32px !important; } + /* Set a negative $size margin on the left at $breakpoint */ + .ml-md-n5 { margin-left: -32px !important; } + /* Set a $size margin on the left & right at $breakpoint */ + .mx-md-5 { margin-right: 32px !important; margin-left: 32px !important; } + /* Set a $size margin on the top & bottom at $breakpoint */ + .my-md-5 { margin-top: 32px !important; margin-bottom: 32px !important; } + /* Set a $size margin to all sides at $breakpoint */ + .m-md-6 { margin: 40px !important; } + /* Set a $size margin on the top at $breakpoint */ + .mt-md-6 { margin-top: 40px !important; } + /* Set a $size margin on the right at $breakpoint */ + .mr-md-6 { margin-right: 40px !important; } + /* Set a $size margin on the bottom at $breakpoint */ + .mb-md-6 { margin-bottom: 40px !important; } + /* Set a $size margin on the left at $breakpoint */ + .ml-md-6 { margin-left: 40px !important; } + /* Set a negative $size margin on top at $breakpoint */ + .mt-md-n6 { margin-top: -40px !important; } + /* Set a negative $size margin on the right at $breakpoint */ + .mr-md-n6 { margin-right: -40px !important; } + /* Set a negative $size margin on the bottom at $breakpoint */ + .mb-md-n6 { margin-bottom: -40px !important; } + /* Set a negative $size margin on the left at $breakpoint */ + .ml-md-n6 { margin-left: -40px !important; } + /* Set a $size margin on the left & right at $breakpoint */ + .mx-md-6 { margin-right: 40px !important; margin-left: 40px !important; } + /* Set a $size margin on the top & bottom at $breakpoint */ + .my-md-6 { margin-top: 40px !important; margin-bottom: 40px !important; } + /* responsive horizontal auto margins */ + .mx-md-auto { margin-right: auto !important; margin-left: auto !important; } } +@media (min-width: 1012px) { /* Set a $size margin to all sides at $breakpoint */ + .m-lg-0 { margin: 0 !important; } + /* Set a $size margin on the top at $breakpoint */ + .mt-lg-0 { margin-top: 0 !important; } + /* Set a $size margin on the right at $breakpoint */ + .mr-lg-0 { margin-right: 0 !important; } + /* Set a $size margin on the bottom at $breakpoint */ + .mb-lg-0 { margin-bottom: 0 !important; } + /* Set a $size margin on the left at $breakpoint */ + .ml-lg-0 { margin-left: 0 !important; } + /* Set a $size margin on the left & right at $breakpoint */ + .mx-lg-0 { margin-right: 0 !important; margin-left: 0 !important; } + /* Set a $size margin on the top & bottom at $breakpoint */ + .my-lg-0 { margin-top: 0 !important; margin-bottom: 0 !important; } + /* Set a $size margin to all sides at $breakpoint */ + .m-lg-1 { margin: 4px !important; } + /* Set a $size margin on the top at $breakpoint */ + .mt-lg-1 { margin-top: 4px !important; } + /* Set a $size margin on the right at $breakpoint */ + .mr-lg-1 { margin-right: 4px !important; } + /* Set a $size margin on the bottom at $breakpoint */ + .mb-lg-1 { margin-bottom: 4px !important; } + /* Set a $size margin on the left at $breakpoint */ + .ml-lg-1 { margin-left: 4px !important; } + /* Set a negative $size margin on top at $breakpoint */ + .mt-lg-n1 { margin-top: -4px !important; } + /* Set a negative $size margin on the right at $breakpoint */ + .mr-lg-n1 { margin-right: -4px !important; } + /* Set a negative $size margin on the bottom at $breakpoint */ + .mb-lg-n1 { margin-bottom: -4px !important; } + /* Set a negative $size margin on the left at $breakpoint */ + .ml-lg-n1 { margin-left: -4px !important; } + /* Set a $size margin on the left & right at $breakpoint */ + .mx-lg-1 { margin-right: 4px !important; margin-left: 4px !important; } + /* Set a $size margin on the top & bottom at $breakpoint */ + .my-lg-1 { margin-top: 4px !important; margin-bottom: 4px !important; } + /* Set a $size margin to all sides at $breakpoint */ + .m-lg-2 { margin: 8px !important; } + /* Set a $size margin on the top at $breakpoint */ + .mt-lg-2 { margin-top: 8px !important; } + /* Set a $size margin on the right at $breakpoint */ + .mr-lg-2 { margin-right: 8px !important; } + /* Set a $size margin on the bottom at $breakpoint */ + .mb-lg-2 { margin-bottom: 8px !important; } + /* Set a $size margin on the left at $breakpoint */ + .ml-lg-2 { margin-left: 8px !important; } + /* Set a negative $size margin on top at $breakpoint */ + .mt-lg-n2 { margin-top: -8px !important; } + /* Set a negative $size margin on the right at $breakpoint */ + .mr-lg-n2 { margin-right: -8px !important; } + /* Set a negative $size margin on the bottom at $breakpoint */ + .mb-lg-n2 { margin-bottom: -8px !important; } + /* Set a negative $size margin on the left at $breakpoint */ + .ml-lg-n2 { margin-left: -8px !important; } + /* Set a $size margin on the left & right at $breakpoint */ + .mx-lg-2 { margin-right: 8px !important; margin-left: 8px !important; } + /* Set a $size margin on the top & bottom at $breakpoint */ + .my-lg-2 { margin-top: 8px !important; margin-bottom: 8px !important; } + /* Set a $size margin to all sides at $breakpoint */ + .m-lg-3 { margin: 16px !important; } + /* Set a $size margin on the top at $breakpoint */ + .mt-lg-3 { margin-top: 16px !important; } + /* Set a $size margin on the right at $breakpoint */ + .mr-lg-3 { margin-right: 16px !important; } + /* Set a $size margin on the bottom at $breakpoint */ + .mb-lg-3 { margin-bottom: 16px !important; } + /* Set a $size margin on the left at $breakpoint */ + .ml-lg-3 { margin-left: 16px !important; } + /* Set a negative $size margin on top at $breakpoint */ + .mt-lg-n3 { margin-top: -16px !important; } + /* Set a negative $size margin on the right at $breakpoint */ + .mr-lg-n3 { margin-right: -16px !important; } + /* Set a negative $size margin on the bottom at $breakpoint */ + .mb-lg-n3 { margin-bottom: -16px !important; } + /* Set a negative $size margin on the left at $breakpoint */ + .ml-lg-n3 { margin-left: -16px !important; } + /* Set a $size margin on the left & right at $breakpoint */ + .mx-lg-3 { margin-right: 16px !important; margin-left: 16px !important; } + /* Set a $size margin on the top & bottom at $breakpoint */ + .my-lg-3 { margin-top: 16px !important; margin-bottom: 16px !important; } + /* Set a $size margin to all sides at $breakpoint */ + .m-lg-4 { margin: 24px !important; } + /* Set a $size margin on the top at $breakpoint */ + .mt-lg-4 { margin-top: 24px !important; } + /* Set a $size margin on the right at $breakpoint */ + .mr-lg-4 { margin-right: 24px !important; } + /* Set a $size margin on the bottom at $breakpoint */ + .mb-lg-4 { margin-bottom: 24px !important; } + /* Set a $size margin on the left at $breakpoint */ + .ml-lg-4 { margin-left: 24px !important; } + /* Set a negative $size margin on top at $breakpoint */ + .mt-lg-n4 { margin-top: -24px !important; } + /* Set a negative $size margin on the right at $breakpoint */ + .mr-lg-n4 { margin-right: -24px !important; } + /* Set a negative $size margin on the bottom at $breakpoint */ + .mb-lg-n4 { margin-bottom: -24px !important; } + /* Set a negative $size margin on the left at $breakpoint */ + .ml-lg-n4 { margin-left: -24px !important; } + /* Set a $size margin on the left & right at $breakpoint */ + .mx-lg-4 { margin-right: 24px !important; margin-left: 24px !important; } + /* Set a $size margin on the top & bottom at $breakpoint */ + .my-lg-4 { margin-top: 24px !important; margin-bottom: 24px !important; } + /* Set a $size margin to all sides at $breakpoint */ + .m-lg-5 { margin: 32px !important; } + /* Set a $size margin on the top at $breakpoint */ + .mt-lg-5 { margin-top: 32px !important; } + /* Set a $size margin on the right at $breakpoint */ + .mr-lg-5 { margin-right: 32px !important; } + /* Set a $size margin on the bottom at $breakpoint */ + .mb-lg-5 { margin-bottom: 32px !important; } + /* Set a $size margin on the left at $breakpoint */ + .ml-lg-5 { margin-left: 32px !important; } + /* Set a negative $size margin on top at $breakpoint */ + .mt-lg-n5 { margin-top: -32px !important; } + /* Set a negative $size margin on the right at $breakpoint */ + .mr-lg-n5 { margin-right: -32px !important; } + /* Set a negative $size margin on the bottom at $breakpoint */ + .mb-lg-n5 { margin-bottom: -32px !important; } + /* Set a negative $size margin on the left at $breakpoint */ + .ml-lg-n5 { margin-left: -32px !important; } + /* Set a $size margin on the left & right at $breakpoint */ + .mx-lg-5 { margin-right: 32px !important; margin-left: 32px !important; } + /* Set a $size margin on the top & bottom at $breakpoint */ + .my-lg-5 { margin-top: 32px !important; margin-bottom: 32px !important; } + /* Set a $size margin to all sides at $breakpoint */ + .m-lg-6 { margin: 40px !important; } + /* Set a $size margin on the top at $breakpoint */ + .mt-lg-6 { margin-top: 40px !important; } + /* Set a $size margin on the right at $breakpoint */ + .mr-lg-6 { margin-right: 40px !important; } + /* Set a $size margin on the bottom at $breakpoint */ + .mb-lg-6 { margin-bottom: 40px !important; } + /* Set a $size margin on the left at $breakpoint */ + .ml-lg-6 { margin-left: 40px !important; } + /* Set a negative $size margin on top at $breakpoint */ + .mt-lg-n6 { margin-top: -40px !important; } + /* Set a negative $size margin on the right at $breakpoint */ + .mr-lg-n6 { margin-right: -40px !important; } + /* Set a negative $size margin on the bottom at $breakpoint */ + .mb-lg-n6 { margin-bottom: -40px !important; } + /* Set a negative $size margin on the left at $breakpoint */ + .ml-lg-n6 { margin-left: -40px !important; } + /* Set a $size margin on the left & right at $breakpoint */ + .mx-lg-6 { margin-right: 40px !important; margin-left: 40px !important; } + /* Set a $size margin on the top & bottom at $breakpoint */ + .my-lg-6 { margin-top: 40px !important; margin-bottom: 40px !important; } + /* responsive horizontal auto margins */ + .mx-lg-auto { margin-right: auto !important; margin-left: auto !important; } } +@media (min-width: 1280px) { /* Set a $size margin to all sides at $breakpoint */ + .m-xl-0 { margin: 0 !important; } + /* Set a $size margin on the top at $breakpoint */ + .mt-xl-0 { margin-top: 0 !important; } + /* Set a $size margin on the right at $breakpoint */ + .mr-xl-0 { margin-right: 0 !important; } + /* Set a $size margin on the bottom at $breakpoint */ + .mb-xl-0 { margin-bottom: 0 !important; } + /* Set a $size margin on the left at $breakpoint */ + .ml-xl-0 { margin-left: 0 !important; } + /* Set a $size margin on the left & right at $breakpoint */ + .mx-xl-0 { margin-right: 0 !important; margin-left: 0 !important; } + /* Set a $size margin on the top & bottom at $breakpoint */ + .my-xl-0 { margin-top: 0 !important; margin-bottom: 0 !important; } + /* Set a $size margin to all sides at $breakpoint */ + .m-xl-1 { margin: 4px !important; } + /* Set a $size margin on the top at $breakpoint */ + .mt-xl-1 { margin-top: 4px !important; } + /* Set a $size margin on the right at $breakpoint */ + .mr-xl-1 { margin-right: 4px !important; } + /* Set a $size margin on the bottom at $breakpoint */ + .mb-xl-1 { margin-bottom: 4px !important; } + /* Set a $size margin on the left at $breakpoint */ + .ml-xl-1 { margin-left: 4px !important; } + /* Set a negative $size margin on top at $breakpoint */ + .mt-xl-n1 { margin-top: -4px !important; } + /* Set a negative $size margin on the right at $breakpoint */ + .mr-xl-n1 { margin-right: -4px !important; } + /* Set a negative $size margin on the bottom at $breakpoint */ + .mb-xl-n1 { margin-bottom: -4px !important; } + /* Set a negative $size margin on the left at $breakpoint */ + .ml-xl-n1 { margin-left: -4px !important; } + /* Set a $size margin on the left & right at $breakpoint */ + .mx-xl-1 { margin-right: 4px !important; margin-left: 4px !important; } + /* Set a $size margin on the top & bottom at $breakpoint */ + .my-xl-1 { margin-top: 4px !important; margin-bottom: 4px !important; } + /* Set a $size margin to all sides at $breakpoint */ + .m-xl-2 { margin: 8px !important; } + /* Set a $size margin on the top at $breakpoint */ + .mt-xl-2 { margin-top: 8px !important; } + /* Set a $size margin on the right at $breakpoint */ + .mr-xl-2 { margin-right: 8px !important; } + /* Set a $size margin on the bottom at $breakpoint */ + .mb-xl-2 { margin-bottom: 8px !important; } + /* Set a $size margin on the left at $breakpoint */ + .ml-xl-2 { margin-left: 8px !important; } + /* Set a negative $size margin on top at $breakpoint */ + .mt-xl-n2 { margin-top: -8px !important; } + /* Set a negative $size margin on the right at $breakpoint */ + .mr-xl-n2 { margin-right: -8px !important; } + /* Set a negative $size margin on the bottom at $breakpoint */ + .mb-xl-n2 { margin-bottom: -8px !important; } + /* Set a negative $size margin on the left at $breakpoint */ + .ml-xl-n2 { margin-left: -8px !important; } + /* Set a $size margin on the left & right at $breakpoint */ + .mx-xl-2 { margin-right: 8px !important; margin-left: 8px !important; } + /* Set a $size margin on the top & bottom at $breakpoint */ + .my-xl-2 { margin-top: 8px !important; margin-bottom: 8px !important; } + /* Set a $size margin to all sides at $breakpoint */ + .m-xl-3 { margin: 16px !important; } + /* Set a $size margin on the top at $breakpoint */ + .mt-xl-3 { margin-top: 16px !important; } + /* Set a $size margin on the right at $breakpoint */ + .mr-xl-3 { margin-right: 16px !important; } + /* Set a $size margin on the bottom at $breakpoint */ + .mb-xl-3 { margin-bottom: 16px !important; } + /* Set a $size margin on the left at $breakpoint */ + .ml-xl-3 { margin-left: 16px !important; } + /* Set a negative $size margin on top at $breakpoint */ + .mt-xl-n3 { margin-top: -16px !important; } + /* Set a negative $size margin on the right at $breakpoint */ + .mr-xl-n3 { margin-right: -16px !important; } + /* Set a negative $size margin on the bottom at $breakpoint */ + .mb-xl-n3 { margin-bottom: -16px !important; } + /* Set a negative $size margin on the left at $breakpoint */ + .ml-xl-n3 { margin-left: -16px !important; } + /* Set a $size margin on the left & right at $breakpoint */ + .mx-xl-3 { margin-right: 16px !important; margin-left: 16px !important; } + /* Set a $size margin on the top & bottom at $breakpoint */ + .my-xl-3 { margin-top: 16px !important; margin-bottom: 16px !important; } + /* Set a $size margin to all sides at $breakpoint */ + .m-xl-4 { margin: 24px !important; } + /* Set a $size margin on the top at $breakpoint */ + .mt-xl-4 { margin-top: 24px !important; } + /* Set a $size margin on the right at $breakpoint */ + .mr-xl-4 { margin-right: 24px !important; } + /* Set a $size margin on the bottom at $breakpoint */ + .mb-xl-4 { margin-bottom: 24px !important; } + /* Set a $size margin on the left at $breakpoint */ + .ml-xl-4 { margin-left: 24px !important; } + /* Set a negative $size margin on top at $breakpoint */ + .mt-xl-n4 { margin-top: -24px !important; } + /* Set a negative $size margin on the right at $breakpoint */ + .mr-xl-n4 { margin-right: -24px !important; } + /* Set a negative $size margin on the bottom at $breakpoint */ + .mb-xl-n4 { margin-bottom: -24px !important; } + /* Set a negative $size margin on the left at $breakpoint */ + .ml-xl-n4 { margin-left: -24px !important; } + /* Set a $size margin on the left & right at $breakpoint */ + .mx-xl-4 { margin-right: 24px !important; margin-left: 24px !important; } + /* Set a $size margin on the top & bottom at $breakpoint */ + .my-xl-4 { margin-top: 24px !important; margin-bottom: 24px !important; } + /* Set a $size margin to all sides at $breakpoint */ + .m-xl-5 { margin: 32px !important; } + /* Set a $size margin on the top at $breakpoint */ + .mt-xl-5 { margin-top: 32px !important; } + /* Set a $size margin on the right at $breakpoint */ + .mr-xl-5 { margin-right: 32px !important; } + /* Set a $size margin on the bottom at $breakpoint */ + .mb-xl-5 { margin-bottom: 32px !important; } + /* Set a $size margin on the left at $breakpoint */ + .ml-xl-5 { margin-left: 32px !important; } + /* Set a negative $size margin on top at $breakpoint */ + .mt-xl-n5 { margin-top: -32px !important; } + /* Set a negative $size margin on the right at $breakpoint */ + .mr-xl-n5 { margin-right: -32px !important; } + /* Set a negative $size margin on the bottom at $breakpoint */ + .mb-xl-n5 { margin-bottom: -32px !important; } + /* Set a negative $size margin on the left at $breakpoint */ + .ml-xl-n5 { margin-left: -32px !important; } + /* Set a $size margin on the left & right at $breakpoint */ + .mx-xl-5 { margin-right: 32px !important; margin-left: 32px !important; } + /* Set a $size margin on the top & bottom at $breakpoint */ + .my-xl-5 { margin-top: 32px !important; margin-bottom: 32px !important; } + /* Set a $size margin to all sides at $breakpoint */ + .m-xl-6 { margin: 40px !important; } + /* Set a $size margin on the top at $breakpoint */ + .mt-xl-6 { margin-top: 40px !important; } + /* Set a $size margin on the right at $breakpoint */ + .mr-xl-6 { margin-right: 40px !important; } + /* Set a $size margin on the bottom at $breakpoint */ + .mb-xl-6 { margin-bottom: 40px !important; } + /* Set a $size margin on the left at $breakpoint */ + .ml-xl-6 { margin-left: 40px !important; } + /* Set a negative $size margin on top at $breakpoint */ + .mt-xl-n6 { margin-top: -40px !important; } + /* Set a negative $size margin on the right at $breakpoint */ + .mr-xl-n6 { margin-right: -40px !important; } + /* Set a negative $size margin on the bottom at $breakpoint */ + .mb-xl-n6 { margin-bottom: -40px !important; } + /* Set a negative $size margin on the left at $breakpoint */ + .ml-xl-n6 { margin-left: -40px !important; } + /* Set a $size margin on the left & right at $breakpoint */ + .mx-xl-6 { margin-right: 40px !important; margin-left: 40px !important; } + /* Set a $size margin on the top & bottom at $breakpoint */ + .my-xl-6 { margin-top: 40px !important; margin-bottom: 40px !important; } + /* responsive horizontal auto margins */ + .mx-xl-auto { margin-right: auto !important; margin-left: auto !important; } } +/* Set a $size padding to all sides at $breakpoint */ +.p-0 { padding: 0 !important; } + +/* Set a $size padding to the top at $breakpoint */ +.pt-0 { padding-top: 0 !important; } + +/* Set a $size padding to the right at $breakpoint */ +.pr-0 { padding-right: 0 !important; } + +/* Set a $size padding to the bottom at $breakpoint */ +.pb-0 { padding-bottom: 0 !important; } + +/* Set a $size padding to the left at $breakpoint */ +.pl-0 { padding-left: 0 !important; } + +/* Set a $size padding to the left & right at $breakpoint */ +.px-0 { padding-right: 0 !important; padding-left: 0 !important; } + +/* Set a $size padding to the top & bottom at $breakpoint */ +.py-0 { padding-top: 0 !important; padding-bottom: 0 !important; } + +/* Set a $size padding to all sides at $breakpoint */ +.p-1 { padding: 4px !important; } + +/* Set a $size padding to the top at $breakpoint */ +.pt-1 { padding-top: 4px !important; } + +/* Set a $size padding to the right at $breakpoint */ +.pr-1 { padding-right: 4px !important; } + +/* Set a $size padding to the bottom at $breakpoint */ +.pb-1 { padding-bottom: 4px !important; } + +/* Set a $size padding to the left at $breakpoint */ +.pl-1 { padding-left: 4px !important; } + +/* Set a $size padding to the left & right at $breakpoint */ +.px-1 { padding-right: 4px !important; padding-left: 4px !important; } + +/* Set a $size padding to the top & bottom at $breakpoint */ +.py-1 { padding-top: 4px !important; padding-bottom: 4px !important; } + +/* Set a $size padding to all sides at $breakpoint */ +.p-2 { padding: 8px !important; } + +/* Set a $size padding to the top at $breakpoint */ +.pt-2 { padding-top: 8px !important; } + +/* Set a $size padding to the right at $breakpoint */ +.pr-2 { padding-right: 8px !important; } + +/* Set a $size padding to the bottom at $breakpoint */ +.pb-2 { padding-bottom: 8px !important; } + +/* Set a $size padding to the left at $breakpoint */ +.pl-2 { padding-left: 8px !important; } + +/* Set a $size padding to the left & right at $breakpoint */ +.px-2 { padding-right: 8px !important; padding-left: 8px !important; } + +/* Set a $size padding to the top & bottom at $breakpoint */ +.py-2 { padding-top: 8px !important; padding-bottom: 8px !important; } + +/* Set a $size padding to all sides at $breakpoint */ +.p-3 { padding: 16px !important; } + +/* Set a $size padding to the top at $breakpoint */ +.pt-3 { padding-top: 16px !important; } + +/* Set a $size padding to the right at $breakpoint */ +.pr-3 { padding-right: 16px !important; } + +/* Set a $size padding to the bottom at $breakpoint */ +.pb-3 { padding-bottom: 16px !important; } + +/* Set a $size padding to the left at $breakpoint */ +.pl-3 { padding-left: 16px !important; } + +/* Set a $size padding to the left & right at $breakpoint */ +.px-3 { padding-right: 16px !important; padding-left: 16px !important; } + +/* Set a $size padding to the top & bottom at $breakpoint */ +.py-3 { padding-top: 16px !important; padding-bottom: 16px !important; } + +/* Set a $size padding to all sides at $breakpoint */ +.p-4 { padding: 24px !important; } + +/* Set a $size padding to the top at $breakpoint */ +.pt-4 { padding-top: 24px !important; } + +/* Set a $size padding to the right at $breakpoint */ +.pr-4 { padding-right: 24px !important; } + +/* Set a $size padding to the bottom at $breakpoint */ +.pb-4 { padding-bottom: 24px !important; } + +/* Set a $size padding to the left at $breakpoint */ +.pl-4 { padding-left: 24px !important; } + +/* Set a $size padding to the left & right at $breakpoint */ +.px-4 { padding-right: 24px !important; padding-left: 24px !important; } + +/* Set a $size padding to the top & bottom at $breakpoint */ +.py-4 { padding-top: 24px !important; padding-bottom: 24px !important; } + +/* Set a $size padding to all sides at $breakpoint */ +.p-5 { padding: 32px !important; } + +/* Set a $size padding to the top at $breakpoint */ +.pt-5 { padding-top: 32px !important; } + +/* Set a $size padding to the right at $breakpoint */ +.pr-5 { padding-right: 32px !important; } + +/* Set a $size padding to the bottom at $breakpoint */ +.pb-5 { padding-bottom: 32px !important; } + +/* Set a $size padding to the left at $breakpoint */ +.pl-5 { padding-left: 32px !important; } + +/* Set a $size padding to the left & right at $breakpoint */ +.px-5 { padding-right: 32px !important; padding-left: 32px !important; } + +/* Set a $size padding to the top & bottom at $breakpoint */ +.py-5 { padding-top: 32px !important; padding-bottom: 32px !important; } + +/* Set a $size padding to all sides at $breakpoint */ +.p-6 { padding: 40px !important; } + +/* Set a $size padding to the top at $breakpoint */ +.pt-6 { padding-top: 40px !important; } + +/* Set a $size padding to the right at $breakpoint */ +.pr-6 { padding-right: 40px !important; } + +/* Set a $size padding to the bottom at $breakpoint */ +.pb-6 { padding-bottom: 40px !important; } + +/* Set a $size padding to the left at $breakpoint */ +.pl-6 { padding-left: 40px !important; } + +/* Set a $size padding to the left & right at $breakpoint */ +.px-6 { padding-right: 40px !important; padding-left: 40px !important; } + +/* Set a $size padding to the top & bottom at $breakpoint */ +.py-6 { padding-top: 40px !important; padding-bottom: 40px !important; } + +@media (min-width: 544px) { /* Set a $size padding to all sides at $breakpoint */ + .p-sm-0 { padding: 0 !important; } + /* Set a $size padding to the top at $breakpoint */ + .pt-sm-0 { padding-top: 0 !important; } + /* Set a $size padding to the right at $breakpoint */ + .pr-sm-0 { padding-right: 0 !important; } + /* Set a $size padding to the bottom at $breakpoint */ + .pb-sm-0 { padding-bottom: 0 !important; } + /* Set a $size padding to the left at $breakpoint */ + .pl-sm-0 { padding-left: 0 !important; } + /* Set a $size padding to the left & right at $breakpoint */ + .px-sm-0 { padding-right: 0 !important; padding-left: 0 !important; } + /* Set a $size padding to the top & bottom at $breakpoint */ + .py-sm-0 { padding-top: 0 !important; padding-bottom: 0 !important; } + /* Set a $size padding to all sides at $breakpoint */ + .p-sm-1 { padding: 4px !important; } + /* Set a $size padding to the top at $breakpoint */ + .pt-sm-1 { padding-top: 4px !important; } + /* Set a $size padding to the right at $breakpoint */ + .pr-sm-1 { padding-right: 4px !important; } + /* Set a $size padding to the bottom at $breakpoint */ + .pb-sm-1 { padding-bottom: 4px !important; } + /* Set a $size padding to the left at $breakpoint */ + .pl-sm-1 { padding-left: 4px !important; } + /* Set a $size padding to the left & right at $breakpoint */ + .px-sm-1 { padding-right: 4px !important; padding-left: 4px !important; } + /* Set a $size padding to the top & bottom at $breakpoint */ + .py-sm-1 { padding-top: 4px !important; padding-bottom: 4px !important; } + /* Set a $size padding to all sides at $breakpoint */ + .p-sm-2 { padding: 8px !important; } + /* Set a $size padding to the top at $breakpoint */ + .pt-sm-2 { padding-top: 8px !important; } + /* Set a $size padding to the right at $breakpoint */ + .pr-sm-2 { padding-right: 8px !important; } + /* Set a $size padding to the bottom at $breakpoint */ + .pb-sm-2 { padding-bottom: 8px !important; } + /* Set a $size padding to the left at $breakpoint */ + .pl-sm-2 { padding-left: 8px !important; } + /* Set a $size padding to the left & right at $breakpoint */ + .px-sm-2 { padding-right: 8px !important; padding-left: 8px !important; } + /* Set a $size padding to the top & bottom at $breakpoint */ + .py-sm-2 { padding-top: 8px !important; padding-bottom: 8px !important; } + /* Set a $size padding to all sides at $breakpoint */ + .p-sm-3 { padding: 16px !important; } + /* Set a $size padding to the top at $breakpoint */ + .pt-sm-3 { padding-top: 16px !important; } + /* Set a $size padding to the right at $breakpoint */ + .pr-sm-3 { padding-right: 16px !important; } + /* Set a $size padding to the bottom at $breakpoint */ + .pb-sm-3 { padding-bottom: 16px !important; } + /* Set a $size padding to the left at $breakpoint */ + .pl-sm-3 { padding-left: 16px !important; } + /* Set a $size padding to the left & right at $breakpoint */ + .px-sm-3 { padding-right: 16px !important; padding-left: 16px !important; } + /* Set a $size padding to the top & bottom at $breakpoint */ + .py-sm-3 { padding-top: 16px !important; padding-bottom: 16px !important; } + /* Set a $size padding to all sides at $breakpoint */ + .p-sm-4 { padding: 24px !important; } + /* Set a $size padding to the top at $breakpoint */ + .pt-sm-4 { padding-top: 24px !important; } + /* Set a $size padding to the right at $breakpoint */ + .pr-sm-4 { padding-right: 24px !important; } + /* Set a $size padding to the bottom at $breakpoint */ + .pb-sm-4 { padding-bottom: 24px !important; } + /* Set a $size padding to the left at $breakpoint */ + .pl-sm-4 { padding-left: 24px !important; } + /* Set a $size padding to the left & right at $breakpoint */ + .px-sm-4 { padding-right: 24px !important; padding-left: 24px !important; } + /* Set a $size padding to the top & bottom at $breakpoint */ + .py-sm-4 { padding-top: 24px !important; padding-bottom: 24px !important; } + /* Set a $size padding to all sides at $breakpoint */ + .p-sm-5 { padding: 32px !important; } + /* Set a $size padding to the top at $breakpoint */ + .pt-sm-5 { padding-top: 32px !important; } + /* Set a $size padding to the right at $breakpoint */ + .pr-sm-5 { padding-right: 32px !important; } + /* Set a $size padding to the bottom at $breakpoint */ + .pb-sm-5 { padding-bottom: 32px !important; } + /* Set a $size padding to the left at $breakpoint */ + .pl-sm-5 { padding-left: 32px !important; } + /* Set a $size padding to the left & right at $breakpoint */ + .px-sm-5 { padding-right: 32px !important; padding-left: 32px !important; } + /* Set a $size padding to the top & bottom at $breakpoint */ + .py-sm-5 { padding-top: 32px !important; padding-bottom: 32px !important; } + /* Set a $size padding to all sides at $breakpoint */ + .p-sm-6 { padding: 40px !important; } + /* Set a $size padding to the top at $breakpoint */ + .pt-sm-6 { padding-top: 40px !important; } + /* Set a $size padding to the right at $breakpoint */ + .pr-sm-6 { padding-right: 40px !important; } + /* Set a $size padding to the bottom at $breakpoint */ + .pb-sm-6 { padding-bottom: 40px !important; } + /* Set a $size padding to the left at $breakpoint */ + .pl-sm-6 { padding-left: 40px !important; } + /* Set a $size padding to the left & right at $breakpoint */ + .px-sm-6 { padding-right: 40px !important; padding-left: 40px !important; } + /* Set a $size padding to the top & bottom at $breakpoint */ + .py-sm-6 { padding-top: 40px !important; padding-bottom: 40px !important; } } +@media (min-width: 768px) { /* Set a $size padding to all sides at $breakpoint */ + .p-md-0 { padding: 0 !important; } + /* Set a $size padding to the top at $breakpoint */ + .pt-md-0 { padding-top: 0 !important; } + /* Set a $size padding to the right at $breakpoint */ + .pr-md-0 { padding-right: 0 !important; } + /* Set a $size padding to the bottom at $breakpoint */ + .pb-md-0 { padding-bottom: 0 !important; } + /* Set a $size padding to the left at $breakpoint */ + .pl-md-0 { padding-left: 0 !important; } + /* Set a $size padding to the left & right at $breakpoint */ + .px-md-0 { padding-right: 0 !important; padding-left: 0 !important; } + /* Set a $size padding to the top & bottom at $breakpoint */ + .py-md-0 { padding-top: 0 !important; padding-bottom: 0 !important; } + /* Set a $size padding to all sides at $breakpoint */ + .p-md-1 { padding: 4px !important; } + /* Set a $size padding to the top at $breakpoint */ + .pt-md-1 { padding-top: 4px !important; } + /* Set a $size padding to the right at $breakpoint */ + .pr-md-1 { padding-right: 4px !important; } + /* Set a $size padding to the bottom at $breakpoint */ + .pb-md-1 { padding-bottom: 4px !important; } + /* Set a $size padding to the left at $breakpoint */ + .pl-md-1 { padding-left: 4px !important; } + /* Set a $size padding to the left & right at $breakpoint */ + .px-md-1 { padding-right: 4px !important; padding-left: 4px !important; } + /* Set a $size padding to the top & bottom at $breakpoint */ + .py-md-1 { padding-top: 4px !important; padding-bottom: 4px !important; } + /* Set a $size padding to all sides at $breakpoint */ + .p-md-2 { padding: 8px !important; } + /* Set a $size padding to the top at $breakpoint */ + .pt-md-2 { padding-top: 8px !important; } + /* Set a $size padding to the right at $breakpoint */ + .pr-md-2 { padding-right: 8px !important; } + /* Set a $size padding to the bottom at $breakpoint */ + .pb-md-2 { padding-bottom: 8px !important; } + /* Set a $size padding to the left at $breakpoint */ + .pl-md-2 { padding-left: 8px !important; } + /* Set a $size padding to the left & right at $breakpoint */ + .px-md-2 { padding-right: 8px !important; padding-left: 8px !important; } + /* Set a $size padding to the top & bottom at $breakpoint */ + .py-md-2 { padding-top: 8px !important; padding-bottom: 8px !important; } + /* Set a $size padding to all sides at $breakpoint */ + .p-md-3 { padding: 16px !important; } + /* Set a $size padding to the top at $breakpoint */ + .pt-md-3 { padding-top: 16px !important; } + /* Set a $size padding to the right at $breakpoint */ + .pr-md-3 { padding-right: 16px !important; } + /* Set a $size padding to the bottom at $breakpoint */ + .pb-md-3 { padding-bottom: 16px !important; } + /* Set a $size padding to the left at $breakpoint */ + .pl-md-3 { padding-left: 16px !important; } + /* Set a $size padding to the left & right at $breakpoint */ + .px-md-3 { padding-right: 16px !important; padding-left: 16px !important; } + /* Set a $size padding to the top & bottom at $breakpoint */ + .py-md-3 { padding-top: 16px !important; padding-bottom: 16px !important; } + /* Set a $size padding to all sides at $breakpoint */ + .p-md-4 { padding: 24px !important; } + /* Set a $size padding to the top at $breakpoint */ + .pt-md-4 { padding-top: 24px !important; } + /* Set a $size padding to the right at $breakpoint */ + .pr-md-4 { padding-right: 24px !important; } + /* Set a $size padding to the bottom at $breakpoint */ + .pb-md-4 { padding-bottom: 24px !important; } + /* Set a $size padding to the left at $breakpoint */ + .pl-md-4 { padding-left: 24px !important; } + /* Set a $size padding to the left & right at $breakpoint */ + .px-md-4 { padding-right: 24px !important; padding-left: 24px !important; } + /* Set a $size padding to the top & bottom at $breakpoint */ + .py-md-4 { padding-top: 24px !important; padding-bottom: 24px !important; } + /* Set a $size padding to all sides at $breakpoint */ + .p-md-5 { padding: 32px !important; } + /* Set a $size padding to the top at $breakpoint */ + .pt-md-5 { padding-top: 32px !important; } + /* Set a $size padding to the right at $breakpoint */ + .pr-md-5 { padding-right: 32px !important; } + /* Set a $size padding to the bottom at $breakpoint */ + .pb-md-5 { padding-bottom: 32px !important; } + /* Set a $size padding to the left at $breakpoint */ + .pl-md-5 { padding-left: 32px !important; } + /* Set a $size padding to the left & right at $breakpoint */ + .px-md-5 { padding-right: 32px !important; padding-left: 32px !important; } + /* Set a $size padding to the top & bottom at $breakpoint */ + .py-md-5 { padding-top: 32px !important; padding-bottom: 32px !important; } + /* Set a $size padding to all sides at $breakpoint */ + .p-md-6 { padding: 40px !important; } + /* Set a $size padding to the top at $breakpoint */ + .pt-md-6 { padding-top: 40px !important; } + /* Set a $size padding to the right at $breakpoint */ + .pr-md-6 { padding-right: 40px !important; } + /* Set a $size padding to the bottom at $breakpoint */ + .pb-md-6 { padding-bottom: 40px !important; } + /* Set a $size padding to the left at $breakpoint */ + .pl-md-6 { padding-left: 40px !important; } + /* Set a $size padding to the left & right at $breakpoint */ + .px-md-6 { padding-right: 40px !important; padding-left: 40px !important; } + /* Set a $size padding to the top & bottom at $breakpoint */ + .py-md-6 { padding-top: 40px !important; padding-bottom: 40px !important; } } +@media (min-width: 1012px) { /* Set a $size padding to all sides at $breakpoint */ + .p-lg-0 { padding: 0 !important; } + /* Set a $size padding to the top at $breakpoint */ + .pt-lg-0 { padding-top: 0 !important; } + /* Set a $size padding to the right at $breakpoint */ + .pr-lg-0 { padding-right: 0 !important; } + /* Set a $size padding to the bottom at $breakpoint */ + .pb-lg-0 { padding-bottom: 0 !important; } + /* Set a $size padding to the left at $breakpoint */ + .pl-lg-0 { padding-left: 0 !important; } + /* Set a $size padding to the left & right at $breakpoint */ + .px-lg-0 { padding-right: 0 !important; padding-left: 0 !important; } + /* Set a $size padding to the top & bottom at $breakpoint */ + .py-lg-0 { padding-top: 0 !important; padding-bottom: 0 !important; } + /* Set a $size padding to all sides at $breakpoint */ + .p-lg-1 { padding: 4px !important; } + /* Set a $size padding to the top at $breakpoint */ + .pt-lg-1 { padding-top: 4px !important; } + /* Set a $size padding to the right at $breakpoint */ + .pr-lg-1 { padding-right: 4px !important; } + /* Set a $size padding to the bottom at $breakpoint */ + .pb-lg-1 { padding-bottom: 4px !important; } + /* Set a $size padding to the left at $breakpoint */ + .pl-lg-1 { padding-left: 4px !important; } + /* Set a $size padding to the left & right at $breakpoint */ + .px-lg-1 { padding-right: 4px !important; padding-left: 4px !important; } + /* Set a $size padding to the top & bottom at $breakpoint */ + .py-lg-1 { padding-top: 4px !important; padding-bottom: 4px !important; } + /* Set a $size padding to all sides at $breakpoint */ + .p-lg-2 { padding: 8px !important; } + /* Set a $size padding to the top at $breakpoint */ + .pt-lg-2 { padding-top: 8px !important; } + /* Set a $size padding to the right at $breakpoint */ + .pr-lg-2 { padding-right: 8px !important; } + /* Set a $size padding to the bottom at $breakpoint */ + .pb-lg-2 { padding-bottom: 8px !important; } + /* Set a $size padding to the left at $breakpoint */ + .pl-lg-2 { padding-left: 8px !important; } + /* Set a $size padding to the left & right at $breakpoint */ + .px-lg-2 { padding-right: 8px !important; padding-left: 8px !important; } + /* Set a $size padding to the top & bottom at $breakpoint */ + .py-lg-2 { padding-top: 8px !important; padding-bottom: 8px !important; } + /* Set a $size padding to all sides at $breakpoint */ + .p-lg-3 { padding: 16px !important; } + /* Set a $size padding to the top at $breakpoint */ + .pt-lg-3 { padding-top: 16px !important; } + /* Set a $size padding to the right at $breakpoint */ + .pr-lg-3 { padding-right: 16px !important; } + /* Set a $size padding to the bottom at $breakpoint */ + .pb-lg-3 { padding-bottom: 16px !important; } + /* Set a $size padding to the left at $breakpoint */ + .pl-lg-3 { padding-left: 16px !important; } + /* Set a $size padding to the left & right at $breakpoint */ + .px-lg-3 { padding-right: 16px !important; padding-left: 16px !important; } + /* Set a $size padding to the top & bottom at $breakpoint */ + .py-lg-3 { padding-top: 16px !important; padding-bottom: 16px !important; } + /* Set a $size padding to all sides at $breakpoint */ + .p-lg-4 { padding: 24px !important; } + /* Set a $size padding to the top at $breakpoint */ + .pt-lg-4 { padding-top: 24px !important; } + /* Set a $size padding to the right at $breakpoint */ + .pr-lg-4 { padding-right: 24px !important; } + /* Set a $size padding to the bottom at $breakpoint */ + .pb-lg-4 { padding-bottom: 24px !important; } + /* Set a $size padding to the left at $breakpoint */ + .pl-lg-4 { padding-left: 24px !important; } + /* Set a $size padding to the left & right at $breakpoint */ + .px-lg-4 { padding-right: 24px !important; padding-left: 24px !important; } + /* Set a $size padding to the top & bottom at $breakpoint */ + .py-lg-4 { padding-top: 24px !important; padding-bottom: 24px !important; } + /* Set a $size padding to all sides at $breakpoint */ + .p-lg-5 { padding: 32px !important; } + /* Set a $size padding to the top at $breakpoint */ + .pt-lg-5 { padding-top: 32px !important; } + /* Set a $size padding to the right at $breakpoint */ + .pr-lg-5 { padding-right: 32px !important; } + /* Set a $size padding to the bottom at $breakpoint */ + .pb-lg-5 { padding-bottom: 32px !important; } + /* Set a $size padding to the left at $breakpoint */ + .pl-lg-5 { padding-left: 32px !important; } + /* Set a $size padding to the left & right at $breakpoint */ + .px-lg-5 { padding-right: 32px !important; padding-left: 32px !important; } + /* Set a $size padding to the top & bottom at $breakpoint */ + .py-lg-5 { padding-top: 32px !important; padding-bottom: 32px !important; } + /* Set a $size padding to all sides at $breakpoint */ + .p-lg-6 { padding: 40px !important; } + /* Set a $size padding to the top at $breakpoint */ + .pt-lg-6 { padding-top: 40px !important; } + /* Set a $size padding to the right at $breakpoint */ + .pr-lg-6 { padding-right: 40px !important; } + /* Set a $size padding to the bottom at $breakpoint */ + .pb-lg-6 { padding-bottom: 40px !important; } + /* Set a $size padding to the left at $breakpoint */ + .pl-lg-6 { padding-left: 40px !important; } + /* Set a $size padding to the left & right at $breakpoint */ + .px-lg-6 { padding-right: 40px !important; padding-left: 40px !important; } + /* Set a $size padding to the top & bottom at $breakpoint */ + .py-lg-6 { padding-top: 40px !important; padding-bottom: 40px !important; } } +@media (min-width: 1280px) { /* Set a $size padding to all sides at $breakpoint */ + .p-xl-0 { padding: 0 !important; } + /* Set a $size padding to the top at $breakpoint */ + .pt-xl-0 { padding-top: 0 !important; } + /* Set a $size padding to the right at $breakpoint */ + .pr-xl-0 { padding-right: 0 !important; } + /* Set a $size padding to the bottom at $breakpoint */ + .pb-xl-0 { padding-bottom: 0 !important; } + /* Set a $size padding to the left at $breakpoint */ + .pl-xl-0 { padding-left: 0 !important; } + /* Set a $size padding to the left & right at $breakpoint */ + .px-xl-0 { padding-right: 0 !important; padding-left: 0 !important; } + /* Set a $size padding to the top & bottom at $breakpoint */ + .py-xl-0 { padding-top: 0 !important; padding-bottom: 0 !important; } + /* Set a $size padding to all sides at $breakpoint */ + .p-xl-1 { padding: 4px !important; } + /* Set a $size padding to the top at $breakpoint */ + .pt-xl-1 { padding-top: 4px !important; } + /* Set a $size padding to the right at $breakpoint */ + .pr-xl-1 { padding-right: 4px !important; } + /* Set a $size padding to the bottom at $breakpoint */ + .pb-xl-1 { padding-bottom: 4px !important; } + /* Set a $size padding to the left at $breakpoint */ + .pl-xl-1 { padding-left: 4px !important; } + /* Set a $size padding to the left & right at $breakpoint */ + .px-xl-1 { padding-right: 4px !important; padding-left: 4px !important; } + /* Set a $size padding to the top & bottom at $breakpoint */ + .py-xl-1 { padding-top: 4px !important; padding-bottom: 4px !important; } + /* Set a $size padding to all sides at $breakpoint */ + .p-xl-2 { padding: 8px !important; } + /* Set a $size padding to the top at $breakpoint */ + .pt-xl-2 { padding-top: 8px !important; } + /* Set a $size padding to the right at $breakpoint */ + .pr-xl-2 { padding-right: 8px !important; } + /* Set a $size padding to the bottom at $breakpoint */ + .pb-xl-2 { padding-bottom: 8px !important; } + /* Set a $size padding to the left at $breakpoint */ + .pl-xl-2 { padding-left: 8px !important; } + /* Set a $size padding to the left & right at $breakpoint */ + .px-xl-2 { padding-right: 8px !important; padding-left: 8px !important; } + /* Set a $size padding to the top & bottom at $breakpoint */ + .py-xl-2 { padding-top: 8px !important; padding-bottom: 8px !important; } + /* Set a $size padding to all sides at $breakpoint */ + .p-xl-3 { padding: 16px !important; } + /* Set a $size padding to the top at $breakpoint */ + .pt-xl-3 { padding-top: 16px !important; } + /* Set a $size padding to the right at $breakpoint */ + .pr-xl-3 { padding-right: 16px !important; } + /* Set a $size padding to the bottom at $breakpoint */ + .pb-xl-3 { padding-bottom: 16px !important; } + /* Set a $size padding to the left at $breakpoint */ + .pl-xl-3 { padding-left: 16px !important; } + /* Set a $size padding to the left & right at $breakpoint */ + .px-xl-3 { padding-right: 16px !important; padding-left: 16px !important; } + /* Set a $size padding to the top & bottom at $breakpoint */ + .py-xl-3 { padding-top: 16px !important; padding-bottom: 16px !important; } + /* Set a $size padding to all sides at $breakpoint */ + .p-xl-4 { padding: 24px !important; } + /* Set a $size padding to the top at $breakpoint */ + .pt-xl-4 { padding-top: 24px !important; } + /* Set a $size padding to the right at $breakpoint */ + .pr-xl-4 { padding-right: 24px !important; } + /* Set a $size padding to the bottom at $breakpoint */ + .pb-xl-4 { padding-bottom: 24px !important; } + /* Set a $size padding to the left at $breakpoint */ + .pl-xl-4 { padding-left: 24px !important; } + /* Set a $size padding to the left & right at $breakpoint */ + .px-xl-4 { padding-right: 24px !important; padding-left: 24px !important; } + /* Set a $size padding to the top & bottom at $breakpoint */ + .py-xl-4 { padding-top: 24px !important; padding-bottom: 24px !important; } + /* Set a $size padding to all sides at $breakpoint */ + .p-xl-5 { padding: 32px !important; } + /* Set a $size padding to the top at $breakpoint */ + .pt-xl-5 { padding-top: 32px !important; } + /* Set a $size padding to the right at $breakpoint */ + .pr-xl-5 { padding-right: 32px !important; } + /* Set a $size padding to the bottom at $breakpoint */ + .pb-xl-5 { padding-bottom: 32px !important; } + /* Set a $size padding to the left at $breakpoint */ + .pl-xl-5 { padding-left: 32px !important; } + /* Set a $size padding to the left & right at $breakpoint */ + .px-xl-5 { padding-right: 32px !important; padding-left: 32px !important; } + /* Set a $size padding to the top & bottom at $breakpoint */ + .py-xl-5 { padding-top: 32px !important; padding-bottom: 32px !important; } + /* Set a $size padding to all sides at $breakpoint */ + .p-xl-6 { padding: 40px !important; } + /* Set a $size padding to the top at $breakpoint */ + .pt-xl-6 { padding-top: 40px !important; } + /* Set a $size padding to the right at $breakpoint */ + .pr-xl-6 { padding-right: 40px !important; } + /* Set a $size padding to the bottom at $breakpoint */ + .pb-xl-6 { padding-bottom: 40px !important; } + /* Set a $size padding to the left at $breakpoint */ + .pl-xl-6 { padding-left: 40px !important; } + /* Set a $size padding to the left & right at $breakpoint */ + .px-xl-6 { padding-right: 40px !important; padding-left: 40px !important; } + /* Set a $size padding to the top & bottom at $breakpoint */ + .py-xl-6 { padding-top: 40px !important; padding-bottom: 40px !important; } } +.p-responsive { padding-right: 16px !important; padding-left: 16px !important; } +@media (min-width: 544px) { .p-responsive { padding-right: 40px !important; padding-left: 40px !important; } } +@media (min-width: 1012px) { .p-responsive { padding-right: 16px !important; padding-left: 16px !important; } } + +/* Set the font size to 26px */ +.h1 { font-size: 26px !important; } +@media (min-width: 768px) { .h1 { font-size: 32px !important; } } + +/* Set the font size to 22px */ +.h2 { font-size: 22px !important; } +@media (min-width: 768px) { .h2 { font-size: 24px !important; } } + +/* Set the font size to 18px */ +.h3 { font-size: 18px !important; } +@media (min-width: 768px) { .h3 { font-size: 20px !important; } } + +/* Set the font size to 16px */ +.h4 { font-size: 16px !important; } + +/* Set the font size to 14px */ +.h5 { font-size: 14px !important; } + +/* Set the font size to 12px */ +.h6 { font-size: 12px !important; } + +.h1, .h2, .h3, .h4, .h5, .h6 { font-weight: 600 !important; } + +/* Set the font size to 26px */ +.f1 { font-size: 26px !important; } +@media (min-width: 768px) { .f1 { font-size: 32px !important; } } + +/* Set the font size to 22px */ +.f2 { font-size: 22px !important; } +@media (min-width: 768px) { .f2 { font-size: 24px !important; } } + +/* Set the font size to 18px */ +.f3 { font-size: 18px !important; } +@media (min-width: 768px) { .f3 { font-size: 20px !important; } } + +/* Set the font size to 16px */ +.f4 { font-size: 16px !important; } +@media (min-width: 768px) { .f4 { font-size: 16px !important; } } + +/* Set the font size to 14px */ +.f5 { font-size: 14px !important; } + +/* Set the font size to 12px */ +.f6 { font-size: 12px !important; } + +/* Set the font size to 40px and weight to light */ +.f00-light { font-size: 40px !important; font-weight: 300 !important; } +@media (min-width: 768px) { .f00-light { font-size: 48px !important; } } + +/* Set the font size to 32px and weight to light */ +.f0-light { font-size: 32px !important; font-weight: 300 !important; } +@media (min-width: 768px) { .f0-light { font-size: 40px !important; } } + +/* Set the font size to 26px and weight to light */ +.f1-light { font-size: 26px !important; font-weight: 300 !important; } +@media (min-width: 768px) { .f1-light { font-size: 32px !important; } } + +/* Set the font size to 22px and weight to light */ +.f2-light { font-size: 22px !important; font-weight: 300 !important; } +@media (min-width: 768px) { .f2-light { font-size: 24px !important; } } + +/* Set the font size to 18px and weight to light */ +.f3-light { font-size: 18px !important; font-weight: 300 !important; } +@media (min-width: 768px) { .f3-light { font-size: 20px !important; } } + +/* Set the font size to ${#h6-size} */ +.text-small { font-size: 12px !important; } + +/* Large leading paragraphs */ +.lead { margin-bottom: 30px; font-size: 20px; font-weight: 300; color: #586069; } + +/* Set the line height to ultra condensed */ +.lh-condensed-ultra { line-height: 1 !important; } + +/* Set the line height to condensed */ +.lh-condensed { line-height: 1.25 !important; } + +/* Set the line height to default */ +.lh-default { line-height: 1.5 !important; } + +/* Set the line height to zero */ +.lh-0 { line-height: 0 !important; } + +/* Text align to the right */ +.text-right { text-align: right !important; } + +/* Text align to the left */ +.text-left { text-align: left !important; } + +/* Text align to the center */ +.text-center { text-align: center !important; } + +@media (min-width: 544px) { /* Text align to the right */ + .text-sm-right { text-align: right !important; } + /* Text align to the left */ + .text-sm-left { text-align: left !important; } + /* Text align to the center */ + .text-sm-center { text-align: center !important; } } +@media (min-width: 768px) { /* Text align to the right */ + .text-md-right { text-align: right !important; } + /* Text align to the left */ + .text-md-left { text-align: left !important; } + /* Text align to the center */ + .text-md-center { text-align: center !important; } } +@media (min-width: 1012px) { /* Text align to the right */ + .text-lg-right { text-align: right !important; } + /* Text align to the left */ + .text-lg-left { text-align: left !important; } + /* Text align to the center */ + .text-lg-center { text-align: center !important; } } +@media (min-width: 1280px) { /* Text align to the right */ + .text-xl-right { text-align: right !important; } + /* Text align to the left */ + .text-xl-left { text-align: left !important; } + /* Text align to the center */ + .text-xl-center { text-align: center !important; } } +/* Set the font weight to normal */ +.text-normal { font-weight: 400 !important; } + +/* Set the font weight to bold */ +.text-bold { font-weight: 600 !important; } + +/* Set the font to italic */ +.text-italic { font-style: italic !important; } + +/* Make text uppercase */ +.text-uppercase { text-transform: uppercase !important; } + +/* Underline text */ +.text-underline { text-decoration: underline !important; } + +/* Don't underline text */ +.no-underline { text-decoration: none !important; } + +/* Don't wrap white space */ +.no-wrap { white-space: nowrap !important; } + +/* Normal white space */ +.ws-normal { white-space: normal !important; } + +/* Allow long lines with no spaces to line break */ +.wb-break-all { word-break: break-all !important; } + +.text-emphasized { font-weight: 600; color: #24292e; } + +.list-style-none { list-style: none !important; } + +/* Add a dark text shadow */ +.text-shadow-dark { text-shadow: 0 1px 1px rgba(27, 31, 35, 0.25), 0 1px 25px rgba(27, 31, 35, 0.75); } + +/* Add a light text shadow */ +.text-shadow-light { text-shadow: 0 1px 0 rgba(255, 255, 255, 0.5); } + +/* Set to monospace font */ +.text-mono { font-family: "SFMono-Regular", Consolas, "Liberation Mono", Menlo, Courier, monospace; } + +/* Disallow user from selecting text */ +.user-select-none { user-select: none !important; } + +.d-block { display: block !important; } + +.d-flex { display: flex !important; } + +.d-inline { display: inline !important; } + +.d-inline-block { display: inline-block !important; } + +.d-inline-flex { display: inline-flex !important; } + +.d-none { display: none !important; } + +.d-table { display: table !important; } + +.d-table-cell { display: table-cell !important; } + +@media (min-width: 544px) { .d-sm-block { display: block !important; } + .d-sm-flex { display: flex !important; } + .d-sm-inline { display: inline !important; } + .d-sm-inline-block { display: inline-block !important; } + .d-sm-inline-flex { display: inline-flex !important; } + .d-sm-none { display: none !important; } + .d-sm-table { display: table !important; } + .d-sm-table-cell { display: table-cell !important; } } +@media (min-width: 768px) { .d-md-block { display: block !important; } + .d-md-flex { display: flex !important; } + .d-md-inline { display: inline !important; } + .d-md-inline-block { display: inline-block !important; } + .d-md-inline-flex { display: inline-flex !important; } + .d-md-none { display: none !important; } + .d-md-table { display: table !important; } + .d-md-table-cell { display: table-cell !important; } } +@media (min-width: 1012px) { .d-lg-block { display: block !important; } + .d-lg-flex { display: flex !important; } + .d-lg-inline { display: inline !important; } + .d-lg-inline-block { display: inline-block !important; } + .d-lg-inline-flex { display: inline-flex !important; } + .d-lg-none { display: none !important; } + .d-lg-table { display: table !important; } + .d-lg-table-cell { display: table-cell !important; } } +@media (min-width: 1280px) { .d-xl-block { display: block !important; } + .d-xl-flex { display: flex !important; } + .d-xl-inline { display: inline !important; } + .d-xl-inline-block { display: inline-block !important; } + .d-xl-inline-flex { display: inline-flex !important; } + .d-xl-none { display: none !important; } + .d-xl-table { display: table !important; } + .d-xl-table-cell { display: table-cell !important; } } +.v-hidden { visibility: hidden !important; } + +.v-visible { visibility: visible !important; } + +@media (max-width: 544px) { .hide-sm { display: none !important; } } +@media (min-width: 544px) and (max-width: 768px) { .hide-md { display: none !important; } } +@media (min-width: 768px) and (max-width: 1012px) { .hide-lg { display: none !important; } } +@media (min-width: 1012px) { .hide-xl { display: none !important; } } +/* Set the table-layout to fixed */ +.table-fixed { table-layout: fixed !important; } + +.sr-only { position: absolute; width: 1px; height: 1px; padding: 0; overflow: hidden; clip: rect(0, 0, 0, 0); word-wrap: normal; border: 0; } + +.show-on-focus { position: absolute; width: 1px; height: 1px; margin: 0; overflow: hidden; clip: rect(1px, 1px, 1px, 1px); } +.show-on-focus:focus { z-index: 20; width: auto; height: auto; clip: auto; } + +.container { width: 980px; margin-right: auto; margin-left: auto; } +.container::before { display: table; content: ""; } +.container::after { display: table; clear: both; content: ""; } + +.container-md { max-width: 768px; margin-right: auto; margin-left: auto; } + +.container-lg { max-width: 1012px; margin-right: auto; margin-left: auto; } + +.container-xl { max-width: 1280px; margin-right: auto; margin-left: auto; } + +.columns { margin-right: -10px; margin-left: -10px; } +.columns::before { display: table; content: ""; } +.columns::after { display: table; clear: both; content: ""; } + +.column { float: left; padding-right: 10px; padding-left: 10px; } + +.one-third { width: 33.333333%; } + +.two-thirds { width: 66.666667%; } + +.one-fourth { width: 25%; } + +.one-half { width: 50%; } + +.three-fourths { width: 75%; } + +.one-fifth { width: 20%; } + +.four-fifths { width: 80%; } + +.centered { display: block; float: none; margin-right: auto; margin-left: auto; } + +.col-1 { width: 8.3333333333%; } + +.col-2 { width: 16.6666666667%; } + +.col-3 { width: 25%; } + +.col-4 { width: 33.3333333333%; } + +.col-5 { width: 41.6666666667%; } + +.col-6 { width: 50%; } + +.col-7 { width: 58.3333333333%; } + +.col-8 { width: 66.6666666667%; } + +.col-9 { width: 75%; } + +.col-10 { width: 83.3333333333%; } + +.col-11 { width: 91.6666666667%; } + +.col-12 { width: 100%; } + +@media (min-width: 544px) { .col-sm-1 { width: 8.3333333333%; } + .col-sm-2 { width: 16.6666666667%; } + .col-sm-3 { width: 25%; } + .col-sm-4 { width: 33.3333333333%; } + .col-sm-5 { width: 41.6666666667%; } + .col-sm-6 { width: 50%; } + .col-sm-7 { width: 58.3333333333%; } + .col-sm-8 { width: 66.6666666667%; } + .col-sm-9 { width: 75%; } + .col-sm-10 { width: 83.3333333333%; } + .col-sm-11 { width: 91.6666666667%; } + .col-sm-12 { width: 100%; } } +@media (min-width: 768px) { .col-md-1 { width: 8.3333333333%; } + .col-md-2 { width: 16.6666666667%; } + .col-md-3 { width: 25%; } + .col-md-4 { width: 33.3333333333%; } + .col-md-5 { width: 41.6666666667%; } + .col-md-6 { width: 50%; } + .col-md-7 { width: 58.3333333333%; } + .col-md-8 { width: 66.6666666667%; } + .col-md-9 { width: 75%; } + .col-md-10 { width: 83.3333333333%; } + .col-md-11 { width: 91.6666666667%; } + .col-md-12 { width: 100%; } } +@media (min-width: 1012px) { .col-lg-1 { width: 8.3333333333%; } + .col-lg-2 { width: 16.6666666667%; } + .col-lg-3 { width: 25%; } + .col-lg-4 { width: 33.3333333333%; } + .col-lg-5 { width: 41.6666666667%; } + .col-lg-6 { width: 50%; } + .col-lg-7 { width: 58.3333333333%; } + .col-lg-8 { width: 66.6666666667%; } + .col-lg-9 { width: 75%; } + .col-lg-10 { width: 83.3333333333%; } + .col-lg-11 { width: 91.6666666667%; } + .col-lg-12 { width: 100%; } } +@media (min-width: 1280px) { .col-xl-1 { width: 8.3333333333%; } + .col-xl-2 { width: 16.6666666667%; } + .col-xl-3 { width: 25%; } + .col-xl-4 { width: 33.3333333333%; } + .col-xl-5 { width: 41.6666666667%; } + .col-xl-6 { width: 50%; } + .col-xl-7 { width: 58.3333333333%; } + .col-xl-8 { width: 66.6666666667%; } + .col-xl-9 { width: 75%; } + .col-xl-10 { width: 83.3333333333%; } + .col-xl-11 { width: 91.6666666667%; } + .col-xl-12 { width: 100%; } } +.gutter { margin-right: -16px; margin-left: -16px; } +.gutter > [class*="col-"] { padding-right: 16px !important; padding-left: 16px !important; } + +.gutter-condensed { margin-right: -8px; margin-left: -8px; } +.gutter-condensed > [class*="col-"] { padding-right: 8px !important; padding-left: 8px !important; } + +.gutter-spacious { margin-right: -24px; margin-left: -24px; } +.gutter-spacious > [class*="col-"] { padding-right: 24px !important; padding-left: 24px !important; } + +@media (min-width: 544px) { .gutter-sm { margin-right: -16px; margin-left: -16px; } + .gutter-sm > [class*="col-"] { padding-right: 16px !important; padding-left: 16px !important; } + .gutter-sm-condensed { margin-right: -8px; margin-left: -8px; } + .gutter-sm-condensed > [class*="col-"] { padding-right: 8px !important; padding-left: 8px !important; } + .gutter-sm-spacious { margin-right: -24px; margin-left: -24px; } + .gutter-sm-spacious > [class*="col-"] { padding-right: 24px !important; padding-left: 24px !important; } } +@media (min-width: 768px) { .gutter-md { margin-right: -16px; margin-left: -16px; } + .gutter-md > [class*="col-"] { padding-right: 16px !important; padding-left: 16px !important; } + .gutter-md-condensed { margin-right: -8px; margin-left: -8px; } + .gutter-md-condensed > [class*="col-"] { padding-right: 8px !important; padding-left: 8px !important; } + .gutter-md-spacious { margin-right: -24px; margin-left: -24px; } + .gutter-md-spacious > [class*="col-"] { padding-right: 24px !important; padding-left: 24px !important; } } +@media (min-width: 1012px) { .gutter-lg { margin-right: -16px; margin-left: -16px; } + .gutter-lg > [class*="col-"] { padding-right: 16px !important; padding-left: 16px !important; } + .gutter-lg-condensed { margin-right: -8px; margin-left: -8px; } + .gutter-lg-condensed > [class*="col-"] { padding-right: 8px !important; padding-left: 8px !important; } + .gutter-lg-spacious { margin-right: -24px; margin-left: -24px; } + .gutter-lg-spacious > [class*="col-"] { padding-right: 24px !important; padding-left: 24px !important; } } +@media (min-width: 1280px) { .gutter-xl { margin-right: -16px; margin-left: -16px; } + .gutter-xl > [class*="col-"] { padding-right: 16px !important; padding-left: 16px !important; } + .gutter-xl-condensed { margin-right: -8px; margin-left: -8px; } + .gutter-xl-condensed > [class*="col-"] { padding-right: 8px !important; padding-left: 8px !important; } + .gutter-xl-spacious { margin-right: -24px; margin-left: -24px; } + .gutter-xl-spacious > [class*="col-"] { padding-right: 24px !important; padding-left: 24px !important; } } +.offset-1 { margin-left: 8.3333333333% !important; } + +.offset-2 { margin-left: 16.6666666667% !important; } + +.offset-3 { margin-left: 25% !important; } + +.offset-4 { margin-left: 33.3333333333% !important; } + +.offset-5 { margin-left: 41.6666666667% !important; } + +.offset-6 { margin-left: 50% !important; } + +.offset-7 { margin-left: 58.3333333333% !important; } + +.offset-8 { margin-left: 66.6666666667% !important; } + +.offset-9 { margin-left: 75% !important; } + +.offset-10 { margin-left: 83.3333333333% !important; } + +.offset-11 { margin-left: 91.6666666667% !important; } + +@media (min-width: 544px) { .offset-sm-1 { margin-left: 8.3333333333% !important; } + .offset-sm-2 { margin-left: 16.6666666667% !important; } + .offset-sm-3 { margin-left: 25% !important; } + .offset-sm-4 { margin-left: 33.3333333333% !important; } + .offset-sm-5 { margin-left: 41.6666666667% !important; } + .offset-sm-6 { margin-left: 50% !important; } + .offset-sm-7 { margin-left: 58.3333333333% !important; } + .offset-sm-8 { margin-left: 66.6666666667% !important; } + .offset-sm-9 { margin-left: 75% !important; } + .offset-sm-10 { margin-left: 83.3333333333% !important; } + .offset-sm-11 { margin-left: 91.6666666667% !important; } } +@media (min-width: 768px) { .offset-md-1 { margin-left: 8.3333333333% !important; } + .offset-md-2 { margin-left: 16.6666666667% !important; } + .offset-md-3 { margin-left: 25% !important; } + .offset-md-4 { margin-left: 33.3333333333% !important; } + .offset-md-5 { margin-left: 41.6666666667% !important; } + .offset-md-6 { margin-left: 50% !important; } + .offset-md-7 { margin-left: 58.3333333333% !important; } + .offset-md-8 { margin-left: 66.6666666667% !important; } + .offset-md-9 { margin-left: 75% !important; } + .offset-md-10 { margin-left: 83.3333333333% !important; } + .offset-md-11 { margin-left: 91.6666666667% !important; } } +@media (min-width: 1012px) { .offset-lg-1 { margin-left: 8.3333333333% !important; } + .offset-lg-2 { margin-left: 16.6666666667% !important; } + .offset-lg-3 { margin-left: 25% !important; } + .offset-lg-4 { margin-left: 33.3333333333% !important; } + .offset-lg-5 { margin-left: 41.6666666667% !important; } + .offset-lg-6 { margin-left: 50% !important; } + .offset-lg-7 { margin-left: 58.3333333333% !important; } + .offset-lg-8 { margin-left: 66.6666666667% !important; } + .offset-lg-9 { margin-left: 75% !important; } + .offset-lg-10 { margin-left: 83.3333333333% !important; } + .offset-lg-11 { margin-left: 91.6666666667% !important; } } +@media (min-width: 1280px) { .offset-xl-1 { margin-left: 8.3333333333% !important; } + .offset-xl-2 { margin-left: 16.6666666667% !important; } + .offset-xl-3 { margin-left: 25% !important; } + .offset-xl-4 { margin-left: 33.3333333333% !important; } + .offset-xl-5 { margin-left: 41.6666666667% !important; } + .offset-xl-6 { margin-left: 50% !important; } + .offset-xl-7 { margin-left: 58.3333333333% !important; } + .offset-xl-8 { margin-left: 66.6666666667% !important; } + .offset-xl-9 { margin-left: 75% !important; } + .offset-xl-10 { margin-left: 83.3333333333% !important; } + .offset-xl-11 { margin-left: 91.6666666667% !important; } } +.markdown-body { font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol"; font-size: 16px; line-height: 1.5; word-wrap: break-word; } +.markdown-body::before { display: table; content: ""; } +.markdown-body::after { display: table; clear: both; content: ""; } +.markdown-body > *:first-child { margin-top: 0 !important; } +.markdown-body > *:last-child { margin-bottom: 0 !important; } +.markdown-body a:not([href]) { color: inherit; text-decoration: none; } +.markdown-body .absent { color: #cb2431; } +.markdown-body .anchor { float: left; padding-right: 4px; margin-left: -20px; line-height: 1; } +.markdown-body .anchor:focus { outline: none; } +.markdown-body p, .markdown-body blockquote, .markdown-body ul, .markdown-body ol, .markdown-body dl, .markdown-body table, .markdown-body pre { margin-top: 0; margin-bottom: 16px; } +.markdown-body hr { height: 0.25em; padding: 0; margin: 24px 0; background-color: #e1e4e8; border: 0; } +.markdown-body blockquote { padding: 0 1em; color: #6a737d; border-left: 0.25em solid #dfe2e5; } +.markdown-body blockquote > :first-child { margin-top: 0; } +.markdown-body blockquote > :last-child { margin-bottom: 0; } +.markdown-body kbd { display: inline-block; padding: 3px 5px; font-size: 11px; line-height: 10px; color: #444d56; vertical-align: middle; background-color: #fafbfc; border: solid 1px #c6cbd1; border-bottom-color: #959da5; border-radius: 3px; box-shadow: inset 0 -1px 0 #959da5; } + +.markdown-body h1, .markdown-body h2, .markdown-body h3, .markdown-body h4, .markdown-body h5, .markdown-body h6 { margin-top: 24px; margin-bottom: 16px; font-weight: 600; line-height: 1.25; } +.markdown-body h1 .octicon-link, .markdown-body h2 .octicon-link, .markdown-body h3 .octicon-link, .markdown-body h4 .octicon-link, .markdown-body h5 .octicon-link, .markdown-body h6 .octicon-link { color: #1b1f23; vertical-align: middle; visibility: hidden; } +.markdown-body h1:hover .anchor, .markdown-body h2:hover .anchor, .markdown-body h3:hover .anchor, .markdown-body h4:hover .anchor, .markdown-body h5:hover .anchor, .markdown-body h6:hover .anchor { text-decoration: none; } +.markdown-body h1:hover .anchor .octicon-link, .markdown-body h2:hover .anchor .octicon-link, .markdown-body h3:hover .anchor .octicon-link, .markdown-body h4:hover .anchor .octicon-link, .markdown-body h5:hover .anchor .octicon-link, .markdown-body h6:hover .anchor .octicon-link { visibility: visible; } +.markdown-body h1 tt, .markdown-body h1 code, .markdown-body h2 tt, .markdown-body h2 code, .markdown-body h3 tt, .markdown-body h3 code, .markdown-body h4 tt, .markdown-body h4 code, .markdown-body h5 tt, .markdown-body h5 code, .markdown-body h6 tt, .markdown-body h6 code { font-size: inherit; } +.markdown-body h1 { padding-bottom: 0.3em; font-size: 2em; border-bottom: 1px solid #eaecef; } +.markdown-body h2 { padding-bottom: 0.3em; font-size: 1.5em; border-bottom: 1px solid #eaecef; } +.markdown-body h3 { font-size: 1.25em; } +.markdown-body h4 { font-size: 1em; } +.markdown-body h5 { font-size: 0.875em; } +.markdown-body h6 { font-size: 0.85em; color: #6a737d; } + +.markdown-body ul, .markdown-body ol { padding-left: 2em; } +.markdown-body ul.no-list, .markdown-body ol.no-list { padding: 0; list-style-type: none; } +.markdown-body ul ul, .markdown-body ul ol, .markdown-body ol ol, .markdown-body ol ul { margin-top: 0; margin-bottom: 0; } +.markdown-body li { word-wrap: break-all; } +.markdown-body li > p { margin-top: 16px; } +.markdown-body li + li { margin-top: 0.25em; } +.markdown-body dl { padding: 0; } +.markdown-body dl dt { padding: 0; margin-top: 16px; font-size: 1em; font-style: italic; font-weight: 600; } +.markdown-body dl dd { padding: 0 16px; margin-bottom: 16px; } + +.markdown-body table { display: block; width: 100%; overflow: auto; } +.markdown-body table th { font-weight: 600; } +.markdown-body table th, .markdown-body table td { padding: 6px 13px; border: 1px solid #dfe2e5; } +.markdown-body table tr { background-color: #fff; border-top: 1px solid #c6cbd1; } +.markdown-body table tr:nth-child(2n) { background-color: #f6f8fa; } +.markdown-body table img { background-color: transparent; } + +.markdown-body img { max-width: 100%; box-sizing: content-box; background-color: #fff; } +.markdown-body img[align=right] { padding-left: 20px; } +.markdown-body img[align=left] { padding-right: 20px; } +.markdown-body .emoji { max-width: none; vertical-align: text-top; background-color: transparent; } +.markdown-body span.frame { display: block; overflow: hidden; } +.markdown-body span.frame > span { display: block; float: left; width: auto; padding: 7px; margin: 13px 0 0; overflow: hidden; border: 1px solid #dfe2e5; } +.markdown-body span.frame span img { display: block; float: left; } +.markdown-body span.frame span span { display: block; padding: 5px 0 0; clear: both; color: #24292e; } +.markdown-body span.align-center { display: block; overflow: hidden; clear: both; } +.markdown-body span.align-center > span { display: block; margin: 13px auto 0; overflow: hidden; text-align: center; } +.markdown-body span.align-center span img { margin: 0 auto; text-align: center; } +.markdown-body span.align-right { display: block; overflow: hidden; clear: both; } +.markdown-body span.align-right > span { display: block; margin: 13px 0 0; overflow: hidden; text-align: right; } +.markdown-body span.align-right span img { margin: 0; text-align: right; } +.markdown-body span.float-left { display: block; float: left; margin-right: 13px; overflow: hidden; } +.markdown-body span.float-left span { margin: 13px 0 0; } +.markdown-body span.float-right { display: block; float: right; margin-left: 13px; overflow: hidden; } +.markdown-body span.float-right > span { display: block; margin: 13px auto 0; overflow: hidden; text-align: right; } + +.markdown-body code, .markdown-body tt { padding: 0.2em 0.4em; margin: 0; font-size: 85%; background-color: rgba(27, 31, 35, 0.05); border-radius: 3px; } +.markdown-body code br, .markdown-body tt br { display: none; } +.markdown-body del code { text-decoration: inherit; } +.markdown-body pre { word-wrap: normal; } +.markdown-body pre > code { padding: 0; margin: 0; font-size: 100%; word-break: normal; white-space: pre; background: transparent; border: 0; } +.markdown-body .highlight { margin-bottom: 16px; } +.markdown-body .highlight pre { margin-bottom: 0; word-break: normal; } +.markdown-body .highlight pre, .markdown-body pre { padding: 16px; overflow: auto; font-size: 85%; line-height: 1.45; background-color: #f6f8fa; border-radius: 3px; } +.markdown-body pre code, .markdown-body pre tt { display: inline; max-width: auto; padding: 0; margin: 0; overflow: visible; line-height: inherit; word-wrap: normal; background-color: transparent; border: 0; } + +.markdown-body .csv-data td, .markdown-body .csv-data th { padding: 5px; overflow: hidden; font-size: 12px; line-height: 1; text-align: left; white-space: nowrap; } +.markdown-body .csv-data .blob-num { padding: 10px 8px 9px; text-align: right; background: #fff; border: 0; } +.markdown-body .csv-data tr { border-top: 0; } +.markdown-body .csv-data th { font-weight: 600; background: #f6f8fa; border-top: 0; } + +.highlight table td { padding: 5px; } + +.highlight table pre { margin: 0; } + +.highlight .cm { color: #999988; font-style: italic; } + +.highlight .cp { color: #999999; font-weight: bold; } + +.highlight .c1 { color: #999988; font-style: italic; } + +.highlight .cs { color: #999999; font-weight: bold; font-style: italic; } + +.highlight .c, .highlight .cd { color: #999988; font-style: italic; } + +.highlight .err { color: #a61717; background-color: #e3d2d2; } + +.highlight .gd { color: #000000; background-color: #ffdddd; } + +.highlight .ge { color: #000000; font-style: italic; } + +.highlight .gr { color: #aa0000; } + +.highlight .gh { color: #999999; } + +.highlight .gi { color: #000000; background-color: #ddffdd; } + +.highlight .go { color: #888888; } + +.highlight .gp { color: #555555; } + +.highlight .gs { font-weight: bold; } + +.highlight .gu { color: #aaaaaa; } + +.highlight .gt { color: #aa0000; } + +.highlight .kc { color: #000000; font-weight: bold; } + +.highlight .kd { color: #000000; font-weight: bold; } + +.highlight .kn { color: #000000; font-weight: bold; } + +.highlight .kp { color: #000000; font-weight: bold; } + +.highlight .kr { color: #000000; font-weight: bold; } + +.highlight .kt { color: #445588; font-weight: bold; } + +.highlight .k, .highlight .kv { color: #000000; font-weight: bold; } + +.highlight .mf { color: #009999; } + +.highlight .mh { color: #009999; } + +.highlight .il { color: #009999; } + +.highlight .mi { color: #009999; } + +.highlight .mo { color: #009999; } + +.highlight .m, .highlight .mb, .highlight .mx { color: #009999; } + +.highlight .sb { color: #d14; } + +.highlight .sc { color: #d14; } + +.highlight .sd { color: #d14; } + +.highlight .s2 { color: #d14; } + +.highlight .se { color: #d14; } + +.highlight .sh { color: #d14; } + +.highlight .si { color: #d14; } + +.highlight .sx { color: #d14; } + +.highlight .sr { color: #009926; } + +.highlight .s1 { color: #d14; } + +.highlight .ss { color: #990073; } + +.highlight .s { color: #d14; } + +.highlight .na { color: #008080; } + +.highlight .bp { color: #999999; } + +.highlight .nb { color: #0086B3; } + +.highlight .nc { color: #445588; font-weight: bold; } + +.highlight .no { color: #008080; } + +.highlight .nd { color: #3c5d5d; font-weight: bold; } + +.highlight .ni { color: #800080; } + +.highlight .ne { color: #990000; font-weight: bold; } + +.highlight .nf { color: #990000; font-weight: bold; } + +.highlight .nl { color: #990000; font-weight: bold; } + +.highlight .nn { color: #555555; } + +.highlight .nt { color: #000080; } + +.highlight .vc { color: #008080; } + +.highlight .vg { color: #008080; } + +.highlight .vi { color: #008080; } + +.highlight .nv { color: #008080; } + +.highlight .ow { color: #000000; font-weight: bold; } + +.highlight .o { color: #000000; font-weight: bold; } + +.highlight .w { color: #bbbbbb; } + +.highlight { background-color: #f8f8f8; } diff --git a/blog.html b/blog.html index f5791f6..52030b8 100644 --- a/blog.html +++ b/blog.html @@ -1,9 +1,146 @@ ---- -layout: default -title: Blog ---- - -
+ + + + + + + + + Robotics Group @ University of Montreal | Blog + + + + + + + + + + + +
+ + + + + + + +
@@ -13,6 +150,451 @@
-{% for post in site.posts %} - {% include news-item.html item=post %} -{% endfor %} + + + + + + + + + +
+ + + December 05, 2020 + + +
+ + Krishna won an NVIDIA fellowship for 2021-22. Congratulations! + + +
+ + +
+
+ + + + + + + + + + +
+ + + November 30, 2020 + + +
+ + We released gradslam - a differentiable dense SLAM framework for deep learning. Check it out! + + +
+ + +
+
+ + + + + + + + + + +
+ + + October 30, 2020 + + +
+ + We organized an IROS workshop on Benchmarking progress in autonomous driving + + +
+ + +
+
+ + + + + + + + + + +
+ + + October 15, 2020 + + +
+ + Checkout our new Neurips 2020 Oral paper La-MAML: Look-Ahead Meta-Learning for Continual Learning [Code], [Short Video]. + + +
+ + +
+
+ + + + + + + + + + +
+ + + October 10, 2020 + + +
+ + Two papers accepted to Neurips 2020 (one of them an oral - top 1.1%). Congratulations Gunshi and Ruixiang! + + +
+ + +
+
+ + + + + + + + + + +
+ + + September 10, 2020 + + +
+ + Robot learning seminar series launched! + + +
+ + +
+
+ + + + + + + + + + +
+ + + June 30, 2020 + + +
+ + Gunshi Gupta succesfully completes her M.Sc. and joins Wayve as a deep learning researcher! + + +
+ + +
+
+ + + + + + + + + + +
+ + + June 05, 2020 + + +
+ + Our paper [MapLite: Autonomous intersection navigation without detailed prior maps] was adjudged best Robotics and Automation Letters (RAL) paper for 2019! Check it out here. And, here’s a short video abstract. + + +
+ + +
+
+ + + + + + + + + + +
+ + + January 20, 2020 + + +
+ + Check out our new ICRA 2020 paper gradSLAM: Dense SLAM meets automatic differentiation on fully differentiable dense SLAM: Project page, Video. + + +
+ + +
+
+ + + + + + + + + + +
+ + + September 10, 2019 + + +
+ + The “Active Domain Randomization” paper got accepted to CoRL 2019. Congrats Bhairav, Manfred, and Florian. + + +
+ + +
+
+ + + + + + + + + + +
+ + + September 01, 2019 + + +
+ + Dhaivat, Rey, and Philippe joined the group as Masters’ students. Welcome! + + +
+ + +
+
+ + + + + + + + + + +
+ + + September 01, 2019 + + +
+ + Sharath, Mark, Amrut, Rohan, and Dishank joined the group as interns. Welcome! + + +
+ + +
+
+ + + + + + + + + + +
+ + + August 01, 2019 + + +
+ + Our paper Deep Active Localization got accepted into Robotics and Automation Letters + + +
+ + +
+
+ + + + + + + + + + +
+ + + November 28, 2018 + + +
+ + Dhaivat Bhatt just joined our group as an intern. Welcome! + + +
+ + +
+
+ + + + + + + + + + +
+ + + September 05, 2018 + + +
+ + Manfred Diaz and Ruixiang Zhang join the group as PhD students … + + +
+ + +
+
+ + + + + + + + + + +
+ + + April 20, 2018 + + +
+ + Paper on self-supervised visual odometry estimation accepted to CVPR workshop on SLAM. + + +
+ + +
+
+ + + + +
+

+ + + + | + + + + | + + + + + +

+
+ +
+ + + + + + + + + diff --git a/blog/2018/04/20/cvpr.html b/blog/2018/04/20/cvpr.html new file mode 100644 index 0000000..f0d3adc --- /dev/null +++ b/blog/2018/04/20/cvpr.html @@ -0,0 +1,179 @@ + + + + + + + + + Robotics Group @ University of Montreal | Cvpr + + + + + + + + + + + +
+ + + + + + + +

April 20, 2018

+ +
+

Paper on self-supervised visual odometry estimation accepted to CVPR workshop on SLAM.

+ +
+ + +
+

+ + + + | + + + + | + + + + + +

+
+ +
+ + + + + + + + + diff --git a/blog/2018/09/05/welcomephd.html b/blog/2018/09/05/welcomephd.html new file mode 100644 index 0000000..c02baf7 --- /dev/null +++ b/blog/2018/09/05/welcomephd.html @@ -0,0 +1,179 @@ + + + + + + + + + Robotics Group @ University of Montreal | Welcomephd + + + + + + + + + + + +
+ + + + + + + +

September 05, 2018

+ +
+

Manfred Diaz and Ruixiang Zhang join the group as PhD students …

+ +
+ + +
+

+ + + + | + + + + | + + + + + +

+
+ +
+ + + + + + + + + diff --git a/blog/2018/11/28/image.html b/blog/2018/11/28/image.html new file mode 100644 index 0000000..3a87381 --- /dev/null +++ b/blog/2018/11/28/image.html @@ -0,0 +1,179 @@ + + + + + + + + + Robotics Group @ University of Montreal | Image + + + + + + + + + + + +
+ + + + + + + +

November 28, 2018

+ +
+

Dhaivat Bhatt just joined our group as an intern. Welcome!

+ +
+ + +
+

+ + + + | + + + + | + + + + + +

+
+ +
+ + + + + + + + + diff --git a/blog/2019/08/01/dal.html b/blog/2019/08/01/dal.html new file mode 100644 index 0000000..160cf6e --- /dev/null +++ b/blog/2019/08/01/dal.html @@ -0,0 +1,179 @@ + + + + + + + + + Robotics Group @ University of Montreal | Dal + + + + + + + + + + + +
+ + + + + + + +

August 01, 2019

+ +
+

Our paper Deep Active Localization got accepted into Robotics and Automation Letters

+ +
+ + +
+

+ + + + | + + + + | + + + + + +

+
+ +
+ + + + + + + + + diff --git a/blog/2019/09/01/welcomeinterns.html b/blog/2019/09/01/welcomeinterns.html new file mode 100644 index 0000000..d4cc137 --- /dev/null +++ b/blog/2019/09/01/welcomeinterns.html @@ -0,0 +1,179 @@ + + + + + + + + + Robotics Group @ University of Montreal | Welcomeinterns + + + + + + + + + + + +
+ + + + + + + +

September 01, 2019

+ +
+

Sharath, Mark, Amrut, Rohan, and Dishank joined the group as interns. Welcome!

+ +
+ + +
+

+ + + + | + + + + | + + + + + +

+
+ +
+ + + + + + + + + diff --git a/blog/2019/09/01/welcomestudents.html b/blog/2019/09/01/welcomestudents.html new file mode 100644 index 0000000..b11963e --- /dev/null +++ b/blog/2019/09/01/welcomestudents.html @@ -0,0 +1,179 @@ + + + + + + + + + Robotics Group @ University of Montreal | Welcomestudents + + + + + + + + + + + +
+ + + + + + + +

September 01, 2019

+ +
+

Dhaivat, Rey, and Philippe joined the group as Masters’ students. Welcome!

+ +
+ + +
+

+ + + + | + + + + | + + + + + +

+
+ +
+ + + + + + + + + diff --git a/blog/2019/09/10/adrpaper.html b/blog/2019/09/10/adrpaper.html new file mode 100644 index 0000000..310a19f --- /dev/null +++ b/blog/2019/09/10/adrpaper.html @@ -0,0 +1,179 @@ + + + + + + + + + Robotics Group @ University of Montreal | Adrpaper + + + + + + + + + + + +
+ + + + + + + +

September 10, 2019

+ +
+

The “Active Domain Randomization” paper got accepted to CoRL 2019. Congrats Bhairav, Manfred, and Florian.

+ +
+ + +
+

+ + + + | + + + + | + + + + + +

+
+ +
+ + + + + + + + + diff --git a/blog/2020/01/20/gradslam-icra.html b/blog/2020/01/20/gradslam-icra.html new file mode 100644 index 0000000..1578ff5 --- /dev/null +++ b/blog/2020/01/20/gradslam-icra.html @@ -0,0 +1,179 @@ + + + + + + + + + Robotics Group @ University of Montreal | Gradslam Icra + + + + + + + + + + + +
+ + + + + + + +

January 20, 2020

+ +
+

Check out our new ICRA 2020 paper gradSLAM: Dense SLAM meets automatic differentiation on fully differentiable dense SLAM: Project page, Video.

+ +
+ + +
+

+ + + + | + + + + | + + + + + +

+
+ +
+ + + + + + + + + diff --git a/blog/2020/06/05/maplite-award.html b/blog/2020/06/05/maplite-award.html new file mode 100644 index 0000000..f4a8ef4 --- /dev/null +++ b/blog/2020/06/05/maplite-award.html @@ -0,0 +1,179 @@ + + + + + + + + + Robotics Group @ University of Montreal | Maplite Award + + + + + + + + + + + +
+ + + + + + + +

June 05, 2020

+ +
+

Our paper [MapLite: Autonomous intersection navigation without detailed prior maps] was adjudged best Robotics and Automation Letters (RAL) paper for 2019! Check it out here. And, here’s a short video abstract.

+ +
+ + +
+

+ + + + | + + + + | + + + + + +

+
+ +
+ + + + + + + + + diff --git a/blog/2020/06/30/gunshi-graduates.html b/blog/2020/06/30/gunshi-graduates.html new file mode 100644 index 0000000..40f1a8e --- /dev/null +++ b/blog/2020/06/30/gunshi-graduates.html @@ -0,0 +1,179 @@ + + + + + + + + + Robotics Group @ University of Montreal | Gunshi Graduates + + + + + + + + + + + +
+ + + + + + + +

June 30, 2020

+ +
+

Gunshi Gupta succesfully completes her M.Sc. and joins Wayve as a deep learning researcher!

+ +
+ + +
+

+ + + + | + + + + | + + + + + +

+
+ +
+ + + + + + + + + diff --git a/blog/2020/09/10/robotlearningseries.html b/blog/2020/09/10/robotlearningseries.html new file mode 100644 index 0000000..d0b8de4 --- /dev/null +++ b/blog/2020/09/10/robotlearningseries.html @@ -0,0 +1,179 @@ + + + + + + + + + Robotics Group @ University of Montreal | Robotlearningseries + + + + + + + + + + + +
+ + + + + + + +

September 10, 2020

+ +
+

Robot learning seminar series launched!

+ +
+ + +
+

+ + + + | + + + + | + + + + + +

+
+ +
+ + + + + + + + + diff --git a/blog/2020/10/10/neurips.html b/blog/2020/10/10/neurips.html new file mode 100644 index 0000000..690c4da --- /dev/null +++ b/blog/2020/10/10/neurips.html @@ -0,0 +1,179 @@ + + + + + + + + + Robotics Group @ University of Montreal | Neurips + + + + + + + + + + + +
+ + + + + + + +

October 10, 2020

+ +
+

Two papers accepted to Neurips 2020 (one of them an oral - top 1.1%). Congratulations Gunshi and Ruixiang!

+ +
+ + +
+

+ + + + | + + + + | + + + + + +

+
+ +
+ + + + + + + + + diff --git a/blog/2020/10/15/lamaml.html b/blog/2020/10/15/lamaml.html new file mode 100644 index 0000000..925f67f --- /dev/null +++ b/blog/2020/10/15/lamaml.html @@ -0,0 +1,179 @@ + + + + + + + + + Robotics Group @ University of Montreal | Lamaml + + + + + + + + + + + +
+ + + + + + + +

October 15, 2020

+ +
+

Checkout our new Neurips 2020 Oral paper La-MAML: Look-Ahead Meta-Learning for Continual Learning [Code], [Short Video].

+ +
+ + +
+

+ + + + | + + + + | + + + + + +

+
+ +
+ + + + + + + + + diff --git a/blog/2020/10/30/irosworkshop.html b/blog/2020/10/30/irosworkshop.html new file mode 100644 index 0000000..03056e3 --- /dev/null +++ b/blog/2020/10/30/irosworkshop.html @@ -0,0 +1,179 @@ + + + + + + + + + Robotics Group @ University of Montreal | Irosworkshop + + + + + + + + + + + +
+ + + + + + + +

October 30, 2020

+ +
+

We organized an IROS workshop on Benchmarking progress in autonomous driving

+ +
+ + +
+

+ + + + | + + + + | + + + + + +

+
+ +
+ + + + + + + + + diff --git a/blog/2020/11/30/gradslam.html b/blog/2020/11/30/gradslam.html new file mode 100644 index 0000000..7787d65 --- /dev/null +++ b/blog/2020/11/30/gradslam.html @@ -0,0 +1,179 @@ + + + + + + + + + Robotics Group @ University of Montreal | Gradslam + + + + + + + + + + + +
+ + + + + + + +

November 30, 2020

+ +
+

We released gradslam - a differentiable dense SLAM framework for deep learning. Check it out!

+ +
+ + +
+

+ + + + | + + + + | + + + + + +

+
+ +
+ + + + + + + + + diff --git a/blog/2020/12/05/krishna-fellowship.html b/blog/2020/12/05/krishna-fellowship.html new file mode 100644 index 0000000..326a521 --- /dev/null +++ b/blog/2020/12/05/krishna-fellowship.html @@ -0,0 +1,179 @@ + + + + + + + + + Robotics Group @ University of Montreal | Krishna Fellowship + + + + + + + + + + + +
+ + + + + + + +

December 05, 2020

+ +
+

Krishna won an NVIDIA fellowship for 2021-22. Congratulations!

+ +
+ + +
+

+ + + + | + + + + | + + + + + +

+
+ +
+ + + + + + + + + diff --git a/code.html b/code.html new file mode 100644 index 0000000..df0a553 --- /dev/null +++ b/code.html @@ -0,0 +1,177 @@ + + + + + + + + + Robotics Group @ University of Montreal | Code + + + + + + + + + + + +
+ + + + + + + +

Please see our published articles for their corresponding code repos. In general, our repositories can be found here:

+ +

https://github.com/montrealrobotics

+ + + +
+

+ + + + | + + + + | + + + + + +

+
+ +
+ + + + + + + + + diff --git a/code.md b/code.md deleted file mode 100644 index bd6e76c..0000000 --- a/code.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -layout: default -title: Code ---- -Please see our published articles for their corresponding code repos. In general, our repositories can be found here: - -https://github.com/montrealrobotics - diff --git a/contact.html b/contact.html new file mode 100644 index 0000000..c9dc554 --- /dev/null +++ b/contact.html @@ -0,0 +1,216 @@ + + + + + + + + + Robotics Group @ University of Montreal | Contact + + + + + + + + + + + +
+ + + + + + + +

Joining Robotics and Embodied AI Lab

+ +
+
+
+
+
Work with us
+
At REAL, we are always happy to take in talented individuals as full-time students (for M.Sc./Ph.D. positions), or for short-term (intern/visitor) roles. If that's you, here is how to get started.
+
+
+
+
+ +
+ +
+

Full-time students (M.Sc./Ph.D.)

+
+ +
+ +

+ If you are looking to join an M.Sc. or a Ph.D. program at Universite de Montreal or Mila, and would like to work at REAL, please apply through Mila admissions. Indicate either Liam Paull or Glen Berseth as some of your faculty advisors of choice. +

+
+
+ +
+ +
+

Interns

+
+ +
+

+ If you are looking to intern with REAL, please fill out our internship application form. +

+

+ We are recruiting up to two interns for Winter 2021 (Jan-May). Please see this listing if interested +

+
+
+ + + +
+

+ + + + | + + + + | + + + + + +

+
+ +
+ + + + + + + + + diff --git a/contact.md b/contact.md deleted file mode 100644 index baa4364..0000000 --- a/contact.md +++ /dev/null @@ -1,51 +0,0 @@ ---- -layout: default -title: Contact ---- - - -

Joining Robotics and Embodied AI Lab

- -
-
-
-
-
Work with us
-
At REAL, we are always happy to take in talented individuals as full-time students (for M.Sc./Ph.D. positions), or for short-term (intern/visitor) roles. If that's you, here is how to get started.
-
-
-
-
- -
- -
-

Full-time students (M.Sc./Ph.D.)

-
- -
- -

- If you are looking to join an M.Sc. or a Ph.D. program at Universite de Montreal or Mila, and would like to work at REAL, please apply through Mila admissions. Indicate either Liam Paull or Glen Berseth as some of your faculty advisors of choice. -

-
-
- - - -
- -
-

Interns

-
- -
-

- If you are looking to intern with REAL, please fill out our internship application form. -

-

- We are recruiting up to two interns for Winter 2021 (Jan-May). Please see this listing if interested -

-
-
- diff --git a/events.html b/events.html index 414faf6..6122494 100644 --- a/events.html +++ b/events.html @@ -1,9 +1,146 @@ ---- -layout: default -title: Events ---- + + + + + + + + + Robotics Group @ University of Montreal | Events + + + + + + + + -
+ + +
+ + + + + + + +
@@ -14,12 +151,1163 @@
- {% comment %} - Sort the events by date, putting those without dates last - {% endcomment %} - {% assign events_by_date = site.events | sort: 'date', 'first' %} - {% assign events_by_date = events_by_date | reverse %} - {% for p in events_by_date %} - {% include event-card.html event=p %} - {% endfor %} + + + + + + + + + + +
+
+
+ + + + + + Montreal Robotics Summer School + + +
+
+
+ +

+ + Montreal Robotics Summer School + + +

+ + +

Robotics is a rapidly growing field with interest from around the world. This summer school offers tutorials and lectures on state-of-the-art machine learning methods for training the next generation of learning robots. This summer school is an extension supported by the many robotics groups around Montreal.

+

+ + + + + +
+ + + +
+
+
+
+ + + + + + + + +
+
+
+ + + + + + Workshop on Physical Reasoning and Inductive Biases for the Real World + + +
+
+
+ +

+ + Workshop on Physical Reasoning and Inductive Biases for the Real World + + +

+ + +

Workshop at NeurIPS 2021

+

+ + + + + +
+ + + +
+
+
+
+ + + + + + + + +
+
+
+ + + + + + Workshop on the Ecological Theory of RL + + +
+
+
+ +

+ + Workshop on the Ecological Theory of RL + + +

+ + +

Workshop at NeurIPS 2021

+

+ + + + + +
+ + + +
+
+
+
+ + + + + + + + +
+
+
+ + + + + + The 6th AI Driving Olympics Competition + + +
+
+
+ +

+ + The 6th AI Driving Olympics Competition + + +

+ + +

The 6th iteration of the AI Driving Olympics, taking place virtually at NeurIPS 2021. The AI-DO serves to benchmark the state of the art of artificial intelligence in autonomous driving by providing standardized simulation and hardware environments for tasks related to multi-sensory perception and embodied AI.

+

+ + + + + +
+ + + +
+
+
+
+ + + + + + + + +
+
+
+ + + + + + IROS 2021 Workshop on Evaluating the Broader Impacts of Self-Driving Cars + + +
+
+
+ +

+ + IROS 2021 Workshop on Evaluating the Broader Impacts of Self-Driving Cars + + +

+ + +

The primary objective of this workshop is to stimulate a conversation between roboticists, who focus on the development and implementation of autonomy algorithms, and regulators, economists, psychologists, and lawyers who are experts on the broader impacts that self-driving vehicles will have on society.

+

+ + + + + +
+ + + +
+
+
+
+ + + + + + + + +
+
+
+ + + + + + Winter 2021 Robot Learning Seminar Series + + +
+
+
+ +

+ + Winter 2021 Robot Learning Seminar Series + + +

+ + +

The Robotics and Embodied AI Lab and Mila are hosting the Winter 2021 edition of robot learning seminar series; a set of virtual talks by researchers in this field. Speakers this session include Steven Waslander, Animesh Garg, Sylvia Herbert, Georgia Chalvatzaki, Deepak Pathak, Pulkit Agrawal, Lilian Weng, Kelsey Allen, Manolis Savva, and Jiajun Wu.

+

+ + + + + +
+ + + +
+
+
+
+ + + + + + + + +
+
+
+ + + + + + Summer 2020 Robot Learning Seminar Series + + +
+
+
+ +

+ + Summer 2020 Robot Learning Seminar Series + + +

+ + +

The Robotics and Embodied AI Lab and Mila are hosting the Winter 2021 edition of robot learning seminar series; a set of virtual talks by researchers in this field. Speakers in this inaugural session include Stefani Tellex, Rika Antonova, Gunshi Gupta, Igor Gilitschenski, and Bhairav Mehta.

+

+ + + + + +
+ + + +
+
+
+
+ + + + + + + + +
+
+
+ + + + + + IROS 2020 Workshop on Benchmarking Progress in Autonomous Driving + + +
+
+
+ +

+ + IROS 2020 Workshop on Benchmarking Progress in Autonomous Driving + + +

+ + +

Autonomous driving has seen incredible progress of-late. Recent workshops at top conferences in robotics, computer vision, and machine learning have primarily showcased the technological advancements in the field. This workshop provides an platform to investigate and discuss the methods by which progress in autonomous driving is evaluated, benchmarked, and verified.

+

+ + + + + +
+ + + +
+
+
+
+ + + + + + + + +
+
+
+ + + + + + Fall 2020 Robot Learning Seminar Series + + +
+
+
+ +

+ + Fall 2020 Robot Learning Seminar Series + + +

+ + +

The Robotics and Embodied AI Lab and Mila are hosting the Winter 2021 edition of robot learning seminar series; a set of virtual talks by researchers in this field. Speakers this session include Florian Shkurti, Valentin Peretroukhin, Ankur Handa, Shubham Tulsiani, Ronald Clark, Lerrel Pinto, Mustafa Mukadam, Shuran Song and Angela Shoellig.

+

+ + + + + +
+ + + +
+
+
+
+ + + + +
+

+ + + + | + + + + | + + + + + +

+
+ +
+ + + + + + + + + diff --git a/events/aido-6.html b/events/aido-6.html new file mode 100644 index 0000000..0c25288 --- /dev/null +++ b/events/aido-6.html @@ -0,0 +1,5 @@ +

The AI Driving Olympics 6

+ +

Duckietown traditionally hosts AI-DO competitions biannually, with finals events held at machine learning and robotics conferences such as the International Conference on Robotics and Automation (ICRA) and the Neural Information Processing Systems (NeurIPS).

+ +

AI-DO 6 will be in conjunction with NeurIPS 2021 and have three leagues: urban driving, advanced perception, and racing. The winter champions will be announced during NeurIPS 2021, on December 10, 2021!

diff --git a/events/ecorl.html b/events/ecorl.html new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/events/ecorl.html @@ -0,0 +1 @@ + diff --git a/events/iros2020.html b/events/iros2020.html new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/events/iros2020.html @@ -0,0 +1 @@ + diff --git a/events/iros2021.html b/events/iros2021.html new file mode 100644 index 0000000..fcef413 --- /dev/null +++ b/events/iros2021.html @@ -0,0 +1,11 @@ +

IROS 2021 Workshop on Evaluating the Broader Impacts of Self-Driving Cars

+ +

Self-driving cars have received significant attention in the last decade, and arguably have the potential to be the most impactful robotics application to date. The question that is usually asked by the public is “when are self-driving cars going to be here?” On one side, entrusting the entire driving problem to an autonomous agent seems frustratingly daunting. On the other side, we have started to see real deployments of autonomous vehicles in limited capacities, so perhaps there is reason for hope.

+ +

Autonomous driving advancements are typically evaluated along well-defined, but potentially myopic performance criteria. These metrics are reasonable in the sense that they do give us some quantitative measure that we can use for comparison. However, the true potential impact of this technology reaches far beyond these relatively simplistic measures. In this workshop we will take a broader perspective with respect to evaluating the progress that we have made towards making self-driving a reality. In the process, we will focus particularly on aspects of the integration of this technology that are rarely covered in technical papers on the subject. Specifically, we will focus on the following three objectives:

+ +

The primary objective of this workshop is to stimulate a conversation between roboticists, who focus on the development and implementation of autonomy algorithms, and regulators, economists, psychologists, and lawyers who are experts on the broader impacts that self-driving vehicles will have on society. We feel that it is critical to foster a community of researchers and practitioners whose expertise extends beyond the algorithmic challenges of realizing self-driving vehicles. As roboticists, we are ill-equipped to understand the broad impacts of this technology in areas that include ethics, philosophy, psychology, regulations, legal policy, and risk, to name a few, and it is critical that technological development is guided by such impacts. We will achieve our objective by inviting speakers and panelists who are experts in these adjacent fields to stimulate a broader conversation around this technology. This objective would be considered achieved if participants take the new perspectives they were exposed to and consider them in their own specific field of interest. For roboticists, this means explicitly considering these broader issues in the development of their algorithms. A stretch goal would be to spawn research collaborations between roboticists and researchers from these adjacent fields.

+ +

Duckietown traditionally hosts AI-DO competitions biannually, with finals events held at machine learning and robotics conferences such as the International Conference on Robotics and Automation (ICRA) and the Neural Information Processing Systems (NeurIPS).

+ +

AI-DO 6 will be in conjunction with NeurIPS 2021 and have three leagues: urban driving, advanced perception, and racing. The winter champions will be announced during NeurIPS 2021, on December 10, 2021!

diff --git a/events/learningseriesfall2020.html b/events/learningseriesfall2020.html new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/events/learningseriesfall2020.html @@ -0,0 +1 @@ + diff --git a/events/learningseriessummer2020.html b/events/learningseriessummer2020.html new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/events/learningseriessummer2020.html @@ -0,0 +1 @@ + diff --git a/events/learningserieswinter2021.html b/events/learningserieswinter2021.html new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/events/learningserieswinter2021.html @@ -0,0 +1 @@ + diff --git a/events/mrss.html b/events/mrss.html new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/events/mrss.html @@ -0,0 +1 @@ + diff --git a/events/physical-reasoning.html b/events/physical-reasoning.html new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/events/physical-reasoning.html @@ -0,0 +1 @@ + diff --git a/index.html b/index.html index 2c1384d..a2dc916 100644 --- a/index.html +++ b/index.html @@ -1,33 +1,458 @@ ---- -layout: default -title: Home -notitle: true - -# groups of columns of {roles: list, width: num, image: bool} -role-tables: -- - roles: [faculty, postdoc] - width: 4 - image: true - - roles: [phd, masters, developer] - width: 8 - image: true -- - roles: [intern, intern-alum, collab] - width: 4 - image: false -carousels: - - images: - - image: /img/slider/group1.jpg - - image: /img/slider/group2.jpg - - image: /img/slider/group3.jpg ---- - -
- {% include carousel.html height="50" unit="%" duration="4" number="1" %} + + + + + + + + + Robotics Group @ University of Montreal + + + + + + + + + + + +
+ + + + + + + +
+ + + + + + + +

Robotics and Embodied AI Lab (REAL)

The Robotics and Embodied AI Lab (REAL) is a research lab in DIRO at the Université de Montréal and is also affiliated with Mila. REAL is dedicated to making generalist robots and other embodied agents.

We are always looking out for talented students to join us as full-time students / visitors. To know more, click on the link below.

- Learn more + Learn more
@@ -38,18 +463,224 @@
News
- {% for post in site.posts limit: site.front_page_news %} - {% include news-item.html item=post %} - {% endfor %} - {% assign numposts = site.posts | size %} - {% if numposts >= 1 %} - + + + + + + + + + + +
+ + + + + + + + + + +
+ + + November 30, 2020 + + +
+ + We released gradslam - a differentiable dense SLAM framework for deep learning. Check it out! + + +
+ + +
+
+ + + + + + + + + + +
+ + + October 30, 2020 + + +
+ + We organized an IROS workshop on Benchmarking progress in autonomous driving + + +
+ + +
+
+ + + + + + + + + + +
+ + + October 15, 2020 + + +
+ + Checkout our new Neurips 2020 Oral paper La-MAML: Look-Ahead Meta-Learning for Continual Learning [Code], [Short Video]. + + +
+ + +
+
+ + + + + + + + + + +
+ + + October 10, 2020 + + +
+ + Two papers accepted to Neurips 2020 (one of them an oral - top 1.1%). Congratulations Gunshi and Ruixiang! + + +
+ + +
+
+ + + + + + + + + + +
+ + + September 10, 2020 + + +
+ + Robot learning seminar series launched! + + +
+ + +
+
+ + + + + + + + + + +
+ + + June 30, 2020 + + +
+ + Gunshi Gupta succesfully completes her M.Sc. and joins Wayve as a deep learning researcher! + + +
+ + +
+
+ + + + + + + + + + +
+ + + June 05, 2020 + + +
+ + Our paper [MapLite: Autonomous intersection navigation without detailed prior maps] was adjudged best Robotics and Automation Letters (RAL) paper for 2019! Check it out here. And, here’s a short video abstract. + + +
+ + +
+
+ + + + + More news … - {% endif %} +
@@ -60,25 +691,1981 @@
News
Projects
- {% comment %} - Sort the projects by date, putting those without dates last - {% endcomment %} - {% assign projects_by_date = site.projects | sort: 'last-updated', 'first' %} - {% assign projects_by_date = projects_by_date | reverse %} - {% for p in projects_by_date %} - {% include project-card.html project=p %} - {% endfor %} -

- - - - - All projects… - -

+ + + + + + + + + + +
+
+
+ + + + + + ConceptFusion: Open-set Multimodal 3D Mapping + + +
+
+
+ +

+ + ConceptFusion: Open-set Multimodal 3D Mapping + +

+ + +

ConceptFusion builds open-set 3D maps that can be queried via text, click, image, or audio. Given a series of RGB-D images, our system builds a 3D scene representation, that is inherently multimodal by leveraging foundation models such as CLIP, and therefore doesn’t require any additional training or finetuning.

+

+ + + + + Collaborators: + + + + +
+ + + +
+
+
-
- + + + + + + + +
+
+
+ + + + + + One-4-All - Neural Potential Fields for Embodied Navigation + + +
+
+
+ +

+ + One-4-All - Neural Potential Fields for Embodied Navigation + +

+ + +

An end-to-end fully parametric method for image-goal navigation that leverages self-supervised and manifold learning to replace a topological graph with a geodesic regressor. During navigation, the geodesic regressor is used as an attractor in a potential function defined in latent space, allowing to frame navigation as a minimization problem.

+

+ + + + + + +
+ + + +
+
+
+
+ + + + + + + + +
+
+
+ + + + + + f-Cal - Calibrated aleatoric uncertainty estimation from neural networks for robot perception + + +
+
+
+ +

+ + f-Cal - Calibrated aleatoric uncertainty estimation from neural networks for robot perception + +

+ + +

f-Cal is calibration method proposed to calibrate probabilistic regression networks. Typical bayesian neural networks are shown to be overconfident in their predictions. To use the predictions for downstream tasks, reliable and calibrated uncertainity estimates are critical. f-Cal is a straightforward loss function, which can be employed to train any probabilistic neural regressor, and obtain calibrated uncertainty estimates.

+

+ + + + + Collaborators: + + + + +
+ + + +
+
+
+
+ + + + + + + + +
+
+
+ + + + + + Inverse Variance Reinforcement Learning + + +
+
+
+ +

+ + Inverse Variance Reinforcement Learning + +

+ + +

Improving sample efficiency in deep reinforcement learning by mitigating the impacts of heteroscedastic noise in the bootstraped target using uncertainty estimation.

+

+ + + + + + +
+ + + +
+
+
+
+ + + + + + + + +
+
+
+ + + + + + Lifelong Topological Visual Navigation + + +
+
+
+ +

+ + Lifelong Topological Visual Navigation + +

+ + +

A learning-based topological visual navigation method with graph update strategies that improves lifelong navigation performance over time.

+

+ + + + + Collaborators: + + + + +
+ + + +
+
+
+
+ + + + + + + + +
+
+
+ + + + + + Taskography - Evaluating robot task planning over large 3D scene graphs + + +
+
+
+ +

+ + Taskography - Evaluating robot task planning over large 3D scene graphs + +

+ + +

Taskography is the first large-scale robotic task planning benchmark over 3DSGs. While most benchmarking efforts in this area focus on vision-based planning, we systematically study symbolic planning, to decouple planning performance from visual representation learning.

+

+ + + + + Collaborators: + + + + +
+ + + +
+
+
+
+ + + + + + + + +
+
+
+ + + + + + gradsim + + +
+
+
+ +

+ + gradsim + +

+ + +

gradSim is a framework that overcomes the dependence on 3D supervision by leveraging differentiable multiphysics simulation and differentiable rendering to jointly model the evolution of scene dynamics and image formation.

+

+ + + + + Collaborators: +
    + + +
  • + + Miles Macklin + +
  • + + +
  • + + Vikram Voleti + +
  • + + +
  • + + Linda Petrini + +
  • + + +
  • + + Martin Weiss + +
  • + + +
  • + + Jerome Parent-Levesque + +
  • + + +
  • + + Kevin Xie + +
  • + + +
  • + + Kenny Erleben + +
  • + + +
  • + + + Florian Shkurti + + +
  • + + +
  • + + Derek Nowrouzerzahrai + +
  • + + +
  • + + Sanja Fidler + +
  • + +
+ + + +
+ + + +
+
+
+
+ + + + + + + + +
+
+
+ + + + + + gradslam + + +
+
+
+ +

+ + gradslam + +

+ + +

gradslam is an open-source framework providing differentiable building blocks for simultaneous localization and mapping (SLAM) systems. We enable the usage of dense SLAM subsystems from the comfort of PyTorch.

+

+ + + + + Collaborators: + + + + +
+ + + +
+
+
+
+ + + + + + + + +
+
+
+ + + + + + La-MAML + + +
+
+
+ +

+ + La-MAML + +

+ + +

Look-ahead meta-learning for continual learning

+

+ + + + + Collaborators: +
    + + +
  • + + Karmesh Yadav + +
  • + +
+ + + +
+ + + +
+
+
+
+ + + + + + + + +
+
+
+ + + + + + Active Domain Randomization + + +
+
+
+ +

+ + Active Domain Randomization + +

+ + +

Making sim-to-real transfer more efficient

+

+ + + + + Collaborators: +
    + + +
  • + + Chris Pal + +
  • + +
+ + + +
+ + + +
+
+
+
+ + + + + + + + +
+
+
+ + + + + + Self-supervised visual odometry estimation + + +
+
+
+ +

+ + Self-supervised visual odometry estimation + +

+ + +

A self-supervised deep network for visual odometry estimation from monocular imagery.

+

+ + + + + Collaborators: + + + + +
+ + + +
+
+
+
+ + + + + + + + +
+
+
+ + + + + + Deep Active Localization + + +
+
+
+ +

+ + Deep Active Localization + +

+ + +

Learned active localization, implemented on “real” robots.

+

+ + + + + Collaborators: +
    + + +
  • + + Keehong Seo + +
  • + +
+ + + +
+ + + +
+
+
+
+ + +

+ + + + + All projects… + +

+
+ +
+ + + + + +
+

+ + + + | + + + + | + + + + + +

+
+
+ + + + + + + + diff --git a/interns-winter-2021.html b/interns-winter-2021.html new file mode 100644 index 0000000..a0b66e6 --- /dev/null +++ b/interns-winter-2021.html @@ -0,0 +1,198 @@ + + + + + + + + + Robotics Group @ University of Montreal + + + + + + + + + + + +
+ + + + + + + +

Upto two internships are available at the Robotics and Embodied AI Lab (REAL) at Universite de Montreal and Mila. Internship roles are full-time (40 hrs/week) with the possibility of remote work. Preferred duration: Jan - May 2021.

+ +

Note: Students at undergraduate and graduate levels are welcome to apply too

+ +

Selected interns will leverage end-to-end differentiable SLAM frameworks (such as gradslam) to implement deep learning solutions for 3D perception, navigation, and manipulation. They will have the opportunity to collaborate with the vibrant research community at Mila and publish at leading robotics/vision/ML venues. Candidates will be compensated competitive to graduate student salaries in the Montreal area.

+ +

Requirements:

+
    +
  • Must be available for full-time roles for winter 2021 (Jan-May 2021)
  • +
  • Experience in one or more of the following: 3D reconstruction/mapping, deep learning for 3D perception, deep reinforcement learning, SLAM
  • +
  • Strong programming skills (prior pytorch experience required)
  • +
+ +

Desired:

+
    +
  • Experience with one of differentiable computer vision (see Kornia), differentiable rendering, or with designing and implementing RL environments and agents
  • +
  • Prior publication record at robotics/vision/ML venues
  • +
  • Prior open-source project contributions/management
  • +
+ +

How to apply: Please fill out this form.

+ +

In case of questions, feel free to write to Krishna.

+ +

We will continue to process applications on a first-come-first-served basis until the positions are filled.

+ + +
+

+ + + + | + + + + | + + + + + +

+
+ +
+ + + + + + + + + diff --git a/people.html b/people.html index 1d083a2..00d2a4e 100644 --- a/people.html +++ b/people.html @@ -1,25 +1,146 @@ ---- -layout: default -title: People - -# groups of columns of {roles: list, width: num, image: bool} -member-tables: -- - roles: [faculty] - width: 4 - image: true - - roles: [phd, masters, intern, developer] - width: 4 - image: true + + + + + + + + + Robotics Group @ University of Montreal | People + + + + + + + + + + +
+ + -alumni-tables: -- - roles: [postdoc-alum, phd-alum, masters-alum, intern-alum] - width: 4 - image: false + ---- + -
+
@@ -32,41 +153,4953 @@
- {% for role-table in page.member-tables %} - {% for role-column in role-table %} - {% for role in role-column.roles %} - {% include role-people.html role=role image=role-column.image %} -
- {% endfor %} - {% endfor %} - {% endfor %} -
-
-
+ + + + -
+ + +
-
-
-
Alumni
+
+
+
Faculty
+
+
+ + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + + Liam Paull + + + + + + +

    + Liam Paull +


+
+ + + +
Interests: Robot perception, uncertainty, sim2real, and robot benchmarking + + + +
+ +
+ + +
+
+ + + + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + + Glen Berseth + + + + + + +

    + Glen Berseth +


+
+ + + +
Interests: Reinforcement learning, robotics, machine learning, generalization, planning + + + +
+ +
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
-
-
+
+ + + + + + + + + + + + + + + + + + + + + + -
-
-
- {% for role-table in page.alumni-tables %} - {% for role-column in role-table %} - {% for role in role-column.roles %} - {% include role-people.html role=role image=role-column.image %}
- {% endfor %} - {% endfor %} + + + + + + + + + + + + + + + + + + +
+
+
+
+
PhD Students
+
+
+ + + + + + + + + + + + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + + Kaustubh Mani + + + K + + + + + + + +

    Kaustubh Mani


+
+ +
Advisor: Liam Paull     + + + +
Interests: Safety, Robustness and Model-Based RL + + + +
- {% endfor %}
-
-
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + + Manfred Diaz + + + + + + +

    + Manfred Diaz +


+
+ +
Advisor: Liam Paull     + + + + + +
+ +
+ + +
+
+ + + + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + Luke Rowe + + + + + +

    Luke Rowe


+
+ +
Advisor: Liam Paull     + + +
Coadvisor: Chris Pal     + + +
Interests: Prediction, planning, and simulation for autonomous driving + + + +
+ +
+ + +
+
+ + + + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + Ruixiang Zhang + + + + + +

    Ruixiang Zhang


+
+ +
Advisor: Liam Paull     + + +
Coadvisor: Yoshua Bengio     + + + + +
+ +
+ + +
+
+ + + + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + Zhen Liu + + + + + +

    Zhen Liu


+
+ +
Advisor: Liam Paull     + + +
Coadvisor: Yoshua Bengio     + + + + +
+ +
+ + +
+
+ + + + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + Mostafa Elaraby + + + + + +

    Mostafa Elaraby


+
+ +
Advisor: Liam Paull     + + + +
Interests: Continual learning, imitation learning + + + +
+ +
+ + +
+
+ + + + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + + Sacha Morin + + + + + + +

    + Sacha Morin +


+
+ +
Advisor: Liam Paull     + + +
Coadvisor: Guy Wolf     + + + + +
+ +
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + Charlie Gauthier + + + + + +

    Charlie Gauthier


+
+ +
Advisor: Liam Paull     + + +
Coadvisor: Glen Berseth     + + + + +
+ +
+ + +
+
+ + + + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + + Miguel Saavedra-Ruiz + + + + + + +

    + Miguel Saavedra-Ruiz +


+
+ +
Advisor: Liam Paull     + + + + + +
+ +
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + + Albert Zhan + + + + + + +

    + Albert Zhan +


+
+ +
Advisor: Glen Berseth     + + + + + +
+ +
+ + +
+
+ + + + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + Siddarth Venkatraman + + + + + +

    Siddarth Venkatraman


+
+ +
Advisor: Glen Berseth     + + + +
Interests: Reinforcement learning, representation learning + + + +
+ +
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+
+ + + + + + + + + + + + + +
+ + + + + + + + + + + + + + +
+
+
+
+
Master's Students
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + + Roger Creus Castanyer + + + + + + +

    + Roger Creus Castanyer +


+
+ +
Advisor: Glen Berseth     + + + +
Interests: (Unsupervised) (Deep) reinforcement learning + + + +
+ +
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + + Ali Kuwajerwala + + + + + + +

    + Ali Kuwajerwala +


+
+ +
Advisor: Liam Paull     + + + + + +
+ +
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + + Simon Demeule + + + + + + +

    + Simon Demeule +


+
+ +
Advisor: Glen Berseth     + + + + + +
+ +
+ + +
+
+ + + + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + Adriana Hugessen + + + + + +

    Adriana Hugessen


+
+ +
Advisor: Glen Berseth     + + + + + +
+ +
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+
+ + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + +
+
+
+
+
Undergraduate Researchers and Interns
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + + Raj Ghugare + + + + + + +

    + Raj Ghugare +


+
+ + + + + +
+ +
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+
+ + + + + + + + + + + +
+ + + + + + + + + + +
+
+
+
+
Software Developers
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + Kirsty Ellis + + + + + +

    Kirsty Ellis


+
+ + + + + +
+ +
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+
+ + + + + + + + + + + + + + + + + + + +
+ + + +
+
+ + + +
+
+
+
+
Alumni
+
+
+
+
+ +
+
+
+ + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
Past Postocs
+
+
+ + + + + + + + + + + + + + + + + +
+
+ + +

    + Florian Golemo +


+
+ +
Advisor: Liam Paull     + + +
Coadvisor: Chris Pal     + + + + +
+ +
+
+ + + + + + + + + + + + + + + +
+
+ + +

    + Ali Harakeh +


+
+ +
Advisor: Liam Paull     + + + +
Interests: Bayesian deep learning, conformal prediction, out-of-distribution generalization, and continual learning + + + +
Current Position: Senior Applied Research Scientist at Mila + +
+ +
+
+ + + + + + + + + + + + + + + + + + +
+
+ + +

    + Steven Parkison +


+
+ +
Advisor: Liam Paull     + + + +
Interests: SLAM, optimization, robotic perception, and lukewarm coffee + + + +
Current Position: Robotics researcher at the Institut de Recherche d'Hydro Québec (IREQ) + +
+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+
+ + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
Past PhD Students
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + +

    Vincent Mai


+
+ +
Advisor: Liam Paull     + + + + + +
Current Position: AI researcher at the Institut de Recherche d'Hydro Québec (IREQ) + +
+ +
+
+ + + + + + + + + + + + + + + +
+
+ + +

    + Krishna Murthy Jatavallabhula +


+
+ +
Advisor: Liam Paull     + + + + + +
Current Position: PostDoc at MIT with Antonio Torralba and Joshua Tenenbaum + +
+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+
+ + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
Past Master's Students
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + +

    + Dishank Bansal +


+
+ +
Advisor: Liam Paull     + + + + +
Thesis: Calibrated Uncertainty Estimation for SLAM + + +
Current Position: Research Engineer at Meta + +
+ +
+
+ + + + + + + + + + + + + + + +
+
+ + +

    Charlie Gauthier


+
+ +
Advisor: Liam Paull     + + + + +
Thesis: Fear prediction for training robust RL agents + + +
Current Position: PhD Student at Mila / REAL + +
+ +
+
+ + + + + + + + + + + + + + + + + + + + + +
+
+ + +

    + Miguel Saavedra-Ruiz +


+
+ +
Advisor: Liam Paull     + + + + +
Thesis: Leveraging Self-Supervision for Visual Embodied Navigation with Neuralized Potential Fields + + +
Current Position: PhD Student at Mila / REAL + +
+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + +

    + Sai Krishna G.V. +


+
+ +
Advisor: Liam Paull     + + + + +
Thesis: Deep active localization + + +
Current Position: Reinforcement learning researcher at AI-Redefined + +
+ +
+
+ + + + + + + + + + + + + + + +
+
+ + +

    Gunshi Gupta


+
+ +
Advisor: Liam Paull     + + + + +
Thesis: Look-ahead meta-learning for continual learning + + +
Current Position: PhD student at Oxford + +
+ +
+
+ + + + + + + + + + + + + + + +
+
+ + +

    Nithin Vasisth


+
+ +
Advisor: Liam Paull     + + + + +
Thesis: Lifelong learning of concepts in CRAFT + + +
Current Position: Senior project scientist at Robert Bosch Center for Cyber-Physical Systems @ IISc + +
+ +
+
+ + + + + + + + + + + + + + + +
+
+ + +

    Breandan Considine


+
+ +
Advisor: Liam Paull     + + +
Coadvisor: Michalis Famelis     + + + +
Thesis: Programming tools for intelligent systems + + +
Current Position: PhD student at McGill + +
+ +
+
+ + + + + + + + + + + + + + + +
+
+ + +

    Bhairav Mehta


+
+ +
Advisor: Liam Paull     + + + + +
Thesis: On learning and generalization in unstructured task spaces + + +
Current Position: CEO at Innabox + +
+ +
+
+ + + + + + + + + + + + + + + +
+
+ + +

    + Anthony Courchesne +


+
+ +
Advisor: Liam Paull     + + + + +
Thesis: On quantifying the value of simulation for training and evaluating robotic agents + + +
Current Position: Project manager at Institut du Vehicule Innovant (IVI) + +
+ +
+
+ + + + + + + + + + + + + + + +
+
+ + +

    Dhaivat Bhatt


+
+ +
Advisor: Liam Paull     + + + + +
Thesis: Variational aleatoric uncertainty calibration in neural regression + + +
Current Position: Research engineer at Samsung + +
+ +
+
+ + + + + + + + + + + + + + + +
+
+ + +

    + Rey Reza Wiyatno +


+
+ +
Advisor: Liam Paull     + + + + +
Thesis: Lifelong Topological Visual Navigation + + +
+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
Past Undergraduate Researchers and Interns
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + +

    Aurélien Bück-Kaeffer


+
+ + + + + +
+ +
+
+ + + + + + + + + + + + + + + +
+
+ + +

    Atharva Chandak


+
+ + + + + +
+ +
+
+ + + + + + + + + + + + + + + +
+
+ + +

    Bipasha Sen


+
+ + + + + +
Current Position: PhD student at MIT + +
+ +
+
+ + + + + + + + + + + + + + + +
+
+ + +

    Aditya Agarwal


+
+ + + + + +
Current Position: PhD student at Brown + +
+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + +

    Kaustubh Mani


+
+ + + + + +
Current Position: PhD student at Mila / REALl + +
+ +
+
+ + + + + + + + + + + + + + + +
+
+ + +

    Nikhil Varma Keetha


+
+ + + + + +
Current Position: PhD student at CMU + +
+ +
+
+ + + + + + + + + + + + + + + +
+
+ + +

    Sai Sree Harsha


+
+ + + + + +
Current Position: Master's student at UCSD + +
+ +
+
+ + + + + + + + + + + + + + + +
+
+ + +

    Abhishek Jain


+
+ + + + + +
+ +
+
+ + + + + + + + + + + + + + + +
+
+ + +

    Sharath Chandra Raparthy


+
+ + + + + +
Current Position: PhD student at Mila with Irina Rish + +
+ +
+
+ + + + + + + + + + + + + + + +
+
+ + +

    + Mark Van der Merwe +


+
+ + + + + +
Current Position: PhD student at the University of Michigan + +
+ +
+
+ + + + + + + + + + + + + + + +
+
+ + +

    Amrut Sarangi


+
+ + + + + +
+ +
+
+ + + + + + + + + + + + + + + +
+
+ + +

    Rohan Raj


+
+ + + + + +
+ +
+
+ + + + + + + + + + + + + + + +
+
+ + +

    + Waleed Khamies +


+
+ + + + + +
+ +
+
+ + + + + + + + + + + + + + + +
+
+ + +

    Zihan Wang


+
+ + + + + +
Current Position: Master's student at Stanford + +
+ +
+
+ + + + + + + + + + + + + + + +
+
+ + +

    Homanga Bharadhwaj


+
+ + + + + +
Current Position: PhD student at the University of Toronto + +
+ +
+
+ + + + + + + + + + + + + + + +
+
+ + +

    Adam Sigal


+
+ + + + + +
Current Position: PhD student at McGill + +
+ +
+
+ + + + + + + + + + + + + + + +
+
+ + +

    Sarthak Sharma


+
+ + + + + +
Current Position: AI/ML Engineer at Verisk AI Lab + +
+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+
+ + + +
+ + + + +
+
+
+ + +
+

+ + + + | + + + + | + + + + + +

+
+ + + + + + + + + + + diff --git a/projects/01-gradslam.html b/projects/01-gradslam.html new file mode 100644 index 0000000..c643bf5 --- /dev/null +++ b/projects/01-gradslam.html @@ -0,0 +1,305 @@ + + + + + + + + + Robotics Group @ University of Montreal | gradslam + + + + + + + + + + + +
+ + + + + gradslam + + + + +

gradslam

+ +

gradslam is an open-source framework providing differentiable building blocks for simultaneous localization and mapping (SLAM) systems. We enable the usage of dense SLAM subsystems from the comfort of PyTorch.

+ + +

People

+
+ + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + + Krishna Murthy Jatavallabhula + + + + + + +

    + Krishna Murthy Jatavallabhula +


+
+ +
Advisor: Liam Paull     + + + + + +
Current Position: PostDoc at MIT with Antonio Torralba and Joshua Tenenbaum + +
+ +
+ + +
+
+ + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + + Liam Paull + + + + + + +

    + Liam Paull +


+
+ + + +
Interests: Robot perception, uncertainty, sim2real, and robot benchmarking + + + +
+ +
+ + +
+
+ + + + +
+ + +
+

+ + + + | + + + + | + + + + + +

+
+ +
+ + + + + + + + + diff --git a/projects/adr.html b/projects/adr.html new file mode 100644 index 0000000..3942305 --- /dev/null +++ b/projects/adr.html @@ -0,0 +1,420 @@ + + + + + + + + + Robotics Group @ University of Montreal | Active Domain Randomization + + + + + + + + + + + +
+ + + + + Active Domain Randomization + + + + +

Active Domain Randomization

+ +

Domain randomization is a popular technique for improving domain transfer, often used in a zero-shot setting when the target domain is unknown or cannot easily be used for training. In this work, we empirically examine the effects of domain randomization on agent generalization. Our experiments show that domain randomization may lead to suboptimal, high-variance policies, which we attribute to the uniform sampling of environment parameters. We propose Active Domain Randomization, a novel algorithm that learns a parameter sampling strategy. Our method looks for the most informative environment variations within the given randomization ranges by leveraging the discrepancies of policy rollouts in randomized and reference environment instances. We find that training more frequently on these instances leads to better overall agent generalization. Our experiments across various physics-based simulated and real-robot tasks show that this enhancement leads to more robust, consistent policies.

+ + +

People

+
+ + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + Bhairav Mehta + + + + + +

    Bhairav Mehta


+
+ +
Advisor: Liam Paull     + + + + +
Thesis: On learning and generalization in unstructured task spaces + + +
Current Position: CEO at Innabox + +
+ +
+ + +
+
+ + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + + Florian Golemo + + + + + + +

    + Florian Golemo +


+
+ +
Advisor: Liam Paull     + + +
Coadvisor: Chris Pal     + + + + +
+ +
+ + +
+
+ + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + + Manfred Diaz + + + + + + +

    + Manfred Diaz +


+
+ +
Advisor: Liam Paull     + + + + + +
+ +
+ + +
+
+ + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + + Liam Paull + + + + + + +

    + Liam Paull +


+
+ + + +
Interests: Robot perception, uncertainty, sim2real, and robot benchmarking + + + +
+ +
+ + +
+
+ + + + +
+ + +
+

+ + + + | + + + + | + + + + + +

+
+ +
+ + + + + + + + + diff --git a/projects/conceptfusion.html b/projects/conceptfusion.html new file mode 100644 index 0000000..80fe844 --- /dev/null +++ b/projects/conceptfusion.html @@ -0,0 +1,305 @@ + + + + + + + + + Robotics Group @ University of Montreal | ConceptFusion: Open-set Multimodal 3D Mapping + + + + + + + + + + + +
+ + + + + ConceptFusion: Open-set Multimodal 3D Mapping + + + + +

ConceptFusion: Open-set Multimodal 3D Mapping

+ +

Building 3D maps of the environment is central to robot navigation, planning, and interaction with objects in a scene. Most existing approaches that integrate semantic concepts with 3D maps largely remain confined to the closed-set setting: they can only reason about a finite set of concepts, pre-defined at training time. Further, these maps can only be queried using class labels, or in recent work, using text prompts.

+ +

We address both these issues with ConceptFusion, a scene representation that is: (i) fundamentally open-set, enabling reasoning beyond a closed set of concepts (ii) inherently multi-modal, enabling a diverse range of possible queries to the 3D map, from language, to images, to audio, to 3D geometry, all working in concert. ConceptFusion leverages the open-set capabilities of today’s foundation models pre-trained on internet-scale data to reason about concepts across modalities such as natural language, images, and audio. We demonstrate that pixel-aligned open-set features can be fused into 3D maps via traditional SLAM and multi-view fusion approaches. This enables effective zero-shot spatial reasoning, not needing any additional training or finetuning, and retains long-tailed concepts better than supervised approaches, outperforming them by more than 40% margin on 3D IoU. We extensively evaluate ConceptFusion on a number of real-world datasets, simulated home environments, a real-world tabletop manipulation task, and an autonomous driving platform. We showcase new avenues for blending foundation models with 3D open-set multimodal mapping.

+ + +

People

+
+ + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + + Ali Kuwajerwala + + + + + + +

    + Ali Kuwajerwala +


+
+ +
Advisor: Liam Paull     + + + + + +
+ +
+ + +
+
+ + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + + Liam Paull + + + + + + +

    + Liam Paull +


+
+ + + +
Interests: Robot perception, uncertainty, sim2real, and robot benchmarking + + + +
+ +
+ + +
+
+ + + + +
+ + +
+

+ + + + | + + + + | + + + + + +

+
+ +
+ + + + + + + + + diff --git a/projects/ctcnet.html b/projects/ctcnet.html new file mode 100644 index 0000000..6480740 --- /dev/null +++ b/projects/ctcnet.html @@ -0,0 +1,360 @@ + + + + + + + + + Robotics Group @ University of Montreal | Self-supervised visual odometry estimation + + + + + + + + + + + +
+ + + + + Self-supervised visual odometry estimation + + + + + + + +

People

+
+ + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + + Krishna Murthy Jatavallabhula + + + + + + +

    + Krishna Murthy Jatavallabhula +


+
+ +
Advisor: Liam Paull     + + + + + +
Current Position: PostDoc at MIT with Antonio Torralba and Joshua Tenenbaum + +
+ +
+ + +
+
+ + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + Gunshi Gupta + + + + + +

    Gunshi Gupta


+
+ +
Advisor: Liam Paull     + + + + +
Thesis: Look-ahead meta-learning for continual learning + + +
Current Position: PhD student at Oxford + +
+ +
+ + +
+
+ + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + + Liam Paull + + + + + + +

    + Liam Paull +


+
+ + + +
Interests: Robot perception, uncertainty, sim2real, and robot benchmarking + + + +
+ +
+ + +
+
+ + + + +
+ + +
+

+ + + + | + + + + | + + + + + +

+
+ +
+ + + + + + + + + diff --git a/projects/dal.html b/projects/dal.html new file mode 100644 index 0000000..278bcb0 --- /dev/null +++ b/projects/dal.html @@ -0,0 +1,479 @@ + + + + + + + + + Robotics Group @ University of Montreal | Deep Active Localization + + + + + + + + + + + +
+ + + + + Deep Active Localization + + + + +

Deep Active Localization

+ +

Active localization is the problem of generating robot actions that allow it to maximally disambiguate its pose within a reference map. Traditional approaches to this use an information-theoretic criterion for action selection and hand-crafted perceptual models. In this work we propose an end-to-end differentiable method for learning to take informative actions that is trainable entirely in simulation and then transferable to real robot hardware with zero refinement. The system is composed of two modules: a convolutional neural network for perception, and a deep reinforcement learned planning module. We introduce a multi-scale approach to the learned perceptual model since the accuracy needed to perform action selection with reinforcement learning is much less than the accuracy needed for robot control. We demonstrate that the resulting system outperforms using the traditional approach for either perception or planning. We also demonstrate our approaches robustness to different map configurations and other nuisance parameters through the use of domain randomization in training. The code is also compatible with the OpenAI gym framework, as well as the Gazebo simulator.

+ + +

People

+
+ + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + + Sai Krishna G.V. + + + + + + +

    + Sai Krishna G.V. +


+
+ +
Advisor: Liam Paull     + + + + +
Thesis: Deep active localization + + +
Current Position: Reinforcement learning researcher at AI-Redefined + +
+ +
+ + +
+
+ + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + Dhaivat Bhatt + + + + + +

    Dhaivat Bhatt


+
+ +
Advisor: Liam Paull     + + + + +
Thesis: Variational aleatoric uncertainty calibration in neural regression + + +
Current Position: Research engineer at Samsung + +
+ +
+ + +
+
+ + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + + Krishna Murthy Jatavallabhula + + + + + + +

    + Krishna Murthy Jatavallabhula +


+
+ +
Advisor: Liam Paull     + + + + + +
Current Position: PostDoc at MIT with Antonio Torralba and Joshua Tenenbaum + +
+ +
+ + +
+
+ + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + Vincent Mai + + + + + +

    Vincent Mai


+
+ +
Advisor: Liam Paull     + + + + + +
Current Position: AI researcher at the Institut de Recherche d'Hydro Québec (IREQ) + +
+ +
+ + +
+
+ + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + + Liam Paull + + + + + + +

    + Liam Paull +


+
+ + + +
Interests: Robot perception, uncertainty, sim2real, and robot benchmarking + + + +
+ +
+ + +
+
+ + + + +
+ + +
+

+ + + + | + + + + | + + + + + +

+
+ +
+ + + + + + + + + diff --git a/projects/fcal.html b/projects/fcal.html new file mode 100644 index 0000000..1a6f3f7 --- /dev/null +++ b/projects/fcal.html @@ -0,0 +1,483 @@ + + + + + + + + + Robotics Group @ University of Montreal | f-Cal - Calibrated aleatoric uncertainty estimation from neural networks for robot perception + + + + + + + + + + + +
+ + + + + f-Cal - Calibrated aleatoric uncertainty estimation from neural networks for robot perception + + + + +

f-cal

+ + + +

People

+
+ + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + Dhaivat Bhatt + + + + + +

    Dhaivat Bhatt


+
+ +
Advisor: Liam Paull     + + + + +
Thesis: Variational aleatoric uncertainty calibration in neural regression + + +
Current Position: Research engineer at Samsung + +
+ +
+ + +
+
+ + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + + Kaustubh Mani + + + K + + + + + + + +

    Kaustubh Mani


+
+ + + + + +
Current Position: PhD student at Mila / REALl + +
+ +
+ + +
+
+ + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + + Dishank Bansal + + + + + + +

    + Dishank Bansal +


+
+ +
Advisor: Liam Paull     + + + + +
Thesis: Calibrated Uncertainty Estimation for SLAM + + +
Current Position: Research Engineer at Meta + +
+ +
+ + +
+
+ + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + + Krishna Murthy Jatavallabhula + + + + + + +

    + Krishna Murthy Jatavallabhula +


+
+ +
Advisor: Liam Paull     + + + + + +
Current Position: PostDoc at MIT with Antonio Torralba and Joshua Tenenbaum + +
+ +
+ + +
+
+ + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + + Liam Paull + + + + + + +

    + Liam Paull +


+
+ + + +
Interests: Robot perception, uncertainty, sim2real, and robot benchmarking + + + +
+ +
+ + +
+
+ + + + +
+ + +
+

+ + + + | + + + + | + + + + + +

+
+ +
+ + + + + + + + + diff --git a/projects/gradsim.html b/projects/gradsim.html new file mode 100644 index 0000000..8d06e32 --- /dev/null +++ b/projects/gradsim.html @@ -0,0 +1,423 @@ + + + + + + + + + Robotics Group @ University of Montreal | gradsim + + + + + + + + + + + +
+ + + + + gradsim + + + + +

gradsim

+ + + +

People

+
+ + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + + Krishna Murthy Jatavallabhula + + + + + + +

    + Krishna Murthy Jatavallabhula +


+
+ +
Advisor: Liam Paull     + + + + + +
Current Position: PostDoc at MIT with Antonio Torralba and Joshua Tenenbaum + +
+ +
+ + +
+
+ + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + + Florian Golemo + + + + + + +

    + Florian Golemo +


+
+ +
Advisor: Liam Paull     + + +
Coadvisor: Chris Pal     + + + + +
+ +
+ + +
+
+ + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + Breandan Considine + + + + + +

    Breandan Considine


+
+ +
Advisor: Liam Paull     + + +
Coadvisor: Michalis Famelis     + + + +
Thesis: Programming tools for intelligent systems + + +
Current Position: PhD student at McGill + +
+ +
+ + +
+
+ + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + + Liam Paull + + + + + + +

    + Liam Paull +


+
+ + + +
Interests: Robot perception, uncertainty, sim2real, and robot benchmarking + + + +
+ +
+ + +
+
+ + + + +
+ + +
+

+ + + + | + + + + | + + + + + +

+
+ +
+ + + + + + + + + diff --git a/projects/ivrl.html b/projects/ivrl.html new file mode 100644 index 0000000..da0e21e --- /dev/null +++ b/projects/ivrl.html @@ -0,0 +1,444 @@ + + + + + + + + + Robotics Group @ University of Montreal | Inverse Variance Reinforcement Learning + + + + + + + + + + + +
+ + + + + Inverse Variance Reinforcement Learning + + + + +

Inverse Variance Reinforcement Learning

+ +

Most robotics problems can be written as (Partially Observable) Markov Decision Processes (MDPs), with discrete or continuous observation and action spaces. Deep Reinforcement Learning (DRL) is a powerful tool to find an optimal policy for these processes, based on experience acquired during the training process. The training of a DRL agent requires many trajectories, which can be arduous and expensive to produce in the real world. Indeed, the real world is not parallelizable, may require human efforts to reset, and comes with risks for the robot and the environment. Gathering sufficient experience is therefore one of the most important challenges when applying DRL to robotics. The objective of this project is to reduce the amount of samples necessary to train a DRL agent on a robot.

+ +

A diagram representing the generation process of the noisy target.

+ +

DRL algorithms are complex processes. An important part of most model-free algorithms is learning the value function of a given state or state-action pair, i.e., the expected return given the current policy. To do so, deep supervised learning components are used, where the input is the state(-action), and the label is called the target. The target T is a noisy sample of the value. Often, it is computed using the reward r and the next state s’ sampled from experience, the next action a’ based on s’ and the current policy, and the value Q of the next state-action pair which is bootstrapped from the current value estimator (this is the Temporal Difference target). The noise on the target negatively impacts the learning process: the networks learn from wrong data, which entails slower learning and instability.

+ +

The key element in this project is the fact that the noise affecting in the target, i.e. its difference from the true and unique value function, is heteroscedastic. This means that the distribution it is sampled from changes for each input and training step. Sometimes, this distribution has a very low variance: the target is close to the value. Sometimes, on the other hand, the target is subject to a lot of noise and it does not contain useful information with respect to the value. Therefore, the value estimation task in DRL is a case of heteroscedastic regression.

+ +

Projects

+ +

Batch Inverse-Variance Weighting for Deep Heteroscedastic Regression

+ +

Noisy labels slows the learning process in regression: the first part of this project was to prove that the effect of noisy labels can be mitigated given the hypothesis that we know the variance of the noise distribution of each label. How can we include this additional information for heteroscedastic regression? Intuitively, we shoud give more weight to the labels we trust more. In linear regression, the Gauss-Markov theorem shows that the optimal solution is to weigh each sample by the inverse of the variance of the label noise. We show that adapting inverse-variance weighting for gradient-based optimization methods allows to significantly improve the performance of the learning process. Our paper, Batch Inverse-Variance Weighting: Deep Heteroscedastic Regression (BIV), was presented at the Uncertainty and Robustness in Deep Learning workshop at ICML 2021.

+ +
+ A plot showing learning curves, where BIV is doing better than L2 and some baselines. +
BIV improves the learning performance with noisy labels compared to the L2 loss. Source: Batch Inverse-Variance Weighting: Deep Heteroscedastic Regression
+
+ +

Inverse-Variance Reinforcement Learning

+ +

See project page: https://montrealrobotics.ca/ivrl/

+ +

The second part of the project was to use this weighting scheme in a DRL setting. For this work, the challenge was to estimate the uncertainty of the target. A systematic analysis of the sources of uncertainty in the target generation process justifies the use of deep variance ensembles. These are used to estimate the variance due to the stochasticity of the environment and the policy, as well as the predictive uncertainty of the value prediction used to bootstrap the target. As the variance output by these deep ensembles is also the result of a training process, the uncertainty estimation is subject to complex dynamics. We show that the BIV weighting scheme is robust to changes of scale in the variance estimation. We show that combining BIV with deep variance ensembles in DRL algorithms such as DQN and SAC leads to significant improvements in the sample efficiency. This framework, called Inverse-Variance Reinforcement Learning (IV-RL), is presented in our Sample Efficient Deep Reinforcement Learning via Uncertainty Estimation submission to ICLR 2022.

+ +
+ A plot showing learning curves, where IV-SAC is doing better than DQN and other ensemble baselines. +
IV-RL on SAC improves the learning performance and the sample efficiency compared to other ensemble-based baselines. Source: Sample Efficient Deep Reinforcement Learning via Uncertainty Estimation
+
+ + +

People

+
+ + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + Vincent Mai + + + + + +

    Vincent Mai


+
+ +
Advisor: Liam Paull     + + + + + +
Current Position: AI researcher at the Institut de Recherche d'Hydro Québec (IREQ) + +
+ +
+ + +
+
+ + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + + Kaustubh Mani + + + K + + + + + + + +

    Kaustubh Mani


+
+ + + + + +
Current Position: PhD student at Mila / REALl + +
+ +
+ + +
+
+ + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + + Waleed Khamies + + + + + + +

    + Waleed Khamies +


+
+ + + + + +
+ +
+ + +
+
+ + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + + Liam Paull + + + + + + +

    + Liam Paull +


+
+ + + +
Interests: Robot perception, uncertainty, sim2real, and robot benchmarking + + + +
+ +
+ + +
+
+ + + + +
+ + +
+

+ + + + | + + + + | + + + + + +

+
+ +
+ + + + + + + + + diff --git a/projects/lamaml.html b/projects/lamaml.html new file mode 100644 index 0000000..1b3441d --- /dev/null +++ b/projects/lamaml.html @@ -0,0 +1,301 @@ + + + + + + + + + Robotics Group @ University of Montreal | La-MAML + + + + + + + + + + + +
+ + + + + La-MAML + + + + +

La-MAML

+ + + +

People

+
+ + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + Gunshi Gupta + + + + + +

    Gunshi Gupta


+
+ +
Advisor: Liam Paull     + + + + +
Thesis: Look-ahead meta-learning for continual learning + + +
Current Position: PhD student at Oxford + +
+ +
+ + +
+
+ + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + + Liam Paull + + + + + + +

    + Liam Paull +


+
+ + + +
Interests: Robot perception, uncertainty, sim2real, and robot benchmarking + + + +
+ +
+ + +
+
+ + + + +
+ + +
+

+ + + + | + + + + | + + + + + +

+
+ +
+ + + + + + + + + diff --git a/projects/ltvn.html b/projects/ltvn.html new file mode 100644 index 0000000..1608243 --- /dev/null +++ b/projects/ltvn.html @@ -0,0 +1,305 @@ + + + + + + + + + Robotics Group @ University of Montreal | Lifelong Topological Visual Navigation + + + + + + + + + + + +
+ + + + + Lifelong Topological Visual Navigation + + + + +

Lifelong Topological Visual Navigation

+ +

See project page: https://montrealrobotics.ca/ltvn/

+ + +

People

+
+ + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + + Rey Reza Wiyatno + + + + + + +

    + Rey Reza Wiyatno +


+
+ +
Advisor: Liam Paull     + + + + +
Thesis: Lifelong Topological Visual Navigation + + +
+ +
+ + +
+
+ + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + + Liam Paull + + + + + + +

    + Liam Paull +


+
+ + + +
Interests: Robot perception, uncertainty, sim2real, and robot benchmarking + + + +
+ +
+ + +
+
+ + + + +
+ + +
+

+ + + + | + + + + | + + + + + +

+
+ +
+ + + + + + + + + diff --git a/projects/o4a.html b/projects/o4a.html new file mode 100644 index 0000000..b6b1268 --- /dev/null +++ b/projects/o4a.html @@ -0,0 +1,363 @@ + + + + + + + + + Robotics Group @ University of Montreal | One-4-All - Neural Potential Fields for Embodied Navigation + + + + + + + + + + + +
+ + + + + One-4-All - Neural Potential Fields for Embodied Navigation + + + + +

One-4-All: Neural Potential Fields for Embodied Navigation

+ +

A fundamental task in robotics is to navigate between two locations. In particular, real-world navigation can require long-horizon planning using high-dimensional RGB images, which poses a substantial challenge for end-to-end learning-based approaches. Current semi-parametric methods instead achieve long-horizon navigation by combining learned modules with a topological memory of the environment, often represented as a graph over previously collected images. However, using these graphs in practice typically involves tuning a number of pruning heuristics to avoid spurious edges, limit runtime memory usage and allow reasonably fast graph queries. In this work, we present One-4-All (O4A), a method leveraging self-supervised and manifold learning to obtain a graph-free, end-to-end navigation pipeline in which the goal is specified as an image. Navigation is achieved by greedily minimizing a potential function defined continuously over the O4A latent space. Our system is trained offline on non-expert exploration sequences of RGB data and controls, and does not require any depth or pose measurements. We show that O4A can reach long-range goals in 8 simulated Gibson indoor environments, and further demonstrate successful real-world navigation using a Jackal UGV platform.

+ + +

People

+
+ + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + + Sacha Morin + + + + + + +

    + Sacha Morin +


+
+ +
Advisor: Liam Paull     + + +
Coadvisor: Guy Wolf     + + + + +
+ +
+ + +
+
+ + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + + Miguel Saavedra-Ruiz + + + + + + +

    + Miguel Saavedra-Ruiz +


+
+ +
Advisor: Liam Paull     + + + + + +
+ +
+ + +
+
+ + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + + Liam Paull + + + + + + +

    + Liam Paull +


+
+ + + +
Interests: Robot perception, uncertainty, sim2real, and robot benchmarking + + + +
+ +
+ + +
+
+ + + + +
+ + +
+

+ + + + | + + + + | + + + + + +

+
+ +
+ + + + + + + + + diff --git a/projects/taskography.html b/projects/taskography.html new file mode 100644 index 0000000..d599369 --- /dev/null +++ b/projects/taskography.html @@ -0,0 +1,304 @@ + + + + + + + + + Robotics Group @ University of Montreal | Taskography - Evaluating robot task planning over large 3D scene graphs + + + + + + + + + + + +
+ + + + + Taskography - Evaluating robot task planning over large 3D scene graphs + + + + +

Taskography

+ + + +

People

+
+ + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + + Krishna Murthy Jatavallabhula + + + + + + +

    + Krishna Murthy Jatavallabhula +


+
+ +
Advisor: Liam Paull     + + + + + +
Current Position: PostDoc at MIT with Antonio Torralba and Joshua Tenenbaum + +
+ +
+ + +
+
+ + + + + + + + + + + + + +
+
+ + + +
+ + + + + + + + + + + Liam Paull + + + + + + +

    + Liam Paull +


+
+ + + +
Interests: Robot perception, uncertainty, sim2real, and robot benchmarking + + + +
+ +
+ + +
+
+ + + + +
+ + +
+

+ + + + | + + + + | + + + + + +

+
+ +
+ + + + + + + + + diff --git a/publications/index.html b/publications/index.html new file mode 100644 index 0000000..a86545e --- /dev/null +++ b/publications/index.html @@ -0,0 +1,7381 @@ + + + + + + + + + Robotics Group @ University of Montreal | Publications + + + + + + + + + + + +
+ + + + + + + +
+
+
+
+
Publications
+
+
+
+
+ +
+
+
2023
+
+
+
  1. + + + + + + + +
    + +
    + + + + + +
    + + + + + + ConceptFusion: Open-set Multimodal 3D Mapping + + + + + + + Krishna Murthy Jatavallabhula, + + + + + + + + + Alihusein Kuwajerwala, + + + + + + + + + Qiao Gu, + + + + + + + + + Mohd Omama, + + + + + + + + + Tao Chen, + + + + + + + + + Shuang Li, + + + + + + + + + Ganesh Iyer, + + + + + + + + + Soroush Saryazdi, + + + + + + + + + Nikhil Keetha, + + + + + + + + + Ayush Tewari, + + + + + + + + + Joshua B. Tenenbaum, + + + + + + + + + Celso Miguel de Melo, + + + + + + + + + Madhava Krishna, + + + + + + + + + Liam Paull, + + + + + + + + + Florian Shkurti, + + + + + + + and + + + Antonio Torralba + + + + + + + + + Robotics: Science and Systems (RSS) + + + 2023 + + + + + + + + Abstract + + + arXiv + + + + + + + + + Project Page + + + + Video + + + + BibTeX + + + + + +
    + +
    + + + +
    + +
    + +
    + +
    +
+ +
+
+
2022
+
+
+
  1. + + + + + + + +
    + +
    + + + + + +
    + + + + + + Hierarchical Reinforcement Learning for Precise Soccer Shooting Skills using a Quadrupedal Robot + + + + + + + Yandong Ji, + + + + + + + + + Zhongyu Li*, + + + + + + + + + Yinan Sun, + + + + + + + + + Xue Bin Peng, + + + + + + + + + Sergey Levine, + + + + + + + + + Glen Berseth, + + + + + + + and + + + Koushil Sreenath + + + + + + + + + In Proc. IEEE/RSJ Intl Conf on Intelligent Robots and Systems (IROS 2022) + + + 2022 + + + + + + + + Abstract + + + arXiv + + + + + + + + + Project Page + + + + Video + + + + BibTeX + + + + + +
    + +
    + + + +
    + +
    + +
    + +
    +
  2. +
  3. + + + + + + + +
    + +
    + + + + + +
    + + + + + + AnyMorph: Learning Transferable Policies By Inferring Agent Morphology + + + + + + + Brandon Trabucco, + + + + + + + + + Phielipp Mariano, + + + + + + + and + + + Glen Berseth + + + + + + + + + Internation Conference on Machine Learning + + + 2022 + + + + + + + + Abstract + + + arXiv + + + + + + + + + Project Page + + + + + + BibTeX + + + + + +
    + +
    + + + +
    + +
    + +
    + +
    +
  4. +
  5. + + + + + + + +
    + +
    + + + + + +
    + + + + + + Lifelong Topological Visual Navigation + + + + + + + Rey Reza Wiyatno, + + + + + + + + + Anqi Xu, + + + + + + + and + + + Liam Paull + + + + + + + + + IEEE Robotics and Automation Letters + + + 2022 + + + + + + + + Abstract + + + arXiv + + + + + + + + + Project Page + + + + + + BibTeX + + + + + +
    + +
    + + + +
    + +
    + +
    + +
    +
  6. +
  7. + + + + + + + +
    + +
    + + + + + +
    + + + + + + Sample efficient deep reinforcement learning via uncertainty estimation + + + + + + + Vincent Mai, + + + + + + + + + Kaustubh Mani, + + + + + + + and + + + Liam Paull + + + + + + + + + International Conference on Learning Representations (ICLR) + + + 2022 + + + + + + + + Abstract + + + arXiv + + + + + + + + + + + + + BibTeX + + + + + +
    + +
    + + + +
    + +
    + +
    + +
    +
  8. +
  9. + + + + + + + +
    + +
    + + + + + +
    + + + + + + Monocular Robot Navigation with Self-Supervised Pretrained Vision Transformers + + + + + + + Miguel Saavedra-Ruiz, + + + + + + + + + Sacha Morin, + + + + + + + and + + + Liam Paull + + + + + + + + + In + + + 2022 + + + + + + + + Abstract + + + arXiv + + + + + + + + + + + + + BibTeX + + + + + +
    + +
    + + + +
    + +
    + +
    + +
    +
  10. +
  11. + + + + + + + +
    + +
    + + + + + +
    + + + + + + Generalization Games for Reinforcement Learning + + + + + + + Manfred Diaz, + + + + + + + + + Charlie Gauthier, + + + + + + + + + Glen Berseth, + + + + + + + and + + + Liam Paull + + + + + + + + + In ICLR 2022 Workshop on Gamification and Multiagent Solutions + + + 2022 + + + + + + + + Abstract + + + + OpenReview + + + + + + + + + + + + BibTeX + + + + + +
    + +
    + + + +
    + +
    + +
    + +
    +
  12. +
  13. + + + + + + + +
    + +
    + + + + + +
    + + + + + + f-Cal: Aleatoric uncertainty quantification for robot perception via calibrated neural regression + + + + + + + Dhaivat Bhatt, + + + + + + + + + Kaustubh Mani, + + + + + + + + + Dishank Bansal, + + + + + + + + + Krishna Murthy, + + + + + + + + + Hanju Lee, + + + + + + + and + + + Liam Paull + + + + + + + + + In 2022 International Conference on Robotics and Automation (ICRA) + + + 2022 + + + + + + + + Abstract + + + + + + PDF + + + + + + Project Page + + + + + + BibTeX + + + + + +
    + +
    + + + +
    + +
    + +
    + +
    +
+ +
+
+
2021
+
+
+
  1. + + + + + + + +
    + +
    + + + + + +
    + + + + + + Iterative teaching by label synthesis + + + + + + + Weiyang Liu, + + + + + + + + + Zhen Liu, + + + + + + + + + Hanchen Wang, + + + + + + + + + Liam Paull, + + + + + + + + + Bernhard Schölkopf, + + + + + + + and + + + Adrian Weller + + + + + + + + + Advances in Neural Information Processing Systems (NeurIPS) + + + 2021 + + + + + + + + Abstract + + + + OpenReview + + + + + + + + + + + + BibTeX + + + + + +
    + +
    + + + +
    + +
    + +
    + +
    +
  2. +
  3. + + + + + + + +
    + +
    + + + + + +
    + + + + + + LOCO: Adaptive exploration in reinforcement learning via local estimation of contraction coefficients + + + + + and + + + Pablo Samuel Castro Manfred Diaz + + + + + + + + + In Self-Supervision for Reinforcement Learning Workshop-ICLR 2021 + + + 2021 + + + + + + + + Abstract + + + + OpenReview + + + + PDF + + + + + + + + + + BibTeX + + + + + +
    + +
    + + + +
    + +
    + +
    + +
    +
  4. +
  5. + + + + + + + +
    + +
    + + + + + +
    + + + + + + Orthogonal over-parameterized training + + + + + + + Weiyang Liu, + + + + + + + + + Rongmei Lin, + + + + + + + + + Zhen Liu, + + + + + + + + + James M Rehg, + + + + + + + + + Liam Paull, + + + + + + + + + Li Xiong, + + + + + + + + + Le Song, + + + + + + + and + + + Adrian Weller + + + + + + + + + In Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition + + + 2021 + + + + + + + + Abstract + + + arXiv + + + + + + + + + + + + + BibTeX + + + + + +
    + +
    + + + +
    + +
    + +
    + +
    +
  6. +
  7. + + + + + + + +
    + +
    + + + + + +
    + + + + + + Uncertainty-Aware Policy Sampling and Mixing for Safe Interactive Imitation Learning + + + + + + + Manfred Diaz, + + + + + + + + + Thomas Fevens, + + + + + + + and + + + Liam Paull + + + + + + + + + In 2021 18th Conference on Robots and Vision (CRV) + + + 2021 + + + + + + + + Abstract + + + + + + PDF + + + + + + + + + + BibTeX + + + + + +
    + +
    + + + +
    + +
    + +
    + +
    +
  8. +
  9. + + + + + + + +
    + +
    + + + + + +
    + + + + + + Deep Koopman Representation for Control over Images (DKRCI) + + + + + + + Philippe Laferrière, + + + + + + + + + Samuel Laferrière, + + + + + + + + + Steven Dahdah, + + + + + + + + + James Richard Forbes, + + + + + + + and + + + Liam Paull + + + + + + + + + In 2021 18th Conference on Robots and Vision (CRV) + + + 2021 + + + + + + + + Abstract + + + + + + PDF + + + + + + + + + + BibTeX + + + + + +
    + +
    + + + +
    + +
    + +
    + +
    +
  10. +
  11. + + + + + + + +
    + +
    + + + + + +
    + + + + + + Taskography: Evaluating robot task planning over large 3D scene graphs + + + + + + + Christopher Agia, + + + + + + + + + Krishna Murthy Jatavallabhula, + + + + + + + + + Mohamed Khodeir, + + + + + + + + + Ondrej Miksik, + + + + + + + + + Vibhav Vineet, + + + + + + + + + Mustafa Mukadam, + + + + + + + + + Liam Paull, + + + + + + + and + + + Florian Shkurti + + + + + + + + + In Conference on Robot Learning + + + 2021 + + + + + + + + Abstract + + + + OpenReview + + + + PDF + + + + + + Project Page + + + + + + BibTeX + + + + + +
    + +
    + + + +
    + +
    + +
    + +
    +
  12. +
  13. + + + + + + + +
    + +
    + + + + + +
    + + + + + + On Assessing the Usefulness of Proxy Domains for Developing and Evaluating Embodied Agents + + + + + + + Anthony Courchesne, + + + + + + + + + Andrea Censi, + + + + + + + and + + + Liam Paull + + + + + + + + + In 2021 IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS) + + + 2021 + + + + + + + + Abstract + + + arXiv + + + + + + + + + + + + + BibTeX + + + + + +
    + +
    + + + +
    + +
    + +
    + +
    +
  14. +
  15. + + + + + + + +
    + +
    + + + + + +
    + + + + + + Batch Inverse-Variance Weighting: Deep Heteroscedastic Regression + + + + + + + Vincent Mai, + + + + + + + + + Waleed Khamies, + + + + + + + and + + + Liam Paull + + + + + + + + + In ICML Workshop on Uncertainty & Robustness in Deep Learning + + + 2021 + + + + + + + + Abstract + + + arXiv + + + + + + + + + + + + + BibTeX + + + + + +
    + +
    + + + +
    + +
    + +
    + +
    +
+ +
+
+
2020
+
+
+
  1. + + + + + + + +
    + +
    + + + + + +
    + + + + + + gradSim: Differentiable simulation for system identification and visuomotor control + + + + + + + J Krishna Murthy, + + + + + + + + + Miles Macklin, + + + + + + + + + Florian Golemo, + + + + + + + + + Vikram Voleti, + + + + + + + + + Linda Petrini, + + + + + + + + + Martin Weiss, + + + + + + + + + Breandan Considine, + + + + + + + + + Jérôme Parent-Lévesque, + + + + + + + + + Kevin Xie, + + + + + + + + + Kenny Erleben, + + + + + + + and + + + others + + + + + + + + + In International Conference on Learning Representations + + + 2020 + + + + + + + + Abstract + + + arXiv + + + + + + + + + Project Page + + + + + + BibTeX + + + + + +
    + +
    + + + +
    + +
    + +
    + +
    +
  2. +
  3. + + + + + + + +
    + +
    + + + + + +
    + + + + + + La-MAML: Look-ahead Meta Learning for Continual Learning + + + + + + + Gunshi Gupta, + + + + + + + + + Karmesh Yadav, + + + + + + + and + + + Liam Paull + + + + + + + + + In Neural Information Processing Systems (Neurips) + + + 2020 + + + Oral (top 1.1%) + + + + + + + Abstract + + + arXiv + + + + + + + + + Project Page + + + + + + BibTeX + + + + + +
    + +
    + + + +
    + +
    + +
    + +
    +
  4. +
  5. + + + + + + + +
    + +
    + + + + + +
    + + + + + + Your GAN is Secretly an Energy-based Model and You Should Use Discriminator Driven Latent Sampling + + + + + + + Tong Che, + + + + + + + + + Ruixiang Zhang, + + + + + + + + + Jascha Sohl-Dickstein, + + + + + + + + + Hugo Larochelle, + + + + + + + + + Liam Paull, + + + + + + + + + Yuan Cao, + + + + + + + and + + + Yoshua Bengio + + + + + + + + + In Neural Information Processing Systems (Neurips) + + + 2020 + + + + + + + + Abstract + + + arXiv + + + + + + + + + + + + + BibTeX + + + + + +
    + +
    + + + +
    + +
    + +
    + +
    +
  6. +
  7. + + + + + + + +
    + +
    + + + + + +
    + + + + + + Curriculum in Gradient-Based Meta-Reinforcement Learning + + + + + + + Bhairav Mehta, + + + + + + + + + Tristan Deleu, + + + + + + + + + Sharath Chandra Raparthy, + + + + + + + + + Christopher Pal, + + + + + + + and + + + Liam Paull + + + + + + + + + In BETR-RL Workshop + + + 2020 + + + + + + + + Abstract + + + arXiv + + + + + + + + + + + + + BibTeX + + + + + +
    + +
    + + + +
    + +
    + +
    + +
    +
  8. +
  9. + + + + + + + +
    + +
    + + + + + +
    + + + + + + Generating Automatic Curricula via Self-Supervised Active Domain Randomization + + + + + + + Sharath Chandra Raparthy, + + + + + + + + + Bhairav Mehta, + + + + + + + and + + + Liam Paull + + + + + + + + + In BETR-RL Workshop + + + 2020 + + + + + + + + Abstract + + + arXiv + + + + + + + + + + Code + + + + + BibTeX + + + + + +
    + +
    + + + +
    + +
    + +
    + +
    +
  10. +
  11. + + + + + + + +
    + +
    + + + + + +
    + + + + + + The AI Driving Olympics at NeurIPS 2018 + + + + + + + Julian Zilly, + + + + + + + + + Jacopo Tani, + + + + + + + + + Breandan Considine, + + + + + + + + + Bhairav Mehta, + + + + + + + + + Andrea F Daniele, + + + + + + + + + Manfred Diaz, + + + + + + + + + Gianmarco Bernasconi, + + + + + + + + + Jan Ruch, + + + + + + + + + Florian Golemo, + + + + + + + + + A Kirsten Bowser, + + + + + + + + + Matthew R Walter, + + + + + + + + + Ruslan Hristov, + + + + + + + + + Sunil Mallya, + + + + + + + + + Emilio Frazzoli, + + + + + + + + + Andrea Censi, + + + + + + + and + + + Liam Paull + + + + + + + + + In Springer + + + 2020 + + + + + + + + Abstract + + + arXiv + + + + + + + + + + + + + BibTeX + + + + + +
    + +
    + + + +
    + +
    + +
    + +
    +
  12. +
  13. + + + + + + + +
    + +
    + + + + + +
    + + + + + + Probabilistic Object Detection: Strenghts, Weaknesses, and Opportunities + + + + + + + Dhaivat Bhatt, + + + + + + + + + Dishank Bansal, + + + + + + + + + Gunshi Gupta, + + + + + + + + + Krishna Murthy Jatavallabhula, + + + + + + + + + Hanju Lee, + + + + + + + and + + + Liam Paull + + + + + + + + + In ICML workshop on AI for autonomous driving + + + 2020 + + + + + + + + Abstract + + + + + + + + + + Project Page + + + + + + BibTeX + + + + + +
    + +
    + + + +
    + +
    + +
    + +
    +
  14. +
  15. + + + + + + + +
    + +
    + + + + + +
    + + + + + + MapLite: Autonomous Intersection Navigation without a Detailed Prior Map + + + + + + + Teddy Ort, + + + + + + + + + Krishna Murthy, + + + + + + + + + Rohan Banerjee, + + + + + + + + + Sai Krishna Gottipati, + + + + + + + + + Dhaivat Bhatt, + + + + + + + + + Igor Gilitschenski, + + + + + + + + + Liam Paull, + + + + + + + and + + + Daniela Rus + + + + + + + + + IEEE Robotics and Automation Letters + + + 2020 + + + + + + + + Abstract + + + + + + PDF + + + + + + + + + + BibTeX + + + + + +
    + +
    + + + +
    + +
    + +
    + +
    +
  16. +
  17. + + + + + + + +
    + +
    + + + + + +
    + + + + + + Integrated benchmarking and design for reproducible and accessible evaluation of robotic agents + + + + + + + Jacopo Tani, + + + + + + + + + Andrea F Daniele, + + + + + + + + + Gianmarco Bernasconi, + + + + + + + + + Amaury Camus, + + + + + + + + + Aleksandar Petrov, + + + + + + + + + Anthony Courchesne, + + + + + + + + + Bhairav Mehta, + + + + + + + + + Rohit Suri, + + + + + + + + + Tomasz Zaluska, + + + + + + + + + Matthew R Walter, + + + + + + + and + + + others + + + + + + + + + In 2020 IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS) + + + 2020 + + + + + + + + Abstract + + + arXiv + + + + + + + + + + + + + BibTeX + + + + + +
    + +
    + + + +
    + +
    + +
    + +
    +
  18. +
  19. + + + + + + + +
    + +
    + + + + + +
    + + + + + + Perceptual generative autoencoders + + + + + + + Zijun Zhang, + + + + + + + + + Ruixiang Zhang, + + + + + + + + + Zongpeng Li, + + + + + + + + + Yoshua Bengio, + + + + + + + and + + + Liam Paull + + + + + + + + + In International Conference on Machine Learning + + + 2020 + + + + + + + + Abstract + + + arXiv + + + + + + + + + + + + + BibTeX + + + + + +
    + +
    + + + +
    + +
    + +
    + +
    +
  20. +
  21. + + + + + + + +
    + +
    + + + + + +
    + + + + + + gradSLAM: Dense SLAM meets automatic differentiation + + + + + + + Jatavallabhula Krishna Murthy, + + + + + + + + + Ganesh Iyer, + + + + + + + and + + + Liam Paull + + + + + + + + + In International Conference on Robotics and Automation (ICRA) + + + 2020 + + + + + + + + Abstract + + + arXiv + + + + + + + + + Project Page + + + Code + + + + + BibTeX + + + + + +
    + +
    + + + +
    + +
    + +
    + +
    +
+ +
+
+
2019
+
+
+
  1. + + + + + + + +
    + +
    + + + + + +
    + + + + + + A Data-Efficient Framework for Training and Sim-to-Real Transfer of Navigation Policies + + + + + + + Homanga Bharadhwaj, + + + + + + + + + Zihan Wang, + + + + + + + + + Yoshua Bengio, + + + + + + + and + + + Liam Paull + + + + + + + + + In IEEE International Conference on Robotics and Automation (ICRA) + + + 2019 + + + + + + + + Abstract + + + arXiv + + + + + + + + + + + + + BibTeX + + + + + +
    + +
    + + + +
    + +
    + +
    + +
    +
  2. +
  3. + + + + + + + +
    + +
    + + + + + +
    + + + + + + Deep Active Localization + + + + + + + Sai Krishna, + + + + + + + + + Keehong Seo, + + + + + + + + + Dhaivat Bhatt, + + + + + + + + + Vincent Mai, + + + + + + + + + Krishna Murthy, + + + + + + + and + + + Liam Paull + + + + + + + + + In IEEE Robotics and Automation Letters (RAL) + + + 2019 + + + + + + + + Abstract + + + arXiv + + + + + + + + + + Code + + + + + BibTeX + + + + + +
    + +
    + + + +
    + +
    + +
    + +
    +
  4. +
  5. + + + + + + + +
    + +
    + + + + + +
    + + + + + + Active Domain Randomization + + + + + + + Bhairav Mehta, + + + + + + + + + Manfred Diaz, + + + + + + + + + Florian Golemo, + + + + + + + + + Christopher Pal, + + + + + + + and + + + Liam Paull + + + + + + + + + In Conference on Robot Learning (CoRL) + + + 2019 + + + + + + + + Abstract + + + arXiv + + + + + + + + + + Code + + + + + BibTeX + + + + + +
    + +
    + + + +
    + +
    + +
    + +
    +
+ +
+
+
2018
+
+
+
  1. + + + + + + + +
    + +
    + + + + + +
    + + + + + + Probabilistic cooperative mobile robot area coverage and its application to autonomous seabed mapping + + + + + + + Liam Paull, + + + + + + + + + Mae Seto, + + + + + + + + + John J Leonard, + + + + + + + and + + + Howard Li + + + + + + + + + In + + + 2018 + + + + + + + + Abstract + + + + + + + + + + + + + + BibTeX + + + + + +
    + +
    + + + +
    + +
    + +
    + +
    +
  2. +
  3. + + + + + + + +
    + +
    + + + + + +
    + + + + + + Autonomous Vehicle Navigation in Rural Environments without Detailed Prior Maps + + + + + + + Teddy Ort, + + + + + + + + + Liam Paull, + + + + + + + and + + + Daniela Rus + + + + + + + + + In IEEE International Conference on Robotics and Automation (ICRA) + + + 2018 + + + + + + + + Abstract + + + arXiv + + + + + + + + + + + + + BibTeX + + + + + +
    + +
    + + + +
    + +
    + +
    + +
    +
  4. +
  5. + + + + + + + +
    + +
    + + + + + +
    + + + + + + Local Positioning System Using UWB Range Measurements for an Unmanned Blimp + + + + + + + Vincent Mai, + + + + + + + + + Mina Kamel, + + + + + + + + + Matthias Krebs, + + + + + + + + + Andreas Schaffner, + + + + + + + + + Daniel Meier, + + + + + + + + + Liam Paull, + + + + + + + and + + + Roland Siegwart + + + + + + + + + In + + + 2018 + + + + + + + + Abstract + + + arXiv + + + + + + + + + + + + + BibTeX + + + + + +
    + +
    + + + +
    + +
    + +
    + +
    +
  6. +
  7. + + + + + + + +
    + +
    + + + + + +
    + + + + + + Geometric Consistency for Self-Supervised End-to-End Visual Odometry + + + + + + + Ganesh Iyer, + + + + + + + + + J Krishna Murthy, + + + + + + + + + K Gunshi Gupta, + + + + + + + and + + + Liam Paull + + + + + + + + + In CVPR Workshop on Deep Learning for Visual SLAM + + + 2018 + + + + + + + + Abstract + + + arXiv + + + + + + + + + Project Page + + + + + + BibTeX + + + + + +
    + +
    + + + +
    + +
    + +
    + +
    +
  8. +
  9. + + + + + + + +
    + +
    + + + + + +
    + + + + + + Learning steering bounds for parallel autonomous systems + + + + + + + Alexander Amini, + + + + + + + + + Liam Paull, + + + + + + + + + Thomas Balch, + + + + + + + + + Sertac Karaman, + + + + + + + and + + + Daniela Rus + + + + + + + + + In IEEE International Conference on Robotics and Automation (ICRA) + + + 2018 + + + + + + + + Abstract + + + arXiv + + + + + + + + + + + + + BibTeX + + + + + +
    + +
    + + + +
    + +
    + +
    + +
    +
+ +
+
+
2017
+
+
+
  1. + + + + + + + +
    + +
    + + + + + +
    + + + + + + Duckietown: an open, inexpensive and flexible platform for autonomy education and research + + + + + + + Liam Paull, + + + + + + + + + Jacopo Tani, + + + + + + + + + Heejin Ahn, + + + + + + + + + Javier Alonso-Mora, + + + + + + + + + Luca Carlone, + + + + + + + + + Michal Cap, + + + + + + + + + Yu Fan Chen, + + + + + + + + + Changhyun Choi, + + + + + + + + + Jeff Dusek, + + + + + + + + + Yajun Fang, + + + + + + + and + + + others + + + + + + + + + In IEEE International Conference on Robotics and Automation (ICRA) + + + 2017 + + + + + + + + Abstract + + + arXiv + + + + + + + + + + + + + BibTeX + + + + + +
    + +
    + + + +
    + +
    + +
    + +
    +
  2. +
  3. + + + + + + + +
    + +
    + + + + + +
    + + + + + + Hybrid control and learning with coresets for autonomous vehicles + + + + + + + Guy Rosman, + + + + + + + + + Liam Paull, + + + + + + + and + + + Daniela Rus + + + + + + + + + In IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS) + + + 2017 + + + + + + + + Abstract + + + arXiv + + + + + + + + + + + + + BibTeX + + + + + +
    + +
    + + + +
    + +
    + +
    + +
    +
  4. +
  5. + + + + + + + +
    + +
    + + + + + +
    + + + + + + Parallel autonomy in automated vehicles: Safe motion generation with minimal intervention + + + + + + + Wilko Schwarting, + + + + + + + + + Javier Alonso-Mora, + + + + + + + + + Liam Paull, + + + + + + + + + Sertac Karaman, + + + + + + + and + + + Daniela Rus + + + + + + + + + In Robotics and Automation (ICRA), 2017 IEEE International Conference on + + + 2017 + + + + + + + + Abstract + + + + + + PDF + + + + + + + + + + BibTeX + + + + + +
    + +
    + + + +
    + +
    + +
    + +
    +
  6. +
  7. + + + + + + + +
    + +
    + + + + + +
    + + + + + + A parallel autonomy research platform + + + + + + + Felix Naser, + + + + + + + + + David Dorhout, + + + + + + + + + Stephen Proulx, + + + + + + + + + Scott Drew Pendleton, + + + + + + + + + Hans Andersen, + + + + + + + + + Wilko Schwarting, + + + + + + + + + Liam Paull, + + + + + + + + + Javier Alonso-Mora, + + + + + + + + + Marcelo H Ang, + + + + + + + + + Sertac Karaman, + + + + + + + and + + + others + + + + + + + + + In 2017 IEEE Intelligent Vehicles Symposium (IV) + + + 2017 + + + + + + + + Abstract + + + + + + PDF + + + + + + + + + + BibTeX + + + + + +
    + +
    + + + +
    + +
    + +
    + +
    +
  8. +
  9. + + + + + + + +
    + +
    + + + + + +
    + + + + + + Safe nonlinear trajectory generation for parallel autonomy with a dynamic vehicle model + + + + + + + Wilko Schwarting, + + + + + + + + + Javier Alonso-Mora, + + + + + + + + + Liam Paull, + + + + + + + + + Sertac Karaman, + + + + + + + and + + + Daniela Rus + + + + + + + + + IEEE Transactions on Intelligent Transportation Systems + + + 2017 + + + + + + + + + + + + PDF + + + + + + + + + + BibTeX + + + + + + + +
    + +
    + +
    + +
    +
+ +
+
+
2016
+
+
+
  1. + + + + + + + +
    + +
    + + + + + +
    + + + + + + A Unified Resource-Constrained Framework for Graph SLAM + + + + + + + Liam Paull, + + + + + + + + + Guoquan Huang, + + + + + + + and + + + John J Leonard + + + + + + + + + In IEEE International Conference on Robotics and Automation (ICRA) + + + 2016 + + + + + + + + Abstract + + + arXiv + + + + + + + Poster + + + Slides + + + + Code + + + + + BibTeX + + + + + +
    + +
    + + + +
    + +
    + +
    + +
    +
  2. +
  3. + + + + + + + +
    + +
    + + + + + +
    + + + + + + Slam with objects using a nonparametric pose graph + + + + + + + Beipeng Mu, + + + + + + + + + Shih-Yuan Liu, + + + + + + + + + Liam Paull, + + + + + + + + + John Leonard, + + + + + + + and + + + Jonathan P How + + + + + + + + + In IEEE/RSJ International Conference onnIntelligent Robots and Systems (IROS) + + + 2016 + + + + + + + + Abstract + + + arXiv + + + + + + + + + + + Video + + + + BibTeX + + + + + +
    + +
    + + + +
    + +
    + +
    + +
    +
+ + + +
+

+ + + + | + + + + | + + + + + +

+
+ +
+ + + + + + + + + diff --git a/research.html b/research.html index c184b5a..c637d21 100644 --- a/research.html +++ b/research.html @@ -1,8 +1,146 @@ ---- -layout: default -title: Research Projects ---- -
+ + + + + + + + + Robotics Group @ University of Montreal | Research Projects + + + + + + + + + + + +
+ + + + + + + +
@@ -13,12 +151,1968 @@
- {% comment %} - Sort the projects by date, putting those without dates last - {% endcomment %} - {% assign projects_by_date = site.projects | sort: 'last-updated', 'first' %} - {% assign projects_by_date = projects_by_date | reverse %} - {% for p in projects_by_date %} - {% include project-card.html project=p %} - {% endfor %} + + + + + + + + + + +
+
+
+ + + + + + ConceptFusion: Open-set Multimodal 3D Mapping + + +
+
+
+ +

+ + ConceptFusion: Open-set Multimodal 3D Mapping + +

+ + +

ConceptFusion builds open-set 3D maps that can be queried via text, click, image, or audio. Given a series of RGB-D images, our system builds a 3D scene representation, that is inherently multimodal by leveraging foundation models such as CLIP, and therefore doesn’t require any additional training or finetuning.

+

+ + + + + Collaborators: + + + + +
+ + + +
+
+
+
+ + + + + + + + +
+
+
+ + + + + + One-4-All - Neural Potential Fields for Embodied Navigation + + +
+
+
+ +

+ + One-4-All - Neural Potential Fields for Embodied Navigation + +

+ + +

An end-to-end fully parametric method for image-goal navigation that leverages self-supervised and manifold learning to replace a topological graph with a geodesic regressor. During navigation, the geodesic regressor is used as an attractor in a potential function defined in latent space, allowing to frame navigation as a minimization problem.

+

+ + + + + + +
+ + + +
+
+
+
+ + + + + + + + +
+
+
+ + + + + + f-Cal - Calibrated aleatoric uncertainty estimation from neural networks for robot perception + + +
+
+
+ +

+ + f-Cal - Calibrated aleatoric uncertainty estimation from neural networks for robot perception + +

+ + +

f-Cal is calibration method proposed to calibrate probabilistic regression networks. Typical bayesian neural networks are shown to be overconfident in their predictions. To use the predictions for downstream tasks, reliable and calibrated uncertainity estimates are critical. f-Cal is a straightforward loss function, which can be employed to train any probabilistic neural regressor, and obtain calibrated uncertainty estimates.

+

+ + + + + Collaborators: + + + + +
+ + + +
+
+
+
+ + + + + + + + +
+
+
+ + + + + + Inverse Variance Reinforcement Learning + + +
+
+
+ +

+ + Inverse Variance Reinforcement Learning + +

+ + +

Improving sample efficiency in deep reinforcement learning by mitigating the impacts of heteroscedastic noise in the bootstraped target using uncertainty estimation.

+

+ + + + + + +
+ + + +
+
+
+
+ + + + + + + + +
+
+
+ + + + + + Lifelong Topological Visual Navigation + + +
+
+
+ +

+ + Lifelong Topological Visual Navigation + +

+ + +

A learning-based topological visual navigation method with graph update strategies that improves lifelong navigation performance over time.

+

+ + + + + Collaborators: + + + + +
+ + + +
+
+
+
+ + + + + + + + +
+
+
+ + + + + + Taskography - Evaluating robot task planning over large 3D scene graphs + + +
+
+
+ +

+ + Taskography - Evaluating robot task planning over large 3D scene graphs + +

+ + +

Taskography is the first large-scale robotic task planning benchmark over 3DSGs. While most benchmarking efforts in this area focus on vision-based planning, we systematically study symbolic planning, to decouple planning performance from visual representation learning.

+

+ + + + + Collaborators: + + + + +
+ + + +
+
+
+
+ + + + + + + + +
+
+
+ + + + + + gradsim + + +
+
+
+ +

+ + gradsim + +

+ + +

gradSim is a framework that overcomes the dependence on 3D supervision by leveraging differentiable multiphysics simulation and differentiable rendering to jointly model the evolution of scene dynamics and image formation.

+

+ + + + + Collaborators: +
    + + +
  • + + Miles Macklin + +
  • + + +
  • + + Vikram Voleti + +
  • + + +
  • + + Linda Petrini + +
  • + + +
  • + + Martin Weiss + +
  • + + +
  • + + Jerome Parent-Levesque + +
  • + + +
  • + + Kevin Xie + +
  • + + +
  • + + Kenny Erleben + +
  • + + +
  • + + + Florian Shkurti + + +
  • + + +
  • + + Derek Nowrouzerzahrai + +
  • + + +
  • + + Sanja Fidler + +
  • + +
+ + + +
+ + + +
+
+
+
+ + + + + + + + +
+
+
+ + + + + + gradslam + + +
+
+
+ +

+ + gradslam + +

+ + +

gradslam is an open-source framework providing differentiable building blocks for simultaneous localization and mapping (SLAM) systems. We enable the usage of dense SLAM subsystems from the comfort of PyTorch.

+

+ + + + + Collaborators: + + + + +
+ + + +
+
+
+
+ + + + + + + + +
+
+
+ + + + + + La-MAML + + +
+
+
+ +

+ + La-MAML + +

+ + +

Look-ahead meta-learning for continual learning

+

+ + + + + Collaborators: +
    + + +
  • + + Karmesh Yadav + +
  • + +
+ + + +
+ + + +
+
+
+
+ + + + + + + + +
+
+
+ + + + + + Active Domain Randomization + + +
+
+
+ +

+ + Active Domain Randomization + +

+ + +

Making sim-to-real transfer more efficient

+

+ + + + + Collaborators: +
    + + +
  • + + Chris Pal + +
  • + +
+ + + +
+ + + +
+
+
+
+ + + + + + + + +
+
+
+ + + + + + Self-supervised visual odometry estimation + + +
+
+
+ +

+ + Self-supervised visual odometry estimation + +

+ + +

A self-supervised deep network for visual odometry estimation from monocular imagery.

+

+ + + + + Collaborators: + + + + +
+ + + +
+
+
+
+ + + + + + + + +
+
+
+ + + + + + Deep Active Localization + + +
+
+
+ +

+ + Deep Active Localization + +

+ + +

Learned active localization, implemented on “real” robots.

+

+ + + + + Collaborators: +
    + + +
  • + + Keehong Seo + +
  • + +
+ + + +
+ + + +
+
+
+
+ + + + +
+

+ + + + | + + + + | + + + + + +

+
+ +
+ + + + + + + + + diff --git a/screenshot.png b/screenshot.png deleted file mode 100644 index 3063a2a72b1e60d88fcc405d9a834a906c4828a3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 108257 zcmd>lXH=8hwkSvwsftQ9AR?&rA~iH=7K(xjNEbp85^8|Zn;_B@Fn~Y+K~U)}^bVnf z5{jXO9(rgJddp+)bKkvpjI+-e@BhpC^L^`Ev#v7dnsX-dxvmz&6^<)pWMm9a9;@q< zkx`72kzI%six6Xmt1(ZM`#)j+OP*pLsG@9E#9ylzSd7XCm*_ie01wCh`D zQ4|&DgDslJlf-@;X?mfzS74^beYyYCw!en#lkYh1iw8n9DqqFaxYqExaj}u#@c;_? zPgHy=l-H#N%oc85yGC|iS>HsFx!RJNJKT8wdp(e;%N@4LMV85IBO9FB|3vL@%wzFG zJ3CqC;fJDf2{oy{_m_v;9|v9D%2>)V)?6nSwt|cHkA+3}@JKU}Hz$lcxn3j7d7++X z=N8YbYy)h)`KURi2VHN;kS1kg7VqWywOySESD6c@gga^!Euc+&ZX{nTYf zE*(wW{c4R6d&CiUugz>tD^cFP$!>o6`|;zw-CnWPNqX1nmwL7*Iy0l}zArRC>}m%I zY+oYpieuyrx=}&{Z{|1*X{I;7>9$3-Z6xyM?oGZwk}Hs|XLEzbk7CC!)qZ*Zoq=q4 z3o=0+ROMwi&5{#F$^!9|l`tRif5I~fGa6S}E0DP%l%>q%$<*n{`YT-c=6l(_$P}ln zK2-_P*{e|5lijksto`|LJ&47U_v%ToB{SpOX8mSzzfaWdmUoRPq=SIZ$&jIDne;D1 zI9$lME@`Q7%98g7(am0|W`u;&STaWkv08G~(_tPX3bei|VWA6uX=wwy;X@7D2j$TvW%Y=NRej)lI17jq0Lq{w1TRNt3<+2n_!=(mBvkxL_ z+$vSy2C|&LMPTqN&TLmS8LUHG+e^d-llgj|Y6Eg94I<=X<>G2$*jws5-Oj9W~V)GpjWJ#9OES z#xy5)6zjaq{Il(tLxV%kL%YK#AzZ3)vyU(HyU^xe#$B!q(`|j&df6)eo}LtsJWE?- zcRNG7{#(gEz(2SD2uG?9@ymU>`I$0@OaK0=*eb8Xhuz2m6={CoPx!8DJFndoYoh$( zPT`NTRt%C8jh<421iuHn!FQ5(UW7le=5LFi>nTXOpXBhR<~H#uoF`v^z+d-L+61Y- z|6~2(c#<2XoWC5C9CA{=#;u02hP;Lr_N)H8Uy0vWzbwD()4NA6sM10jLy|5T-ZsqA zoGWM*yP@$r%jdb=h|b8HHQ3r^hG&wQrS{zwmnLq%;Rc(&P#x41*B5_QYWgQc8~- zQSlR~uaVD@FQRYuvsQ1fehqz8aimL>BNCUnQvxj*UQ52Fdu3AiIvblWomG^znJuC} zlO6E-?0a}TwZR?ZY$JRkDw+S)u!cdVaRKZNQO@l1rl#Kaq@jVq&dQMrze>J0uip5- zNtmFXxH`dEX;Q(x$_!;f&R^^?6?_miPBJ?0oR| zknb6bnrL0L%xlzZu6(zA6YOw@K6QQP!C_`!@~NuCbFQQu zDT%A;fnnZnUfU|00byRDzihnOI1c;Swje=B9+-@tjGtnxZE&k{VXfo#BF)aNlwz~d zEQ_o7l&$gQ@3S}NUM=m7ncx(<>$-g>JF#|^dP$lAxapf;wNlX4ZcrI|3>}NIp3k1=#5a|Mj{jbd zq3g%K3;@-$}5!H+;7oNW}MaebFjkC=WAk$d5Jz8}A9>6i!Beul!E?J>^(|buyBy zz=FZQh$!17mL?3PbN5ZB64eRs@#7dNg`l; z!|h-Pex>rG@D1*Lw}a`i&dE{JpXSETk6=xfq^SoPzp(Gl*5WywbZq;EC%PsO7VJ&Z zBR{b>M*Is|u}px=iY`ShjagYQ3SCWDCixsV6xh7Bk8cmCZS>{#y%K&ZOcl{$)jhY| z)IsuG{y_S?nH`ViYa;#4fxp392m1>A#Py(O5q2KgnqFZaHZk2UH=3S<^)k z)eGJ~@e+79+>JWH!UkkRXYqu&;K!2;DF#{I3quQ_5+jgqu7WWj8uUK^6wh{+dt!qh8L#Q?A zwIYCQOY}{Ll4k>=cWU(R>pep_C^@oKawv)~DSDnA%`e{(RTt&ASUS5)GVBm+GweLK zJ)cqlC@Sr|IrUl7nKSuPwYTLo4b=~!|hNm^{j=6Q>x z`SD4Z4_WtZ6-Aby$CPL8WIt9fsW7cZKIU$vqbLvxin^ELS_1U97tH<-UuZ7K6u`_ zJ~ypKMn<0Q@Y2Y`NJm@V+8HcjW#jx7B;pNrx$t_)$bjDR7mr|&hZT=E*a_k;@2$xD z_Xzom=fAo|d3pXG;^C;sYozm>N6py{#Pd)@N<@rT=?V`I575oVR$gCS^S{6^-V}N5 zJv?0GMMb^5yhOYtMV#I2M8)Og`ro)OK!Jbt z%0GAT200n2JAgqD_X`?IacJd!nxT(wls}fo9D174)D1A<@A4%i~jQ&IodbED|^|$MTJgqzwL%tDN z+8KHNq#31wK*Ym{lYpLHjLlXGv7pir=UZ&nZ)}(0zWL$nFPi_q8X^X;0+W{Z5yYAL zBfl^fto^3=_cm~uDH?d^-I3JKg>v!de$vPz2Z_md5C2J12n~SuYpUg@U%^Yo@9Gd5 z#1iRBpUi)2vxZ|7O>V&l#gCYeZ7S!wA1oVspO56eVtV;+4M?H5Z9~SV!^+5z%<~(0GxTB;lHu=7qdW4U`b{0*2}%T z2c(?cmSOSCw5RF0yisa-4`vEkkYCav`+^Ps%;(SDX5&|b^a zkL&Kz)y& z!mRLbf=&L=KMRX+0E`XILM41yy0zN}8q+>HVR7$Hf6{XZ80@P{n4qgDI?E*i1IIUB z|D(*OzkDt0o1sASeYCop2|BMJ(rMw?N%HjPWiA1OC2t9no;HVc4~0Q~w{7@}woUm? z{y*+2ga}|Po~$xKSKtil-y|=6YoV*V%VS6q z?6?QcYyBsEtOW}UQJ8T0>1s9!5BD$cL-9Ial-N^^ZBu9UYPSbbL>Kb~LzADC;SFuCT3giaiJHq`ymk?HkG_->q+P!DU4 zJT&&~Ib^xTtZUQ6*~H=xobawk8k9MqaBd5Af9eKt()@y>*!=&n3$G`zYvH>qu5mOdJW}P=&*h)5zg| zyQx3h!oN8OY#0A&PO7*6MmNgxsOX9o}6HlIW|>0tI0vb|FU)51b>hDiP9fzfFKwmTSOPz&sTzoUxj| zt=)FDzbY%;#s~hem_-P4WpLR^|d_>?{7N&3RXF7McJ!rq$#o$>B2 zr)x!z_f-C&HM1-Pd%8^gOpdgXXySy_Epa}QnGRn{*#u&`fod!4TZvnmp}Ow~G9$u= zb%hj7y8Fm5+goa1u_zo2?U8MFV1&Zd8t3Yu%isj{|2!98FXkdO1@2lUJRHr51WDYQ zgsCk^PQiG(-NI8|Q)-fI8donCY`{|&6Y7X5r^?lYKD!#}O}}uIYmjT3ZbdJleL!m4 z^Bo~|CI<4KHkKX@lgAb5pN29%sdjQ|RozF@y~j%_OBYClhx-CjmB#&K94Xv z{*I*S^T4fYqLC7aJ#gD;_ku7EX?>hyrI49c@%C9sLV7o*jNV&e>#f9q1o-RfN(Br* z-VG$!j-iB|lhF_RT!0ABg95;he`wxiKrwS>Ib+1)1P0vYf!~g%h)Lb_TY`Qw20ZTB z>1Rc)aO1|Vy}_n1&%AE25KsV5c*DF?W>D!>rV3l($Rmu7BmyKUjH}RIii_|{7})*z zAB;;n5OaS!Cm>~-VZaUU(1wgY%1zi(C$w+zMR7R*5l0#>yLuwSySh?DeyUr~b)Qy8 zJs05o7d7X4mfo^-7sB&b#Nc6%-dl>fbn!xtHQIugp5*qGLsB7%Z9;Z${=+e-_k{*U z$1CY9Aqy_y62osxuNdW9e~Bf(1uso|9G7jJ%Q>)1A5*pQ$g2lvzu_nIbx$D7g!2?S zcO>2Chs~IJwrui{FXaI;SbfZ)meMpOiWB(yZIoAob-l0nq=2V#H*q?cpWD( z#|w_6xTDU1l*`!j8Nb54>UV9Cryqdb=exj=|92dOQ*L71nZ(tCGmB+q_lVG!%Yn;; z^~j|c``p!MAIue$5rzNa#T~X3XQ;V!_Mk7Yssj^X+!SyI_L_z*zh6f9ZXWc@;-mA` z1E15hNJmmNcX}V`)@w^L@_|zST0a?mtO!jdqQYv$kr}biX|o6MY4Ayt4*E#9+5xZX zeU$k&p-=P}6QSg>U^2Y}_ZrAhCLN`n$~~qJYHV!OVWr%7qB_k=$9|1@X$0#?>@v7X zJDvu-j1xoGIuXXz+SniV%|0@M&%1eB4rQ$towi%WF_Ox+m=`d%rxT6AV zU=p~VMSU7dHXyOk`3$QTLHXBM@e{XbiSThdF!!5pQ(OIQRgeu5$^?4md^%gYAU7_9I}x1pbOK z8l)H}Za)>jCq$RFcB&QOxbZvR(BekfBj(dsI=%(dQq>WFrLk|k@I3phA!VhC+k+A2m94<=h@t6D-H^q)p$X6 zX7e9xVqEDv0PX7w?Xz8>bn~3M z`r9kqHnNYyyVzx>E-W5R9?+X>3AA#oA_+0UmI6U`9J^^%hz%ov{ntf?EL^O^`%Go5 z47brq6FbXc>v{JDM}I^EDxE)9UK|*UN@ab#1T(3r=L{mpka7};I5@m%>-xOqRo0!lzG*ZCyq5wClx7}j>G4{=|o}tuAIVEYshD&K@glQ!#ngFTYlc)$VnQm;=ETVJ6 zc7tAo!*cC7>M!=jLEPC$_@VF#m(sc8LH1te=s7oM5+EYlY{0~CQ5@VvzM)`K9yn*! z>E7iz$)tiu;{BFVY}t8eeX7(I-;v@9e~w!m=1%D?Yz#zSz$#?mBKG5Md*A_t1x3Tm z#nR-nXpwH~$@*}4+ffPVSuvrhQy(A*PzK0^1CjvBD)wC9uN$Crc@p9D0G9e^{$U|K zVo*7n6s^4PG3+hT4A^Wxr~dQQX~s<9gjM#c z#@jpF2el`gb)rs8^3;^w6jaHs3zCJGs}{0gDk0VhZJ`hG+gS;wo}2<(@;nrfujtaE z#h{v^E)(jN+GI$ooHNXO98fz3A$C`RgeewOMv!;~sV1N1OC3b?x_}caU>Cy%e^S z7swHRBQFva!*^lNx7H<3u5-(cH|<+(kd~J2o%4*_H3MJI8KWA}u#?|zNHqWumro3I z(rH%aJE430i%9yTFbnVkJi9pLmYmE;3Ipr|KP9p>N!|f4q2GHXL%Xv)J+E<4lJ)Q3 zGUZ{ho^lB}{>>pX+L?W>=)e3h1hj6joL;tDUT5&YkiE^VFsK+u_Z) zBk3s#c3w4`w^O>+ux9>`4hsj3_*jL2l_wu>yXgDsFVmV`cyC6GiuffHnlW=SAxYz8 zu-)Wwy8Tj(e3PeN{83;zHYa1_|f)bm8zDb*5p?1BtBef76AuhU);VVc}25`>+IBt;3`C$^+l6%a06c$*$E6rW`n?2jIC z^@ZL^O;{`wCeD2hf`k&^i@#SE={%lV8|==(*=4-p$Mt$naYGUo)2p1bQmH+9q)wXa zJyS4dfa*mv!<~II5msaUrBDTD<54R4YT^n94X-l?X6L=^R~jv80W`>DsqGH=6!kKl z^_4)=*uA>n*$Gdo29zqbktrLs%9H$P1?QPZG7Kw;4*S8d4T-EepbNULXknwyRHQ7Y zm&R+ltccFTWqI%+qx!T|<|3V|6jQ8|E*y>QjnKMO%Wz@1ub)?Q5xnxX1QH+_{_j$p zByrHdY8|-ObmMyn2j>3pIt6h8^vDY~3->bX#(CA4+B`{N+N=kev98SaXFGCzsCQ67 zv(DQCxW?0jlE5iUag-jMX`<2hPhI7(Td(7E`KOdlMkK~*NR|T(zQlqnPJ00aMAWbplq3(u?S9r8$i1vEo;GM zaENy+m?;FLYuEbk{{)rwIgOspNgGeYPHxsZsN`vvGxa(q(M~M}w;8&eUCWYLEg9re zSHpJ&d&x|eh3gB}E<6dpxNQsv>l)hvzrt==1!}F0MD|_g9Clpp>-NAmOGCb{_I~`c zEi{8dM}JdO+y;uJ(Jb{JG>7Q&ZY_{6k-w*p3o*vJZNq?p}q*ya3+NMC-|cFoE!R}GF_x30Ad~vPOE3?Y5oZphbY0W!H8X)5sd?dRny0c{PhA2w?j7Q zn1~YU{V%?hpN?%(i!vOrT8K7*``f+l06D$q+r8+$kO(vTqKG{+1c-hvmBtC3nzX9% zIIu+34F+)+P=Hq|qOVgFDkGCX1lADB{&&Sa@g zjWfrnd4<@@ZNbH!r_D;L^p-pMx~ld2jPvgRf<#!A?&C@rJSa`zsj zkYfNc#R2ZlUANhl8Jf{sE+nx@gEk(3idAHo8IIn;l?Wk)?CL5>G8Q=-f+shW zfcw3Y49NyakTJ2mPrKYSon*a?Sl99c1%{^D1zvdD(z&?v(wQKxB?`h(lU$%T@W%2A zQ^3wx+05E$;F1MNusYyOKxFr7M<2`0MuF1XK=hkwq!B&NZ+GW? z7^^l9kSN(6e)P8&P4P2M?{wvOJH#46cw#(jx*c-I`GX?*L|kmWS{of-#qD;h#M)#` zi}99INE^Dl0y*V0Q(r(AtK(gBHii`Ww6Gt9yD648dDDBTPFrmSUP-_IByn^pd)bFg zi<<*BEb$j1{qV(T3ip%2J@WL0{W`=LS@D_0q``T$UiVnw?BN47Z#b2}c=7J5)R|3D z8;n=e*NofJy#tuNAX}~viU;DgN|_10?uz&6>h}X(^^WjLcdbdm_{F-XP5*b~^o^qRuHg4cS}Eg=D(} zz86pU9pr;~LN9<%=}WBw!pFm1dG;qeSB`^pB?k6#gtjM09g2%_CyCK#G_l1JVRptY z@n=shDvve|aVpgQ2l+?rZ`)4k%Ng)0>oswI31{=S4l7AfzCGLr_t{UFxGX@I2g+vo zPQEn0BTf|~gXxbq4gaNlTTWE)m^*hifkM6;IGRw_oB=u0Gw|3p!1PhYPKu_5WoOAT z#TG1vL3-}A1?z_QK;U2d9UmKu5Wu;a(TfwiPnN4RT+->XAEn2yvU3dV%JH zFa7b``9e440JMY)hPxzi;hF9)AGc_6C$DxTU|bu5x>VF?>>KF80~pV85oC9r-wvNg zrov+V;$W7D5I>*IV+@$dcx7aec2e4uS^}#YdqK?%4(rO=+Q%tlo``TxO5Y2$QN+2W z2aoF_UtHg#QR?37LuMvaDZ0cHXCZ+|ut~GF79uQ`K&4Lo2Er;uu=DLoW<)?s!HV9x za6)1gQ2OVy4N_@iv&FdbET3P8>aV||0{Dd zirr(Bb@F~yLhYf-X!Qn#{>0- z`eNhGoR>vg!5=bw=$kB(9)NE0j}|2mH++((yBi6biWqb)YEZ!V=1Q$GbuxN&M1;ry zW--GFQpKE?rdcfoSFIL3gO^8dcnJy;*tC+uiZ+t%-Yj`NP$m{Se(2K;2w{pF605J3<8ehg?f#g@zXI*pJHczuZ1E9(XwBPpA9q zO!=m)cRl{uNq0bp;eO%S+MjiCdTvGHV5t&jU)W+fFr5Stjs8;4m>>Y-@>v&;m1R>v z^-9Zhy!9Ii5MfOfk&H83*|6aN$+K&XdiF}j)DxC!$oil(ozgHI`l$BY*^Go& z9-?A3eU6%pW|x#cI{K^yh)ilaUF;yS?2(jC)`<^NY8u%L4rt|7q`Bg-l)TpRU9n!z zwU-hE#u;V(;;lHh5+kq=!5jTh#6xzk;1u|uUKvF-m)NC`ZbJ5&10HQc6p$$$KT-aB zESDuzw5{i1ycD?5R-m4}lM3nM5%*w#ZSoqAapt5CLpxa=V{nkgmEBhRaSJ{R-5F)g5iN?dB z`gi2sRbA(;%N~CLfQGC8K7RS+_fu%rt*+I$bvW-xqNbrgBZGhU4I`(>T!~VRn(qiV z#zytV;~%oS4um^q?(JWHJHG9BXxLdfYR-9a+bu=~p%9Eq;mefnbK|n4hW;;uifgQX zZ<}noju#(xc$}_4+Z`ieW9^O3BPkHM-OO=Klo_|Z2+LQrMA+-mz(Vacnv-`A2!R`c z{lb0Kzlcr7tn6A`IrM~6=K%2gnx%z@LMI>PsL~f;w0`pDq~Avuhl}+WSC^3&S{-_U z$HNlK#QK9Vg#G!mShNlAp6#~G5vqlukA)_n2FM!5GzC-p{8VCw`5RdJT1=xuAJtDe zhKrz%(X((S6^OJSW_X$M6_6d^4yM6|zw!8pU&i2jq(+B#0ep0cvH6 zns(-^@!^8==Qw8(dE6G#aQ%3)&3dLSHu5M6DY_S|YQ~ni;=0h`X=#~>0r;()SFf`s zwL_~;c_DQto)4Jo6zvYioZ~|AU1~`k-`+oyF2~dT{3|k6l_b&ZZ_o^?)A^uQcM(c` z+dY5Rb<8TFGkoqMT+>EP^-OIo2(_rFR;$A3?@_21kV6-KBl+I!Le@hYpX)6juicrK zY<+fg&0zf}&n?)Wx0x!u<5^0I^VwuocAqR+5q^ULa(-&FxV##fZ&pGSJ~eKl%8zu*jiHg)_mjsC~a zw!77Ehq;FDkmD{&&!1Tfkt13@9xQnljV~IHKb-p>pV*@c*j-1iYPH`$Cv}>6P2CuP z^}D~^TC!#T)Se6yVY@kh?-o0jsSQXfr0XTJEASD5^=!{g1k`#u0E_yX$nr|!41ph1 z&DE0&tV*hfCF=BSgxGN~n;nkCKnu;Zh=ZY8(=_`nbAzu1)>Q>AZq{@$iNvuoMS=l> zfUdRYYM3DP$vU2V1_rchRgyX(t&63hacigCsiD0W-lU90ezHRJRTP63K(z7PnhPc@ zxjWgGv0|fu5sDCrgk97*FtZMnu~<^3CC2YF{J0YMoj3{yg+ zmj}vvg#jCm7IYg*Nyr-ikr{CE57KXS<(s2BR> zi&nb>x^UE5+{;XJEB{<1$%Y*E4`o0C>SBllV)#YIMG}Ho#jA2T1PE|_UA+kBW(oU` z)=lsEP~W??8BDfoxvfEVgYUL)=f@j=vIqS5U;CZZ8%dRjGfq1!k3pwTjr?_K+`R$=U|UO)87xw83LgxQqUCH!rw#`q1j*Dd!|A);6FC$YGE zI@@u6v0IT28oH%*JzOsvpyCCY={_tKNw;e9w+|wH{kKmU&5o-GJi`-@qL_LMwc%=l+1IO>!#y z53o`M6M>$~NkxZ1{B6waYziFfYMgLts4^<{-)PUi2-{j$?^z_imaSFbh4W~HA2xWi z-gO5{p2yIiy_L9!(TGi4Mg3%n0F^PIEweYQM`<6s0k{+ea6HcANbZZfR*ndqJOX~y zUzg@Nm1)8~?s+S793q0-cQb}WY>XA*LSxaAj7*cB1=jYdn+j;qtt1*L#u@q9iWV5I z7i7X(BRvIrA;5?pWAsB?=KX(2U{h{6jiqkA>{)M`T=`)*gY*z8hVxl-I=Uh@3ygp* zrI7CXE!qe4I=E$lC|yg4G3A}fxoYIj#7;i$IOEY0;lY?`IH!#A?{tEb)3XPkpwV`h zH$%&!DN^o+gC~!qn-a&}d0w=qYJ=-n>=gO0!qe_deN=|}GaL5r((T%@67 z=x)^0gTlInp=vx0YkOYP$yv2Fa!JIv%s6PH<%t~My3+pknd>6cK;#_fxx)G3h5%|` zGyv3VjY*k#rtvV}wJmhC z-DKv^gzR~9Qs)qirtRa2p>W(*;i0GBpPBxc&r#u(z710F2hIF75Xx>oJ#{_m)3@Bs z7txecn#zR0W^$UYB*juyRcmgA^5aemMammzSE^|~n1amTT|wOV^#IOhNlqXbxn=e#Eo{rxa@rtl1 z&&c$D9DG&d@oNp~7Ge3&2dev-j-pS`iY2~m8H6bp%%-N>-MDA=MGW|3wl-|;t~cPi z2*qH2`u3ayhwkCy;9Z_OMUlHVa`OGlUY+u}75lNg6Rn%^ztWl?=MnGCe`Nn$Pj^>0 z>g-@f8Q?t|Jm!#>-UGA^z;yEARrC=8@5PQIcinr)s?EUI2gAehZ|Th}-L3f5 zyDR9kOaOjm4$nM3ZWPiM<>|}WOnFzD?Nq%U!o(B#HUh3a}^7TDA zyC}z~Vn!_BO)(fBNS($AovrV)0~nAargIc+`aJ#~K4VJOlBgw!v@d$%}R znCv=SmabQUsn#0DL71kuhrRa%nLd86wT$cBjD!j{1$po8>qUJ(xy3UL{1_-|GF;=!&&f0>)(DP>->fRj#Ff$1Bp z2rrFOzmD&&t%G?^wU0|h>r3>t)}p{JjOgp3#}qN1Q@&dbe}IbDlw~W#kFOg&IDIAd z)Fty^?DOzop4mLzViRPw+B)g}+ACwqt}FTxyFXmU$*~WA!F{N#b|RfvqW}D;5(!2Z ziIdle1izB`l5??ETSS6kOstk8CJLw_c$xjFS~!nkU0>L||2jWfHu&L|n>D-i)+461 z@Sx52A0;xI5&kZ{&$TwX4n9bKE9M=_v=Nxv$Un-MRqizFfV(K*oEmfDnTn>3` zE_o=yAE)J7#v^X`IDJVONN^pxEuPn$<=YRN{+uCcrz>}utLl4h0#N8R!3kuD7MQ_7 z1$HZP$|*-%Wv0~9kC7V2xiyUYkwguv-MBpo$~@gJd{Uaz%ud(mPub4*1PQ!bHcj#~ zaK~a&V#}P3671lPtml-b+Tq3?w+R-#_lU6;7sjxLYU!Z+^ztGzJG#kwLOV0tbm4Iw zhbXgIM05HrmTCerU}@}ciuy@?#Ux<=w!qJRlCAKrPkVK%7GK4PY-iRD z^ey)(U`tS4q%&;vdp}8F>6A2P9oJD0BR*xFb&vNy|Hj&Iuc&^fXQ$yiK2EuDTUT~2 zO!mMOyb@ES&=osM(x0a+jI!8JTz&bg?Wf%8LqB_^s|u)4YQ`xke0Yj8hYw0J>^v+7 ze9LtBUOea20)QlgHlD`R%FcMhWu`Db3;sOLX^cV%U;1t1fK2+0NFfe(!iCLqXXmZ{UG^CaN@^`eM}*f%8&Z3c zREl>`YEL^VpFch+imH)ZPKpwzXI#0u|BCRs!|9Lbgx8bLXL|sp-cg~jiqWWafe?YX zRSnrQRlALe%2LagkmQH{wocE}$D#buulx!)DcAegLMni7r%aO@cfJ&B-{ja8v^aWt zx~;{El2Cj-jn=L7toF4@9ZS^Zc&(6XcM(37RX|^F#$zcrKDt#77qHxBF*n5^Xf`p8E%Sc-ewy}8Y=nsk0*U1jDO z$6jt{AGY#VMNebb9{6r#1#@pWdF1ka*+H8&Fp5_m?sOCk6q(==(-Z8o?Z{({Mc z5V}lxi?hHL4mHVi{iM|oc1wYy_b<}zDU0_wVNRe_%qPhTE`q2OV_tz4`@)~#z0VY0 z>|L30%6_ce=0)5)>z6!CNMO3hZnwJXoK`lYgqEer2eXBE?vK_mYm&y4j9t*nK&f4` zTy*f}zCKe@l!&)3(#m3tS^^_$YEjhanA2{UXK z1-#Ay0w1&$#&zM&Xr~te4_Yb(OEsvT(E4+al_<7J_jwS=rr5z89^7zu^O>4=x;-93 zG!7M^bejpkXj+>$yxNt@?K&o*1nXSSJRg~Yxa2{~Hh)g625AB+{*A{sV_Xv7>2ok4 z01?e(>#JaJ-29mCWqe)I_MM>w_DlM>c|)=3(5#P{cd;JJDS67Gi0yQn6Rd&RjYS!F4Dm%^0i=UB{k670dmHFA@Um zO?q)X+&x8==l63Ht7_(Q$UbS6-mr(q4&E7Q`-ZFHl##CUNtE~ZF=$F^SU)^$VQSS-@w_w1M9}Vk6v$23epIM3>pL}7i4;VeW0G=MCr(F zTE-Bl60F_i0gHea2QoQUnY{miHQm0QXRjr$u;rIO{f<)RN;nNj@70=JQff~^*!If-`=W}aVWxJyE|J=j?ez+?O2&PzAzL4?JXBhQk^nYCe$cm1>)%a zlBip!oae(`^k_FZ|C^JL85Ht?IlNno$uc*Nn43-i*qeF`%D zeKhb&KVds+`O`9+Sg{FQm143{eSpZ!5m~<|3HiFgOBd4^6kZ7QoJ%M(JH)!yU=Uzp zM&4fI>7r<-p!uJVvZPZYI>PK=(tV)J+W&AaBB@XMv@O^t?M zIdX4Ll5aONNGcA?&wKpX;`bSBlZ^x_==0e$+dXLhD&y|cJ9H;yc=_HB#6y#oO3$K{ zNgpjk$;w&c?*YkCro`aImQcmbNlUaqdWvLj$F4?scHmrna_-FFMyj|$>X+eXDR*iQ z0bjF%boui{->^!)E)q__v1%X=5!+*FO73rjo=+|PGIy`2lP-7G>|IGMx@Dp`RxtHj zsR#p|YqFdUHa@cf{OThzbX^?`+|P@JXU?C;$~2C5Y&BEvn)3~C+lpomCn zh+kw{Tw1A#cDi>&t^IvAk>DbmLwz}MMpr5F-cJBkJ1^6LNWtyb>5FVi5EKmxojo&A z44;#TfjOC4RnOnqcq`a-cMR|u0_%KJH%+;!c;;jGQq>j4XPS=9Cv!*^cDn>#eZDV- zo360q;=XDWckhaIUb;V_%(2+aS-V?(7ZX8|?TL`$j%oks>~|N-b-pJ1E@?GkchC~( zM{4uoQ0dQ55QMKq#^q(ltpoy$lZRx7s!aWnQwXDzUYrcBb zM-(T>;Gg0xgYvj~V5-DN_;98Z28q|Zzgj<>O7VVu%@Ff2#&I*lKJ7r#)+U1U5j~3h zH#B@{LihO7yl8Pl?t11)j>fHKGuDA|zZY{+wBB-`GkQ+FQp{OOG-|jCH2A!^5}!z9 zR8o8es|bAdN)PUuGr8vZc%d*N8eALGRl|r(TGUoUfvQCo(m$aIO($%P8^LwvNXIwu z7Vhm*)zq@UXf=Ux!*PH&ybVdX61KG;S_j@<541-z^&P+ep?pz0I(lx$CH5K^POk_L z;sUtCgp3EQZy-9k@x7<=q)x2X$*+D=?YX2DVw0d;oGol#UrmYi-K?=Y@@BVYO{?kW zI!N9>rHSX4B)PV%w9w|)xXiZN(MR+@0wXXwr$RKbX{=DSh17)GMr~dIcAQM%fZZ}{FbAHMn81L)FSU4*K#sCT3<#CB9&T7&u2 z`wjNKFSS|nES)OYpi?rG@>X-yd>f(hYwHKYy31jEis(ph_SycI8G*%YT^eqW=j^eR z(UdG|K7KyxpR}@JTxn)<9nDz+sn6Iw>d@pqY-zrmGJ6n|%$|4{^iL?=Ih|D>8x_|T ze(D=xD;`B&SvT4tr=WVf^LeBFB-SrH$(D)BU9fhz*~$|1-98O8IJ1f3uBU!8ukYl~ zZ3uKg(`+)Ib9bTYk|34J6(?^Xm+yz2x{*_l*W>;)Jz_1gzMB=1nO-@1TRJSjUwoGc zJL6JRs;68@4}|3&zTs}+6-)Yfg|LwQ>d%p~>2*4|(8s%Ul&svh0#6n#63fCzHg9{L z(sqF|j&~YmHKdgZzdN+n;SYG0&9-+`D+~7rkE>@31KC)Ws%`a^d=8I^craa6)YP6Y z#&@gAYyB)O-MZy|Mr@_Y`o&*NuPTG3WmR0B20GWoh0D;R9vWJ7+!H+Om;JT{Y1q3N z1Bx-AdXV0Ebp#>19Ylf8iU3SVc_niL`5WC9)!rOGTzRFZw3>&o2ha#4V_ja^-#=*s z<<&-r=f~c{D4a&Whz80Hd(YekE4i3Wy4;@M z#6X>;Qui-P!;~?04VT{OL`r;uyc#q-U$|O))~Jjk}K-{4lX$_=W!`{yr4GTx6Ww4iH3SF;VrtF^Zi|ajz7d7B#n+Y=UHVP2BX9^}Ci;lt$)hact_(3|7+~ z71=5P{2#8)GAzodd-rrmNjHO%3ep{eh!RpNDj+4&%}~R@&`1c(Py*6QNlSOf(A^C~ zcMm;h-uL{^xz2Sy?QhSAz4qSET5I3;Z#U7)IQa4>mDRGG1rHylF^*TgsX1)BDBB{x zb+iRkOF>4IoduIGaWC7XljP#Rvm7NJ)@`Miy*~=1!sGMcg}*@^i|QLjmY?;Pl(ds* z+0C*^q#57NEsyQ(m=U@R+PveDe-nGRK#ToT;%aS3eJFhREvO5j0O+T!vZMZ@_7I07 zbH{y21P_lGRf^6>`#dr>|5I6LBHL%{??D0q7~IUY9qJqx4Z3p~=}}3n65>lVA_jFk zW^+ETb4bX-MY7q`?pFQ*t#kh}PO>!%lsGt@Hr~qfD_mHI{=%e@iaQ=>qut(|rvati zgEdCXTToBb)$x%Jf60Ps14KiQz7fysdE({xiVL+OWUf#>5OEzQSqX_WnqQdPotFS6 z=Xmpui~Dk)kNPP5poV*y8J?vNcauGm1ANSc%#$S|YJi@V>5j`TfDKSK`7=N+^Uk6R zfUH+ia=}dpy4QGy{G%@TOq=BRrmc(Km<0{GoCR&M*7SoH5$_}I`OPxm*TVF2PL|th zblpc^e1u)igSs77Pv-mu*2FeLu5p?iN5xK=m~To~_1Mr`d)G=#Vacl~#fui<1oeNXC0 z*$FSoOd2hd92rY(S_eAPA&!I)DwkYHu}GR6bgRvVqu1ekxA5dei;nDXQ0s-zi(tdr;v_g#`EIiEEQ-zwYjFpL?rD5gsYk0b-!J@{-vCGDDag z5XL4Ycbaf=hxJNlZH)^+(}`V7E#@c3JNYMKz|-dGgJ*r*rns(E9NqGBMOl0f$JF0s zR=>2wWS3kpq7K|U!bKTL$PVKsE-6|X;8Ui? z>n2&X%i|9e)A%MBWnuG3bQ{kK@G6PgL?Ar}JmP&$ZYu~NL~d1|GuTnIq~x@QUQcJO z#2Ojt<&%eQ4$;FsFTXeHHtJtx_OYD6wL}+k%C=`0<}8u!Ck9v zfxCXGOI9!YU*QT+@ZGqL3L>=G*KD?ntr2~D(yfbDFZ%cujP~=BfNe^4(7y;9AHJ!> zp`FBd)S*WMH9J^tC$Qv^;54z!5X=5*(duWP>rMT95!v)HS=}5m?Pl66HdIaSAyt|2IH!{aO46gw+ zymlBpN$`VDW&!JOW3>^Wf-{_hz!nsV!ixY1dgP||3h?l*#i9B2g#k-m>cdVgI7r}5 zg)OQy?O{{uY@rmf<5h`HLf?}t%ITM8q}-;gW#LCu#yxCStO+l6u2_5z@D_x%c*i+T z@f$NZ-Sm@udvPKQ2p$(ukiGXh{?vkgm@a#|L~n9a5ocDGZgDitnutGtKr0Be%_vub zcIYkxj_&iEI#ZTBzsz25m|^@lURI4OVf39K1PgP^*_33rzoV2e>~pEN*u(08sANc~ZvL_USCiwr#4y$%Fh2Ih(vYn zON;g;3Qrg5>ui3{?wd&)hKVejkC?SKE!4jMw~&>#O=%1t8GP(94LsNb~-r}Uvv zB1FV=Q5MozyB8Qio^_Jeh-~y0Dq?fb_rvCi4*j#YG zJTQOh$%$#RTuu8y4!yKQO7_)}n-I3C>*btEf>xR8wefIi&w_SM~lcAMpac;Y_*$_c`x0?&~Na?$(ed-l+9Yxv40}1{%(wqKiMsI!%Fi@LHu3#HKwN4meX!H zvry4$!)mCMLl44^Gf(f3Mw=hTPw%Rr10^Y5-~QlAPioMe4~fbCYxmrTc0?`zpeT~! ze%L=pAu^yVnO6vSdf``B-s<_IXg$#7a+l!ru5iDsKsYL+=IO&>YQ{FwstMFlg9Z+H zulkl8R;0_o`IX1OV__M18BmIVbcx9=(b}3BdY~{5Tz(w&BP$1bVpE_a5oa*HaV*ibG;YOst|tIG zd#sk}O&WZofd3|$`HmC>f#(f=v?}On3(z6D%4FdJx9XMxNN44XDyZEmku~e$(HhZt zJXf_OzlJHDnM9X8-WYnXDs$$5Ueb5W!&BBCG}6Sijo0QYaPdHlZn%*^xwQ zI(exxs@PpD2BtVR+KfwmX|RoImZ>X{``q;2EMuCA&F2a;i@NCy-UrR#yS2W;uDu8w z`Ej!{M>=aMFpJ8<@$YUgDp{D!>0i&XW|z9V-3^(NAIS5`eh#?Q+lH=3e>lIxlLbB1 z(^1=#`Wvxt9p-95oax}6v%qDDm2tHHf?Hs>ZmW1!E$W@`(-ISG$aQ^FAKTcFn6_yA z5y9W@H}U=q=~gZR$#iu7JJ7t8e}6z)Cl!H4Dc^j~T&6y__O z4(R9K5U|K zjn@2UjLC<9!Z-gva?Z2PURTVOOZ|g)v4sYvL#lW*UJ{ZFbTAayeWCXlWi~k6HuNaNU^UC0V-!TVc^%Rw;C`w?$(2#euDN69{nqE+#l5c zXUD(r;CWv(ygEmmuES=U4dk5uUSF3UkL^R)1ol~IpYD7CHev-YgLUps6D4WW-jGzo zeLQ4DE@!UUo51pR6%k_EoOlPgbOzhm7;VmQ=bULC>o;o-ICQ%`gjMFCo$}Gy{J@|1 zl~;+0_ODJkogkG}45=#=@YYMa&}h`#eq<}2_#+L0TR8Yl>QzmdHF7S=cRimHfPQJ{ zd~blPx@~4(D!m^gDQV`}G`??=+Aj8^Ihh4?lQEl6(l>7XxmxEk2}=>YwSIQdfWFiP z&5m$@s$Fj7vKK3ZE+cCm`7P!j{XpNZf)RVh)ulKY0kx+oXa+)`ZP_w=U7xT*E04P& zgaRc%YBL2iuc8H;$wV10gN>o( z4{N84C;d)uQo%i0rzep!$Z226H40ced*9G50(@jNcVoOi7ut#G@PxMo+g%zDN(Uby(d=s{)J!M0DbnXOdXKKuscaCfz!{pN1T z3VAWSET6km=5J&&)|JHnlC*yPtf`Szmn{qz)!e$(p@~b&)?y<+SQKK(ky5arT{UUVacgY-E03I+K;( z^}b$(<VgnPVp@y+LYk?AY9Xz+Aq!vU~;n~S@6Bl z>I3is@biOp4p69?ns!di$`f@ns`7c4htE7w7iS&d$RwQoOR(d44Zmdh;87h))iP;W zLHFe#%fereV(XB+pvlcw+Bxq^B^f{t|?o2M!B#uO~(wZL_K zX1U?BdP~OA7u17(iP+~%2{y)}|LlNL;f=o`+aDyy17?KC76|uX)9%DXE1L*hjv`UM^Jf;Soc~LzPnxg{#50#;Udy20X?~u+rK;`1 zvO69WH1Br08AUiR2gW+TjDaw!z#1u{!n^q!`o{D2luWYdfyQAB)}%}nQZjx)Y`5vT zsOMmLFa7+GJBqCnXy*6gvpa7DK#r~S@F;__VITZz-@PY% zW9^a5hRaO4JM`6);g$7F*5>1^7xy=7SEjeNgon}8j~0Uf-cT%?!5i`Khiy?W`d*X! zWS)g2D2BqNSe;uRK8$7WFaDM3fg8r}kZg6_YqAdvn_uu^F??QWMKpVc#xDn_YrJxN z2OlZtE7&6HLsrNa(pdXa9r%UgdO);U+*TBS&oi9VpijoH1{Gu_j6vPyo;&(zO0bcK z)7^C z*Gn(GX|sQpdaM7|b`W$rM|ayHKjqzjAuW}b)`Fbvu%qj3nlI&-Z!Qb%T?r9^Y zeQ#r~291m<0oUz&YdTI+|L&@*H4tj0^5O}D<61kx zde!s9b(z*g&(~=`@1@E766n;=lijqi#?#h`#_EJ`0bb23Bkoil3%X%ZP ztJ4uGCL&9&0Mbu=Xgfo;DF8X?`mx7pRF@zpDr-IfsB7bHjc?RZW0#`WU3zAj_J+bu zkJ*MK^|e&0A4e+A)Q;7m^0}5bBoyMtLRK;!6}VIXB(tn9j`ZHA5nIqC0W~taG5Mwr z=c{kI!#e)%VdC0XH}x3;waG&%dyt*ruj8uO<91X)R1@_&yG0uHTCJP2g0iAz?owDAH_z zm{{96S{}Q)#&@xlY)F>jWE zE=ES$^Bq77B!{&C1!SD$y`McvSkYc&39f~++hS-!iuHYs=6dm-4D$zyK2A*E^rk>s zP|_pB2BMw?o$^wG7<q6|^^|0S%;LxQ#`b5wADDu|rAMnORL&juBNgZQWt(?ZmP#~uz6Skl2(H~(%qJKgdck|nm3YVdaJG-?{Lym1 zrB~+2dntWFHdp-c4l1A(M!)~hsBvYIr#laL94@s?Sr?%Ky>98Jm63+$L+JBc z8jN>7UDkm9Q0&!#Sumw-scwZ=9b=~75AU~3V+MVKQYWmGTYd^t1B-&>eoL#M5l8il zo>QbxaJv#N25_n`7^%IJw(h!=zj$aj)p@>{Uh1Yf}ZM(({7Q0CSgh_DF) zAuc4l2ky1QQ1=Hd+xKT-dxu7>-T+YGKi9QxM0G?5g}fBGtod|O>5B8Drb0NH22+oq zYreHe+aaH?%6qN(r|Bj{QZBMTQ{a&hsmMRu0VuLu>>kzjbFqoh`!oGqEJ@thUB}$hY{BO?4YZm=kBGGKvTtb&5h{X+KHi&p&O0sT zz0yp`Ra(Y#@Ow=1i@_&tk6&j}f4UpnN3xcUTW$*Y)_!a#!0A~p{z&cr>p)JY`&qMG zuW=xMWEinY8iSkccmC)C+ZTisj+E;P?*82*e@)3qWgb~s@ILaPQC%*Koiw}-z8=o{ zFn|z;19MsJ9C7}puhF--QY*KKn7HJ@t}>XJaUZ3Ev}SE|i?5v-9dLJBGB<-hdZwl_ zQ&zgagbyA4yB`x7mPgCy$vd&WgFHA4-qnyLwt2NFHUt3ammw;8l~jDV}0OFvgL{0z^c8Jum?npl zWJoq`y2+r`L*t}*I-%xYx?d>0X9O=jHqJBmV)lqVP_A&4d_Zo`du`o-@Ht;KxUA0; zt~n&1IT*_>8$!fyR#M1KdPZkcrKA+EUgJb{^zu+_G4isIvQXWyaE`Z@sg%DkKmATs zK2lp-k$Ql&%Au{FYZiACz0%;A3kl#j!nU7xO#JOYXLf^|Rz9-9PT`h9v_Q;qWj+4o zfb+TX`{>^?e@m3+p0TivMB(G(-Ft<*1tS74R3%rpoy%9t>)XaG-%yGa8Yxxjpj%EZ zUDx14(j1+qNj>1O5YzgUZo`} z=%!`e99`{|-Pr-)O4>jnpwO3Ngs+=Qqx)PJXd+^{F8_JPRX2}WI-8n_TOZ@eHL8&d zw)jb*PFZGC9<ZVFxF8daJSpp_VqO4$FubUOcb#d;yoC2k?wHXpupLRQ zbY2|`1HvTbz-84@+EFzbUSFZxKaXI@!~3ly?lFR=GSFeXmDiyakU z;YVHLw=t1qyzVCY>V9mPesTI{UiPVI zrW0`5*;@5x`g9(+a3A^A{x8vU*T;gB!S5IfGcVd24kOY$7W4P_H$k08 zny4KPTV`4ofb&Seb0@9+8nmzQ-7~mKCQJ=-ThawteeX;tb~*xWuekxM zg_M&`DvD?SfO3+4t9Kucs_do{GV+cMO;xU&2{9iBFI9z~ZSZ*fgS_?E<#H2zX-rd6 z*TJ!T7b)8kC6Jf^K}h0$4BQNudMf??$#0L!@7u$VeRY|6?W);qc!Vs-+WZFmHpR~p z*4;Z@fgSk+e*Km{iK`;3F{g8HSy>Ig&>OI9$F#1-AaW1df)1;u< z21HIcAybrFerk19Y3~bN^A@F)2u3@7(Sp%Qr(T$hr%@ZYx|p5X-N4yKTBV64c-nyc6IMK=dK$E?_+T={! zW!CEU-+uh=t2dNC)+np+lcD$@FmHI8sm2x_5Bn^9R z$mc#uj+;f0N2xh5<8ZeLgq>~|(}B5Qs<_g@`1hhfnY-Mi!jF+Fv87&ohwmXcchJP= zOZ~c;7q#UqMIyR8%CDUUew89dxJ1+8467JpXXHqs%KkMI>K_}nab6x*h}Y&WrK5*N zim=2#OZTErS5;pVa0)Q_{vM{Bp8K0w=vVI}_BW4Xs%zoeHAfIo#P~z{M*b289E&3g zx^bTI!vEtM^9=Y6^$9x@sxB4pGr_k^z5_z}lKBe*93m$vzmjRN&P7Up8fNr@afIRBQ|$7RP>FQ)s_4iWmFSTp0a(O&VoKgINu|4O0J+3)Dy+p zdj_iZZOU~ib>uEGZjedu&-C4#%OsaYHr-mgoqKo)Uo{Gu2v??2-I%d&oh ztb+7rL{v=RG`Bo6CYmb-wF(VktP!XzQA>24=cf#DdpjapO6|yahVh?6`V|SMEw;Id zUTh5t0>qGi+Ik~!qrV4q27V<<>%wEhB}>P(+&(Ve>-sK7ENz|{2W`VSB#|F)WY8y< zH@T~8&ILWO_Q~A%r}(ENHx7kEm=;80&|k0F(;z>U!?F%(gyyk=pLuxFc&fG?2+?2v z_&O8C6}`b~6qu&?Mh*HRS&)O8^f6Y3`xO1(?qK4yQ@J`$?{fx$qxw_G*6CH5_5RLI zj(PZY2(H|F<@3L1&csQdfE(nyAOrko@-wg4GPZrr@m*~yJfhA|c)x*g*748idH=nS ziA~M*&({tRR;ZzIrR#*^EB~nJCuN=@?4>D9Yz?(eUS^%zE5Ey|WZU~$N=te1+>>=| z$gcEiB9UTja7=c5KX|O<1lkF6yf$(O6u__90GHGD46YSgEwqjdK>)IeUQ^8~fT0wf zz{Ujlx$&t6tDL4#W3S5iIj!BI&~spNNv8gW+gZ%iPexHI#Q7jJ+hfmU=h5}kTU83m z*BSdT-gg!g;=tW;Sjm#&l%q2wvhmK*6ATcB&5&(&zxHM+o5w6;Ak)HMm2TQ7i~{xg zM>Ff67EgM8+H$z^%4S7fDwe=65E#JL!dT%^rt|B zP1u%f!*tD}m)(9DVyU38yL1x|pq$dpaZSp(R6Lcm5;B^36PtXhB~eKkML($pwBi?n z|J2Q}rlc$>pIlIU>U>b|R0=0q!qDq;*aobIcO1mg`jY>;m{zL@nf7oUa^$X9IL%qs z+t0k(y~1F7-}zmt_rz&g%>#~`V6i-Pb zNHMg!2wk~+P`#XqDf$GGPKc2mW}zoBh|Fo+ZO(4~W)282iJ+65c;P_;P;V3Iu!k!h zjbwjMINs~cqO_H?f8BEVp9GPwBzX< zf414X?AS~*p0hM3x}K0k5KS5(xctRd4TFnjO-?Ua?+wNm&W{UZWD_xP9Z}royR`m+ zeBIf;8u2=#R;ukfcr%XfG4QlIMlo?6pAhJ_ckAjl>xsN_2#b1^G;!6fj zIA!m}Gu~Bj=Iy!Sa}U$&D2u9>o0ZfrG-{sq!N9~}#Gd&P(3hEc)~XG`bNT|p{(&^J zADs$Vzx6t1^lfc*UE%*@^Lf!=V(_E$u;Wzwx%+k7lg(w{I2`Sp`}_;a z`;j~Z54qHezle%w(SoaUD60prws(;p=+l0Ags;ulCH_Ic5DAm_oz7z3vzg6{aNpTy zX7A?5BURFS-ti>D^K3$_N$p6j_hHr=S=z^oIs|DzteP(X|2Y0$3#YwnxRL$MF-#Of zwNdd9jY`H&s|vycFmtkt6iIg%J4s*t3N^BCBP4W*?s4@@`%a+shqTn9N6Iu~{q%2x zL&L**n{=dE#|2I(OrUq8HPn%Hx?gNAfYggT+7MvhzJ&#zt=^H_m}Qe9_D-l^#UNys z5=ziBZMpC{^p`9RyiDk1kBmx!yMZ z%VMBMuZvt@vz}2(z)jx^XRPU$ zJ8gcK1zaaCfqSmTl!4wg1i4tj1A~1Js1l668m;#;KYH*)BvbYW(`m~VH_gp%KHH}2$AA6PN!QXm&D`=D{+cg{K87vOF2t9n^oPl1x902^ zND%z;h2A&dJrwXfk;^>>1Zcq>agOn==7xs_DE}1_d?*BZz4FP{vij^izLC$SqSV2> zRyVvT*?dv#+OOLnPyY+;ymP>}RqvHLRSi7APU zmvNl^GByXK)OD&%d$j8_IaW^taMlEaLz6dZ?sqfl;h^YbBb1py>tZ@!f&rEA>UzJ- zRg&8_pb%N}Eq%*N43Vb_ow8_u7npHOh}_w6d&q0qHu-=_f+^1F&1R5n+hrJRMn#F& z07kErJCs|8EWk)K>->J04RiMCABzvdNb)n{N_;U_W{s(T!y6=+4YCV%bca+>Mftvi!2Q8*Fb!{kum660h;=|)~kC5L48qF&RWp2rpe z&EN8h!sB%3kJ$N8b=RsrO`$<8CqLro9Sva`7V4t0CXTsDzLbXN@KWwG>Nl9IrZCet z?9E&E70P++YbA={M3wl^c-@UI&h# zOYE>GX)UDX3vI%K7#oT1tdG}6xrxsnhqIb2qcBN$LI0acm0>D?!$hloy*cQYF7$Qi z^}6)%-Ktgn)q&mc8}_qk$hvfCfZ^54;ZOUXhd2#`eoK_;In#JW5f9bL=yJF*5 zdqzUHqYZ(N>)j1QHZlzem@9|Pup5pIDQK{ebNS5AxXE=V?&6V`M0i zL33!cWp`6U7omgzPPl&NR_+!)v_D`P!z6=1G2Ia(G$Y~g)I~<_-b(fW_E6v&l)2qW zODig}JjJ}2Fe=!wLdqS1fdrk7)N}Q**QIh#-^kMCS25`H)MuW2+%M7+!5ls24)bnG zXQ>%P32|7szq*#_DeFY|?^4B1-%v`?Grr`p*ITCdY;J9@7W~^P9gV{KLz>}fPkn74 z=ORJ|G5Ok-Wv_KL@(<2xo@p@b=GhZt$R=k!g9SEd9mRC3=aCqzU(^#H-oi$+VwJp{ zqKW`5*iFaqqo3@b+cq9=oGq25(;R)`EE=@yeg1DU=@vbEOO91oV0Yj?Pm?P|D%$Zb z z$J-XCv{~o6g=ek27E>XzTd6*mvl|UoM-9zQQ^w5FNStJ|WL*vl$Er7?*1kC_{mys6 z2cDTfH^11%f|HFAWwbhv_iXE|Ksel$3x^SPpx>@E$<~0=@G-H(U{*V8YTgL9)Qj2j zBQd|L7(l%l%QZXogVn_e>B9&fvr36h?o{&j2(!dlu7r%g`wXRAkzIk>xniHb$^KSI z{i_k>bYChr%fO3DG1{$)koUuFVfPOsN)$gOm!UeEowH60oOHu#{<9fZrpq2aaMXv} z7WEEqf#ZcJM$WNDkKp9{$qPGR|JmU-y2Kvrw&8k*l!o_SXgdOi84Z~i^w#GFPeQgd z5TMYV1<^=twjj^J)1;8CyW{Kin}1D_R!k`VelVoL$|#KmI1seKtEog)0>lX*>zb??}Ptdcd<%$io;LWWHgpKhS#t*!f zxsY4*dBm!*73kgFbv8$>Vfg51J6#PdC~e_A%#*is4DJ8;<+j(*!tfC|?XX7*mPY#C)j1( zv$t?Ay#oe?y~@HJ6q~l?`C6?>Cem9`4G8{uJ=U3A=Nu(=gdd#_0>wHvK6Sh7bkA2@ z;Lt}Qless5)s&3U$FDS0zTDx@T3j2(MGZugGptAtzo(%pe@SJ#9ue#LLng*vDO!c` z&hov;@rlOS&J$UzvEgSp)qtnceP=zufK1m)tjeVVbwmedb+m`Y0l)i1M`g9wq88huF@t+iug#LON`zv_#W>E4SK*8#YVQJQDk7v)oVATrPg zZQ!fK%6*vTvaG%f{<_=}N^`R6eevf0q2l8ndI~*@HgxuxG-?2kMfpJ$7Cn2#9PQmu zM#hK9Hiqd_=%!_wP)39zO7>MM%aper+HWO*(D;q0WLdF8fWpFI2QN4s>UJV0a-2>g zz*@Rk*Kh;$|$N*v-hX#no((e_*4?kRn|3) zy{1S-a%p(-D zzu$#mp;s&@u@A#zwru22@GMN@c-K|hn5(`&lb`m}@zzY%_%L`L&Hl9j)@V%*Q&*S@ zz#Jb5GB#O~ws_sH6c#cPNG5xNhu-twG& zi?%=CZBKp+{t~_^?%nzhIFeQ0P zetg5s7Jp1WDY@H-2LMTj4nlk0GHjK_6_>^#^L5v&WJTnMA#MQU1}X3%taryKRlVQa zjER}qF^@aG?)_Qsn5Q$(MD?z?J}mjpa0>zUoEhbOTFWJ1v`#|4ns$LXHJ$V0oy;SR8QGwMMuVU)R_R>d+?=j+wlLE2$?wi9wuYis$W9ux_i7eq$ z|yw!aB1 zz*txX5i|XQ~vt8JEHulb)#+Wq&|Z)HWqfI=iLW+p=gRzBo+fS1&gwXlI%5tc=j=P9yA@oGBlZW3A)v0@j`> z@@RLkCb5eWM2)-rBcmyeAxpE|0glvKZm5ai_QaJ`uBpCk@0b76s|wLh3A4gUU2Es6 zDLxJADO3|tD^w0rEoWmdmy4~8HGlrQTYTD%H{i8FVD07e$Z9@pF6oJom$(s^wu=FT zeWH=19NzGv(pgV)v+DbjRmm~m+h?us#IdB?);Z$f8bFcjzuxOvCJ(!bqC!Q^gW?2V z$mDW;J+mrmC&U^(o=6|9*G_nX55_9Nc(=P=+i%y_#dd}mw}AY!X$>6GT)6N%p(R$x zP+3D0$B}Tc>}l%T&;L1`(xK)usD-2-QMBXV&iTor>{-~->%bya=_A6w&F#5zss}t- z5{du5!JT-k@&MM6_sEtt&Mot$c()rc=b@#PzLg zM_%5nLUuz@hAepd#beydSrdb9JFrNzFW`0Qpa`?ZTcM@2vz`?gYnomAx`H{-@v>|>3r8zrUyMjBR)=p67#b9FPlR#&4tA(=l&5W# zL3(0^OpQjr_T(H4?S({Ur)QIP4i^dvn7Zx;F_r^@ga=jMy~svT*V8erv&1aKJ}3X9 zaZq35!7!W^Zb;44ZWlu|g2&{b=!2u$e=(ja_3l8rFV`}y&Vx`HYP2)xeaMs1`(x5> z^(r4}({VdHtRMZph7wWn9U?UUoUBPC_8pb;~On0Ii**7hOYPe~FUHR+;ts zULKawOz+7))O{+8P?w?QHYOuW`RrCTYHblhf)Qww{iJ!D^S!MB^^9nOMbOuqrz375FYi8b{osjNa?K8LZ6r^>CO&;BC*G$) z|7K|yo0(~n_v{sfGygC9Y1G@`+(pjchA&7@GpxU7!bHBY|9$)AacQ#ouyskp^WZaarOh@A8a<5Y&=a9T?uHCUUn&C`82d%mp?B_L4JI*HSj zaGia?!{*Zgvbop1 zfw*FR+{7UsV#-biU(bB4)ztp}TzI}iaFOG|?57$dPiSS2bDFyu&*bna-Zto%C9GlS zk-s@Iq@>i+=x-WVoywe4m^iN(QGMQT3ufm~Z_S+=Yyp^mXgzTjUJq1Br#O;D^aDX6 zSZu>dAbbq;aZK!gc>!X$bX-?KCVj-qahFym5H{VX|cy?jL5>FkMer~VG zw1y6~#*{ije!_S!;iF+igzGSwf>@^p&Y6RYuAdy0%) z?Kwg0fMl{L@wW7CAj!hhSl3teCy9IUKYQqZ22U5tb&d;H+}|J-^B(5XIyxNY2c^#! z>+)FYzsAPndHcq57aI|M(eV`#0xv%3H_#UUuHD-QjB4md47*w1XHZP5xeBT5&Uo}x zQ|je9w(30LjqwsS%4Q0*t|)@=78-2qOZja6dm4SDKp}k_S3fI~_n^cWBd-P9cjZj* zex*hT8)KtiEe~X`ZCxw)e!96UY2FezdYYqXvQ%70)QbCF?N+g}`z(oNEJwRJ!z(@2 zxTSakF9UT^!a3dRpib~qy1D6W{ant<8MO3iY?+~lXI4b>-;U>x%XyIXPbE)IoH3?$ zCd+pQ;jLX@EX&3H1ve2|6(0Npd{DQJqIIEZa`7^iKEIk^dW2FOODH$ySt@54+N{q> z&|jpY*avRW=E+}NCk2Ack?#-f_JVpnBeQj#ep5xNG~JC7g-KXRyytQIHfG8Yryht0 za9_JQ#1mTp6*-W1;O82__wo3OC^thD7Xz`rD&(&K;r z?1gNlkr37z(d6Z`J~6Xj^kqD~VJ?Xw(E{qjl7xO6(JVh$Cm8zOYl<8s*enVtITp?E z>Kb30&HgjRjZ%;d#q=N6)4hH9xZd11ycx3W9>@sm{)-<}q!CX0q3BB--HNiAPMkU3 z*^|Twu>ix+Xl`bk_VCa8j7#hgpD)?yV@tgR@>v_{hC1=5DH5Ml&3u}~#3g?_0(%Dr z;u{;~o(Viph%Dltc+r$vT(0n(Ex=wgua6ZJqs;`2PPUGUH@}RN@%w();FZhrPUwBQ zJ}Qz$=gvZd>3qWumvUZka`idHY*5|Bc2>W{Z$NyJS7Z(D$@LE>uBSL>Fqu z*?Eoy)ALG)tpxidu%*-qiq~xXk(ZL!uAmRvTVvt;{7ist45$^T`3DyMB*{TX{ebXS zRLn~b?Lig;pA*{ErRkNjFx=S<)i#RQ(TG~|lDE5xUli{q15=jzUJcBIDh9bFq|s7i z@gh{>T-|t+^IX-1hoB;|2^s!$$>)**Z}anI=NPesKg-MZjTuIZ*KDrMIZgiKIB0*P zzEP!X68oKmu6; ztV6@SL0g**=UA#gBWntmGiJ}q3xyew-;!)TIcKOaiKKs^-`S>ltXFs!! zu5ryjW-NH#U%2!;NGe%oQ&E3lr;(TCQ*y*>Td>M76|tRAeZj1+Gx3wC$Lf>C&-im@ z|MqI_p(W*zk9BmjU!^%X9eRq+gI=#PyT}~dQG0VCGM-#Ko_Ka&{q<=2^Fptpk?qq@ zu`x9-Z+vs!hp>{#gPnyA?fI<6=fv>r#u?1lU_u2&1@V)1(|_9|qAoqWu~{-zzn@XR#s>CWd>lE!w}waXI~fWv>H&p#!9rqFruy|ZG4}S0 z>}1m7=_#CYfS^lq)t6=TF>~*J%2nJD+gRJgLZv7UH~8Yks|qbv=X5v10#vdN`0a0N ziI!ObHDtN56w6$?Su&B^lPs0^5EnuYUwX!mU8}LsoIS%}A(6y*3ITDUX1*ua*Cp9U zwoIG-pASE73%DFSk0GZa^jEKFQbGMM^1k}3u4P*jcXxMpcMa~r-GV!8+}#3!jk^Vx z;1UQL+zGaEcXyZO+}r2f^Sa*{{Ri}qYmHsCc2&))n)1zW`W@9BKRV}d2+)48EyARu z30NahvBB-@yPX%CsM*9a0>k z>b8=B-K!euW)QuPYSKPsr7?@h~Eidos(0Gs(d7lda;1@Ai>Opru88h@HUn((Lu; z)9>XqLRMMp>NsY3{9qxY;^oz@Z)lh^D6g(oUh^`GcsVwn97&54mj1}88r{95B~7Hw z^D^js?l>>#)HmyMC_BI=z9B_6TU8Z7FehF@+;rAi7Gp}1aCL2XxvdFIDa{LzE>T=0 z2_a6=oqW+Fc-w{r zAi?oG_S}MuSX4EK&0d<{*`0ORkBd?deoOhY=vK45i(C!;0aSb7f-kujlOR za?CY1$C2awsLjZf>V8?Pp-?D0K*7JU3_eWMAWtB_mLT~s)(B`O6@2OIhV>&`*f~37 zt60&Xw448Nc4J}m%aND{5<^XGNoCjGa0I^gwR_APhKwfa5 zT_Ss{HGYCZ>`OaODsk^NCF_)6SV*VklVHmwSDG7F`%D1ck1SWqO}1H8JIgya+PBqv zA!Rmg0{c^zte5rv^|0n@8gY^lBwLa}Wr{RB#tEm!ew*E3J~CYDkrCt7>h|cwaPJB- zoFJn%ZL?&9-%iR(y;`_4^x$o^JUr!I;kJIZT?nw%R+f?R*RzV@`w4{mau)-YE-4Ta zSKPos7RH_~?MsQu2TO5ie}emd%?MmkG4Bon=h(i*2duTQZn$L%h+iith8E30Wi1`@ zSvqbF_0A-Q9(>V?ekOr+^MKvKT(T*dYg*VKpQR|wbN1$PIvn4EY zGs&2pogG=3+#5-M9sF%iXeU&Lua)K42|3khhI*Xup`!siIRj2m5iSl#g$t*Ea5V3y*sKbZYz6>grVW^z>@us2E5mbD7`0`z@xYrs!Uu zZY8Ank6WQBMHRE$fE#_#oq$=STXK@wCd~aCW8Ui$Ic$kAo=1nC{scGCi|$as2)%k+ zS&w6ChUc4Dby>T@#5=^0yZP%NpkFgEwKw1(;ds^#lxyhE6nGmCc*bnleIR|8S0=6t z7vp6Kln8i?8LZ_QMuf&xPP1d-S59<*ZcDU$LwslS6cu)I<%}12F9ob6SY*|l_WiAC zd<5Kd+P$J<33;`$csx#YK9mek4acJ~u51AM`=fZfwbTMO{*|9 zsRBc)tYWT&3`1$^B(I@DkE<~|r$@H@1==N8)adr|dZ1OW&=sH2aa-(RVrw#Q#}*{H zr1L9HMvJpj*Bu$rHZ`A~mvjgwDgKufI@y&N)^!1*griD? zMgb8X`*jd00n*n*I$DJ9#NW(_n>1v1D^!Zk2cV}j4dHgDz{-(O6HH*LR9&q`O|?;Q z@I}FdI#Yn-v z+!esb3{PROjsYZFbEahs6l=i93t?h1j~^G8SF7H!W{}CCJYn#Gt+D#aERRd&*ZS@j zOop+vu!8#|QrcL(_*&X{1ckFOC? zYG*MFB-FM=Y#+WzQj zaWF+LU`^tGqbds%5`8ACc0ySo6;MIEw~PgTNMj?7dPP*SNisxt->)@jo=%`(t?RY< zr|!YvgUB%;)5&arF=&xA^9=a(Jhd5#LQHl04SBjt)N*IuzHce_kpnPGcUr`b;pc@5 zy4|GXggNDZW~6KHBsu9acs`ZsPSrM{>n z{L=(Gtpv_yr0j78T$Z03e%5vGG^M#uKUmOD6fGh$@{UK_TXVbRYR~%l7l5P?OZLhBySAHQ>xfrK~ePntEwo6kK6LOwcvP(K~bkK9pUs-9p+#9dQH-G=bIA$Q9fkxl`k%Q zw7WNA!K9;!ybgN1AR`KR>J@g_>|Zjhfq)s*s7CR&gZZ$-1&?u~CnQ zC!)By*r3znR44NrcWL&7i!JG&cML8l!XuvHh@@lvMYb|Cu9O`-PwIc7^++)@b$~>H_#Pi|jiSxcuEb!BAG+ zIoDTjb6!g;cdV{psX;j{b@CEbh2o{FHHJnYUKFiO5bn3;2XkW_^sUUyimWZ zkodh~(~J5yCT|`e3n}S{_`MPDaEPOA!dlQ43-w;I?hV!p3%RTwxz1VxAPbR$(glRo*Te-(J!Y5G>@*)h|_O zjk-c7-lQ>cU&ufjejZd5l2z*1PQlLPjlNAQ5v6x$lR^n=H}(>Z%K+~FCgWq_3sJ(n zl*xUijlv06IWy@M@Jh7Nwir4&VH!TpVa?FZQ2YEDY!L@Q7N$5dS^O$b{!zF6`}a>d zelCc*qRCR3o&@~A)bSHPQozwU7#p{pm2u+cKWTSJ;yC*0ANe6#^x04rbJLP7Li-mL z<6qrAyHbgYXs7aaQ8K1s%dIDbZZ^;%>Eh|shJ3wk26#Y>3Tkdb|0WF8EXym&Svf61 zE|gAR;{@s?$^P0Hy6t>EJN|wn3oz6+E`>_!r|^9BISfnxb=@bSQ>y6Q2FVf_$ zgj-yv{e!F8F>yB<`}0@v`9Vji6~m~L@M#e%h)9jvgnJlDyIbAt-E~Vm! zTc3@iI2PYd_bF7-l3XYFWj@YSh>de*!_R;q*VT%2rbq;?HwlTyfpFCTq;nzs&sGLu z%Bh|dMZW1SXGz&jF=p6Z7qyJTO?Fj~L4&hEg8k_dg|ti$*G#Z#l!m2@{v2aMU)k$MpaFBV># zTrfbw8--6SU^u33C4L$E>HvgB#2GD1pk$*XL#BnC^8RFvcr8a}EUGAfz@5!$Ky`Kl zrmXI8w*UHcTyxwVg|Tx?n?vQ@QO|BF#)~$D;J*Kq*7bT?bG4V{gh0$ihB*r_Ndo(& z&>v#gc{*qbkSZqwBjTmLuR&UYAap6eSzovjP_&X@la-~x8?)%CGmt%aFPHy0lHsJR~dvjyXT;$TB z@ksyv+LYFwm$%p3GGL-iNcUn0`zjQFY|j%gy|W%oAgO%*jJ0d>Ry&Bp(-#gV2@xxZ0G*(AIw%#lJMb zDBMR70iwnN4~xfLhG|tCtAKx|isFaI<$Na8-+*YU>=NZH%JCg^HH;ioX%y3SI?~c5 zFIU=KeOEn}t(#AT8jpa@aNJMXaidPA0?O*=LAJUDH1d>Im7hv5@i~HWy!GrvgOGCQ z1FFkWc!nc)Cr7Rh202cmzUX4VDMp;T0(WrDhrWh0npD5FC7*$eBTx`VT|dkpQc}MN zi85+8p=sQN7JnRT(ycFzSjEnx)Tx5Y_sL9ql%&i&TfS#fAE#cpW))nRH zy)>U7sJl{^HCeuoYWLt1NoPlpu|{o#ruA)u)5Yt#t{(4e!_D?{vG+RMP22gX+NSEXG;BUqFo5Fy!76L!aRo6(v1wt3 z&j!88gz@fD4`ek%W3>n)up6`53`*kw=lmXC-|W|cp~({o$z=SQA@Y^1O*fr->@bDV z|En*f919CeA_7u&Oxgo`5)Xv0Q__?fP|}AW*)=h!S2CH?nTdD;w2zA{Qf*%l%CUa42Y3V0L{kd%#(PzSSCYsx|s>cteqJ%>`0}| z!=u4>8$jj67E$D(dJqp6m@!}!dHJ*iVI}?O60AI*9uYcs7mnk-Clk)GtDJBh@b=Td zLo!0he;bSc{vy(R=2;W)XzE|I)nvnQ^OF#GiuKq#BMKz{!Dz8pZ%~uIXK^)!9J9L; znU}hr%rkVOKbb;GzCpRqR8r=_4DrdQYApr~T$DDX*YJ zvsS`P-h77Ky4?z(=Q~p4za27YJ?vq==Zn|Vk3SAk<>+>?GuXp*{cYOJ)wDq@e!zv? ziTbu<{sqTFVJJcT8K*nWHl-?(Tl+GVdQCrZA>;2LTWutn*zuMRe-wC%n+3ir%c_IRVJ=Zg6xr&sF*-Q> z*s{ukn*D-|4>M>y^=d0CR$VkU9GDC{_t91=5DH`nB#(v^y&0BB)HF7PY&fUk>lD_C zKq5xkm2HS6=G59_)yco2k`lCyQ{S~uoy$NfpP!wZsVh!ZA)r#n;np6O$$Dd!8a!d; zzj$wOT;-rA(4Z9Ob2--3ql?V-&t$6Tc{(yePE`NQ`bUwjlFb|Z&x)NO*zI8LiJC64 zg|nAIZ-=OVSoLmR)0g@|8pI475xb&OzZ%iR6;hKL z-W_hO2$OgRwxA4STsa#4$tlZb6?b5OB}sR3rJ##Q3(oyf&a4cS+_Jmce5;DL8qF6E zo%Q7rB!(r=44YcSRq|7VG#0YeG7fx2=0AUN{R3ZHO-MyBqh#w*xVKN1v%;EXNh?0~ z_8{hE_VauaZfM{(tH@l8UVv_x!N_q8rCvQh%{dxIfE*?y7R|^h`>cPF<;Gx+y^#F`HCuIy=?jqq*eeV<480}or{w~K0YxUuQnuQ}L-mG9 zfgE=lq3>oLoQZ`KRhX8=KulQ6>emgiDsa$R?%Hfy%jR!O*Ke?KHzy0|tscawoEF5; zY1M^X<@ete@-)8)zh!Fg_T(wfFGmXBM{8Kdr!fZHH5%N+&+L@zJBht`dHW4x0-l^} z5VJQpMg$e#61>I@hk=@tqaB>JE#yk1OVpr^Igk*G%6dTCPO##B*Bxi8wNi&kaV(VJ zkI9xg6Dw%7kOPzqx}-};ib4D8Z%BOh6$ae96#Ok`e60_OPEOUoqnzF}0^XJUt4>wI zg}&a-n#^CV?~}(bd|@Rywla|Ay(dwccHlI$vLVnO*S4~TRvKeY2;`{yhx&)wjuNJ{zOV*1;X%ID7+JH zIx-eDL((U;S?XAzW_i@%SEONkGKx=&!AGbu-!mt9=o;3q2jMRPDG!-!;1FdI$=6B`^n(gczKaIvRB3 zOU@>i{_wnA36FO_7MQK8>ZBh29%b#SDwQE}2`kx#&MRP2VbqM82gVnisC~<&wW5{X z6WpYziz;PzqIRIcr**oJsa0St;`FmhZiq(5_WhAynH+Xc7iXoz_A?Cic&cGTROnIo z2pD6MA;uI^zr^;z#lz6q?vFBV*&g!74lRi1J|R`+EprD*HpeWvec{u~wL}?~ zt!-NR{cdiMoAuzt1t>j`>--!3VL+VWkz+e^2M2Z-s=SycEAQmQ=%_seg-N!p^=xmf zVPbaQ#*(Dy5LSQ*RtIBE%frMi3~ZtU`uzlf4wX}=Hxd~%QN2oJ;gV0y9#;psDKfxr z6J7{2tvzODWA#1qx(9NUE26H70wmnPRI$r%HURrU(EbZZ-ZbVSG$`QeTq+Sq{r;5Fu#@mBkYPWML-(%ZfV?9o&dt%=G4bVu0K*O@WD^==T=_VZ%iD}*xSvnt zHS}^T3(3l{hWVCbstbJ%%S=;2Z9=YPS$SX7J+B?oHQzs;i1FKK+USu5c4~W@d9X_jBp>0_@cX#YM276zA{#z5evq2^5DKC~E5Fy4 zx4kW}OI@3yNrT}mVoFAxTxxoSkApDC*Sv#n#pS;`9GFo0e zm}IdA7n9t}=|kW>{h3ZwJAD{}sfmWk!X^h|_*ukem@IzsANlUT*4s5?q{uOU_3+t({xv3rGXG|Y&?Aegb zC%v(UlquhXD*ZYyvNrzU^^VVkzqyFNn=kt^81lJcd?#?<`cS0>L1rSljbU)j*c`P$r&T}YI+t58_#N#mYsT&g2 z-doBQfyp?{iZj25@@?A0z3im@B%YemJ02v47Xwf5nEnW_M<81u^4WXQvonAloBP?^{oK-FnRy)d#}?9am`)(!T0En0W@9P-M- z^b$NwS&Y3Xh{I`d8Fa@Ol@{LLr7a`$ix5U>vVvWNie}(;@y);*GUHtl``AHPj51VfmiU|7!`YnQEVry5hfgWWQ;{YGG%io^Fd}sNygG%*?v=fMtioU{c&KeMY0=m~+=l(5@IF61}cx zb?PYU)jVai#_;#j1h;9l6A3;= zx#wA--0rzc!WmKxH%ODDJYqKN|b>%}VlTEo8A zWjvyacb8SYE#(DL|Amh7Wq_D9Qlqg^X(orav$mc+{h0{TrESsPr5!0uzLtF=`z+m~ zrV$Ya<4%a52*CYhG9-dw*d{v(Yi=ROuh@M!%f#*XkS02cS0&{UJZWQ{M892Ip$MEc zi^Q!kX_kqos;Ef*2v-Qs&*!3GT68G}9;l`$Xx>9WE93a>{mfCLqg40IVX*R)ir7i) zYqqRp%7%t_eP1!csIEyNzkp4K)xBM9S;w}C9hH+&N{!zWrNM+rk+o?zcs4S$9gExnPJRU)xOh%l^H89xKi(n|-0c^(}4)N8s#VBgU^ zFW&Yj`)Pa^g2jB(cX+b!m10B!om>&&vk1*Pl{A-}C%{9-VCTmitDKT$3YQiDk%J9s z=uvg(V87Q}SG5*Ss2JWwSB) z)k&&`YF#fjxRHQnqP4@dxS-VtaJ9iz@eXF+1O{vWql_~rW0S~ndJ4QiYT%kq+G_1H z^g>dD3Ry@IF>#p0s>utVVme#Tvx`_{*WQxd&PQ$b#Cf{Ljh}HF57Vx!!*d`bX5{pQ z<*<%t%fKi4HV9ZUR7$-C=MKb6D*pLsFq}|8Q2m6Ti}C4QK#&gi{2=jamC4bcRZv*Y z%jHmM8l4?hc5;Hy`-V{KbGYusY^xZ_}!b3j?4^6 zf-a)g$g~R1Fbz)V{lxaFH(FE`{A6BNYc^kF z|9PlRB&q-ByHKQVG$MAryzOfZnDQUwyW(_GKh2ImAWviQr@psy+7`QCscpge?d}E{ z^vMXOsm%qrMxj%}>D@okDWxM>ujeKAn+#R*0Ne!>b66H|6kfBJB~jO8lNQs}TM1#}1IxFqNZF&UFyFdwc? z!+vcH=BPdZ!(X#9R>J`rv+(xc%X$;yN&U99qU(*39(n-BEHbtcJtU<|avjV=9lH#v zdm-MSfUmmv_5Rj1f?vCk<1?>&A?O^DEqtrU54mpu;XAMxCHi6idEYH^+}?0*j-42I z@13hu@N_U5ND2?oLbjyk`d&R7E%AYv^jI#G;?0^Fz}%~+wcCrhL75T z7RWO8bN#C)0OJ|g(PxNCN%?)AHb>*&p+%`nHb5-_sNR@Z!3wt745f54s02|#3?{;zjX~JolRsL^?k)BaO^Uv(adT^`EwsR89>d*zKcg%Tg(*9P&nrL| z9OsyVaI;JmzlxDQN}ypQ>tMnQo|tCy$hY=r{+ay^rLOKv;Y1X4R_Il!=&Mw~{V&l2 zJ#?SIv9YlbIn_FIllf|6Infs#qSz)n=dgjXCARM#S*}n4V=zYno?KE^{AGf$mt-yb zM~Qox8O@|^6|E7jQc!Ag5h5R2odVU%tNQL&Oj?B7DI}24OTr4^3Qd^Q`S`c(3Fa#j@bu3G4go_RQ|QU@p+u;5n{jgn(1ZQF$$^4bkQ+YR6|5Lqp~+ z-$$d?Ba50^iIO)BW#Y})@#V^(e!_nP>+v9baUil^J_^#tWkZMbh%N5?Xjw~aSr33+ zZbnH|R}t2vZD}P2{W@PO_Z4wvwzbXw%ji13C)*bZ2KOM;08OU;y<`Kz23Nd_r z4`8xTz?Ca%mVa~Kx=akZHHGU8IvUX){@nL1GKE~i`UG6f`Pkv==$Ulrz2ictYQeFX zK-xl2_8XA6MJ2*K*T75>^Kr05C32t*{rWwZ$PGtGD#?qsC2fSraZY|$S%Ci+B|ZmA zgZbAJ14v?NoSxr8^x0s;jGQdc<8$?!O@kZwLJUb=E@6@}8wd7ia`w=YpKZ&8@pEKG zihzomT1vE5>Le;+&Bdw2)u4Po#O_3Q{n#rX+>t0Fezj=-D#F@$cLjy|r?8=@C!>+x z36bQ6IdS$K*W5z*4G3}*@DL?rHo|#u+|3euQU@Q@baUeadN{^%zZflgUAFGICPuZ(9O{vR%xr`e)a;(Z5o5<}_ z|K-A3HEXr$7Al?CC<6_oX|9f$#cp0!Om^XJOm@pb!ueBo&ia?EnFko>hdbR|5AICK1!0 zj1aHIxs}LqD^xSR^k}Na&>lCUB}Wb|l)71jnQREud~UMRvh}uG!s3Dwtt$jy1>G)j zok_HLU9mmvL9qb`+}UhMT{|PMYza9m$RreoAx7|P{OU9sG3+l2Niu4w(E zVgfs^o}NPd1-N&5@_WxYN27a`M%UZL@Xeg)HN6Czba3`O<{kDam(6I$X>~I9+pO(m zyh^`xAsp(d<)-64@Vo}DO^G&+-$qV<^2u!om0o8CTmDHVKR!Cz@qB;QFL%6|NAaYx zDor9co2tSk^2F-f48w~3VDC3TR%m6w0>j72G&ELLoag?L0CceI;g=;am&kLgY}3$Odjw8PK$2DM1V1Kj)$ z2L3m#hJ)d^mw>y~5Bwy>pAtS<4-_wWCjG2!j=J|AfY@TNV4u_{9hB#L+BX(bzbNX^ z4Z;!pz6pAQ zSbq5C4&I4WRs9-ty*JGk^pugb(Ideh1o|k(%|Mk0Hm7W=tDiuF;H}FZ9m}+(kq`{* zK3P_BYT>0@w~fuGM2wNd@yg8%J~#i_|E-uXFt!&k z(YilcG@P_k7;kB+G|a$PWSzPqgoI&X!Es6Em2;xaWNJNbRx2Ld@2wZ>#wOm`M;Us#a5yQ3wlx}OBu8%{=odQv;dSrzhLYNMd+$iA^J5Gt*o_Cu) zOShk(#4m&cw-58y+msJy+}LsA*rydMoL-%Ar_FJ2JP}#+aqYGLPUGr63++Cdv(w<& zT3Q|?$wGO|7h~zTQ4{_PpmsIFSksMgp>i~2zwRG7O6b&{(0uimGh7@`9b!Qf{|W|p zoaNLeb?ruQvM^#XO}Jb^6SVAC9{f)b31Hr77g((~cqM-wR-og5~iNweb*l4{l)7Rne5i&XB21 z%4MO01?aV&O8Y6X&A-U^8JQr{BY$XcmzqIQ5)mbS?HzbcyOZz{;*ignx{V$&+2amF zv*7g%H)N6O1(EVnAAW06Pym%w#zjJCNz+rEI-0Dj2b0354wHBU=q$%Y7%3uR+!itS z9T`7q1s)oXc?Czg>K!T{Id6y7_O=^p{@V9G;w=hz9Zf~yYb5^n7gww+zGUmLT&?mQ z!M<-{^6Y!hNHM|lsdx_<^C8CQ|M;veXj@@G%mXFCma% zym8v`3ntmwms{Nu^>CpmXy)x=7Z4=Dw0&eo44slxd$Y5Nh)Hm+lP7SpO#_sHnab*z zIa9X_*}p#R4~&+@ytCuc!&66>sKeK4IXXyzF!0!i%59i;lvcZK#1%M4XyGGU<%u{= zxZd|X3_nO#C^lT-iVv>x@Up zOLwoP8`mIdg3KYYXlCY%#)TEJaZ3;zLP5h!@?bBV=m+&aj+3 zMu*$IXR1l*=~V2Rj_Nc=9u`7#U<9BwxOAHZy2kcJ&feBN{V)vzP;t(wOeWnt-VQ6nCZYAgyf6}uiy8}f(6uXxw zOTOwXwP9Ts`Mbew1`+3l0!PK<+fg0ioKsp%A73y;p7JEKm);sfphBm>iyh!4s^)Co zpveB9wquQH#Y}(gd0}mRhwDt}_F&CsfHn{>&*yaOBmGn2`?(~sFyD!xTx2-e z1ta`2K7zwq)kdT2m@pHQ(iPfH^8!I5z!=P=kChb_RoM$RAoW?;v;i3uy8ZWDVLGTX zorjCidyBCH_D`_&|4R47Y#kB_yY<}RN%B0Yeg8f-!(o6mMHw1E*u1UOkMcl-$s zv8_Otxwex8d`D8*B+Ax`rmYeCGDk=Po| z!$TR?g!rp1h`*a0b+$v0?Q5Zu_riBmDR2G}v_r$sQ=3b+RCWJMbnZ5a1_L30=0PJB znv7Iko=y)-1^pB|5=rFr)&I(uA2M1TZE@6yCj(XdpzeW+-utNbtcVnxuQsFTYL}@a zfsTaCra18Dk6i2_mLf=iGWXf+W4=@IW5PEJg9wFsiauBY zYgo}U6JKVD;^e!&2h`ha!|1k~<5$FZOyLk0i3CT7kez~}(HSZy7EO@5+n!7!NcVRj z^va&GzyFxM7<5Yv#xqq0cP}b2oWZ0t330+ekDWm$;Ir?E67Ux#Ac={t~QRNx2dKh546uXQV1pG-lMcFIK^!uoBUQ8UD6s+&O;B zY=!BlZ&+Ic^9#%)ad);T9MO$11xZB06E0iqiEw@A-nT^_lAig|4|WQZi~H38(A~{? zz({E8V@Gk36S_yP&F9AnbJ?LlV4(Hs2UEr5CWZdYRL3+EY<~1Td^P6x`?Xn~_ z&~Mz+v3$b!V?`AXF*tskCo8edL`LSM$H)l?mZ-!@0F9U!^0*k1PpOVIeUrl_Je-A$3aZOga!_i~R-uyNoVzARl^3_W^BFE3&oi}wc za2+d|)`TZJ1Dei2bvW3H9gj2Pe}<%v?+YgIHD6=KOPC!bsNE-~CH{5}EE?4H@_4rOmBcm0)Z>TbtEKnCjtmVbDBmv1zc;GUpHAsGP950g1m?R7wLAd%@7MU1I~iYsSL`7gQWD7mj+%(+j9K0qpYcOAwdTs&~|^4OLN|_b>n71`-u{Chw?v;1rI_nIw;{d2WpCh_is$xKW#TQs91w#!GGTI|6qPO zp@NXE_QVkLUHLm-@jui3uP*}}&j0cAfBu2mBsjd*H94?Tm;Y(4J{o@v7V_`m{r5ip ze;X{?%JBYwrfCCF%EVAfCxF+im}zl=itw1y*n06-TWw-H@I-%FfhC7a@(I`c?C416 z@jufC4EtDPDz?8FEKKgDgZiGfU`3Kj<{!=s|4f#9WGgP{7&A7EiYEE8;#omGyZ&CR z=l)wDJfOfi5!caYPw@Y}Siy!N-NI8k_ZnYvkoefiaQ>nCn%s+ji1FFAp^no379#<1 z6J>9IT6&*}(_*%mmVb!SFVw%pC-3~X?U#QeYo_2KN}ARnKC9?Ebr$b>SpFCCEcY*A z5$?jRO!*rT{f}-Ux=H>Tp76nFeZ~K{cTWD_%^muiVEuJ{6*SW{=0>t_HS0^NRe88u zWR>aYioH9`HP#{U*K0o4yEydx-;y3I3=eL`?mOMm1PkI0ZtH4nG&p;`_j~m6FSh>x zg78tiisVsU#P2_5kmIk>8c*Og#AgR~2dK?5+MznuX|DcXJ^EmL%*#iA9WhI7)tz$H z0lSco``=SAqWh`wE)=w!m$g~H`nvLJjnWdnF~|EgOFHKGMfo{+EwWTJVcY#ZY5&=6 zVj%QdZjj;Nh}sz##5F_3ZsQ^AFy?hyH0nD?C^)>Beh$k68mZbAF)%#KcK3JJB=FgC zTqSMxzd3RK_s(AUfwrLK|5IOclA(c?QtaF9eV(`ao_*QUnfok0*`ld+eNDil#9>wn zb!i-abMNQIYKO$V@TRRv6xp*LsVziD!P40Wm*T!7z5YmmE2>mxY&Xv?_nU809{~jh z-4HMv5KyC`+C`JGS#V-u5GF6tL1cP1C3 z_O6EpENya)V|*wW5`ZS_Oo!anlY7tQv_I?!iY?@FKr9P4R!CzBRp^hIOn(T{1{PB= zoT9e`{DpO;!EQ(RYs*%t$}N_f{tscS6{P$)g+1Rq?l#)`#eN1n9Yu2ZzLuQQ%vUeY zI?E(Xs+g#WG#iuR!$(>z6UyF<)V#|ct?RLVI3MsY52%JoIvhj@-`YRh8XHgsXXk$u zn{!EQs*%Am^d3jFHD5$s2M3Rr-K#agf85dTi*Fn*ILkqMol<*MKHe| zCsmlSnj^a*%P{cD#lWyOE;DY9^|`C0aM%&hT4u02e?Yp`W&UxTC52lQ8kF?;xgJ98 zVlG&X$Kg8^Ktg|4>$L_@{;>ST)KGVFeD_i}nUrBT^?UiV?C`zxSJFqS$jp81(ah!j zDAt`csG~gY3Nh)ob#p}yd!>g*MG6d%$6RNf@=F0&RG{LMF}s$$Oza;feO|87uZ-(y z*2pxBOWX4gMptoXw7ut|Jgo#Cj{k?aet=Bv$5HMUEOq)ld7+H->!fpae`45Gw$QjN z>raxw?Y2Ehw?4m4?gXj`$P?!~sF2J}rYH!@vdWoN(KAqW9}c8eqmU&q2{3hXi1Tql)3Wwc9KxFcAX;dr`NOI)qT%iKyYrM|Or?BQM`O5zsH zqfD7eqrWVk7xuLeHvg2B9TdUW-VVNWE*-7aJk%z*PfHG`ObIlx#!;V7reLL zTD|t_y=S_2bye@*?yjm`)lJ?t{!=XP#6e3r7V*bsbxCHdn?v;-OGt{rdq+Tk`Ub4^ zQL<*kM3!nVtKJCA9xh5CGj1UC!zi~QqjHC|--Y^wx;Q$S4h`NhQG&;&9u2daty=}W zF2_j13Q-Zo!Ynnt#!%Gf_$X%RK+CG}oNu3s3&)B$EWkhenG5bu=nF|Ysp*0wQBwLC z`?qe*Ix4=k*e0yFfQwv@g;d&g9H@Q^_1hN^Xf>967SM0mL-ouThK}kE$yW?TUtBo||*8&yx8a80p-iL~WLGb$& zgS4ZK3L#)>)R&AgK+D6nbd(dY&v#OaR~<)>1~j+@cQCHl)WlGG61}$ecE}qeQ?Bte ze8s{$fxkMdLNwUJKsgVm{2f&p)XeiKE^Sf30L}Rrb5)GGB$%_o77+XOYFqnb*7luj zZjHnj{q_Hf1&~?>*XgOuC|78L6_(Px+li@ixe;IfL19dLl#BCdQnhP4|D(#&=WtHB zZA48@;Or-ulCt;%Tg>d7LhwS>eaGG?l9NJ;+$M z5%4J^Lr-`V^19`u1{9uQsZzNPDDEp#FstUFkWFFjdOlY-Y8w;D zqGSwd{HORf!8l!R`{23J&t;o?fL}PC?hrqu`uC>5sZ84tde9L;@@12V!NW{&b?nue z>O*9>B_1=*>lj@6W3un=6ZWGYCgC)LX{|Ytpt6k1G-KY3BUd)~@y3(i1)5WE{C-zADB=1wbPl}wLS}sQnl#G}7 zG|p%QYg3TKGBT(DVS`@@rDHyqM_M|&btGeBq--$lm{3$p8we$4Ad1?8X3{k~y9A7I za}OB%CAGEL&oxXs4SKun+rK|rQ)at}7>tr?yqh?6=A4O9<&b3~FePD!yZ*8AU21#U z{#!#Pae0nDBC2b{u1F;D%5nkWpA^tdg1_zIp6S9=iY$uxbpEXECjqnYA#jF8Wlza1 z7jnq~$J;60^m_Yg6>9lmF``FLq@>-V<(zKUZXaXOD(KtG@#iuhpL7M3U1Ak#(K7+v zuY)>8h8bq?Pp0K7TuWIsL2bOEAZ+@gDe3b8v?^!fRrlsdaGneL5q%X;>20@_dNB?S zrlXL?`?&`=IVDrH(fH27M`=Ki8y3TOlHp7Wo6cI}pFp3=8`1?G)~yDJboFB1t-oum z$8v>_BdjGZ7>0vnnA`y%f#T;LW$8I&DJSZ8Cl)RZTy#K-c$j7a*f=O(w@%EB2aMKa5oVz!+9*a^8@Fa1y=2-P96Nv z*`LO0Xc1Ec+@?&Y$|Hiu-B&N|Co`pvbMmEMAMj!|{crO8E(=G41ZC5P+%6CLD?o}a z+#`i)*8<)HG1s^~IA8Z?;DPC|qJ|z{4HfSR*0Rs*+g+goAI>opEX?_BuyS-&i3iTQ z7KnDk!oGvO63>eHOKG&4+8p)OeP$3orl~;!@|n*`Yf2zlq5QtiAl&64PMK*-MV3=^ zai|t#dY$x>nU)sNF=9J`o*1+-^qY)(1HEQuhdM!Gn04u9$~Rn1LeJF;STA6cKj`vj z&jsF6ZQ{oxSr)=VthdB;9iY6ba5rmD-zj{W0rJjzsEj+l1%-KPK?9_CM)tG8ea&Ar zY~%n$waK#)Cwd0mq;fJX=Z&o;MbI9*Z(%oYcnnHyUvG;Dm zT-RsY5^Jce zlc7lpsBB6CG(NF-Y9@SIu+cF^t4GN1M2;n)woqi4c_vI)18QlFT6xSoEPl_&>JJ4J zi&lVo)CY>8uzA{i(D`g7#mt3!L{78TD1BtV;y?B9*5y7A*V*YPx!*ISP%>)L^?YyB zHxFg~PK9xno7L141%+p;*R90l*DPKwAf%!1(}r}W)hzImyF=}Ew(zmPyuhx7`XVBFgll%kC)G>I%YA=b=h|;+F`DoZ27g)GU*z$%S}(drqzkL zz?M}on4W9wu%|3VhlydG?+oox!G^aRK&!p$J#0Sup8d%5(^1dO-R!7$BYjWnrJ6IX zdLt^ZC07^LxQlo9I<;X66)+Ce7|Hs$K-zZt?!B-53ERU%!}rvijhj(!G^zLszC-3r zKr^=J9~X(<5^o`k0B_@4n>va1j`vMV$rICr!yS2g(3rsUCs-_~wqgy3YX3>%j0)4( zf!f!u=rz??YL+;7X$0)M8F?eUigB?8$7}JLjTMvHn8%7B>ca6v5p5084%7yV&~^gw zyg&wZ!`Wd!M~r`~eY%xYO6LBO)QOGF!fvKH@Oi#-+bXb)DI-u}hzqk;P< zfBl~So>aeNX40;8L6PtcJGb;zg4gFPqiwws(%*^p4lXkFnQe+Wdt?qj!R?kb1w(?` zsMdiq0o2*SIpBUR{|w&_rI+i^R*yx5+F)G@We3K)@t*f1+|wNs14M4h*e;4tl&EJK zwhQKVcL19rg3WPAB(DynKorZpl0IUmqx|Accw8dMkNk8DOo_va49dnEN^1-i)`bc2 z$)6HwViV}Y3UaT}0+qr{p~Ky5>=GG%`;LyKNMnDg{6e2vX%`y0&Kv}g+2xbDVv`~j z9t6~s*GsE&4t;F#=+?dIe=)tUhNjSKE93u^R9KxkXP1=w=GTP3T95t2eXOQW^tm@Ai=1tETj_NYu(%ev*Uq7c&@X*SD#3b{BItc8i4cPS&2tDaXy~ zr8Bqe#4G+ca4{H~&m#O_GCM+NcYSAqw(bek>oOFMjAoUdBk}S!t?S*+SGyA|00Sxv zq^Kkrn|T4G7Fh)J#&1d(OlHUE>>}^K8n~GN&zkO%-Ulu5iLrG5>`$fH_QNE&vyf-| z{?-HWan@gDh!o9Iv~sO*}zyFy!v(ldSG(B?v92h^@i%=${;Ca z&eMR4@qbwS?xT=V)TIBJ+olu3sFrhA5&>@(9&s}D+Z*4?G}_2?88nd|(aK}EMd zGhZoxQ$OG`aEncUwl!;6ydri~!^rQE+S523o8G4SxPumUnB-*hqMO3ab0soNOw;7H z*X!qI49CojOIHoShM}Y4>3Cpznh?4{#aOES)+Ouc;R6NPKIyZ4EJbD0Np->lE8*Iv zSmAux!^-fMA3+-5R9tT@=JZ*)D|MU_wC`*P|BS-mjpx}%-BwqEQxFPCJ~x2|1*{Ui z3ibh&iVJzn_-Y05(wNemI5tedCx!5Ik#^O$!KSnlS=Tb`!8PXXZ^J4!x;3v?S9_jo zZ)am$87%ji5*EG#P{$W;>r!_C*Pf_Tw>Nu-{PkLIX zGK{BXWb-k$lapDh6FAS1ow>STPv3~wsKc%9D1q${jB9pC#4Bh8Of z=uP7{J#^Xwi}2>gTPl0Lhx6T8$9b9W#!&J0?ArMqcVxipv@Y?jZ|+s6J1&|3<(=~j z;j{O`lbGh+4zSw~=LsYic>hLwQFHoQyhM1><+>~WtkZWM0KiTQckV*J7id!{_Mo}csp(h1)Tt7PkRNIGB-kH{myL!$kj{t zLHs&<%pEh7zh&N@y?j856BetN`vwEUZ&!B5mZyD75xWJg71dpEz*LL$c5mF=UW-BJ zn^sq012$%s#-b++|L}}O5-$1*%o)083xr1Onw@^P8g!`_f4^H-=?bz(t=WOJO=x9cf!<7{_i4}hUSY4zUf zV4L?aKq3`XAuK?&l+?L25T6Zho3QIa!|qWna2^ zY6AF+BV3z!z?F-9(&_CzA0O3AJo;;AXk5 zIY<-~XDs&630y`GX>xaHJX7L4xspP4>i$tls=H$poQe3`&rB{EIX&U#xwVTf@@#cW z7WOFwp@W~T+BTQO!dEvQT2PGavan2=L6CfbS-YD*$o4ZV^n}1VlM`$g3m2pSFr5QX zyoG6}RN*rBVD?vm-(mLa_Jr18pw5ALMtmdYFXz@9*8(eOxr$i1#k z`RkP)bB7%dIu8}6HX1CO?d51wD%2Vbh9BqZ>!G2?s@7GHx6=#FL6Cbe^@I;68JJ zIiHVGnRg6|!hUZpF%x0WO4om>eimi(Zaa%g&y}XDy8t{x)^v{)b3q?vuN=69V-8lv z!JxA?&84nxw#NcOko>Ouk*BBOVqaH0WOkph^49fFDBR3AXunla;5Ls;v}T`WOWioj zCb^r+nyb2R47n%>X@}Yg&u0EnB@?K>jKB`s93}kXxF$c<1;;zVW-DHuwiU?wHjbaH zZCF4hAF%CJ^pp0Bmx~IvgSwJawvbw7Ia*@Tjh_JxBleeH;4g~)JrReb{m_-$+Nm|O zRoV(uFWYjpttnnJ)jLOxvdu9S%_+_862VOhtHlZsm!`Jm4z|Vnufv&2P(E64OY;8y zeATa!a{aNd7V=RglsdykJr-SO(W_^o?nQFgc8f0H*`%zBi<~2V7Fik&?{ACyetMNl ztDJgOLyudAp{|M$MI+Ob8H3Bmi(knbqgH-EA*LT+8#}e57gVYyE7ZS#SgN;gR*)+( z_Kk2QTZ)Bim+ z<;I$Oqb?Yw+&z>sY()4j4&$#^S>~SlHbX#J?Rys-6OZ9!clNo$A?*F*ZZH!kT|Z+V zFMXC|j^LuuBp7fj=DHSa(OOI8rf`)^vqMQGuWPN)s zVhg99M>&q4Zrb8b*RYigbr$-@V;9Ue3TXY$m~@kj(4hbY8eWW=lR*6{?7~hr>(Z_I zeNQSS(k^=+vG9D7)g5PJJRRR<6O`jjC&?a7w} zg^Ebf$kp=w$MaS^-mgFKLw^`#gd&M_ga98l!3A(jAM^AR5nJX}Du@0onG#g911-^! zOk5*zw=AMeQmUti>ZZbE7emx58oT!o>ou%Fiz_`%vaKLhT^nxO6Sk^8?IoWaau4Ol z;rbVL?+Sx#Umeg_$e$T!A=F=Ne z4UElyfbY{>sQlV)h7jdvNXIxa2wx>RQHJ(>rMUU^)|S9~vt)@mPiqL7`9hvL~JCoIy}+1&ef8}I1{n?u}F zFy_`U_H|0Blb4fbJ0`FY*Tboc?exjv6%bjK{P1eOH55|!Y(VFsqp$bLJW!8qa>bds zt1?00oC(W@zp*cK?cn&_xbi0d0){{Jf|Yr-3;S63nogTPfdEFdEB2sHN9c5SBh!*2|S>B5fCJqDo%Dia%C7u0(tmg#B$&OtIh1I=Ddn&GakF0x3P}yYqVpRW- zlv{Z*ge$o8F{RdzHbN()EN8B6;4_Tcy|#h{ibh3aghaNQPSq5ja#|f+^VA%5`iK;4 zO9Z&PlyC5d?+SiJwtGvrJA9UQ_f1BGhYeO^@4|?8J41?g%!vYQIg(eC0yx;DeIS|d zLvNh)kn1c*Hi7Su^@gP2>%9s-bj2RPJpohaQ?wxBGf3cP^qx~6XP zcWc;L{AZrdExpLvnTqp7+9fKyyHD;eyS>)(BAP_?DbM2Xh0Zn3h8Mw8W41kWrd=l$ zgGco0{n$Yf*7E`u{s)AnFD3BkFJ}fjWWszp`e}m87m31SfGXtNR7sIrJ!if4mb2wM ztn+>Lc=m@MQB5hl3}W-^J({_(v{C5zB*O;7JXt662iN-gp^tm%}Zf$Pw=xUHH2s%}1AhDl%;}X{8p0(EZ*uxvlzfo)`Ys zn%waB6E&HlSYnQ@vZa*ZtB}K~=eLx>dXDW2eIMVm$jcT+>~y@%UJY?gK;EysmA(_B zp|x(CTb#pqf3&AySw9�lSUw2RDCI4#FBWSMvpvuse1__eXBx51-gq?w&{mZmwkM zG;!{A1c8T}hrTm8$xu%@bSIVp`|&~)qQbBHRbAgjhEDvC4Bm(gUX?=S4!EF5?7G;N zf|~{Lgs4QN*`|63ZXQ^L#W#m{9~ySTnEPL5Y(NiJDFXiEIYin;%crUFOagPV>d?O( zE=FYnK?-zGO@#h_@va_;Ze?yXwohr>`PB2wdh;njE(7zmPCjGDlh=)>pv0jY8`GCP zUW52lgBK;cxkWFZy$cP~hjL)VyUuWzw>O(F+pD@@ulhQUWF+uUwhf!1E^e6Ovcwqq zwk_*IK%+bAQt_|qHn3nCv2p&wXeSDwT05<{Pszo;^E4Cb6fb+$;4i7VXSg!6JeWxUi*f(%H zXga6=_Fl;Ek} zIAm&Pov)5b(mB+mSv}YjbEv*K#IXCSv>4+Wh8NkQfFUUfO(omwLoU`VB#VdUHtZi( zBjo?#^&+N%_00=&WoXpnd^WPi_@yTBIre_L&LcPZ%-ww-@;!qyds-=Vx4$ndtU=pE zG{61i0Ljpb?Dy9ZL;-tr4(lg#4>=bf(_lHd9;nzPuZxd|5zh;`okv$3h;Oab!;KP$ zH?al-LBc-R{I38N_QZ40Y}kHTc49TqfK;A%-VGvcw^L+8Bv-TU%U;-O1e2o99 zI-SnvtNCjRA|Fw)Jmp}>>~*B&aaVxD=&VYfgL_JkBNeDPiFW2)Z3$U3#Meo^(MgcTh8ZI z9Z@l#`Tay;wbQuXb8MxDt=(h4%9rUX&;3kii5e|P$n6ubdTf=;qF-y3(9lh%z33h7 zTJ>Q!h}P96q(=>A6YfHi`v(kDWN-VMCCCDp)++soAEzYiAfVrkGT)b6@xb|fK2%< zhQrDGbK>=LhsX)12|okM>EeoqGDc?nZO0$s19!fq7~dS~AEh%$6&<`WH_&@bHM87q z?QE(nbvQyH_ev&N^P>}Bt3LGiC{sN%v!a7V`4bshB=k{eb+6?=ULt7 z)mP&;m26sD37~cNmp^IN9N|hmIwxnCvu=X@?xn7)d;+N6G`-ajPq0!$)7vbb_RjU> zGrgbLr*f$qtZZTQc+S)G?6R^rjy z<4CRq&LUwUQ^<|P+^4oK(^+;*+xXz7$Ay(v+Q=rgrLx7F>IJrn29t`px{-Fs2PC4D zXvGlkJCLm$zPT>ybE!!KFkTq#Y-CsO5ahygGlUV@zV3$?)AdQ+qRj!=73`O;CUB5) zEnckG>$c75_RVyBW?rY}_rqetP_Uu@bF2pFHYL31*&4R{k=ab8G3=FEO4EktL;vcH zZb&DRnO>hPtuTAvm|ItgNr79{nHKswO6u`iHfxMd<&kxKOS=!)%`D&Z$mWyJnSg7)3xd0=ucr;K!I>lW8rqvUPTI(#$*Uq}9eBzdIKS#~ezz*~7u z`8tovMeX}kLawOs41w4_b7Up2P%d%8z8O|{rpUms=Q=CNsQtUh$)Hj1Gkl9cpuNK(2c(cCQoZO%Ttz74F+`}{j`*E!IE{Pj#JQ@)9CAw{u}n1qOUN%cP*xX%COQ*>L9D z@!A7!M%W0y2+7s?YNnCjaPI$J8~?dZxzMmHJ9zI3l8}U^juU_}Ei3amWb>7k+h+Ex ztu3M!frb1iFNZb>hg#Io7RLywrLe@nu-499WAEB_w|E)-A zLwd|a;)!=W^&cE^KyhCktyW2*0$QfQ~VK!`+`=0U%`q@0a)^^M7&`CaktG?LS z`fq0Z_t6k5uhm=@-wFC^&SIoqJ19robl_?f1~L+NQdNALRW_suJWUTloLJ+dm}n*C_@qR0Nm@ijC(l=KQasq}zb{_oXNz=(+0cT*iue z^J0Co6KC1K1o>BCfkQZFG(%B^Zut8ggD7zQcz#)cCa@+;Yc3%#iGlK;y+)BiNZ2W@ zPW$#3c~V}`{yH3Zjr#qDc?<{t8sQKcgqpf_fc<0G{aeDL{mbGTw%dG5Q#Q&=z5%-Rk zgXqh$hKfq5LUwyXM1TFi+Wl1#!3gAa*@Gdu3V_K%P}1ec+`g%Wxvxcx459sB;!a3< z*AYT%HQ2EHpQ8IoCE}FLlM?mwKjs3tG@_F3|8MnAdHjEF{15#2zkF`6@H#28mY9BY z-0Fk9zPsDW_SlqGhhgCUH|*-=pJR=mMWkM{0)I3QU=yXZiH#;iO_#R)L$r7jP^`v2b? zSm0Fpf$5(^aXYTAqY1OS(o)}A$TOYCzc@19rAT2j`950c7Vniq32R;<{xFg8w zL6FPM2^~Kkr%z1m`XXjS8`_g!*GS7s`6RL7UA(y=TJB91%Y~aig;S%;zQMD_N;#d5 z@Q;T;gbg#VRiR|nTuU;m_u_?Kxvha-roAP9`xeiswTM(lw!ZM5XslTOxYhd;q0g5J z55eeyae5UdG}su-z3)4Ch4YGppRJE-E6HbW*mjR;$n)@`4LXU|iloieP6x0-SMZn? z%xJbN!R;l56h`&6CDpjaZRr&I!*ls~+1Xu~!Z>0Mw0-qoHJOM|05gUr6;C{uM22-3 z{JEPE53iCDc=FxNA(fopAAio`3%5&kt8bz@+|1FTi*XSKe4V{6@O;^@WQenHu`03i zr_O<&Su>H6fcHEQUwdWzwYoxy>cMiuF=8q)(J{ZKUDmRl|0aJTP>cIPe(<}O&1f&z zv0zNH$CB~JoT6c1&k4GPkuT`etUC{uodXY`uT`3y7ok??yYoD|EUyeqJ+h6mfdO-m z-Y=lO8N|%&#qWMH>+<-7RPu~@z~`XlgO#f6uy52;yQt}L;-ILzt05)FwB)8NGaL4p zfkb>D=d&Ru?YhjYmuyb6wKkBJ1OKGKgh~Y&9A2|pmB@~hVw6+#7l%zWRnN;FU97HTsEH{i>qdPrr6gWSp&5^~aW?bG@L{=?5_Ul=JBAK6v@-Rgl?ki^ zxoHCByBHGrDrW*ARV5BI2b6y=F==#S~ zZM7?uKz3NI%NiF42`4ZI4_DoIBE#8K$00jMf%v;hI&t7YRv+7E~ysnmU(3aV94OFPJ;)y9+|r) z*-S7>V_Rngp{=Gm`Hj$Y6X(fYZe=x)p>I_+zpWUow6Okz&#t-Z#P~O($gtrhSKjG_ zvsrTW?{esdOwYS~lI7Xukn43b%@$jzRiBGe3y_J|5~-Es7#-@M4VR(3mHkMznVF7I z+A;9wQMP(rr(bt=?WIA2tAT{y$(<%W5CWN=Zu{B==bUQ1K zRV4+hV%DtQ&Z(l|@HF$tCA%aeg7AF%u-Vu5cE*mn14pN>iaH3`jVCGSoI2&r>36l^ zoku6h=b;?!r?=*|-oJ1q7G&B7ou@QX_Mg_w=lAmko(Rd%8^Mj8NVd8fF|dc`nW#(> zyY86BQ(Gex6C>KL3a>i?A3og(YHS;ka#jj(Qf_1p0fR7v)cq)vXRKuu{J*`3EA%StUZTcZ zDCHCp^$1*{fm99Ey62Ncb&!z=lkI91NgKMopU|W-o1`8i6u%gJq*`S|GKubMH z50wZsalldzDlX+?AI8UC^R@YM6Grm#(a?7V$WNq#h<>NUKD-Zssmn z@W|USTB|6}uP^AnU>S1}=P51DZ&LGZENI#hGG%_3lblMoW7d3w!goslwYmcPNu0XE z{xgF<)ej%^6!nECV@_^K9ruC)P;FG#I)e$b+41fXl8FHGK!Qe^H_58D_ROqOWXj@| zWgKf7&n6wS>)Ma)1F-d%lOxn>_)mTe5w@2)m0b}1I-~8A)$_K3tJNaLgWx@RW zRyg}f#`0!dTxt&e^%0yS07QSqbk?SV3z3F(qQ_MSUOeYZ!$!c1e2o#bFJt@|QJBju{b1YjMpD zpRu4S&du}x_xaoNSkvEO=HLEkYI-B#`9n(ka>)qt_qsp!%n0m zJhH^@gJxGFatB&9B;F2QmEM3*=K>L@d}_`37T{X|dle)-hg$CB z-{H8Vx93o%xrowIiX3QT>1W(Iq74y{lMnZcZkH<%4dE+qyn0l2p9Z*Y`JCeMX515)V@IW~VMP(D zL&sddNUyaY2w9`*uRjek8{b&h{n+S&9S%C0%HhV$3i{=3Fn3*SU2N$-hms_S?&h7F z!gbAIJvNRzyHpDs=I#6YnTUDP^_^K0Q5-XSX2-^1nt@V`Oxr1DPW8C$?jAEvg(WWT zUZk3~7mJt+>SbMdNi%NKYwopk+$PB?QL$zAU_G+iLN_VySC1vQ6DHy1b`rU#79e+> z^Uq>I2`bDNZ6zJ{&WPy`M%ij@8*dB%eWhTBc?}|sro>J zPYs5te%jhfF`-%0kY>px@-ZC2uKpZ3oZX)Ux*Z`zb{p+2&>;4+8WBG>Jq7=uf@C1C zZKL-$7fu%8-)3@mFB9wf8ll}KXb<%7#ojUr>1=idL9+oLJb-r3FS9*HduR6RT?H~; zX2n~dscOGH^nrl9;{iUSRHMggAEMnXe~C4nO@H^4gQor?7LZ~?`?XU6no5M6o|{~o zqYV;ueF%ky$$?o6FR#W;_%>Ji?k82Mrj^dFX;}D-^O|iJp^B~p(GjXk`mu#sK}Q2N zsmV5zTDP91z7q9!~!d|zL<>9_!Hu`cr#SBTF zRi$X==Az$J{^!28a>RDlVJ$@nkXTWx>byYJNVW&7rIY$g6$Zv+`zNS4f>mCd3&pS#c(Ym#$dB~Li96a4F_9d!4I_8ELKeQ)gd zb=q&NaK=`3hAw5fJi8pV2#WC9T5WM6dF&F5VDuiHKQ9{XxJs#bEiaXE+PkOU=5TA% zJQji5SJ1qIN^g)x*9?z@=&Y_7xek&@P1V##@I4zOkWY1jDnmb8_4^+1QEhg9D3#3h_TaCNMBv~!x9oGC=yRkGYup=l^o4U36rTl z|F~1OX{IG7?t@-n55MsGQYOAnxq{Z3wt9_mN+3^hQXY7h(Xw_R*ucR|8wbDQBHfa1 z436>{u}Xf4F!m&0(KCno42qO}vhjEHA>C6OlPp|CT34f=%84-E-QP1_*i*Y*_IYA$ zjAY4&%WtT|ZX@TF=f z`5E66;Z!{<`1fR;P6R$60wvblDjd%d8KWPxXt#m$)6ijpi#(sF9UWJCE%N0MTIy{h zA1Yl;zeNN-ko5sw{Yl|xaCgyMjlr=g{1Alz;z!6fUnwgp3MN#l_m1rYQzKGpRu*hr z&bpt6o)ao=Qah$SzR`Ze(@O!Kq{mkdsW_XJl^>kq02bjI)yYUg@aoS6wUSiMCQcqz zy&&~Eng@maW&GhI#DVyfu+Bc$EIg`xS2cp80U1-x)1pX6g*Br_QtK}!vdHa4VD})p zm(WKETOc@RN~f z!xO3AvbL@vrC^7{%-0n*t}nthQbg>a@{@LiA=0&{=j-F>C9?jE) zy2s35wMfif14U;X*-gsDF3y#V#;};YQ=kOfkbm>q7dAn?275p9dW*jE!!J9lY&$zq zF>eOCnh~=f*gD#L=6}vcKqJDQfQ+m>rRSxg1T7|ELM+b?QtB301K>1TZl#{ZBl2H{ zCkIo$4%}K^VViT`n~2rSNT4{}!r^008Z^CLF&%uPR>HFqqR$2f)$KR~hDo$^Hx511 z^c&IOi}&Spbof_u?Gi4o-LUi->%c!?E0=k&7ntg)idU3qtRz3huBYymmj9^_B^8hI z7fI(Zy}phj`p5wlwnM?5dMSy{KhK9?AkiiGd;;*V;lW`#HXCh1fhRm?Q0-xQ+pr9d z((kT~=Vw7A8{#AK*1`X*L;ukpqRtAHf#{8X?c&G4NCXEUfA*erzEm#yhNDy6KtSK( zMy^4lf#kknxxR;OeA%a~o&(OsY?h^AwVD00;CsWfuE)guY`)|uJ0fi%bBLQ6TOlTO zh@mlOmaj>x9B;q07%5RFS7|1H@7k$Fn{@Pb(oM^8RE%CnNH4@ay~uiF2$jyD392j1 z!_dktC>Sr&$lFqnROiK!(dSLTQba&%r92GO@q`UC99W;A~#CWb~@U2G4 z@ej5`o)y!mR5hofhPpw$SN2rn@ctmQt-@-#A*q^KzPrYsYFf1GDqA+7zH`b5YI5m6 zGLkXp3vGMQK4vQI^0LvJFeBv&O<|EfP*k=)R`3RH6pEGWvu0luS_+I`&}8$fz=^m(l*Ku+&G(q|s#wWH84U{OGq{+O{d}d!cKRjknct^LZ zkXf0(jRsf{T%H#-T96oHw1$kwibFX$c(t?yioo1t_8~Q=BB=LOoYA|&s zghJ3MYOWyW%CxjhPnc_&+RP!0HTB2Ep?uPe8>Qea@oCZ~Czr4>qmPPxz|$uG+`%o; zg=boBsC@2bNHK7ed`iN~1y)?MS%KzDr>szk>xhd@;0Xa=xce5p`hB#(9c9Uio8tAQ zsdVPg?WkLP#@A^!-U}BgW*>FpS2%H#SAItSq#lEZ>a;U?|4quInYr%wW))K=XTLf9 zYWjP2dP~TQ;6vjnN?F?C8wAsu<9^+e@N##4g&4pBdj}BqJu@IG6d1(ix@&=psTBVHzxO(+P8V42Wci$^63d-YgYP7=C04YZ4XD#-6e~L{sB4w6c z3vF73zoZM$f75Tp!#Sj{ow7=G^d(F7QB_ah&uj#M#+o#}&-B=AJOwjMq3aJHXK5&h z*JwAK#e=JubbgZXo2Kk%GpdV?V1q3$;4rO@V(dlJ$JIOBL3(Vz=ahF;)r(iu1eOWS zb&;M7;*`Qa`Wu-bW}zr>lM8q~Q5S1&dwlkvoR~y;zY@>%lv!&sg?9Mk*G9JdQrUZ% zLD>C;&XSuphVYujnT?WKTxK&HBor%Bp~??o8$a-QQq%o=qN49eY+kDA`Zw4}od&IK zbe(z~?(a7s^DN4162L}fUB$jS+~py=G~O+;{>6<+rFz?*;EM62wxD|Oy0KPf1cK<;tx#^$XkUQIYy=pp7f+`bg|ysdCpf9tMt5cI07&bYJPQ zX>CQY5rlbp5R~tl#2wZ<8sy94KTVF|eNIx}ADuq@%t6KtFZJC!)?|3rxO0W$kI|C)X0SH-1m=+?;1+XvNMzd+(V^UR)Fx-5jZOKW;SZVMI+VkM4-e#PvIi1^$>9vbLnM8wuj1wi4sjm~W^wl@ zz`v~8P7g4YDfcvFY41Bk9_w?eeQaix)xO0F9VR&VHA^L$m0FvXI3}mD1t!GWSZY#5 zfJ>ghYZ9XFZk^|IQd8cB~zup758Sk;_Zsa!l+%Unt zP_Ebp94j~Lt8-WVIc=kB$z)X0-mQLG*BFt_wsYlR`b*+u3~(vHZi4Nb3yPbK&2}@)_=e>P@Qjx-HSjLQCG&Qea2eC7Hb0*f zxp~3RBJyTsxHF5UAD^qb1Ncmihp+_%Fs+8KvZIe$Bg|&@d8n@`uDojZ#3>8A?YHDv z-ynHz-mia6e|JH&cdt>pzvatcF+)+o$nasyp>4bTq+Tw&W=n^bWNlF2giaylOd`E{ z>adnup0A*US6JJ#NTF|#o^ouHG&j6SPKArs?-B;XV2MEQf)VG|uJw6f(8mt~uPPsB z&L;lw)m{=*M?G>obzE@IWKlD%0#)frz4r;hS#1k4Kd%{3ZV9sG?;oQ{X?Xkuk>-Dj z>NLQ+*#h@mrbvUIAgjZl6f6{>mVf)*nL#aG-R9tR5 z4iAVM+J5%FcN-qu;lk}OSO?h1A2$%N<3_Ec>KZP$L1-tBd<9w;kHhCj$Dpk&jZN+A z()WMXjSQp)`HaUe8TF}S;+8fYmC?4dTwdWYweV{uA$tknCp6#<-E*Jmxcg^}xAXH4 z$TjLU<9h;0B?qc^d zdRl}K!OTIK$;|(>1VttdDfQiNN3=0&M!^BNRB{r}eKLIeGN~E-3nB%K`hw-L_8oRT zwEWKQo7>4AJ4~-kPQpU#`oLVXXL@U#8!diEz}1#!=dp0f7B2<8gGIl|bBsqa#-)6* z?{d&4$pPJ+q4eOdH_Z$oIs zpi=i*3s+iI$BAblwf~AG{-kd#VUEi>Lji<{?P=*9Md)vNPf>(NI2f?=Jxvmsf4&oq zb3Ne4$3J3?m*NqAbVfc-gX_3MScOu=-*aH9Ss(^X4b6~xp1(|yjvui{J}0})@lR3> zYrh`JFddVRNiYQkKvD+%ZBJ#9`^yxH$WZvV8;=wR6rM9cgMxczCg$mW#B5!I0{QPh zI*MaVJB_;jv)SL*{_E(3k=GjYo;2t`&6sfkfBEt7|Ls5WKaPPs5U-43#JJ!8X}OgO z*v_!~cPssW)wiidoJeRz;QrHWOAZxb)BF!(E`A|QXEr=U2LzruBq=1gHnZv6Whaf; zJXLV@Hm*S6M}P7YIE=lkb@!wCO^s=_d_^47^YZ9eisuc=6H z5N7tHq|GEk479z|h`LF67LI-gm{Ce27YS4OI|u6qfgcpz2k-;4!+eE@{%GX282;7o zE60y3fm)RxHR%3c0UEEo9g)5wOAnepe8b{oe{(R1ipA4$^Dy%tHh04!L|x8VL^Rf} z!)l8E!`@p)#T6`VqY01@AV`4V5=aOT+;z|pf@^ShAKWFlLkK~FySuvvhr!(k8QfuT zzRAgvB=2|Ey+6Kt*ZpzU8fNW1!|dL>yZWhms;jD>5+Sy0%&LAol_ph`-aj{5CRKc+ z90akx{oo_UYssvUvOoFa;MzJH(of&s?n`m8o_WxiskI^UYe{*PTwvhbI?Ax=XEPY# z0#IFx+0ii2g;7!SF8%g>ajRQ-?v?Wo*(GVD%CQqSTqUbH-!+m%Cm=_HQxUX_;$F$B zeN{RU5lDFfRNHQCu~pFbqlb{NTB$`Exp~>k@YEEca!t<%NSQmONrXFsBiK3S`YUh{ z=Uj(SuDzIkctfD|6ox^4fYJ8$k?+4LKB8pivs{4Wrk)w@oiqkB)s-&Kq zyY|1zYH6Hb*J_-0#aRd?mP;Y@^jjeNHN=iThd3hsAB)9p0&rS~_^fOb8G7gR*wd4l z>FJNp9R_5z)q~GhyCq!bkGn$5&SIzZ;w~2(%_&3NcIE2lDBq8U5~pf;&&AqLOdiHu zI;L}%5>LLQ88zB6f5GgetYfTBQgdh~&{?wIyE_vQ(&*&(jCH6GWgJkPXK4X7Zn`=0 zWF@xjDlvxF%;Z7m501^;{@5sP`9*U^sgZPp$LYIi+V2Qb?z7^XSnKWs%EsBO1MWZ-x zb`pn^(ry37d?bW53vm)czqi`zHnsM6PaXRFP$~z<1mX$7fc~kN5fAwI8(<8!8Mv=9@P9LvQgv$HURmo?`S`V zz>iF}-i}AywsO_6F)CR-LUYpJq)V;yl{gRkWe{7gxGU=$8b)KF%4=*$C$(n}I*iws(!n|m0Oy5>O>Dc%EDX;lE z9rY0Mn%J1fsNsC8`yy@%p;-)l`#_x8T#)b7b!Km&)^iTYaR-Mx4p~P2amkx|SkCqe z-@915o+?cvuak})SKe|fd}LdeKQ);PcJzYyNbi|GdPj3IN~oCn{iDqA`}T^VlXir>#kV*{q%0D-*ZOhX;-uE{ zW&`PiC4=O9#WbCxmN8>)8Tja*tc2A>h?QO%khj!8M5tN?;$Y#`KBD-~OCplz49=MA zGuBnb0vk#0NP-vuQe#=Vip9!=l4LlLVUAS7tBt(U*qcdnK9nZr&4gc}k~;R!tOw7x ztc?qP!o(|tiZVu?S8Ui~MuPPwGl3Y~q){QMlr71jYnDSMra)0Q^*|N+NO5fPksmG% zKl(LPyq^e;)|clYBl);X7;dN0%w{ou^un?&xpG0W6kg_1zm@Yy37&E z)wm{_j0h>xYle-5*cMSOMuu3jQ^(+B6%ECI!U&lh*pRoIf}{P|2#N(eFFC4=fll+$0;2Qw`c`XDlU^Pn)38J#uDXTlCPZ z$+mds)6{c5caQ$~y$_~{sxQgg;4QmJ)l0)Pl7^@+DWtA|M*%4F9%++?`HvcHR)YctxSX(b-$j3th|t@x!&h7NE)Ecd!J(eu4Kmk(Wb#D2|huS z*ayYjbbCSW3X}t^zNOET%&)6@WILH7)U?LT)g$_y!AIbpW*sJEk@p-`LbOvk4m=)$ zFDe=mMqUY7t9q7o@yN?esEpSHMzV!H20^upu874K65hk;kS?WukiLN_L!`%Yit+YZ z=nYMlky&keY1!Cvv6qgA>!^zfVgbRmYQJaNeXZr7Z;=_xz4}GXpLUUqu-CI)? zN_X|#o4ym01moSNYSd=Fg>BQ@gEQ7DtHRje{lQuErWC6xz&trXY(f5%`Sb=PzQ;RZ zY8a*dOWVA712d4Gbr{M2!$5kKR@!Hpmp3Rl8N&RqT?IB{y(}Jo*EarRi5C136vw8d zcP?KoRWKlQe!*P@%om!(B|JWZ_Ltiz6W0{wb%exnkB9ncD%39SHPC5x)J`kwJNOUw z(t)V=-NNIDR zu#5Y9n`ClHp@z1HBDhpwm4oDi>eO=A=tUON%%$aBx4alxyqL<<57I&pq2&>%2T}6| zw9sh4uo?vAHb#H5&vTJ>+&Tx>87B*}9Jo_~Hlal8U%KM-i~ zOmoJ;n%L(CF1{sWDpzG}G68LScBN)xfW&L-UiOrTjkM)`#tnY&aDO5e{D5{w?dWM? zz6hxDJ8tf7J68lhK;50$ylxbQA^;LVhwTJ|x~GD#=;#*evIXA@lnNrem5eVFlfM1n z8MgtA02bb)w@R?{PkEZ)(KK1TzFVr+9QWAGy)X7Kr^yX-wl9QC_*?Bmo};HfjM@^5 z`}O>z>&Ou#2fi}V)U^=xdUp>WjDl3~}eTvKXMtRAq zYGCj%a^Q^-$_B2qig{4pITwO1+XS!ZP++Oa=-u=6KsThAkI-E$ba74ZBx5Bm7FN^>GPd5Kxv@joKG%VgI=@EX?Z;$jZ z?n+fM`}Fr9=YJl*l7`U_8#n{&f1uN!;m9!NO!!v|KTrSPV$Odx^?&y;0q=Fd@7a^d zy!BuGcJJOyGM^6{^jku@4W8zI={BeiyOYe$%7VQ!2mX}lf4>+J;1qHs@r9_Kiv46@ zeM@iwDxMxIN1;m?t4w(R(1pK(qTAvr(hlob--OTZ;E&^H){%@)vedA|B_xP@18NF` z>VL`Uuban1c>HqeIzmDePMD0*g~}T*0O8wbK6j(Rc<=jrPmMb3$A?X)Xzl-r>HbOU z>t3fFR?o3SwUoj!f(D5=6;iLR^ zm%r{+023BjFAeTD9{rzQ4NPH}8T8*@{c~#kw>N$Vga7u%|B2qvvDM4ar4nd>3NYASKR`U12)_SPnI1@%F75L42Nr?PKXmiIaqOpw6AN=?MsCdEBf8l6_pNm! zI<8s7pN>lVhf*eZY5k8j%HP8l_Gu@(W4^|vh#pnLQs3d{>S_u>WM zxxc`8KsBeW6%`qNz~6eMoAj}?3H$wyMbi4^8LQL~te|&csrPoA4%gq-ixDAB*KA}@ z^7AX1+zyBazklbJ-U*p~^QT^dEyZ;?G$G(GiM|r90d%)`m72mkGN^_P(-SoG@{XQJ z6&1T8P{MZdY&(&1xRmePXOCKpg!fYZj9NoI7zcbnnxap^$L_4hZ3)g824Hf=9luII zcgJk!0)!SnA?>niEv>(V3g1{anmK%@_E>(MSndLS@i6(A$4)}7)`1Qg@}!5mja23j z6o;igy8TRLJt(v;!cG%qX2x||;+m!`A94qrv?(Rw$>WH;0n)Wok|UdkS0OfXxT8Ip zM8w$gPVq?$Ieac&p1-X!QcjY@d7^k!pJHrHixKRyzZYs!anoOiSoujM)vaB{hqj<> z%9@f;?mU>>9qK<4v>#>ryYT{35G&%aj2;h)32;1hr_|+`a5WWLzs&0?PnHk|j$Yu( zN-qb0Flytcviam7rxzf*{akx(+V#|#>!OFfCXwuWkHu|MceGp|P*P`9u8`%b=uv;L z&I|XQj}jw+J|e00SY{7av$yqW)y?5~EUdsT71mdFZPR?ps6CT?J|{i!zv4ADQoleeN;#;XPmTHzwGRN^?Kv<%(sR zDxHxr*15~%7utP1q)tb&I32Qi2o!pybbdpm_B6h0-^Ti{p3-T8X-bkqHqP#o!g z^+Ykd!pY1w`WgqxGJMCHI&Gpb*NvM7G`4OZdV1*V+Q`Ot8*3erlDe%@+wrFr4y{E? zl|A3P%hi5H00cO>kPT`KcJB*nw#w+8i%OI^EI-~>^29l@Y_|^OQw{Zql!uLY9w z;rC=&wNTan@=^cWGQ;ac;8efFD@n3RuIBQexnCi7-!T`x?J`*?dm({L+O>brqU&g| zT{wO*4v0*1HAml^9lnCP33cDrjeTkB@y?WjK$r8@QA5 z3Qn$gd~ju;Yj4YCQ~59rYP!FU&AaF4=!_bRi~-$2sVLP(hfB09nKz(8O~tUTE-!De zNUf+U{UAWz*-xWrz*S@~zC=!w2(?`!)uZvftoHrEdHVtRG)dwa_LJyj-Dn)1pl8gh)xors~Hix>S z-Znbpl)Ed0@aMZgC(;jQDCMeo^1SL`g-u>;UbA#vkXu|srKsvPbA>YhLf$%g7`eEF zB@OClXxJV3dwDTZQ34tZkK>;y6CWfwJKvjA&59>~z zjo5l@Om4@&liS%(eXqG)D?7PGxROMsnMZo;G=-L^+$azbR)0Sl$w#BNKrlpv`Tj?_ zp~68N{f6z9Q-19twSEQZv3hePuM@)XIWMF&Mb4c^exui+F=F^4+3SFd=*4cel)C;E zp!TM7#z`JR*YUOby0E?8F4uZz2B~Rg%iIKg?a!r?th#+I6!%SYYpZ;JTCZ)F zlrT5^{jT+3T$;5d9S$3x4zf~ecSO=QHbR4AXlaUm02_B#NrI)fOp`SNAeJDmItRDU z*|b&Sd!YR2(C}4&43yRCear*7tcD3lGM4$q@x^Lvlhu!%Np>0A4bZ%z&yB_dHIZy( zjfCxXjGy6b=~ofEFA3a;9DNr#BB3G{r9Cu=B<;EEdo@CW*THM{518fO5T$J*|5SP_ z{baD&xiPUZLnpRF&Rdm_O*)UUsAq{SfN=bE>^ZNoYJ(x@pg26Wy;H)9K(^%`OJ)@- zbsmp1_4pMU@nzN!H*IuejQI4?+r-}Uo0QNFLXWjH1*1&GiE6xJ#0DOiYg7sYni^}h zL}+QM$Awc{!bZE|-PWKugSkKr=Em5l|5yVRVEh?Z|GNSTQ->q616UPWxf{Ai6iM9a zj9@B7oq9oxTe0nr>xWW>&RbsJe{VYn&nlzooKx?p1fw*N7Y~%ht!t&k&!5q4qy(##6il#7$JBYQxvOc>moiPOGY4BMZmGkdr z$WKe;0BOV)0Mmg@*soqnkSzI?dRX}C2Pka5)K@tEU~%LCnR+CZNctt!WI}}8GLi_N zF-Nt>h#$`ius1FgcNkXgccGluBAg7lXjg4K9{8C9AdnW--L4n*rh zk^8s`o8MMO8T>i45d$Rz#r9>&v=D^?>!Mfv*g}qz553qw(jxj~>&ewWZ$h(~ayH+s zVmp}QX0Hk5L%Fdw-<<0PuAFPF~ZueMD zhWm#FO7aEFlIcexZrY3**JiMuoe?=5y)o)O|E{;q=XYpvEq`gX7?Aq>8($sqP;qr# z-*OiZtF6vb#@LtaR*SV_y2BQm>8W%pi2+=ZGWOy2Irt&xv-AzJ<2m7hdyVoo{_EF7 zdH6G$fFG3Tf04g#E^j2J_2q+}IrK#}a?Rm+FIT>^8SdQqkF8=Y<_)Ne(VRB&dvktc=Vy13`X$1_FjpSt$7JdOjXvOBJ+{fx z83%CBFoNr^0ugv*|M43AH4H(%FoTik2=CjUQ~4i^`rD%}w)w|@%%xxR>W3JNFH3)Z zF8!O+;9uentKI?s$20!=0^|Qt^^V`C?H7S0n1rk$^sfAGE+=3!#Qv`qfB)JB7Sj-c z^~(2WP{7|W*u(t&(a(SS@58{dOaCwGsz|+%eFJN&L~9GlI&os+|&gPVtd)g zEpZ3(w@kuzL2{D+!p-vXv3AQq9B4J>0;`VK6q8>2DYLCrdy=Vl8ZenVB7#KB@T*4B zH*Z2*I=eJ;S zH1@LQI;f`@rNEDWGyIqA!72tgqL6B}C|m5sN#7|8sK_LHKZm}_ECRM(6D4SIQ{F(V z4=gJ$OY*G-OziFL~W`QmKo=EdmBMsBhaSi>eXiEx7-Jewxp) zs7+-7iii&{xKj=pqm$3HXtj^|e}vrPpt+yUb5fIEo(0ptGRD+m862sUZEC{C*Q#f< zoNGH+iDLG;dZWc{hvenDXpqF^DA|xapJcccs{o1M56yu@tVROePz#}1VhT)d?a5SH ztoY%}Hw;OQyhvQ6T0UK>bI@yAw7okfEv!-gT~Oo{{{(cj!z$2tF1T1ZC&3zk=|p}f3h=sCwW zjTb!prF=B!JpggEHX!lM=FI&#rMu518!O*2Ld*F+a7{fD5YS9l8lR$f`7m=@8d563 zc_Hu2<+?plIdfNuJDS;{)-4b!sNQYf8m_E;ix0afe)N*3`pJ5GIj%Hbz9#o%L^N=# zzSd@lO~Ka|qnQ_X)X~9 z#dXod9~hk@&E{8PcE}L{@P$7d66(7h5uA>7;@E6)!jx`#$G)np-Ojab>(PN%yWg?! zq|&-V^GWS?6SHmtcbpsj!JPq8&MNi=U@C}&%jxRm`9gVH_&D>{{Eb=TOcBthhq7Wd zA#}cr+{$Tmzz%SK={4&He^gMwatM{d!fbmVBB+-4@jc-?@zumCwGq$md9*FwG)JFQ zEPVBS$vzgGZ4R(q@mPIZu?dfIyt>34$#;B_?+M56lq6Eh*DKj1pM2nUA$igM_7mX` zF>Iud(U_Q+^@oj6muLsa3%;AABm~S;?XqFDUCl$-Uu!d{&jqUHmMUB-fRit0zm)a9 zT4Q#Tp2Su{@6hew{os}Dcjc*>xuDG&Ki>4k$us1XjpSq-&Gydd4I8 zKXpRe5#^UX-RBR-)(E-Z1$Gl#yJNMRM=LFFn(9%QjP>Z%=8eE$5ROY3)IXlXyg-El zO7Kw0xl&<$W94f>*lO}2D_g;MNZ_=wadh^M-puPN{kjS)DAP?lJLj zNI9~h8Mx}7%bqEsl)cF40HR%QN=Fyt@e9a-j?V4H;cD`V5*;65u%%e))dqkZRkNih zq=R_tZ^kx^n$B!)E4sZttAf_`>^WMzbNup2Th5=N;l%3fCsLjJPJ0^lDn*#U?^V;EKCnKtUB`gHR}xc3|9R96NZ z5v2SPYsAGfcS5LUW@blwc03!R^>3>BqpP~gr!cC)yD!B}oj{x#8q7<>Z?zbbSe|fc z2{(gr9PHV!hq}kIYr#1u%-C07f0(y$j@`2#9MrJ4E+2!fcPoeB$j+TlWO*lBqZ-s} z;yFwgJ5f!f7xekIJE*;RCih9K3 zL#VR_xPvA!Dil4PTp`|CZrtc;*TYs@F5SC$B1}-TDoJ3F9ncp6++2+SE{L_pEQ&r>c_uMM|r~yrf9Do z7DNy!=si?hZA;T0MKFBdUfft1Hxx4Lj)U~CwX@oT7vWJ)u+)R$qKM4SrsBMBLm7>~ z_JuN1PP~X9MMpMXy)G1oQ=jY6KRBO$?H*l^r@L4XES6qroao}@q<6)WpM?LHrds`= zqe$?ykvkTxdo!zW0iZ}Sm?*iHC%lMBH_jP9HR(t*UbT&&KVH8+k*`1_61f_N@#Use zeSQONgGiE=C@Ij97-)I!8B%Diae{2Un^>B0um~NUuzpk#5RufU3@*hnbOYMqs;TpT zo9a%t{?cvtK^lwakK%dWf^QhjCi1GsZuz+EhxRhx7j3Vk4cu8vya0XoOPseUrGPdm%7pQYPN8wa6WJ;w znm)np1q)(xOB&@QGmkXMB8}98Fkcn^ z?@V=AGk^NE|J{rigj?cs5g!JE+EFB-SEMEJm=XxjqUlaPDO#`+cz*~`xwsZ{=kBub zEH!7HdKj`8B#O4`>?z+rzPk)W{|t;9{*i%#(Hj|RL6r^QN^RrPx3%t~fTZ1?*NfhlarI1?9Ux zN6*E~snMT5jK~C6Zu7(?d+`QGFXmd8@l2ZVjDD}+Re`>4dd;4Ueath~;(X`rF5AkH zI+tQa#Jdy@X!S(@1WajTk>E?r!`myZZ61#ix8$%s>qRPZZF#JnbZ2_IBD07B4{xpu zIrI!QPX#_X8SXfB8hqd0#y&jt`x?zh9oc6i^(5Fu7OYrsjZUO8@?P%7m7LSKZ~Nqp z&kh2tBsps_b{`^-w|L10iadn)K>US~jM=ZYCZ3#*#$@hs`PaPeIWFZNkLHq$o(kZaZsBV{rKX;yHsk!CfCzdvqxcMS)R&55tR^Em<|eaT8uuiot_ZL-pHXeoC5tbpsLn+@r*0(oRD=xFv@Mc1L2-#0VZIR%DR4SD;Wu z%-|osqP7uo0f?(fa^wr+A-BfJRXX6r)#Vgw^ytOPp&U)&h`JMLN74fyH48~OO|s=o z#W5=HZ6Hz6O(K6gkrTzu4?%aQPPa=`+vDDfhk#IK`#oXQ@Crzg3|Q|HkOA-W&kTd%5k~uX8n6w%!mQ6yA@hzvMWc=rc|S7opwWC2C&o`r~GT z?edB3O11nJU2u~^{ke+slt`Da)<4d73j@~zhl!BFp43^}lHDIB@&(EaYy=!WTmmM{ zZU`FY%P-rP9~t9*Hc1 zol}716tF+8hkR0QunMcgx46<`9={s<1ZxM|wcOJe9wr1rHH1!1Yk<;XtE%sIkebC* zdq2S8?M-8zSd~*$QH`iSdo%PhzXg<6LNvW+-d-JfUlS-d4}`8=L)?hX)l2ziZbgU7 zr}!f54Y?RqUo2ypkXsVUH2LzRnryGB6lwz5YW4cC&OgsUmC`(#=kL}@&tkm)z@z~7 z$HsT2CWPX<0R-M_d9FK9XVV=yi-zq=Do&ecwS4D~jkM0lrk1~K-i;w99+#$6q9_g~ z?x0$48dg-e%GQt!6l_Q!%7j!}TItb@}uj_hC((r)z2TwnIx&0}A=o-{_mSOQ@r*@Oik- zM=9NAl{|5ZOX-j)c!-xB>g0C$IF}>_u6iiP=XW{N=F)VS-?#ZJ~WLF9yDyq*%P?ny2dB(oyS@Ia#v>mxId4NAtXMq{9 zcSRg~tH#g})~@em=grG?k2MMzGLL5X1FWn!VOYy|3ekyI`{K)M^D&a-7GwIsnSq3c z;l~y8R{)TtU75tHN2b+oMTEM7Fyf&iCA9mr3oQTQn%9Q6EoYM&`D zv*L)7ECmK`a9OkQEceuc?%xlKUqY=C=pr04z^c*5`=z8kz*~_JrL32jlK(1(Q|2nu z8l?Kfz6h0A|2LKAO(yjjbpC}8AUsWKO0LY>Omaogq;;u;AQ})3flOg>JHxA2;7hl;;y&2JXDJW%@AB=?@@HI*JH%s@2(9uPoQ?3*#elb<+>|3jw@g zOCzY3H>QjDudRvHISx9P-6GDZ+}ASbZ#((7qU$Ep5MOtdUn(qcj#0hzuI!^^1$ z_jsW7dxU6_-?9e&Fo_@;X zu2mPr6|Qod5g#8NC>jgu7C9K!ctXej2EB*-0ObKJBXLwLaZ@w^?rlA64{tQ|Jvvs} zGvN*{%*}exsnl`uIIYlIa3`{(-&(XZmbqU_9 zJRX0bj;OBvs0rJiVb~P#P|Fli{oL>Yq%@0HOokpRL}X6Km22-lT(G&;_#hzykGWT} zTilrFru1Wey*BIc>_D633%x6WX>`xG+P-0BVy+1=X-`D@{)q0Lr!M-tbEH`U5HV%z z;Q|M?yA|dp<&TF$g;IT|XjM<5QodS_a{WhCG>pvXOw=jq+fA|e9AL(LQekwO>i2Pp7VQ#Ej9j^X*w{~ul{|KR6 z+Adml|c4c*Am;PeXJ4O6NDqo;l8GKJ3zw( z_GBFk?zkjw2ea%grY`~vj~7ck_TF&YiLMd3^gGW0rJ(OpuM~4<=HFfj?a^^kUga;^ z>T@r-SK}W#M0OjVRGl?p^#DY z*tu!z+vzIeca>FLV1F!2sJ)moVun;xrB=JxzI?s$dkPL*Djhw$!(%eorQ7nIiP=!;9jKd}4#ag%+y zI=A#XdU>OW*nXppZLqrN>bdwZcno0k9<8(OljOCQBYEq*_9c~I*QRL0^JIW8Je&g? zefPzf^PfPo^V91xF^sYQ+Pmsm@Ay10KHT_y?P;W??Io0T_ip|9?_&)+_LYH|1uHB@ z=x?mmkk*qwy@K+erfiNQCnx_tv%{R>7I65$xsM+IPrhs*0VB&b2&#WY(7;yDbber^ z*#Z8~{z{1LE6cxX0sMd2?tk0*k9_`X#7VcfBRUUTBd69hqgc7s(AVyu=xffRe&WhB zJJ5r!`@5TRr$boosLT}4(pe7Wa;vueP9nylf^D;;oo4ajnpC;&!@Vmj@qxKT&+6L= zbCbCZ3}ux1Ch7tG(@qjzZbCtjnmb^F{;g_^QQ zgJ;SO$7a5d0mg}VMpSCC=4&Q1%FiX4wnSUYA@{7d;nmrsYj%at9dE3(g{c-5wFA4i z&`Zb+OyjA2SsyU%$WDiM840O(8NA0iIPP4q@7Q}CQ@W~2-+Z(5YQKo5^p2$bIiZJ7 zUDZS+6R1F zRg>O}Y^^8j46U@>^V%Yf3f!`NFmBhG`Du7fS+8?|0VsN28L5BTy1#F;zFQ0M?@2B0 zcBEUrr{TR}h}4_E@APOO#USxeLw#Y-nsHJizTh}-nJ#L$fw_01iE35YggS}(Vj(-6 zLSF0TCz7#Z${k>H-o-W9p-C7%tkxLXy#8A3hKfc#5smVa zf3om3p&N7Uj2(ZT21^x%qx_p%T*XLY^RkW}at6s*MLipbAIsm!IJ(=ozFpO7@F|-! z-)V&7#|`=K&7;l`jx&U6!hIhcqUEC8lq+amRvoIWyI9mUca`N=_DyDzf$NMDI5=WU zj!jjCBX({re*v6qjhIF!I+g4j=tE-4M;ZpS=8BROWMs>Kp^V}h+6V57n$Id?o6R2A zj?Cl-zwaFoRFP3yNUbZ}o_1b67xtViUz7Vb9uQk&Rn-d13cd+Fj4|D9X^JmZD0Y zy|9ocY;)RPeX4iI9|Ni02VJW(WYl{GpPn|GMd|M(iR73!iIW)?wep=hg13tU791OR_99V=3!) z@qv1yMAoNRcDk?Tq-r>9RcN1)ed${mjIbj?&U)BtwV4IMx(?o#bbWYI066%yt&9TZ ze%5u{0hO(SkNC7JHHmg(u>-~J<+4yg7S! zybkJqkK&!zdY<)ts#~o*dtrACMO)=_zZ*yP!dD_=h4q<<^{EdQsvd|qy>-P5Jm3C zhnrdW$9atDq2d;E4Nsu4%ifmllehOX`^DS=&-RdQ!$O} z7cp>c16KJkXH@_VsCmi4@tRA-CSrXB@8Auk_;sHYp*3)L83<`E+@!Ey>z*6MzI2Oo zUj9*?YO)jwg9V3oZg)^9Fn1g+`VAw|Psb~ou_@JDJO@&oriM=_Lo;6V`Iy;9=Z6@x zJ-#<7N0)u^-*7W9UYj|eYmROSb>3S;nGZUi%>UDv6|d zX`&MKKnfa;Uh&Q3j9TREyw2;5&>L>N6x6$-X|=$v&_ki~ReY6dM>-|CXe~2#ovx|t zk*I8ag|nF--DTa2CZl z`Q(N4goP2PRbHD&-dCoSZVmRW?`w84=P)DUY#-B z)DjD6V*zO!I5Aa{b7bt|C3nhGYHfcWnh?3lReD;jxZAGg=_TwM`*q!l889Ngx!7`f zj!|lXrRgWmeN5>A8s!Y@*>_ySr#5LufLM)Mgk2s8QmvaTc&L;u56YW~tv`Uiwy93y zaX?&B4k;>+yLp!!EizNK`FMf|15suPS1g^sCM)5TdQM_TnK?>dYO-beBO<}hj%>9q z{Eo3lr@r=F4T||XxL;{FI+8}l#;WR0rG@FdYqkzbP|qd;X+F|+8|Jn6GLOwp^ymz> ze>R5uGANn$9h1jL7p22uMSV#Zv|<8$!q8IFz6z-sw@I?%0?~6Y6(pq}r*EMW3l9 z%1u|GNQv21JwZgLIqUs;mJ5+rBx}NsSb@K}j4h0+JWL%q5*0xU-*OS`<4lj2o z1Dl=Z5pPlzO1J z0bUeBb;8jFBfWP{ke<@Xu z5A~HwnGR0KflA=IC49jmA#?Hc=-3-bE_S3v18KJZoHV{#p;=jWZC{omH@iqU9dP=C zqL}#r$<-4py2;Y*jd*Qt6y@cMN9a}9i^oK@U z95Y|FOQzI$O1-Yrg-72FbWSg4UhboH7bqUW@bUb`fmvy98D^c-b zBQ$67`c$5a{fW=~o*qadlAH*}Yq{VHE6`SHL}JtP<~fAC#9iI#M{Vv7%ua$@D(vrk zW5?-tqfCS;eMS9!R>S9d2WRn<3r8KO1qV8 zH!W9)aAdE`@)obu?EM{zP@)vXZgPnQaF@d+O8%flM|EHtf`o~;gYXp)mTpD2Pxuw# zny>zb(^GYA**KY{9&a%mm`1(o>v=f^tEZ~slb=d}%N3hnX_R)gxVW|FiCf)zJJO2I z17tRm8p4G%)Xvp~;;bG>57H859B$HX7hwk+HJ&jSnW?on(YU zf?ku5s4d^J)!go&9->E>C2=^BZAuPyv3so&B=2A2*|jV$>-}s3uPQn<{+Bn`4QDg4pf82+R`L9WaSon`O(&L8Y&A_Ir4&%oriw zydu*VC0!{(idUoZfay_4cguUxSx75-=5Ab#=}q5hOIg)(yK+JLxDSX>4?+$v@k)1K zSGb(Az@(U9FXY4hH=dQ;Y`AC!m#!D zT{2g_fM!*s;mSrAbgiS-L>FOhM;#6=li|)WLZc7Q%herK;?g*Q90c5rU40j%u}aqI zMH!oWQU!L-rM%hhacn7p$0BPsMP1WS?5^Ne>pgnbAep-l`wUn#W%@STNir1G`rmy^ zc>{5qCLN>Ju~+;3r}cAcJ3NT4f+_Qexo6-8<&F0_39vv7lQQ_wMA3ewar(Wjky6hq zvFoKtW6`hthTfbl6(yRDDC&i&?_)UUKJ9LpuL$$UP5M$t7}+vMySmD(ybM)#MI9_v z3CKp*@;5$Io3&e3u^rH}K~9XNkPIPEZ<*d%&@u*@^P>Adv0|tQ6;u*tMx8y!UrGdu zL_r@BDrX9}l(|7i+&WC-)#Vpha=wn$SCA|RbuHgA4xu#J zMI9cCfAWkq77DS;%vBNk=3xVwVqic{s^vYr2s43jUwI^2k%|F5hBsN4)tU6q9=&hD z&e#L_+b*7{vX;Dyp#vkzHkfG=sCO8m9OST7^fzOmFL5(CUkkbIT9{(jdoeLh-Z*vf z9$zT<-GgTDL63Eiaa?fSE;3s31Y^JAW)xY3a85(}S(y4?O@A)4&0DMKkz+2Y{7F17*Hx$mbAVO7mn#uHJ$_`s zv>WjT3Pe&6bkhfN2<2`d2r<$(z1sNdtX<>JAL*xfas~bv6ik!_bO*4&*;Xh?K=JQ#5!x0f7< zRu9FP0T<~tz)MZma7~wZ%5%#M4?J~oR`;(RTBPA=LLoHFZ<^U=IO}Vac9BoC`ObQS z4dlwAxGPZfZ^(HWY`I)QTF`Csk5XH=8h7B0N)q5>)k3Ir?^r3=ytpp=Mo z=^g1cKteH85fK3a0qIReI)q+AM|v-z2Le(O2#`o1ApMKmbI#tf&-nh``{RzoU}OZ6 zyescob3SvfS)S$YAfxW@dmG&HIfL`60gly6m#MAd6O-lcwHnhWW3i@nvXa;Lh+j9y zm6{tcJC~7Ix0_&8!`B+^(Wx#aYycujKqqdS&OY1&9UoI=ek8V}V zPmxc0P(R?yZS5@OzMIo!-meTD_`Kk4c%VX*{f_vgck{?h{i1O7u>_|+uV)g**{xkK^Z$1eDMWVb&+C1 zuB3kE)gmWHhU;x@R*XE(Wf>bqV|F9QDi^^k3=5UtoKi~Ccm)+>WL&AUc)Dk}_^>S( zhu@Rmu&pfY3bB#q063Q!XB*V>?mU zUheiZBS}FwhLpxEtKQWEL;)jWlH>4&*?OhZbgb}qCzRdd@+{nZEl-&XaSfl|-IpYQ zg#L(Th@2Tj-#2;4?aCxgnva+pY-et7*za!W2yjO~E}qI;;WcI^ro!vi^(HFt%XFWH zCsgc|S=#l8PrzLKSQL%kRMxHRnfui3ITN=U*11r03`vPa9%Cv5-_oUW>4wgpmCgV= z_cgg_2~p2(+)9x#{0(FF1O@g}C9uK@?f#W5I0VZ}@(zj_<8H60_8eY`&MQPH*29&X zhjvXzkl&dpJU9!LbtgD(rSJX)|M~u8zwF$ha?NX^yBSNhBc2#r~qu z>_WiC=lQuVo5)9DH^*o>eVN^BzLPD$W(XkAWpZmmc&3`?K8)tZ8ugAuzgeXfKQ_cs*R z({3AM31Ao0)0prz^OrLQaPB&+v}F5dNnxV(H--}an!IZH)OBZTLhA#>OIV@gP6Ncg z104LgXW%*}mQuG;s~=_T@M8AFvL}7WqM5$0dhzy7g^NbHL|_Ajn}JSxN$&O#BzL=T zg4wn(>%FIn9)pXE7*rb7!JEn+XJlCg`4FcPITn16#>#U>O}+Jw9%@TQZ2oC>8~?L9 zO=ts40pezVrwHBFeBFmG$!&8fu;h(s(`4{olV=Xmvrm8`S8BjdgMT@#nuTxRXJ%kP z76kDME&5qPZYP!ks^Vs4tGhU(;1 z%*R(D2uwEb?{I%N`Ytn<+@YL3G$I!-U@PtpQ{ChFnyd!W)y}>P^l#Lgj>2Oa$&*H~RDK!cWS8yN~ zghWoIIv(L`im0^vfuhH62z{iu|#6;;HNGe zDU+MQH^%x?F7(8(?{p`Lzitq4%70i9v5Gb*Hdi9mtBmXBa+X5Ulv@ijZXHyWXt+pH#_puWZ#n>s~8P!XevgFg_>#uyiU?RGHGesXp&{-GqjfDxLn# z=|A(49Hrf}LnoTQ-*&DoAKAMP|LW3d(xN}(ecg>vACdXyluh)v1*Q6{e&zRY#st@P zRN&|+wHz$j!GIwSobfz!Eb;|U#Niuvw#+P(!t`|ViCrBB{lQdX3&r1eKAC(eINC&2 z(1TY=WA_g-R+|eVeZ-O^#Vu4;PIbRi9Y|C{^=V2W8irR(?Zpx7y_BwAK3-2^T{}j1 zxdnsNwd?p-q$*NYuk%|+`=mL1+q`l^&~8|N*)ff=I-jI| zXVmrk0QOW;cm}sqwYJH8_WTEpOj~iynIYQH4ttRKfAXx(i(*e+?>q;YChr$hTvT@I zkQq!YU;)uumAK`X7Z~oKXnjdzp7e(0NTZ z)$M4a7;~9j?V&vn56=fJE5xKkxkdQ?m*LC8@fdR2HQDzhP51X90qW=TZSCX^?u@Ao zz1KV4qjP;BXAa%>kxqkuY{y5yGB(s1iL$VU^eW;BA?q)KV=!ID6}xM^hJjj@ee|~J z%yaXLQ_OShZ!wXLMJ%SM8jYI-oUYgnax|Kf!-G2J+y)y`1m|4ob`Cj3%QJi9w`!c9 z>D#KyHz-hFM-Z03<+%S{Q`Pb6XT9@mF~?tt!T%(O?~#*c$^Y#D^Y^UeSM*L=xc!#3 z{&oG2&|cC1*3YO%F8@wpmDXn7J)MYK8F*1;e)*E4oX@oJLzr^S@}s2?MU7=zih)Y$82?mWVi zVZE1L!XsaeRvaDkTX9{)R3bTB@=5)=SmKwImG9CQ2Z%o|kZV$9%*Ry-&0fXu_DqNa zIRyKRd@Lr3dC>O@aMBEPF;^Jevcg?|JIKrH`R{dOPfTB5ne8H-7youjcuGFM-jgC% z{dI_zz4}lIzR?yGZp8fsItLkcwsfwzn7IN_oN^wc8kLt}H7M}moVI-%y2KVybm@t^HN z`3e_|xs0JwxH!j0E{f6l{Bd)U%G>FhoF6Y0XbFygR#0krb$vS>YE3CLUHdaY3C!6W z(yUQnw4ePIj`U8#{0iTt>T9gEeK(1?aplyrW)rv3T);EM^rC~=A>X75Ddr7{cKz%AUx)a6TF>g0 zs$?D2e%aaxonKwAp8W>_*vmda_D2op7qF@yJkdmm>OnmZ;bBi=)Qko`EAu&6!9}%~ z95T2^7w0bjJXONkivg}%JA_O#DZZr0TzjD(U4C^L-Mw0Aa&4R^6}vXBjTM6*G0D&b zJFHPuv`hSs1C8x*WPFGqAOKU5u!vUpS9GRje3bLYB#&kJ^@Z6SR$15?Pjkp!T5hAr z@h$F&W@2M0CFve#7ic~<>`r`ywfj_rSw9V4I#hEaicr%UY}KMjwNTJ#Z{lEgEouL( z1@Er0(_S^)x1ChTIdA-=Y^F*;*(=}gw_+x8fy>?4KhkI=%~XBdPx0~?T-XWZ%&!Gg z+O06CS2_Y{+3}Tq)cF7dRgEqVi%^e?9CS0GDI{f^+?n7JBEzuONN+_xSFT|8bUB@} zGKr|UzEV_wMntWYhWH0D)p@p4dAtJZG^Ej1Ui88^&CF&KutEA>~>F+x(YDxnhd4ix&r>7@V-PYPCmSmI*jE1@>Oq~xB)71)~Cel zm)wZqv{x(6|4NuN&>d~}ULJK}U+DiD(4Q}7t4JBki=Z0g*5k?R+E^dxx6IHRw&JR0 zzCKs%RJ9^x=rMg&VnBkqn>OSlnCvC%%iU#$>TuBzD(N}n3h{dyhGs}MM-xV~a@+N& z8a1hglTb^Nc%j<6kY%ZHU;Ms^*Zvi9Ku9+lncCP8fKnwMYlfLMS0;OA>Cm5bV!aAF z2(N63c;SRLb&$&EpN?5&i9ImJe$w?-@v&+=&Q@X@O5?3HG=ab zYoP$Ro^b%kcyhF6P5g6Sc(}H!JLZ#G3R?Pmmj$VYuNYX9UVdulwx_?H!bG^XOPi&t zr*8`_QS*RZkR`#th<%)GZR1Z{MilSH1n?s~0>#HeNMUy56D7xWSioRH`0euM^YVuRk^i7t|u! z5WE)H5pzi${k-pL-6lDqi5xIr6Lij{o1+$*mN-^KO!= zGd66o-!iP3zP5!$m>&?IBBHC$Z^RX}rd+@1nR?oH_uX^V-|m8J2A!=a#<%dA+EqVY z-|9040e@BEy)tOh>aXyDl4&Y`FF*TPf%fgODT~8JovdSx0{=qMTNlZQRQj_0Ph#z3 z#`YsQ6b5Dbj}8CpiF7Ra6>Ez3BY$t5_%|%~31a^nqkx41+0TjnL+_u5(ZBpT4Aoy( z|IerY?+pVf&F{zE{UEKOfjkco4O0HwvhiI#uav-V$#N;$oKc!QSLZHcp4f#j3*B9I zfaV`tBbU=v+!EoJyZ_6y+
zZ=qAzg3|B@|^rd$d{czU;N)#IL*kp^Y9ZbX8*-I zdV5zIEB3#^{_TB|X-5Cwn#LQcr&Xv_OKpGEfj087z86cV;G(2Zgo|!T!9H3txW&1n+ockMO#KF}1?ZTOhl2H`D~HAyN@B+tZi) zM&I-ZItnSUm-_A&uBC+@)~fE!u-pIg&Myg({k8z*__d#=ukqH^rKpOUO}VcM!8HqM zM^d*pLx((pn@O5gNa3`E0dBlr;M520e9-P0-3XoV$>sLPk|J3&>O zLJd?0Ty|9Q6SO>1Rr7ML$_*!^bayjY0?`O}3Y7}>(Q*(STQY%V0Et&lTf&^ToQip3%53-8z~$P1|wd|aTeKB=Yvx7+apHn-`Z>YGQ3z!+bZzQ=~9zi zJ|VD@T%YE(%6UbN>hE7wec$R#LI&N+dKtPk2aP<<@5_hk@1c0-KY{XGhM{a1okxZU zvy;n+@!ja&xsJX0U8T-zj5f~gQ?Iua(%1E;QFpkcm$l+P+O~7r#0p)=Gzlbd)zJoC zsc<;xOt)FfNeb6NqpB>r|r6AIaPTm*Hga+ z=86KG*t6IMjeP1ij~QMINmk^^E82PXCQhSgF!jOf62^M9B@Dc{?|zwa1WS_PqGU;0@mO(@||J4NHJEyVXA6S%9nUoVuvW+_=VGDe$l*mz8JJlUiIK!w*uW4^W~6F zZ2N{g(EH%}vTu|meQIsHiV*f{dtDsQH~&=g ztrYtEwMuho-XvE#L|Vg?wC}cH2+Q|x%3ola!K*^`Mp=z#^F;$_wtQ4 zX%mo!bBR04mP*FCuU9-fqnXkMT(a47Zet(o9mF9t@=9j`4s^pN2RPP~Y;rR^gwng1 z)Um3HqfWHw3p~~(Y_q(1C69vlX`GJH-Ac-;Pj#YZk`?n)6z1p-DgdV?^Gs7r zPXf7X@ielZ6jx;Jy{2L)xN|Et&zCZ-u!_NiSfMMM%xNp&UcVH`y~8XCqLJ_P;(+*y zy9+4%>38u-oM-X5PS3=KeEO%`tk}?SmGE#?#>gmT3RQcG_XGMB zVfsZd!JCa<@;-X0CkJ1z$6Zu$_32zLjME?Fb`gG-GlI}P7U5ffI!s(`)n*@o2J!je zTwM&cj=Zm_h;WE|s(#jkAnabAQTEy%tPkj2VQv$Tww_DmR9r9bEEwOv2#6Gs^>^7R zFgSIU9c;S2Py&Ik+rUVpcF=Ukb%Q5mnZ`P=vo_8WdmoH5Scp4y^QlzFQSOE?-I;0JgI!09|?=yQ_@0tduo_)wYm5Z)_T~t4# zFb5FaZV6R+E!%I5R~$3+$1(x$GaTSDTK}@(QV^L1aDWALhju&<~;YCV|2E* zCJcDQfmf+V#eZyxe|^Hk*dg~i8?i8=C~C4F+l-gFxlI!bTL=pncQ04u29M|pKzPY z{<3({4}YWXKvZ-gtlc8A_Iv5k_st0W-c9$8&)<6{($+WmahwRt3ApdIg0(df&uj`_ zshztL?A@>o6VB$PV(gsZATZ?q+-0NS?tZ7%aGkC0lePIxUAa4lX$O~-ag1F)dAxYS z*Vh}Zr|hLTc&-$EUomNq$-vI10O#I>GGh??O)jK&qNZxPLWTDxWBGhy0?V0gOwNFSMc~_=`$3N>a|8bIJ@_*a()j-F z<=R#8g-c%^{qo@kPjAXh(K;PWqY#IqCa_yN6JHZvEFpr{r|n2C3kLPok7*XsL;ZwC z?NdXv7MKE|QP>zFSJG-UYO2VNokKA1nlozLGN-F6*gG6XLfc2gF;+{N z^9x$?Y1!ID4gSOP1`-!nWna{F1YQdELLGhF&Vsk4GC!XgvtX!CIR%o+Nx3Xe`W$TN z{kEi6fPJ0+z+Ta%^&Iv5a?t49@@t28lKGC2@P&%K`Aav0qr89`-dga$JUSb%gn%z2t6sibu=|x*(6ij8EzZ7 ze{C$ppzZThrcC$d6k)RvLu*}6Ie-w@_nOIW)7ag<7<5E|Ta3b;h28XlSZwy zP5u>NR=(c9Vn)qDFUd47l<@G3fZ~Fef03-*WmnI1;Vba#EKBVYbQEhEcjXav!wo+j zH2)d6_P_FEmc`=H4fFOb-}UX=vYkj7*CD5e-(+=DvW3zYw0Y~QjCOkf;zEO2`$EL+ z(LXAe2taHX`1{~_st28Iuj`gc*9v1C6l;Cs+e|0oVjZg?UXQoAzsDZJ^G`hLbms`!cTGesV;FXt7wG?qw}_i_%(u+R1x-PRN$QVarYe?sUDg1I?*|#8Zvy&bqO;&AzG85FZtL%MyD^GFN(Uy=`0fvM|W&8fzqzc;#8-FD;P? zdkN=gKcmw=)TFBuESUWtrklG}`URd9{Jk;u{!M>+<#$u0z3e$5i*4U$6ZfBBi7q$& z@}~S|wC*)10MFB%P@%iqH;9x={OPe@I(O3WXH9yr(67D7yC)~j_as~#WzBw!Q&l@Z zOKT6Ht|^9uQ`Kn52mIX4-{L%3eW&MGeB{@k@6Y_qU^JR*{g*wu8hJ}9O8xK1$luzu zZz+;rDLvhR`*oAQKVlQ1e^N~z`&u^tGEa%qQ6N%u<+8-pckLQ(sMvn$=Uta7E?9`&Ylu4&eI}8}! zPjH1(z7m&?4)mvpMz|&XGwk}~#g+3Lr_%T3qtmXO3|KYsKx-7%W9g35$b0?9LxET2T7(qnR0XU>gEAg+9Mi;Ty>I`r0MgI$tx!W zpUPVYs8$;3^cUnUH>b;oRXa7lEx!E^ljC1T!Lna${_OTro*n~S%DSXzyntC7ZK79w zkmcYD2goGTXnV$GPY4WFK3jp2v76gc6(wFgc@exrp zVEl{*Sya=ft@CkEUNi(K2DQSKke=BaVB0V#BZ8$4GU7r#GFaOR*0)5c=Do-?(c8;+AxpXCJf68zT~n@c9Ru6zIO)W1E+i%17$10eW>=w;l1YpvhmRXL6Mkq z-Ab6Q0DeyBV(jRl+nuEYB-_Q%jPZ;5R#X+4KiLU>`!%hiarR+QtSQb+4_5-%kBnU& zP3`0s0`%G!)wY585S*p1YQieu#Zy8u1ah_`AGFEfBbRtn}X~k&6eT{G4+i%SE~<`tR>^pat?4^EPirBIx5e$z%{`n|ciM5Fu^aY2lx?kMI+*+41Ao7_m#6#qPHdPAUs5!1;O}h*xb(Se!ZexThDdrt25*JJ z-dGQkVM1p>i4u_~Zrn3LY^<2lD*LikKBquDxb#uNa#C$WvFP+0|@91VWhRy)gJ ze)jgAV>Ksa*PVE}$M2a8_gWj~q8{TSOO^rHGskR+B`!f7;ORe;jT2@O|0CMZo}vrq|xW~B-?luB`oIvj#ECc@J*eM?@xCp z&w`+1(28!2EoY^OR>JZoyAVS;{EToxNl`6%Ia)`{Ly^qTrfhtE1XI{HbCbrw4#m6oj`;#sU&OR`jJfDz(7M zH9~giSGeDJLw+h=qv@5mL^(2dR#vg;E{DKTN0{=M#dA-#(s9>mk=&kg8IoX?i|*ve zI){kc*RIbN+QNA+ZYA3TERsuYZhhS>>zOPnqwK;ZOqp?4x@xZCmM9(uO(+`}#n|$- zj}z?*nR2rpnMWrWno~AabCmKc*ECH+Esy8?$=`ykFjo|lj?#B#KW&}1%ylBa6Q;uy zE2NhzlCK(Vo8N>xhcy1QF^3bKlT;Y#=J+yiXuz%>ZZ`18h=-fT;&h3rK1wU0Q?TMA zyXsyM(KLVYd7rBqcnwiK312Lq+SP)MS1Dm|wm?dZ^PXt^aVyuD!1_@#^32BbpXgc3 zq}@+ej4mw@b#zv=kdbgj{g)gjzn(55z3SzU#=6Fq>W}uqDVIgYio-TF9#Lwx58u+$ zPf}#PIEt)2o0rc1b@FM9(78#V3+7Ui5^At{vbeVO*)X(3E3?T=Imt26>MxA_mBjNG zP;Mltb2%1IW-eFB9Na|?mI2^g(Pa+@k*T9dQ_^%TXJuLIBVbJ0==(5eL0e7u7kWI| z4mhhVf5-{<(%mD=!@2I>+u8u-y!rg%H`!&e%klY!$k;J{$8HO-gC%|5)T<<+e}bZj z#%c1(&t__6?#w=ikTRzDyX})zCUJ@I#XXuNW%{5qjshH2mf*?FW()0D{=PN@l~BI? zYtd+{8zF=>~|R5{1r< zTY!yM$m#`DvVv6_hD5)LVWmhoc3RIZe*szPg9KB)gf{#q&sZIkuFZ#zuowg^I;b9Q^*CRzhTOt5D}Jt>`E(LTSaw`Mt;zwp^y>!h<~KomZLEENf- z65;CF>^IJ>)SRNuoUTr+kWqa%nGRDeG6I}&Sj(yR(oGS zsd6hnb>snzPa8^a+=7oH(FdikBt{ogn-e;RdDo2zP4pa6!XG)qYL zIWj}Z!%===s!VPt&(Vsmf}Xi-Url&Z*MT7f?V8VZ!`gt!f%kq|0n{-pClX_*FZQQA zb0c>-kTL-o+*HxF38P4w=On>`Cb#5i*POkJa>CRGD>;>;LW; z{fW{ySw2ceoKZB$hbW`2^duRYkKLqF%T;k^rVZq@;?oQ8tX^jh&M&Lmhh>4?ikz2t^gQ!DbaLtq?{k=)}8OjjWmTxVae?M zN;767;@sa{MaRT$LFJpzTA6CL57FlN>gqqrt8c@|8u)NVfBwSXYtLs;Hmw<6x}2Lj zTHL%qwy|tvo#otS_5O`(`A?+)N&7Hdd~O?)FhzFY+(u>~ma){K%9*=~?i*xBY)dl}51I|A#Fmn(D$lx$FQ<)B4naDAL;H?2K_ zEHg3z2K}y6C45Ge#(V`Z%E{QX_A_SDu)tslc|8OWt=d*QWpjsQw3?rE`9Nc9Nox-z z<7JqjO~P5^gK>OUS*K=yvT1ZeYD|LR8nU_9o;kM=qG-?%8+WI03TLp|C0OvN`ym-_ zzM3EhDGC+t|6W>1KCgsEEsc#QKD;%z(6*cG$7q2LICLd_)C*~WBElUmMVtaV=5WPpC!CG3cQ)4<M>BmBczO836W5nMM<6CsKt%+gw^LJLk*Kq+&?v`$ zLrEda?}~@JK(nEp3g&Q6-y-&m*t7jQ36F-f=4k=Qxd%Qf*g>nX{(C1Vo=Bg(`u^o< zmRsbfv*PD1u**m{2rhgH^kDDC!IUcSu9v>+;jSJst)@yiQj-2a>zz=)9oQ3}%Uowz zXncn^jkVO=TVqACZZrzZ5_QgjN>RkEY%0=E#4`t4B_7F|ge|Mwc;V$cI+ZjYlrY-E z`g5Sa#`LB;;hy6wso%a(lXynhJc!;Nl^5GBxLTXmy6F30TjDZ zyE?L1Y~3MFOekROr})8D8!Vcl!9LVq^Q{xm8O+r#5ya%SyP&$f;lVGX{7&UCvHpyw z)2rZLclYl>{he6e1Ff0LTKeW??CS(KjXx}>UrCRq8GM%Bo4j`wS`7|EmoAThe1#x0 zdPhlwyWJ!dtEz&rDNj_b=H^7!n8*8e=6JmqP%&((jp$B80~@(IHaU6w8`B}DkjIxa z;|3E5hTM*XUx#3lQ!M)aYVi%(#|5q&XoGXlLfV5Dbh?7`OI7hx(hgpVb`=`RU+0;5=inrGmiqj%Gz{_*NvldpNH?)n&cU8tkA5c7YsN ze@}_*d^$#N9e+<{b@AX|5@c<^Uz0Q8YGq&pE_&3?Rdz*N5~PF=BL5{`|5||Wrzke@ zaw-dMIW$-ki2}?1MBlQd)#nQUaqBmG2{IAFexiU*6EWU~0=Mw2^1kJ?aAqU82X&WX zv9mjq6mPO5Y!kL5kai=Ut8XEAU7p?V=?tyKL%iau6dtrPVIwLL?b-HXAj<1ck^c9h zpP<;MJy{nQ*&koW!7k=kDlbI?Wv(}XNo7GX;_gzS>`W?H8|Lr`WB5i?9xF|qxv{Tj z4Ra$`4_9TvqX=PkAaN>A?5H(SVtsWcnzU0U^*Hq=w>4eOwgVS$x%yu!^mDvMkE^C0 zbAW0#lh)BF4J;E*AW}{Yx)3ZHoU-ks5z#Pn+l@+K0T8{d*Xwcg?C|ZP7;nqh32IQ< zm42KIM0Kz86r|krlnAsFMt$d~^!xC^w&tNa!E=3afZe3kMs(ft_?@|)gtDg_%awU+ zKba(6BiZ6FLFxc4P8%^IuKqgH4YFE!^}^ z%&3N1EwN_y6M=Mgy9x8~AA7)O8+6PtGb)lI=}?hbwcR5w;g|@gfnEaN#S5<{3x~fU zAMOrSm6&LUc!F{3&dj2Rsf1+7^_|a&{lL?nM&%oI^*G*ibx*q z7)+xqOdXw@UnOcW_Z|non)5xZ{Di?t4$$2#rLvtk-@f4-WcOUM*g2m{rONQQ@eT44 zu6@GCOR(3&v?n+B)wdx(K=ZN>tRrxE)+{!Ri$>Z-{?t)%>(m>5yBjy5Z1u^hs72ns zxS9I69Xz3Ixj6QiCTN}3R-7NUO7RGNQ{SCcNN3|&NxPW3T$_eK!p=r+XQ=ep9w;41 zxUN|9ysg}yHtAI%FD&N+^kMggM*kTEVKp#O4>L*Uh^FDT-nE)eO~~5%8@B#@`a}7& z=QqPe8vzVM`loH8gx8v4y?)dl6B(f(+c9MBUr~wq49^`S+i4RFS6DZO!bNaKABzb0 zq+55Df*qA-k*1Z%CzJAuxuCD4$ncVQ(tuWB$DqWQ-X9_BUe~=rxjdLAI`dXVVrac5 zJ|QQg?~z~5p*Qm`Aj@e4W!uBl=5^Q*$ZRZ`Mup9GOJ{iXk$EJ&hf^kg{HiM65cklBkABD{{J8TbrU&G~W7-F=GfzxTb zGPr(YI9BoO`EG0Mx;jmQ@^seT!bYE-wIsc#9o`Wr>M;}%Cu(Nj30)!?uJp6{9aAN5 z8h>(&2)za&FysoW^AQbuX|bi3eHTvTiD$ZNW{vp}wHyc> z)@(B}ei?&=aZ!O5JS%~`OO0kGWsd0|g9U3E5q%Mly3tPS9xqjHLHYv&{q0VKIo%$9 zLG*=5E{bILWog(*i63?v%c`;OMpG9` zF_F{F0)m@8J?KH;w0XsR&pOQ>!losiiRrktD!k)dCY!qUEh*B;CPe~Bl!dtB@C6U| z$;y!B3$8tMZU^f3PxUaD@2}w#b`z?B0wyDzVboc1PwH`UD3TKi<1;M(cbiX^%Lf0( zgfnHp6zpEM^#Urr(6a#Aud-4jhDDEd&^VFSRhV~|Md-Sa0T<`-@SL;)lrR(*z5o*> z`vNQI+!=i$3?p%nqB3(_huJTWbAJ#A5~hHVJ2SFC^M0gEI{rzxwa3wL|EM_gZpM!# zIcK*SAiY3~_i&GOn01!$4GCE*ltB5`oPq7;yW^70;BRaL+a48@!l^O~J}B+X3D(F` z>1hHvtWA91Mfz-((jO|W4|abW^vOdHSF^DbpBu5Kxp~{Pq|gGb7n62NUG_JpZ0cgH zC7i5ij?F=utZBJnA84BB>K6xP_c3!N-dz5q!46rvlI~HT>!H7yEPC`Ay}oxdbcRfb zf*y8vTgj^}|M)TWiqg_rM&;0Cgj2?#z|+4h+@JdHcv`y~LxvP5hk_0KCz|In%4DRk znc1xk@>~=}+0W$cm&P_v*IAR6)7gkFd7gIbpRX9q!*j|jfhlS0Wiy7ygjSV}k8XUV zrXmYkTM$XJ&LW?*3r861*W+2g4UbS+ zMU>JDa7xv+7#wUF_ccIn*OKy$j_&S)g+7LRzLX#`lH!H`xs2hl1L;gNu!Ut~Y!92{ zQ*7U~&nENL2G2(l8?`CeTqbF#)hOp%Yl%G3vWPxt#Bm?Q`s$CRA()k~r`7aK<%-cr z|I|ck(8AI7djq%;#A-)^IE6yhEe`rbBu`9d97mL_E92Z{AOa#$&(^11{Rqvp&D=W= z)2+2G_8xBTm)MKNru)<{E~Tb3Z5-EZKXLzbOH%fjA$Uh}vcD8OJxJIn+OMH&=j?A- ztQ(;&6G**2<%QMu$Wi!*EV`+KH|Q#j#nPIBj*^Pxs;8z;e~&8tn|+q8fHq3F2% zEWA=>{Rt=lZ9BEXX>7$r?>F}twwMBcaSEU7cWzI8dljz}cg%*QoMtM|H?G5m9EEHn z^$r(U*KsvQB-JYBl(cO<#baNI?!32aXPube$Pw$&*#^r`wc9{KiulE&Qi9YT)9x|b zA@Qownisx}_9IN)jZdzg7v>3y@Wg_qp%=>7j>*T-d4CZ1*NAGYV$l)hMU0M5vknle zc=*ld?J+ki`gFZr=Rqzud4JWuqQUL?zCpvvWCI+0?0QnsjfKWvbe