diff --git a/.circleci/config.yml b/.circleci/config.yml new file mode 100644 index 00000000000000..2c8f906aba7851 --- /dev/null +++ b/.circleci/config.yml @@ -0,0 +1,11 @@ +version: 2 +jobs: + build: + working_directory: ~/pytorch-pretrained-BERT + docker: + - image: circleci/python:3.7 + steps: + - checkout + - run: sudo pip install --progress-bar off . + - run: sudo pip install pytest + - run: python -m pytest -sv tests/ diff --git a/Likunlin_final/Likunlin_final/__init__.py b/Likunlin_final/Likunlin_final/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/Likunlin_final/Likunlin_final/settings.py b/Likunlin_final/Likunlin_final/settings.py new file mode 100644 index 00000000000000..e83c872d517c5d --- /dev/null +++ b/Likunlin_final/Likunlin_final/settings.py @@ -0,0 +1,121 @@ +""" +Django settings for Likunlin_final project. + +Generated by 'django-admin startproject' using Django 2.2. + +For more information on this file, see +https://docs.djangoproject.com/en/2.2/topics/settings/ + +For the full list of settings and their values, see +https://docs.djangoproject.com/en/2.2/ref/settings/ +""" + +import os + +# Build paths inside the project like this: os.path.join(BASE_DIR, ...) +BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + + +# Quick-start development settings - unsuitable for production +# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/ + +# SECURITY WARNING: keep the secret key used in production secret! +SECRET_KEY = '7lu!q_nf9z&+*@3(ty!djsexs2($8@wx3^*oro@as!z0p4id&(' + +# SECURITY WARNING: don't run with debug turned on in production! +DEBUG = True + +ALLOWED_HOSTS = ['192.168.53.8'] + + +# Application definition + +INSTALLED_APPS = [ + 'django.contrib.admin', + 'django.contrib.auth', + 'django.contrib.contenttypes', + 'django.contrib.sessions', + 'django.contrib.messages', + 'django.contrib.staticfiles', + 'analyse_text', +] + +MIDDLEWARE = [ + 'django.middleware.security.SecurityMiddleware', + 'django.contrib.sessions.middleware.SessionMiddleware', + 'django.middleware.common.CommonMiddleware', + 'django.middleware.csrf.CsrfViewMiddleware', + 'django.contrib.auth.middleware.AuthenticationMiddleware', + 'django.contrib.messages.middleware.MessageMiddleware', + 'django.middleware.clickjacking.XFrameOptionsMiddleware', +] + +ROOT_URLCONF = 'Likunlin_final.urls' + +TEMPLATES = [ + { + 'BACKEND': 'django.template.backends.django.DjangoTemplates', + 'DIRS': [], + 'APP_DIRS': True, + 'OPTIONS': { + 'context_processors': [ + 'django.template.context_processors.debug', + 'django.template.context_processors.request', + 'django.contrib.auth.context_processors.auth', + 'django.contrib.messages.context_processors.messages', + ], + }, + }, +] + +WSGI_APPLICATION = 'Likunlin_final.wsgi.application' + + +# Database +# https://docs.djangoproject.com/en/2.2/ref/settings/#databases + +DATABASES = { + 'default': { + 'ENGINE': 'django.db.backends.sqlite3', + 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), + } +} + + +# Password validation +# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators + +AUTH_PASSWORD_VALIDATORS = [ + { + 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', + }, + { + 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', + }, + { + 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', + }, + { + 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', + }, +] + + +# Internationalization +# https://docs.djangoproject.com/en/2.2/topics/i18n/ + +LANGUAGE_CODE = 'en-us' + +TIME_ZONE = 'UTC' + +USE_I18N = True + +USE_L10N = True + +USE_TZ = True + + +# Static files (CSS, JavaScript, Images) +# https://docs.djangoproject.com/en/2.2/howto/static-files/ + +STATIC_URL = '/static/' diff --git a/Likunlin_final/Likunlin_final/urls.py b/Likunlin_final/Likunlin_final/urls.py new file mode 100644 index 00000000000000..86ae55cb41e380 --- /dev/null +++ b/Likunlin_final/Likunlin_final/urls.py @@ -0,0 +1,27 @@ +"""Likunlin_final URL Configuration + +The `urlpatterns` list routes URLs to views. For more information please see: + https://docs.djangoproject.com/en/2.2/topics/http/urls/ +Examples: +Function views + 1. Add an import: from my_app import views + 2. Add a URL to urlpatterns: path('', views.home, name='home') +Class-based views + 1. Add an import: from other_app.views import Home + 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') +Including another URLconf + 1. Import the include() function: from django.urls import include, path + 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) +""" +from django.contrib import admin +from django.urls import path + +from analyse_text import views as analyse_views + + +urlpatterns = [ + path('admin/', admin.site.urls), + path('',analyse_views.home, name='home'), + path('modify/',analyse_views.modify), + path('analyse/',analyse_views.analyse), +] diff --git a/Likunlin_final/Likunlin_final/wsgi.py b/Likunlin_final/Likunlin_final/wsgi.py new file mode 100644 index 00000000000000..31220c1f430b80 --- /dev/null +++ b/Likunlin_final/Likunlin_final/wsgi.py @@ -0,0 +1,16 @@ +""" +WSGI config for Likunlin_final project. + +It exposes the WSGI callable as a module-level variable named ``application``. + +For more information on this file, see +https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/ +""" + +import os + +from django.core.wsgi import get_wsgi_application + +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'Likunlin_final.settings') + +application = get_wsgi_application() diff --git a/Likunlin_final/analyse_text/Untitled.ipynb b/Likunlin_final/analyse_text/Untitled.ipynb new file mode 100644 index 00000000000000..33cb6d6eb744ba --- /dev/null +++ b/Likunlin_final/analyse_text/Untitled.ipynb @@ -0,0 +1,64 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from django.shortcuts import render\n", + "# -*- coding: utf-8 -*-\n", + "from django.shortcuts import render\n", + "from django.http import HttpResponse\n", + "import json\n", + "\n", + "tokens = []\n", + "suggestions = {}\n", + "def home(request):\n", + " return render(request, 'home.html')\n", + "\n", + "\n", + "def analyse(request):\n", + " global tokens\n", + " global suggestions\n", + " text = \"\"\n", + " text = request.GET['text']\n", + " tokens = text.split()\n", + " tokens = ['[CLS]', 'it', 'was', 'monday', 'morning', ',', 'and', 'the', 'writeing', 'class', 'had', 'just', 'begun', '.', 'we', 'were', 'ti', '##ring', '.', 'everyone', 'was', 'silent', ',', 'wait', 'to', 'see', 'who', 'would', 'be', 'called', 'upon', 'to', 'read', 'his', 'and', 'her', 'paragraph', 'aloud', '.', 'some', 'of', 'us', 'were', 'confidont', 'and', 'eagerly', 'take', 'part', 'in', 'the', 'class', 'activity', ',', 'others', 'were', 'nervous', 'and', 'anxious', '.', 'i', 'had', 'done', 'myself', 'homework', 'but', 'i', 'was', 'shy', '.', 'i', 'was', 'afraid', 'that', 'to', 'speak', 'in', 'front', 'of', 'a', 'larger', 'group', 'of', 'people', '.', 'at', 'that', 'moment', ',', 'i', 'remembered', 'that', 'my', 'father', 'once', 'said', ',', '\"', 'the', 'classroom', 'is', 'a', 'place', 'for', 'learning', 'and', 'that', 'include', 'leaning', 'from', 'textbooks', ',', 'and', 'mistake', 'as', 'well', '.', '\"', 'immediate', ',', 'i', 'raised', 'my', 'hand', '.', '[SEP]']\n", + " suggestions = {8: 'writing', 43: 'confident', 23: 'waiting', 34: 'or', 45: 'would', 46: 'taking', 51: 'activities', 62: 'my', 72: '去掉 that', 105: 'to', 106: 'includes', 107: 'learning', 108: 'on', 112: 'mistakes', 117: 'immediately'}\n", + " return HttpResponse(json.dumps({\"tokens\":tokens,\"suggestions\":suggestions}))\n", + "\n", + "def modify(request):\n", + " global tokens\n", + " global suggestions\n", + " index = request.GET['index']\n", + " tokens[int(index)] = suggestions[int(index)]\n", + " print(\"检查点\")\n", + " del suggestions[int(index)]\n", + " print(suggestions)\n", + " return HttpResponse(json.dumps({\"tokens\":tokens,\"suggestions\":suggestions}))\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.5" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/Likunlin_final/analyse_text/__init__.py b/Likunlin_final/analyse_text/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/Likunlin_final/analyse_text/admin.py b/Likunlin_final/analyse_text/admin.py new file mode 100644 index 00000000000000..8c38f3f3dad51e --- /dev/null +++ b/Likunlin_final/analyse_text/admin.py @@ -0,0 +1,3 @@ +from django.contrib import admin + +# Register your models here. diff --git a/Likunlin_final/analyse_text/apps.py b/Likunlin_final/analyse_text/apps.py new file mode 100644 index 00000000000000..83adc60b11fbce --- /dev/null +++ b/Likunlin_final/analyse_text/apps.py @@ -0,0 +1,5 @@ +from django.apps import AppConfig + + +class AnalyseTextConfig(AppConfig): + name = 'analyse_text' diff --git a/Likunlin_final/analyse_text/migrations/__init__.py b/Likunlin_final/analyse_text/migrations/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/Likunlin_final/analyse_text/models.py b/Likunlin_final/analyse_text/models.py new file mode 100644 index 00000000000000..71a836239075aa --- /dev/null +++ b/Likunlin_final/analyse_text/models.py @@ -0,0 +1,3 @@ +from django.db import models + +# Create your models here. diff --git a/Likunlin_final/analyse_text/templates/home.html b/Likunlin_final/analyse_text/templates/home.html new file mode 100644 index 00000000000000..dcc5b3bc4223da --- /dev/null +++ b/Likunlin_final/analyse_text/templates/home.html @@ -0,0 +1,307 @@ + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+
+
+

评价区

+
+
+ + +
+
+
+
+
+ +
+ +
+ +
+
+ +
+ +
+ +
+
+ + + + + + \ No newline at end of file diff --git a/Likunlin_final/analyse_text/tests.py b/Likunlin_final/analyse_text/tests.py new file mode 100644 index 00000000000000..7ce503c2dd97ba --- /dev/null +++ b/Likunlin_final/analyse_text/tests.py @@ -0,0 +1,3 @@ +from django.test import TestCase + +# Create your tests here. diff --git a/Likunlin_final/analyse_text/views.py b/Likunlin_final/analyse_text/views.py new file mode 100644 index 00000000000000..e40c6ca8d467f3 --- /dev/null +++ b/Likunlin_final/analyse_text/views.py @@ -0,0 +1,27 @@ +from django.shortcuts import render +# -*- coding: utf-8 -*- +from django.shortcuts import render +from django.http import HttpResponse +import json +import sys +sys.path =['/home/xd/projects/pytorch-pretrained-BERT'] + sys.path +from likunlin_final import analyze_text,modify_text + +text = [] +def home(request): + return render(request, 'home.html') + + +def analyse(request): + global text + text = request.GET['text'] + text = [text] + print("xiaofang") + suggestions,tokens,avg_gap = analyze_text(text) + return HttpResponse(json.dumps({"tokens":tokens,"suggestions":suggestions,"avg_gap":avg_gap})) + +def modify(request): + global text + index = request.GET['index'] + text,new_tokens,suggestions = modify_text(int(index),text) + return HttpResponse(json.dumps({"tokens":new_tokens,"suggestions":suggestions})) diff --git a/Likunlin_final/manage.py b/Likunlin_final/manage.py new file mode 100755 index 00000000000000..30c456de702310 --- /dev/null +++ b/Likunlin_final/manage.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python +"""Django's command-line utility for administrative tasks.""" +import os +import sys + + +def main(): + os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'Likunlin_final.settings') + try: + from django.core.management import execute_from_command_line + except ImportError as exc: + raise ImportError( + "Couldn't import Django. Are you sure it's installed and " + "available on your PYTHONPATH environment variable? Did you " + "forget to activate a virtual environment?" + ) from exc + execute_from_command_line(sys.argv) + + +if __name__ == '__main__': + main() diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 00000000000000..1aba38f67a2211 --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1 @@ +include LICENSE diff --git a/README.md b/README.md index eb337d8253f465..4e7d3bb1090bb4 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,7 @@ # PyTorch Pretrained Bert +[![CircleCI](https://circleci.com/gh/huggingface/pytorch-pretrained-BERT.svg?style=svg)](https://circleci.com/gh/huggingface/pytorch-pretrained-BERT) + This repository contains an op-for-op PyTorch reimplementation of [Google's TensorFlow repository for the BERT model](https://github.com/google-research/bert) that was released together with the paper [BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding](https://arxiv.org/abs/1810.04805) by Jacob Devlin, Ming-Wei Chang, Kenton Lee and Kristina Toutanova. This implementation is provided with [Google's pre-trained models](https://github.com/google-research/bert), examples, notebooks and a command-line interface to load any pre-trained TensorFlow checkpoint for BERT is also provided. @@ -14,12 +16,12 @@ This implementation is provided with [Google's pre-trained models](https://githu | [Doc](#doc) | Detailed documentation | | [Examples](#examples) | Detailed examples on how to fine-tune Bert | | [Notebooks](#notebooks) | Introduction on the provided Jupyter Notebooks | -| [TPU](#tup) | Notes on TPU support and pretraining scripts | +| [TPU](#tpu) | Notes on TPU support and pretraining scripts | | [Command-line interface](#Command-line-interface) | Convert a TensorFlow checkpoint in a PyTorch dump | ## Installation -This repo was tested on Python 3.5+ and PyTorch 0.4.1 +This repo was tested on Python 3.5+ and PyTorch 0.4.1/1.0.0 ### With pip @@ -46,13 +48,15 @@ python -m pytest -sv tests/ This package comprises the following classes that can be imported in Python and are detailed in the [Doc](#doc) section of this readme: -- Six PyTorch models (`torch.nn.Module`) for Bert with pre-trained weights (in the [`modeling.py`](./pytorch_pretrained_bert/modeling.py) file): - - [`BertModel`](./pytorch_pretrained_bert/modeling.py#L535) - raw BERT Transformer model (**fully pre-trained**), - - [`BertForMaskedLM`](./pytorch_pretrained_bert/modeling.py#L689) - BERT Transformer with the pre-trained masked language modeling head on top (**fully pre-trained**), - - [`BertForNextSentencePrediction`](./pytorch_pretrained_bert/modeling.py#L750) - BERT Transformer with the pre-trained next sentence prediction classifier on top (**fully pre-trained**), - - [`BertForPreTraining`](./pytorch_pretrained_bert/modeling.py#L618) - BERT Transformer with masked language modeling head and next sentence prediction classifier on top (**fully pre-trained**), - - [`BertForSequenceClassification`](./pytorch_pretrained_bert/modeling.py#L812) - BERT Transformer with a sequence classification head on top (BERT Transformer is **pre-trained**, the sequence classification head **is only initialized and has to be trained**), - - [`BertForQuestionAnswering`](./pytorch_pretrained_bert/modeling.py#L877) - BERT Transformer with a token classification head on top (BERT Transformer is **pre-trained**, the token classification head **is only initialized and has to be trained**). +- Eight PyTorch models (`torch.nn.Module`) for Bert with pre-trained weights (in the [`modeling.py`](./pytorch_pretrained_bert/modeling.py) file): + - [`BertModel`](./pytorch_pretrained_bert/modeling.py#L537) - raw BERT Transformer model (**fully pre-trained**), + - [`BertForMaskedLM`](./pytorch_pretrained_bert/modeling.py#L691) - BERT Transformer with the pre-trained masked language modeling head on top (**fully pre-trained**), + - [`BertForNextSentencePrediction`](./pytorch_pretrained_bert/modeling.py#L752) - BERT Transformer with the pre-trained next sentence prediction classifier on top (**fully pre-trained**), + - [`BertForPreTraining`](./pytorch_pretrained_bert/modeling.py#L620) - BERT Transformer with masked language modeling head and next sentence prediction classifier on top (**fully pre-trained**), + - [`BertForSequenceClassification`](./pytorch_pretrained_bert/modeling.py#L814) - BERT Transformer with a sequence classification head on top (BERT Transformer is **pre-trained**, the sequence classification head **is only initialized and has to be trained**), + - [`BertForMultipleChoice`](./pytorch_pretrained_bert/modeling.py#L880) - BERT Transformer with a multiple choice head on top (used for task like Swag) (BERT Transformer is **pre-trained**, the multiple choice classification head **is only initialized and has to be trained**), + - [`BertForTokenClassification`](./pytorch_pretrained_bert/modeling.py#L949) - BERT Transformer with a token classification head on top (BERT Transformer is **pre-trained**, the token classification head **is only initialized and has to be trained**), + - [`BertForQuestionAnswering`](./pytorch_pretrained_bert/modeling.py#L1015) - BERT Transformer with a token classification head on top (BERT Transformer is **pre-trained**, the token classification head **is only initialized and has to be trained**). - Three tokenizers (in the [`tokenization.py`](./pytorch_pretrained_bert/tokenization.py) file): - `BasicTokenizer` - basic tokenization (punctuation splitting, lower casing, etc.), @@ -63,15 +67,17 @@ This package comprises the following classes that can be imported in Python and - `BertAdam` - Bert version of Adam algorithm with weight decay fix, warmup and linear decay of the learning rate. - A configuration class (in the [`modeling.py`](./pytorch_pretrained_bert/modeling.py) file): - - `BertConfig` - Configuration class to store the configuration of a `BertModel` with utilisities to read and write from JSON configuration files. + - `BertConfig` - Configuration class to store the configuration of a `BertModel` with utilities to read and write from JSON configuration files. The repository further comprises: -- Three examples on how to use Bert (in the [`examples` folder](./examples)): +- Five examples on how to use Bert (in the [`examples` folder](./examples)): - [`extract_features.py`](./examples/extract_features.py) - Show how to extract hidden states from an instance of `BertModel`, - [`run_classifier.py`](./examples/run_classifier.py) - Show how to fine-tune an instance of `BertForSequenceClassification` on GLUE's MRPC task, - [`run_squad.py`](./examples/run_squad.py) - Show how to fine-tune an instance of `BertForQuestionAnswering` on SQuAD v1.0 task. - + - [`run_swag.py`](./examples/run_swag.py) - Show how to fine-tune an instance of `BertForMultipleChoice` on Swag task. + - [`run_lm_finetuning.py`](./examples/run_lm_finetuning.py) - Show how to fine-tune an instance of `BertForPretraining' on a target text corpus. + These examples are detailed in the [Examples](#examples) section of this readme. - Three notebooks that were used to check that the TensorFlow and PyTorch models behave identically (in the [`notebooks` folder](./notebooks)): @@ -153,7 +159,7 @@ Here is a detailed documentation of the classes in the package and how to use th | Sub-section | Description | |-|-| | [Loading Google AI's pre-trained weigths](#Loading-Google-AIs-pre-trained-weigths-and-PyTorch-dump) | How to load Google AI's pre-trained weight or a PyTorch saved instance | -| [PyTorch models](#PyTorch-models) | API of the six PyTorch model classes: `BertModel`, `BertForMaskedLM`, `BertForNextSentencePrediction`, `BertForPreTraining`, `BertForSequenceClassification` or `BertForQuestionAnswering` | +| [PyTorch models](#PyTorch-models) | API of the eight PyTorch model classes: `BertModel`, `BertForMaskedLM`, `BertForNextSentencePrediction`, `BertForPreTraining`, `BertForSequenceClassification`, `BertForMultipleChoice` or `BertForQuestionAnswering` | | [Tokenizer: `BertTokenizer`](#Tokenizer-BertTokenizer) | API of the `BertTokenizer` class| | [Optimizer: `BertAdam`](#Optimizer-BertAdam) | API of the `BertAdam` class | @@ -162,12 +168,12 @@ Here is a detailed documentation of the classes in the package and how to use th To load one of Google AI's pre-trained models or a PyTorch saved model (an instance of `BertForPreTraining` saved with `torch.save()`), the PyTorch model classes and the tokenizer can be instantiated as ```python -model = BERT_CLASS.from_pretrain(PRE_TRAINED_MODEL_NAME_OR_PATH, cache_dir=None) +model = BERT_CLASS.from_pretrained(PRE_TRAINED_MODEL_NAME_OR_PATH, cache_dir=None) ``` where -- `BERT_CLASS` is either the `BertTokenizer` class (to load the vocabulary) or one of the six PyTorch model classes (to load the pre-trained weights): `BertModel`, `BertForMaskedLM`, `BertForNextSentencePrediction`, `BertForPreTraining`, `BertForSequenceClassification` or `BertForQuestionAnswering`, and +- `BERT_CLASS` is either the `BertTokenizer` class (to load the vocabulary) or one of the eight PyTorch model classes (to load the pre-trained weights): `BertModel`, `BertForMaskedLM`, `BertForNextSentencePrediction`, `BertForPreTraining`, `BertForSequenceClassification`, `BertForTokenClassification`, `BertForMultipleChoice` or `BertForQuestionAnswering`, and - `PRE_TRAINED_MODEL_NAME_OR_PATH` is either: - the shortcut name of a Google AI's pre-trained model selected in the list: @@ -175,19 +181,26 @@ where - `bert-base-uncased`: 12-layer, 768-hidden, 12-heads, 110M parameters - `bert-large-uncased`: 24-layer, 1024-hidden, 16-heads, 340M parameters - `bert-base-cased`: 12-layer, 768-hidden, 12-heads , 110M parameters - - `bert-base-multilingual`: 102 languages, 12-layer, 768-hidden, 12-heads, 110M parameters + - `bert-large-cased`: 24-layer, 1024-hidden, 16-heads, 340M parameters + - `bert-base-multilingual-uncased`: (Orig, not recommended) 102 languages, 12-layer, 768-hidden, 12-heads, 110M parameters + - `bert-base-multilingual-cased`: **(New, recommended)** 104 languages, 12-layer, 768-hidden, 12-heads, 110M parameters - `bert-base-chinese`: Chinese Simplified and Traditional, 12-layer, 768-hidden, 12-heads, 110M parameters - a path or url to a pretrained model archive containing: - - - `bert_config.json` a configuration file for the model, and - - `pytorch_model.bin` a PyTorch dump of a pre-trained instance `BertForPreTraining` (saved with the usual `torch.save()`) + + - `bert_config.json` a configuration file for the model, and + - `pytorch_model.bin` a PyTorch dump of a pre-trained instance `BertForPreTraining` (saved with the usual `torch.save()`) If `PRE_TRAINED_MODEL_NAME_OR_PATH` is a shortcut name, the pre-trained weights will be downloaded from AWS S3 (see the links [here](pytorch_pretrained_bert/modeling.py)) and stored in a cache folder to avoid future download (the cache folder can be found at `~/.pytorch_pretrained_bert/`). -- `cache_dir` can be an optional path to a specific directory to download and cache the pre-trained model weights. This option is useful in particular when you are using distributed training: to avoid concurrent access to the same weights you can set for example `cache_dir='./pretrained_model_{}'.format(args.local_rank)` (see the section on distributed training for more information) +- `cache_dir` can be an optional path to a specific directory to download and cache the pre-trained model weights. This option is useful in particular when you are using distributed training: to avoid concurrent access to the same weights you can set for example `cache_dir='./pretrained_model_{}'.format(args.local_rank)` (see the section on distributed training for more information). + +`Uncased` means that the text has been lowercased before WordPiece tokenization, e.g., `John Smith` becomes `john smith`. The Uncased model also strips out any accent markers. `Cased` means that the true case and accent markers are preserved. Typically, the Uncased model is better unless you know that case information is important for your task (e.g., Named Entity Recognition or Part-of-Speech tagging). For information about the Multilingual and Chinese model, see the [Multilingual README](https://github.com/google-research/bert/blob/master/multilingual.md) or the original TensorFlow repository. + +**When using an `uncased model`, make sure to pass `--do_lower_case` to the example training scripts (or pass `do_lower_case=True` to FullTokenizer if you're using your own script and loading the tokenizer your-self.).** Example: ```python +tokenizer = BertTokenizer.from_pretrained('bert-base-uncased', do_lower_case=True) model = BertForSequenceClassification.from_pretrained('bert-base-uncased') ``` @@ -200,8 +213,8 @@ model = BertForSequenceClassification.from_pretrained('bert-base-uncased') The inputs and output are **identical to the TensorFlow model inputs and outputs**. We detail them here. This model takes as *inputs*: - -- `input_ids`: a torch.LongTensor of shape [batch_size, sequence_length] with the word token indices in the vocabulary (see the tokens preprocessing logic in the scripts `extract_features.py`, `run_classifier.py` and `run_squad.py`), and +[`modeling.py`](./pytorch_pretrained_bert/modeling.py) +- `input_ids`: a torch.LongTensor of shape [batch_size, sequence_length] with the word token indices in the vocabulary (see the tokens preprocessing logic in the scripts [`extract_features.py`](./examples/extract_features.py), [`run_classifier.py`](./examples/run_classifier.py) and [`run_squad.py`](./examples/run_squad.py)), and - `token_type_ids`: an optional torch.LongTensor of shape [batch_size, sequence_length] with the token types indices selected in [0, 1]. Type 0 corresponds to a `sentence A` and type 1 corresponds to a `sentence B` token (see BERT paper for more details). - `attention_mask`: an optional torch.LongTensor of shape [batch_size, sequence_length] with indices selected in [0, 1]. It's a mask to be used if some input sequence lengths are smaller than the max input sequence length of the current batch. It's the mask that we typically use for attention when a batch has varying length sentences. - `output_all_encoded_layers`: boolean which controls the content of the `encoded_layers` output as described below. Default: `True`. @@ -215,7 +228,7 @@ This model *outputs* a tuple composed of: - `pooled_output`: a torch.FloatTensor of size [batch_size, hidden_size] which is the output of a classifier pretrained on top of the hidden state associated to the first character of the input (`CLF`) to train on the Next-Sentence task (see BERT's paper). -An example on how to use this class is given in the `extract_features.py` script which can be used to extract the hidden states of the model for a given input. +An example on how to use this class is given in the [`extract_features.py`](./examples/extract_features.py) script which can be used to extract the hidden states of the model for a given input. #### 2. `BertForPreTraining` @@ -236,6 +249,9 @@ An example on how to use this class is given in the `extract_features.py` script - the masked language modeling logits, and - the next sentence classification logits. + +An example on how to use this class is given in the [`run_lm_finetuning.py`](./examples/run_lm_finetuning.py) script which can be used to fine-tune the BERT language model on your specific different text corpus. This should improve model performance, if the language style is different from the original BERT training corpus (Wiki + BookCorpus). + #### 3. `BertForMaskedLM` @@ -269,15 +285,31 @@ An example on how to use this class is given in the `extract_features.py` script The sequence-level classifier is a linear layer that takes as input the last hidden state of the first character in the input sequence (see Figures 3a and 3b in the BERT paper). -An example on how to use this class is given in the `run_classifier.py` script which can be used to fine-tune a single sequence (or pair of sequence) classifier using BERT, for example for the MRPC task. +An example on how to use this class is given in the [`run_classifier.py`](./examples/run_classifier.py) script which can be used to fine-tune a single sequence (or pair of sequence) classifier using BERT, for example for the MRPC task. + +#### 6. `BertForMultipleChoice` + +`BertForMultipleChoice` is a fine-tuning model that includes `BertModel` and a linear layer on top of the `BertModel`. + +The linear layer outputs a single value for each choice of a multiple choice problem, then all the outputs corresponding to an instance are passed through a softmax to get the model choice. + +This implementation is largely inspired by the work of OpenAI in [Improving Language Understanding by Generative Pre-Training](https://blog.openai.com/language-unsupervised/) and the answer of Jacob Devlin in the following [issue](https://github.com/google-research/bert/issues/38). + +An example on how to use this class is given in the [`run_swag.py`](./examples/run_swag.py) script which can be used to fine-tune a multiple choice classifier using BERT, for example for the Swag task. + +#### 7. `BertForTokenClassification` + +`BertForTokenClassification` is a fine-tuning model that includes `BertModel` and a token-level classifier on top of the `BertModel`. -#### 6. `BertForQuestionAnswering` +The token-level classifier is a linear layer that takes as input the last hidden state of the sequence. + +#### 8. `BertForQuestionAnswering` `BertForQuestionAnswering` is a fine-tuning model that includes `BertModel` with a token-level classifiers on top of the full sequence of last hidden states. The token-level classifier takes as input the full sequence of the last hidden state and compute several (e.g. two) scores for each tokens that can for example respectively be the score that a given token is a `start_span` and a `end_span` token (see Figures 3c and 3d in the BERT paper). -An example on how to use this class is given in the `run_squad.py` script which can be used to fine-tune a token classifier using BERT, for example for the SQuAD task. +An example on how to use this class is given in the [`run_squad.py`](./examples/run_squad.py) script which can be used to fine-tune a token classifier using BERT, for example for the SQuAD task. ### Tokenizer: `BertTokenizer` @@ -313,7 +345,7 @@ The optimizer accepts the following arguments: - `b1` : Adams b1. Default : `0.9` - `b2` : Adams b2. Default : `0.999` - `e` : Adams epsilon. Default : `1e-6` -- `weight_decay_rate:` Weight decay. Default : `0.01` +- `weight_decay:` Weight decay. Default : `0.01` - `max_grad_norm` : Maximum norm for the gradients (`-1` means no clipping). Default : `1.0` ## Examples @@ -321,22 +353,23 @@ The optimizer accepts the following arguments: | Sub-section | Description | |-|-| | [Training large models: introduction, tools and examples](#Training-large-models-introduction,-tools-and-examples) | How to use gradient-accumulation, multi-gpu training, distributed training, optimize on CPU and 16-bits training to train Bert models | -| [Fine-tuning with BERT: running the examples](#Fine-tuning-with-BERT-running-the-examples) | Running the examples in [`./examples`](./examples/): `extract_classif.py`, `run_classifier.py` and `run_squad.py` | +| [Fine-tuning with BERT: running the examples](#Fine-tuning-with-BERT-running-the-examples) | Running the examples in [`./examples`](./examples/): `extract_classif.py`, `run_classifier.py`, `run_squad.py` and `run_lm_finetuning.py` | | [Fine-tuning BERT-large on GPUs](#Fine-tuning-BERT-large-on-GPUs) | How to fine tune `BERT large`| ### Training large models: introduction, tools and examples BERT-base and BERT-large are respectively 110M and 340M parameters models and it can be difficult to fine-tune them on a single GPU with the recommended batch size for good performance (in most case a batch size of 32). -To help with fine-tuning these models, we have included five techniques that you can activate in the fine-tuning scripts `run_classifier.py` and `run_squad.py`: gradient-accumulation, multi-gpu training, distributed training, optimize on CPU and 16-bits training . For more details on how to use these techniques you can read [the tips on training large batches in PyTorch](https://medium.com/huggingface/training-larger-batches-practical-tips-on-1-gpu-multi-gpu-distributed-setups-ec88c3e51255) that I published earlier this month. +To help with fine-tuning these models, we have included several techniques that you can activate in the fine-tuning scripts [`run_classifier.py`](./examples/run_classifier.py) and [`run_squad.py`](./examples/run_squad.py): gradient-accumulation, multi-gpu training, distributed training and 16-bits training . For more details on how to use these techniques you can read [the tips on training large batches in PyTorch](https://medium.com/huggingface/training-larger-batches-practical-tips-on-1-gpu-multi-gpu-distributed-setups-ec88c3e51255) that I published earlier this month. Here is how to use these techniques in our scripts: - **Gradient Accumulation**: Gradient accumulation can be used by supplying a integer greater than 1 to the `--gradient_accumulation_steps` argument. The batch at each step will be divided by this integer and gradient will be accumulated over `gradient_accumulation_steps` steps. - **Multi-GPU**: Multi-GPU is automatically activated when several GPUs are detected and the batches are splitted over the GPUs. - **Distributed training**: Distributed training can be activated by supplying an integer greater or equal to 0 to the `--local_rank` argument (see below). -- **Optimize on CPU**: The Adam optimizer stores 2 moving average of the weights of the model. If you keep them on GPU 1 (typical behavior), your first GPU will have to store 3-times the size of the model. This is not optimal for large models like `BERT-large` and means your batch size is a lot lower than it could be. This option will perform the optimization and store the averages on the CPU/RAM to free more room on the GPU(s). As the most computational intensive operation is usually the backward pass, this doesn't have a significant impact on the training time. Activate this option with `--optimize_on_cpu` on the `run_squad.py` script. -- **16-bits training**: 16-bits training, also called mixed-precision training, can reduce the memory requirement of your model on the GPU by using half-precision training, basically allowing to double the batch size. If you have a recent GPU (starting from NVIDIA Volta architecture) you should see no decrease in speed. A good introduction to Mixed precision training can be found [here](https://devblogs.nvidia.com/mixed-precision-training-deep-neural-networks/) and a full documentation is [here](https://docs.nvidia.com/deeplearning/sdk/mixed-precision-training/index.html). In our scripts, this option can be activated by setting the `--fp16` flag and you can play with loss scaling using the `--loss_scaling` flag (see the previously linked documentation for details on loss scaling). If the loss scaling is too high (`Nan` in the gradients) it will be automatically scaled down until the value is acceptable. The default loss scaling is 128 which behaved nicely in our tests. +- **16-bits training**: 16-bits training, also called mixed-precision training, can reduce the memory requirement of your model on the GPU by using half-precision training, basically allowing to double the batch size. If you have a recent GPU (starting from NVIDIA Volta architecture) you should see no decrease in speed. A good introduction to Mixed precision training can be found [here](https://devblogs.nvidia.com/mixed-precision-training-deep-neural-networks/) and a full documentation is [here](https://docs.nvidia.com/deeplearning/sdk/mixed-precision-training/index.html). In our scripts, this option can be activated by setting the `--fp16` flag and you can play with loss scaling using the `--loss_scale` flag (see the previously linked documentation for details on loss scaling). The loss scale can be zero in which case the scale is dynamically adjusted or a positive power of two in which case the scaling is static. + +To use 16-bits training and distributed training, you need to install NVIDIA's apex extension [as detailed here](https://github.com/nvidia/apex). You will find more information regarding the internals of `apex` and how to use `apex` in [the doc and the associated repository](https://github.com/nvidia/apex). The results of the tests performed on pytorch-BERT by the NVIDIA team (and my trials at reproducing them) can be consulted in [the relevant PR of the present repository](https://github.com/huggingface/pytorch-pretrained-BERT/pull/116). Note: To use *Distributed Training*, you will need to run one training script on each of your machines. This can be done for example by running the following command on each server (see [the above mentioned blog post]((https://medium.com/huggingface/training-larger-batches-practical-tips-on-1-gpu-multi-gpu-distributed-setups-ec88c3e51255)) for more details): ```bash @@ -346,16 +379,22 @@ Where `$THIS_MACHINE_INDEX` is an sequential index assigned to each of your mach ### Fine-tuning with BERT: running the examples -We showcase the same examples as [the original implementation](https://github.com/google-research/bert/): fine-tuning a sequence-level classifier on the MRPC classification corpus and a token-level classifier on the question answering dataset SQuAD. +We showcase several fine-tuning examples based on (and extended from) [the original implementation](https://github.com/google-research/bert/): + +- a *sequence-level classifier* on the MRPC classification corpus, +- a *token-level classifier* on the question answering dataset SQuAD, and +- a *sequence-level multiple-choice classifier* on the SWAG classification corpus. +- a *BERT language model* on another target corpus + +#### MRPC + +This example code fine-tunes BERT on the Microsoft Research Paraphrase +Corpus (MRPC) corpus and runs in less than 10 minutes on a single K-80 and in 27 seconds (!) on single tesla V100 16GB with apex installed. -Before running these examples you should download the +Before running this example you should download the [GLUE data](https://gluebenchmark.com/tasks) by running [this script](https://gist.github.com/W4ngatang/60c2bdb54d156a41194446737ce03e2e) -and unpack it to some directory `$GLUE_DIR`. Please also download the `BERT-Base` -checkpoint, unzip it to some directory `$BERT_BASE_DIR`, and convert it to its PyTorch version as explained in the previous section. - -This example code fine-tunes `BERT-Base` on the Microsoft Research Paraphrase -Corpus (MRPC) corpus and runs in less than 10 minutes on a single K-80. +and unpack it to some directory `$GLUE_DIR`. ```shell export GLUE_DIR=/path/to/glue @@ -364,6 +403,7 @@ python run_classifier.py \ --task_name MRPC \ --do_train \ --do_eval \ + --do_lower_case \ --data_dir $GLUE_DIR/MRPC/ \ --bert_model bert-base-uncased \ --max_seq_length 128 \ @@ -375,7 +415,29 @@ python run_classifier.py \ Our test ran on a few seeds with [the original implementation hyper-parameters](https://github.com/google-research/bert#sentence-and-sentence-pair-classification-tasks) gave evaluation results between 84% and 88%. -The second example fine-tunes `BERT-Base` on the SQuAD question answering task. +**Fast run with apex and 16 bit precision: fine-tuning on MRPC in 27 seconds!** +First install apex as indicated [here](https://github.com/NVIDIA/apex). +Then run +```shell +export GLUE_DIR=/path/to/glue + +python run_classifier.py \ + --task_name MRPC \ + --do_train \ + --do_eval \ + --do_lower_case \ + --data_dir $GLUE_DIR/MRPC/ \ + --bert_model bert-base-uncased \ + --max_seq_length 128 \ + --train_batch_size 32 \ + --learning_rate 2e-5 \ + --num_train_epochs 3.0 \ + --output_dir /tmp/mrpc_output/ +``` + +#### SQuAD + +This example code fine-tunes BERT on the SQuAD dataset. It runs in 24 min (with BERT-base) or 68 min (with BERT-large) on a single tesla V100 16GB. The data for SQuAD can be downloaded with the following links and should be saved in a `$SQUAD_DIR` directory. @@ -390,6 +452,7 @@ python run_squad.py \ --bert_model bert-base-uncased \ --do_train \ --do_predict \ + --do_lower_case \ --train_file $SQUAD_DIR/train-v1.1.json \ --predict_file $SQUAD_DIR/dev-v1.1.json \ --train_batch_size 12 \ @@ -405,6 +468,54 @@ Training with the previous hyper-parameters gave us the following results: {"f1": 88.52381567990474, "exact_match": 81.22043519394512} ``` +#### SWAG + +The data for SWAG can be downloaded by cloning the following [repository](https://github.com/rowanz/swagaf) + +```shell +export SWAG_DIR=/path/to/SWAG + +python run_swag.py \ + --bert_model bert-base-uncased \ + --do_train \ + --do_lower_case \ + --do_eval \ + --data_dir $SWAG_DIR/data \ + --train_batch_size 16 \ + --learning_rate 2e-5 \ + --num_train_epochs 3.0 \ + --max_seq_length 80 \ + --output_dir /tmp/swag_output/ \ + --gradient_accumulation_steps 4 +``` + +Training with the previous hyper-parameters on a single GPU gave us the following results: +``` +eval_accuracy = 0.8062081375587323 +eval_loss = 0.5966546792367169 +global_step = 13788 +loss = 0.06423990014260186 +``` + +#### LM Fine-tuning + +The data should be a text file in the same format as [sample_text.txt](./samples/sample_text.txt) (one sentence per line, docs separated by empty line). +You can download an [exemplary training corpus](https://ext-bert-sample.obs.eu-de.otc.t-systems.com/small_wiki_sentence_corpus.txt) generated from wikipedia articles and splitted into ~500k sentences with spaCy. +Training one epoch on this corpus takes about 1:20h on 4 x NVIDIA Tesla P100 with `train_batch_size=200` and `max_seq_length=128`: + + +```shell +python run_lm_finetuning.py \ + --bert_model bert-base-cased \ + --do_train \ + --train_file samples/sample_text.txt \ + --output_dir models \ + --num_train_epochs 5.0 \ + --learning_rate 3e-5 \ + --train_batch_size 32 \ + --max_seq_length 128 +``` + ## Fine-tuning BERT-large on GPUs The options we list above allow to fine-tune BERT-large rather easily on GPU(s) instead of the TPU used by the original implementation. @@ -424,6 +535,7 @@ python ./run_squad.py \ --bert_model bert-large-uncased \ --do_train \ --do_predict \ + --do_lower_case \ --train_file $SQUAD_TRAIN \ --predict_file $SQUAD_EVAL \ --learning_rate 3e-5 \ @@ -432,8 +544,7 @@ python ./run_squad.py \ --doc_stride 128 \ --output_dir $OUTPUT_DIR \ --train_batch_size 24 \ - --gradient_accumulation_steps 2 \ - --optimize_on_cpu + --gradient_accumulation_steps 2 ``` If you have a recent GPU (starting from NVIDIA Volta series), you should try **16-bit fine-tuning** (FP16). @@ -444,6 +555,7 @@ python ./run_squad.py \ --bert_model bert-large-uncased \ --do_train \ --do_predict \ + --do_lower_case \ --train_file $SQUAD_TRAIN \ --predict_file $SQUAD_EVAL \ --learning_rate 3e-5 \ @@ -479,7 +591,7 @@ A command-line interface is provided to convert a TensorFlow checkpoint in a PyT You can convert any TensorFlow checkpoint for BERT (in particular [the pre-trained models released by Google](https://github.com/google-research/bert#pre-trained-models)) in a PyTorch save file by using the [`./pytorch_pretrained_bert/convert_tf_checkpoint_to_pytorch.py`](convert_tf_checkpoint_to_pytorch.py) script. -This CLI takes as input a TensorFlow checkpoint (three files starting with `bert_model.ckpt`) and the associated configuration file (`bert_config.json`), and creates a PyTorch model for this configuration, loads the weights from the TensorFlow checkpoint in the PyTorch model and saves the resulting model in a standard PyTorch save file that can be imported using `torch.load()` (see examples in `extract_features.py`, `run_classifier.py` and `run_squad.py`). +This CLI takes as input a TensorFlow checkpoint (three files starting with `bert_model.ckpt`) and the associated configuration file (`bert_config.json`), and creates a PyTorch model for this configuration, loads the weights from the TensorFlow checkpoint in the PyTorch model and saves the resulting model in a standard PyTorch save file that can be imported using `torch.load()` (see examples in [`extract_features.py`](./examples/extract_features.py), [`run_classifier.py`](./examples/run_classifier.py) and [`run_squad.py`]((./examples/run_squad.py))). You only need to run this conversion script **once** to get a PyTorch model. You can then disregard the TensorFlow checkpoint (the three files starting with `bert_model.ckpt`) but be sure to keep the configuration file (`bert_config.json`) and the vocabulary file (`vocab.txt`) as these are needed for the PyTorch model too. diff --git a/Untitled.ipynb b/Untitled.ipynb new file mode 100644 index 00000000000000..6701ee5f62e8e7 --- /dev/null +++ b/Untitled.ipynb @@ -0,0 +1,1003 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "%load_ext autoreload\n", + "%autoreload 2\n", + "\n", + "from IPython.core.interactiveshell import InteractiveShell\n", + "InteractiveShell.ast_node_interactivity = 'all'" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Warning: apex was installed without --cpp_ext. Falling back to Python flatten and unflatten.\n", + "Warning: apex was installed without --cuda_ext. Fused syncbn kernels will be unavailable. Python fallbacks will be used instead.\n", + "Warning: apex was installed without --cuda_ext. FusedAdam will be unavailable.\n", + "Warning: apex was installed without --cuda_ext. FusedLayerNorm will be unavailable.\n", + "Better speed can be achieved with apex installed from https://www.github.com/nvidia/apex.\n" + ] + } + ], + "source": [ + "# import seaborn as sns\n", + "import os\n", + "import json\n", + "\n", + "import numpy as np\n", + "import math\n", + "import matplotlib\n", + "import matplotlib.pyplot as plt\n", + "from pylab import rcParams\n", + "\n", + "import torch\n", + "import torch.nn.functional as F\n", + "from pytorch_pretrained_bert import tokenization, BertTokenizer, BertModel, BertForMaskedLM, BertForPreTraining, BertConfig\n", + "from examples.extract_features import *" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "06/10/2019 08:14:45 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt\n", + "06/10/2019 08:14:45 - INFO - pytorch_pretrained_bert.modeling - loading archive file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased/\n", + "06/10/2019 08:14:45 - INFO - pytorch_pretrained_bert.modeling - Model config {\n", + " \"attention_probs_dropout_prob\": 0.1,\n", + " \"hidden_act\": \"gelu\",\n", + " \"hidden_dropout_prob\": 0.1,\n", + " \"hidden_size\": 768,\n", + " \"initializer_range\": 0.02,\n", + " \"intermediate_size\": 3072,\n", + " \"max_position_embeddings\": 512,\n", + " \"num_attention_heads\": 12,\n", + " \"num_hidden_layers\": 12,\n", + " \"type_vocab_size\": 2,\n", + " \"vocab_size\": 30522\n", + "}\n", + "\n" + ] + } + ], + "source": [ + "class Args:\n", + " def __init__(self):\n", + " pass\n", + " \n", + "args = Args()\n", + "args.no_cuda = True\n", + "\n", + "CONFIG_NAME = 'bert_config.json'\n", + "# BERT_DIR = '/nas/pretrain-bert/pretrain-tensorflow/uncased_L-12_H-768_A-12/'\n", + "BERT_DIR = '/nas/pretrain-bert/pretrain-pytorch/bert-base-uncased/'\n", + "config_file = os.path.join(BERT_DIR, CONFIG_NAME)\n", + "config = BertConfig.from_json_file(config_file)\n", + "\n", + "# tokenizer = BertTokenizer.from_pretrained(os.path.join(BERT_DIR, 'vocab.txt'))\n", + "tokenizer = BertTokenizer.from_pretrained('/nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt')\n", + "model = BertForPreTraining.from_pretrained(BERT_DIR)\n", + "device = torch.device(\"cuda\" if torch.cuda.is_available() and not args.no_cuda else \"cpu\")\n", + "_ = model.to(device)\n", + "_ = model.eval()" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "import re\n", + "def convert_text_to_examples(text):\n", + " examples = []\n", + " unique_id = 0\n", + " if True:\n", + " for line in text:\n", + " line = line.strip()\n", + " text_a = None\n", + " text_b = None\n", + " m = re.match(r\"^(.*) \\|\\|\\| (.*)$\", line)\n", + " if m is None:\n", + " text_a = line\n", + " else:\n", + " text_a = m.group(1)\n", + " text_b = m.group(2)\n", + " examples.append(\n", + " InputExample(unique_id=unique_id, text_a=text_a, text_b=text_b))\n", + " unique_id += 1\n", + " return examples\n", + "\n", + "def convert_examples_to_features(examples, tokenizer, append_special_tokens=True, replace_mask=True, print_info=False):\n", + " features = []\n", + " for (ex_index, example) in enumerate(examples):\n", + " tokens_a = tokenizer.tokenize(example.text_a)\n", + " tokens_b = None\n", + " if example.text_b:\n", + " tokens_b = tokenizer.tokenize(example.text_b)\n", + "\n", + " tokens = []\n", + " input_type_ids = []\n", + " if append_special_tokens:\n", + " tokens.append(\"[CLS]\")\n", + " input_type_ids.append(0)\n", + " for token in tokens_a:\n", + " if replace_mask and token == '_': # XD\n", + " token = \"[MASK]\"\n", + " tokens.append(token)\n", + " input_type_ids.append(0)\n", + " if append_special_tokens:\n", + " tokens.append(\"[SEP]\")\n", + " input_type_ids.append(0)\n", + "\n", + " if tokens_b:\n", + " for token in tokens_b:\n", + " if replace_mask and token == '_': # XD\n", + " token = \"[MASK]\"\n", + " tokens.append(token)\n", + " input_type_ids.append(1)\n", + " if append_special_tokens:\n", + " tokens.append(\"[SEP]\")\n", + " input_type_ids.append(1)\n", + "\n", + " input_ids = tokenizer.convert_tokens_to_ids(tokens)\n", + " input_mask = [1] * len(input_ids)\n", + "\n", + " if ex_index < 5:\n", + "# logger.info(\"*** Example ***\")\n", + "# logger.info(\"unique_id: %s\" % (example.unique_id))\n", + " logger.info(\"tokens: %s\" % \" \".join([str(x) for x in tokens]))\n", + "# logger.info(\"input_ids: %s\" % \" \".join([str(x) for x in input_ids]))\n", + "# logger.info(\"input_mask: %s\" % \" \".join([str(x) for x in input_mask]))\n", + "# logger.info(\n", + "# \"input_type_ids: %s\" % \" \".join([str(x) for x in input_type_ids]))\n", + " \n", + " features.append(\n", + " InputFeatures(\n", + " unique_id=example.unique_id,\n", + " tokens=tokens,\n", + " input_ids=input_ids,\n", + " input_mask=input_mask,\n", + " input_type_ids=input_type_ids))\n", + " return features\n", + "\n", + "def copy_and_mask_feature(feature, masked_tokens=None):\n", + " import copy\n", + " tokens = feature.tokens\n", + " masked_positions = [tokens.index(t) for t in masked_tokens if t in tokens] \\\n", + " if masked_tokens is not None else range(len(tokens))\n", + " assert len(masked_positions) > 0\n", + " masked_feature_copies = []\n", + " for masked_pos in masked_positions:\n", + " feature_copy = copy.deepcopy(feature)\n", + " feature_copy.input_ids[masked_pos] = tokenizer.vocab[\"[MASK]\"]\n", + " masked_feature_copies.append(feature_copy)\n", + " return masked_feature_copies, masked_positions\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "def show_lm_probs(tokens, input_ids, probs, topk=5, firstk=20):\n", + " def print_pair(token, prob, end_str='', hit_mark=' '):\n", + " if i < firstk:\n", + " # token = token.replace('', '').replace('\\n', '/n')\n", + " print('{}{: >3} | {: <12}'.format(hit_mark, int(round(prob*100)), token), end=end_str)\n", + " \n", + " ret = None\n", + " for i in range(len(tokens)):\n", + " ind_ = input_ids[i].item() if input_ids is not None else tokenizer.vocab[tokens[i]]\n", + " prob_ = probs[i][ind_].item()\n", + " print_pair(tokens[i], prob_, end_str='\\t')\n", + " values, indices = probs[i].topk(topk)\n", + " top_pairs = []\n", + " for j in range(topk):\n", + " ind, prob = indices[j].item(), values[j].item()\n", + " hit_mark = '*' if ind == ind_ else ' '\n", + " token = tokenizer.ids_to_tokens[ind]\n", + " print_pair(token, prob, hit_mark=hit_mark, end_str='' if j < topk - 1 else '\\n')\n", + " top_pairs.append((token, prob))\n", + " if tokens[i] == \"[MASK]\":\n", + " ret = top_pairs\n", + " return ret" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [], + "source": [ + "import colored\n", + "from colored import stylize\n", + "\n", + "def show_abnormals(tokens, probs, show_suggestions=False):\n", + " def gap2color(gap):\n", + " if gap <= 5:\n", + " return 'yellow_1'\n", + " elif gap <= 10:\n", + " return 'orange_1'\n", + " else:\n", + " return 'red_1'\n", + " \n", + " def print_token(token, suggestion, gap):\n", + " if gap == 0:\n", + " print(stylize(token + ' ', colored.fg('white') + colored.bg('black')), end='')\n", + " else:\n", + " print(stylize(token, colored.fg(gap2color(gap)) + colored.bg('black')), end='')\n", + " if show_suggestions and gap > 5:\n", + " print(stylize('/' + suggestion + ' ', colored.fg('green' if gap > 10 else 'cyan') + colored.bg('black')), end='')\n", + " else:\n", + " print(stylize(' ', colored.fg(gap2color(gap)) + colored.bg('black')), end='')\n", + " # print('/' + suggestion, end=' ')\n", + " # print('%.2f' % gap, end=' ')\n", + " \n", + " avg_gap = 0.\n", + " for i in range(1, len(tokens) - 1): # skip first [CLS] and last [SEP]\n", + " ind_ = tokenizer.vocab[tokens[i]]\n", + " prob_ = probs[i][ind_].item()\n", + " top_prob = probs[i].max().item()\n", + " top_ind = probs[i].argmax().item()\n", + " gap = math.log(top_prob) - math.log(prob_)\n", + " suggestion = tokenizer.ids_to_tokens[top_ind]\n", + " print_token(tokens[i], suggestion, gap)\n", + " avg_gap += gap\n", + " avg_gap /= (len(tokens) - 2)\n", + " print()\n", + " print(avg_gap)" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "analyzed_cache = {}\n", + "\n", + "def analyze_text(text, masked_tokens=None, show_suggestions=False, show_firstk_probs=20):\n", + " if text[0] in analyzed_cache:\n", + " features, mlm_probs = analyzed_cache[text[0]]\n", + " given_mask = \"[MASK]\" in features[0].tokens\n", + " tokens = features[0].tokens\n", + " else:\n", + " examples = convert_text_to_examples(text)\n", + " features = convert_examples_to_features(examples, tokenizer, print_info=False)\n", + " given_mask = \"[MASK]\" in features[0].tokens\n", + " if not given_mask or masked_tokens is not None:\n", + " assert len(features) == 1\n", + " features, masked_positions = copy_and_mask_feature(features[0], masked_tokens=masked_tokens)\n", + "\n", + " input_ids = torch.tensor([f.input_ids for f in features], dtype=torch.long)\n", + " input_type_ids = torch.tensor([f.input_type_ids for f in features], dtype=torch.long)\n", + " input_ids = input_ids.to(device)\n", + " input_type_ids = input_type_ids.to(device)\n", + "\n", + " mlm_logits, _ = model(input_ids, input_type_ids)\n", + " mlm_probs = F.softmax(mlm_logits, dim=-1)\n", + "\n", + " tokens = features[0].tokens\n", + " if not given_mask or masked_tokens is not None:\n", + " bsz, seq_len, vocab_size = mlm_probs.size()\n", + " assert bsz == len(masked_positions)\n", + " # reduced_mlm_probs = torch.Tensor(1, seq_len, vocab_size)\n", + " # for i in range(seq_len):\n", + " # reduced_mlm_probs[0, i] = mlm_probs[i, i]\n", + " reduced_mlm_probs = torch.Tensor(1, len(masked_positions), vocab_size)\n", + " for i, pos in enumerate(masked_positions):\n", + " reduced_mlm_probs[0, i] = mlm_probs[i, pos]\n", + " mlm_probs = reduced_mlm_probs\n", + " tokens = [tokens[i] for i in masked_positions]\n", + " \n", + " analyzed_cache[text[0]] = (features, mlm_probs)\n", + " \n", + " top_pairs = show_lm_probs(tokens, None, mlm_probs[0], firstk=show_firstk_probs)\n", + " if not given_mask:\n", + " show_abnormals(tokens, mlm_probs[0], show_suggestions=show_suggestions)\n", + " return top_pairs" + ] + }, + { + "cell_type": "code", + "execution_count": 38, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " 0 | [CLS] \t 3 | . 1 | the 1 | , 1 | ) 1 | \" \n", + " 100 | \" \t*100 | \" 0 | ' 0 | and 0 | so 0 | did \n", + " 100 | is \t*100 | is 0 | was 0 | does 0 | isn 0 | has \n", + " 97 | tom \t* 97 | tom 2 | he 0 | thomas 0 | you 0 | she \n", + " 100 | taller \t*100 | taller 0 | tall 0 | shorter 0 | height 0 | tallest \n", + " 100 | than \t*100 | than 0 | then 0 | as 0 | that 0 | to \n", + " 100 | mary \t*100 | mary 0 | tom 0 | you 0 | barbara 0 | maria \n", + " 100 | ? \t*100 | ? 0 | . 0 | ! 0 | ... 0 | - \n", + " 100 | \" \t*100 | \" 0 | ' 0 | ! 0 | * 0 | ) \n", + " 100 | \" \t*100 | \" 0 | no 0 | ' 0 | oh 0 | that \n", + " 100 | no \t*100 | no 0 | yes 0 | nope 0 | yeah 0 | oh \n", + " 100 | , \t*100 | , 0 | . 0 | ; 0 | - 0 | no \n", + " 0 | [MASK] \t 80 | tom 10 | he 4 | mary 2 | she 1 | thomas \n", + " 100 | is \t*100 | is 0 | was 0 | does 0 | has 0 | no \n", + " 100 | taller \t*100 | taller 0 | shorter 0 | tall 0 | larger 0 | smaller \n", + " 100 | . \t*100 | . 0 | ; 0 | , 0 | ! 0 | ) \n", + " 100 | \" \t*100 | \" 0 | ' 0 | . 0 | ! 0 | ; \n", + " 0 | [SEP] \t 86 | . 4 | , 3 | he 2 | \" 1 | she \n" + ] + }, + { + "data": { + "text/plain": [ + "[('tom', 0.7961671352386475),\n", + " ('he', 0.09765198826789856),\n", + " ('mary', 0.04068772494792938),\n", + " ('she', 0.022535543888807297),\n", + " ('thomas', 0.0058586327359080315)]" + ] + }, + "execution_count": 38, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "text = [\"_ was the greatest physicist who developed theory of relativity.\"]\n", + "text = [\"The trophy doesn't fit into the brown suitcase because the _ is too large.\"] # relational adj\n", + "text = ['\"Is Tom taller than Mary?\" \"No, _ is taller.\"'] # yes/no\n", + "text = [ \"Tom has black hair. Mary has black hair. John has yellow hair. _ and Mary have the same hair color.\"] # compare \n", + "text = ['John is taller/shorter than Mary because/although _ is older/younger.'] # causality\n", + "text = [\"Jennifer is older than James . Jennifer younger than Robert . _ is the oldest.\"] # transitive inference\n", + "\n", + "analyze_text(text, show_firstk_probs=100)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "def words2heads(attns, tokens, words):\n", + " positions = [tokens.index(word) for word in words]\n", + "\n", + " for layer in range(config.num_hidden_layers):\n", + " for head in range(config.num_attention_heads):\n", + " for pos_indices in [(0, 1), (1, 0)]:\n", + " from_pos, to_pos = positions[pos_indices[0]], positions[pos_indices[1]]\n", + " if attns[layer][head][from_pos].max(0)[1].item() == to_pos:\n", + " print('Layer %d, head %d: %s -> %s' % (layer, head, tokens[from_pos], tokens[to_pos]), end='\\t')\n", + " print(attns[layer][head][from_pos].topk(5)[0].data)\n", + "\n", + "def head2words(attns, tokens, layer, head):\n", + " for from_pos in range(len(tokens)):\n", + " to_pos = attns[layer][head][from_pos].max(0)[1].item()\n", + " from_word, to_word = tokens[from_pos], tokens[to_pos]\n", + " if from_word.isalpha() and to_word.isalpha():\n", + " print('%s @ %d -> %s @ %d' % (from_word, from_pos, to_word, to_pos), end='\\t')\n", + " print(attns[layer][head][from_pos].topk(5)[0].data)\n", + " \n", + "special_tokens = ['[CLS]', '[SEP]']\n", + "\n", + "def get_salient_heads(attns, tokens, attn_thld=0.5):\n", + " for layer in range(config.num_hidden_layers):\n", + " for head in range(config.num_attention_heads):\n", + " pos_pairs = []\n", + " for from_pos in range(1, len(tokens) - 1): # skip [CLS] and [SEP]\n", + " top_attn, to_pos = attns[layer][head][from_pos].max(0)\n", + " top_attn, to_pos = top_attn.item(), to_pos.item()\n", + " from_word, to_word = tokens[from_pos], tokens[to_pos]\n", + "# if from_word.isalpha() and to_word.isalpha() and top_attn >= attn_thld:\n", + " if abs(from_pos - to_pos) <= 1:\n", + "# print('Layer %d, head %d: %s @ %d -> %s @ %d' % (layer, head, from_word, from_pos, to_word, to_pos), end='\\t')\n", + "# print(attns[layer][head][from_pos].topk(5)[0].data)\n", + " pos_pairs.append((from_pos, to_pos))\n", + " \n", + " ratio = len(pos_pairs) / (len(tokens) - 2)\n", + " if ratio > 0.5:\n", + " print(ratio)\n", + " for from_pos, to_pos in pos_pairs:\n", + " print('Layer %d, head %d: %s @ %d -> %s @ %d' % (layer, head, tokens[from_pos], from_pos, tokens[to_pos], to_pos), end='\\t')\n", + " print(attns[layer][head][from_pos].topk(5)[0].data)\n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "01/10/2019 21:46:20 - INFO - examples.extract_features - tokens: [CLS] jim laughed because he was so happy . [SEP]\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "jim @ 1 -> jim @ 1\ttensor([0.7248, 0.0842, 0.0656, 0.0407, 0.0319], device='cuda:0')\n" + ] + } + ], + "source": [ + "# text, words = [\"The trophy doesn't fit into the brown suitcase because the it is too large.\"], ['fit', 'large']\n", + "# text, words = [\"Mary couldn't beat John in the match because he was too strong.\"], ['beat', 'strong']\n", + "text, words = [\"John is taller than Mary because he is older.\"], ['taller', 'older']\n", + "# text, words = [\"The red ball is heavier than the blue ball because the red ball is bigger.\"], ['heavier', 'bigger']\n", + "text, words = [\"Jim laughed because he was so happy.\"], ['cried', 'sad']\n", + "# text, words = [\"Jim ate the cake quickly because he was so hungry.\"], ['ate', 'hungry']\n", + "# text, words = [\"Jim drank the juice quickly because he was so thirsty.\"], ['drank', 'thirsty']\n", + "# text, words = [\"Tom's drawing hangs high. It is above Susan's drawing\"], ['high', 'above']\n", + "# text, words = [\"Tom's drawing hangs low. It is below Susan's drawing\"], ['low', 'below']\n", + "# text, words = [\"John is taller than Mary . Mary is shorter than John.\"], ['taller', 'shorter']\n", + "# text, words = [\"The drawing is above the cabinet. The cabinet is below the drawing\"], ['above', 'below']\n", + "# text, words = [\"Jim is very thin . He is not fat.\"], ['thin', 'fat']\n", + "\n", + "features = convert_examples_to_features(convert_text_to_examples(text), tokenizer, print_info=False)\n", + "input_ids = torch.tensor([f.input_ids for f in features], dtype=torch.long).to(device)\n", + "input_type_ids = torch.tensor([f.input_type_ids for f in features], dtype=torch.long).to(device)\n", + "mlm_logits, _ = model(input_ids, input_type_ids)\n", + "mlm_probs = F.softmax(mlm_logits, dim=-1)\n", + "tokens = features[0].tokens\n", + "# top_pairs = show_lm_probs(tokens, None, mlm_probs[0], firstk=100)\n", + "\n", + "attn_name = 'enc_self_attns'\n", + "hypo = {attn_name: [model.bert.encoder.layer[i].attention.self.attention_probs[0] for i in range(config.num_hidden_layers)]}\n", + "key_labels = query_labels = tokens\n", + "labels_dict = {attn_name: (key_labels, query_labels)}\n", + "result_tuple = (hypo, config.num_attention_heads, labels_dict)\n", + "# plot_layer_attn(result_tuple, attn_name=attn_name, layer=10, heads=None)\n", + "\n", + "attns = hypo[attn_name]\n", + " \n", + "# words2heads(attns, tokens, words)\n", + "head2words(attns, tokens, 2, 10)\n", + "# get_salient_heads(attns, tokens, attn_thld=0.0)" + ] + }, + { + "cell_type": "raw", + "metadata": {}, + "source": [ + "0,2\t-1\n", + "0,3\t-1\n", + "0,10\t+1 动宾\n", + "1,1\t+1 动介\n", + "1,4\t-1\n", + "1,11\t0\n", + "2,0\t+1**\n", + "2,6\t0**\n", + "2,9\t+1**\n", + "3,5\t-1\n", + "7,4\t-1\n", + "11,8\t0\n" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [], + "source": [ + "head_size = config.hidden_size // config.num_attention_heads\n", + "layer = 1\n", + "head = 1 # 2, 3, 10\n", + "wq = model.bert.encoder.layer[layer].attention.self.query.weight.data.view(-1, config.num_attention_heads, head_size).permute(1, 0, 2)\n", + "wk = model.bert.encoder.layer[layer].attention.self.key.weight.data.view(-1, config.num_attention_heads, head_size).permute(1, 0, 2)\n", + "\n", + "wqk = torch.bmm(wq, wk.transpose(-1, -2))\n", + "# (wqk * wqk.transpose(-1, -2)).sum((1, 2)) / (wqk * wqk).sum((1, 2))\n", + "# plt.imshow(wqk[head]*wqk[head])\n", + "# plt.show()\n", + "\n", + "# q = torch.matmul(pos_emb, wq)\n", + "# k = torch.matmul(pos_emb_prev, wk)\n", + "# (q * k).sum((-2, -1))" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "pos_emb = model.bert.embeddings.position_embeddings.weight.data\n", + "pos_emb_prev = torch.zeros_like(pos_emb)\n", + "pos_emb_next = torch.zeros_like(pos_emb)\n", + "pos_emb_prev[1:] = pos_emb[:-1]\n", + "pos_emb_next[:-1] = pos_emb[1:]\n", + "pos_emb, pos_emb_prev, pos_emb_next = pos_emb[1:-1], pos_emb_prev[1:-1], pos_emb_next[1:-1]\n", + "\n", + "# pos_q = torch.matmul(pos_emb, wk[head])\n", + "# plt.imshow(pos_q[:32])\n", + "# plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['Tom has black hair. Mary has black hair. John has yellow hair. _ and Mary have the same hair color.',\n", + " 'Tom has black hair. Mary has black hair. John has yellow hair. _ and Mary have different hair colors.']" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "text = [\n", + " # same / different\n", + " \"Tom has black hair. Mary has black hair. John has yellow hair. _ and Mary have the same hair color.\",\n", + " \"Tom has black hair. Mary has black hair. John has yellow hair. _ and Mary have different hair colors.\",\n", + " \"Tom has yellow hair. Mary has black hair. John has black hair. Mary and _ have the same hair color.\",\n", + " # because / although\n", + " \"John is taller/shorter than Mary because/although _ is older/younger.\",\n", + " \"The red ball is heavier/lighter than the blue ball because/although the _ ball is bigger/smaller.\",\n", + " \"Charles did a lot better/worse than his good friend Nancy on the test because/although _ had/hadn't studied so hard.\",\n", + " \"The trophy doesn't fit into the brown suitcase because/although the _ is too small/large.\",\n", + " \"John thought that he would arrive earlier than Susan, but/and indeed _ was the first to arrive.\",\n", + " # reverse\n", + " \"John came then Mary came. They left in reverse order. _ left then _ left.\",\n", + " \"John came after Mary. They left in reverse order. _ left after _ .\",\n", + " \"John came first, then came Mary. They left in reverse order: _ left first, then left _ .\",\n", + " # compare sentences with same / opposite meaning, 2nd order\n", + " \"Though John is tall, Tom is taller than John. So John is _ than Tom.\",\n", + " \"Tom is taller than John. So _ is shorter than _.\",\n", + " # WSC-style: before /after\n", + " # \"Mary came before/after John. _ was late/early .\",\n", + " # yes / no, 2nd order\n", + " \"Was Tom taller than Susan? Yes, _ was taller.\",\n", + " # right / wrong, epistemic modality, 2nd order\n", + " \"John said/thought that the red ball was heavier than the blue ball. He was wrong. The _ ball was heavier\",\n", + " \"John was wrong in saying/thinking that the red ball was heavier than the blue ball. The _ ball was heavier\",\n", + " \"John said the rain was about to stop. Mary said the rain would continue. Later the rain stopped. _ was wrong/right.\",\n", + " \n", + " \"The trophy doesn't fit into the brown suitcase because/although the _ is too small/large.\",\n", + " \"John thanked Mary because _ had given help to _ . \",\n", + " \"John felt vindicated/crushed when his longtime rival Mary revealed that _ was the winner of the competition.\",\n", + " \"John couldn't see the stage with Mary in front of him because _ is so short/tall.\",\n", + " \"Although they ran at about the same speed, John beat Sally because _ had such a bad start.\",\n", + " \"The fish ate the worm. The _ was hungry/tasty.\",\n", + " \n", + " \"John beat Mary. _ won the game/e winner.\",\n", + "]\n", + "text" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "with open('WSC_switched_label.json') as f:\n", + " examples = json.load(f)" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "with open('WSC_child_problem.json') as f:\n", + " cexamples = json.load(f)" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [], + "source": [ + "for ce in cexamples:\n", + " for s in ce['sentences']:\n", + " for a in s['answer0'] + s['answer1']:\n", + " a = a.lower()\n", + "# if a not in tokenizer.vocab:\n", + "# ce\n", + "# print(a, 'not in vocab!!!')" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "for ce in cexamples:\n", + " if len(ce['sentences']) > 0:\n", + " e = examples[ce['index']]\n", + " assert ce['index'] == e['index']\n", + " e['score'] = all([s['score'] for s in ce['sentences']])\n", + " assert len(set([s['adjacent_ref'] for s in ce['sentences']])) == 1, 'adjcent_refs are different!'\n", + " e['adjacent_ref'] = ce['sentences'][0]['adjacent_ref']" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [], + "source": [ + "from collections import defaultdict\n", + "\n", + "groups = defaultdict(list)\n", + "for e in examples:\n", + " if 'score' in e:\n", + " index = e['index']\n", + " if index < 252:\n", + " if index % 2 == 1:\n", + " index -= 1\n", + " elif index in [252, 253, 254]:\n", + " index = 252\n", + " else:\n", + " if index % 2 == 0:\n", + " index -= 1\n", + " groups[index].append(e)" + ] + }, + { + "cell_type": "code", + "execution_count": 41, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[(2,\n", + " \"The trophy doesn't fit into the brown suitcase because [it] is too large.\",\n", + " 'fit into:large/small'),\n", + " (4,\n", + " 'Joan made sure to thank Susan for all the help [she] had recieved.',\n", + " 'thank:receive/give'),\n", + " (10,\n", + " 'The delivery truck zoomed by the school bus because [it] was going so fast.',\n", + " 'zoom by:fast/slow'),\n", + " (12,\n", + " 'Frank felt vindicated when his longtime rival Bill revealed that [he] was the winner of the competition.',\n", + " 'vindicated/crushed:be the winner'),\n", + " (16,\n", + " 'The large ball crashed right through the table because [it] was made of steel.',\n", + " 'crash through:[hard]/[soft]'),\n", + " (18,\n", + " \"John couldn't see the stage with Billy in front of him because [he] is so short.\",\n", + " '[block]:short/tall'),\n", + " (20,\n", + " 'Tom threw his schoolbag down to Ray after [he] reached the top of the stairs.',\n", + " 'down to:top/bottom'),\n", + " (22,\n", + " 'Although they ran at about the same speed, Sue beat Sally because [she] had such a good start.',\n", + " 'beat:good/bad'),\n", + " (26,\n", + " \"Sam's drawing was hung just above Tina's and [it] did look much better with another one below it.\",\n", + " 'above/below'),\n", + " (28,\n", + " 'Anna did a lot better than her good friend Lucy on the test because [she] had studied so hard.',\n", + " 'better/worse:study hard'),\n", + " (30,\n", + " 'The firemen arrived after the police because [they] were coming from so far away.',\n", + " 'after/before:far away'),\n", + " (32,\n", + " \"Frank was upset with Tom because the toaster [he] had bought from him didn't work.\",\n", + " 'be upset with:buy from not work/sell not work'),\n", + " (36,\n", + " 'The sack of potatoes had been placed above the bag of flour, so [it] had to be moved first.',\n", + " 'above/below:moved first'),\n", + " (38,\n", + " 'Pete envies Martin although [he] is very successful.',\n", + " 'although/because'),\n", + " (42,\n", + " 'I poured water from the bottle into the cup until [it] was empty.',\n", + " 'pour:empty/full'),\n", + " (46,\n", + " \"Sid explained his theory to Mark but [he] couldn't convince him.\",\n", + " 'explain:convince/understand'),\n", + " (48,\n", + " \"Susan knew that Ann's son had been in a car accident, so [she] told her about it.\",\n", + " '?know tell:so/because'),\n", + " (50,\n", + " \"Joe's uncle can still beat him at tennis, even though [he] is 30 years younger.\",\n", + " 'beat:younger/older'),\n", + " (64,\n", + " 'In the middle of the outdoor concert, the rain started falling, but [it] continued until 10.',\n", + " 'but/and'),\n", + " (68,\n", + " 'Ann asked Mary what time the library closes, because [she] had forgotten.',\n", + " 'because/but'),\n", + " (84,\n", + " 'If the con artist has succeeded in fooling Sam, [he] would have gotten a lot of money.',\n", + " 'fool:get/lose'),\n", + " (92,\n", + " 'Alice tried frantically to stop her daughter from chatting at the party, leaving us to wonder why [she] was behaving so strangely.',\n", + " '?stop normal/stop abnormal:strange'),\n", + " (98,\n", + " \"I was trying to open the lock with the key, but someone had filled the keyhole with chewing gum, and I couldn't get [it] in.\",\n", + " 'put ... into filled with ... :get in/get out'),\n", + " (100,\n", + " 'The dog chased the cat, which ran up a tree. [It] waited at the bottom.',\n", + " 'up:at the bottom/at the top'),\n", + " (106,\n", + " 'John was doing research in the library when he heard a man humming and whistling. [He] was very annoyed.',\n", + " 'hear ... humming and whistling:annoyed/annoying'),\n", + " (108,\n", + " 'John was jogging through the park when he saw a man juggling watermelons. [He] was very impressed.',\n", + " 'see ... juggling watermelons:impressed/impressive'),\n", + " (132,\n", + " 'Jane knocked on the door, and Susan answered it. [She] invited her to come out.',\n", + " 'visit:invite come out/invite come in'),\n", + " (150,\n", + " 'Jackson was greatly influenced by Arnold, though [he] lived two centuries later.',\n", + " 'influence:later/earlier'),\n", + " (160,\n", + " 'The actress used to be named Terpsichore, but she changed it to Tina a few years ago, because she figured [it] was too hard to pronounce.',\n", + " 'change:hard/easy'),\n", + " (166,\n", + " 'Fred is the only man still alive who remembers my great-grandfather. [He] is a remarkable man.',\n", + " 'alive:is/was'),\n", + " (170,\n", + " \"In July, Kamtchatka declared war on Yakutsk. Since Yakutsk's army was much better equipped and ten times larger, [they] were defeated within weeks.\",\n", + " 'better equipped and large:defeated/victorious'),\n", + " (186,\n", + " 'When the sponsors of the bill got to the town hall, they were surprised to find that the room was full of opponents. [They] were very much in the minority.',\n", + " 'be full of:minority/majority'),\n", + " (188,\n", + " 'Everyone really loved the oatmeal cookies; only a few people liked the chocolate chip cookies. Next time, we should make more of [them] .',\n", + " 'like over:more/fewer'),\n", + " (190,\n", + " 'We had hoped to place copies of our newsletter on all the chairs in the auditorium, but there were simply not enough of [them] .',\n", + " 'place on all:not enough/too many'),\n", + " (196,\n", + " \"Steve follows Fred's example in everything. [He] admires him hugely.\",\n", + " 'follow:admire/influence'),\n", + " (198,\n", + " \"The table won't fit through the doorway because [it] is too wide.\",\n", + " 'fit through:wide/narrow'),\n", + " (200,\n", + " 'Grace was happy to trade me her sweater for my jacket. She thinks [it] looks dowdy on her.',\n", + " 'trade:dowdy/great'),\n", + " (202,\n", + " 'John hired Bill to take care of [him] .',\n", + " 'hire/hire oneself to:take care of'),\n", + " (204,\n", + " 'John promised Bill to leave, so an hour later [he] left.',\n", + " 'promise/order'),\n", + " (210,\n", + " \"Jane knocked on Susan's door but [she] did not get an answer.\",\n", + " 'knock:get an answer/answer'),\n", + " (212,\n", + " 'Joe paid the detective after [he] received the final report on the case.',\n", + " 'pay:receive/deliver'),\n", + " (226,\n", + " 'Bill passed the half-empty plate to John because [he] was full.',\n", + " 'pass the plate:full/hungry'),\n", + " (252,\n", + " 'George got free tickets to the play, but he gave them to Eric, even though [he] was particularly eager to see it.',\n", + " 'even though/because/not'),\n", + " (255,\n", + " \"Jane gave Joan candy because [she] wasn't hungry.\",\n", + " 'give:not hungry/hungry'),\n", + " (259,\n", + " 'James asked Robert for a favor but [he] was refused.',\n", + " 'ask for a favor:refuse/be refused`'),\n", + " (261,\n", + " 'Kirilov ceded the presidency to Shatov because [he] was less popular.',\n", + " 'cede:less popular/more popular'),\n", + " (263,\n", + " 'Emma did not pass the ball to Janie although [she] saw that she was open.',\n", + " 'not pass although:see open/open')]" + ] + }, + "execution_count": 41, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def filter_dict(d, keys=['index', 'sentence', 'correct_answer', 'relational_word', 'is_associative', 'score']):\n", + " return {k: d[k] for k in d if k in keys}\n", + "\n", + "# ([[filter_dict(e) for e in eg] for eg in groups.values() if eg[0]['relational_word'] != 'none' and all([e['score'] for e in eg])])# / len([eg for eg in groups.values() if eg[0]['relational_word'] != 'none'])\n", + "# [(index, eg[0]['relational_word'], all([e['score'] for e in eg])) for index, eg in groups.items() if eg[0]['relational_word'] != 'none']\n", + "# len([filter_dict(e) for e in examples if 'score' in e and not e['score'] and e['adjacent_ref']])\n", + "# for e in examples:\n", + "# if e['index'] % 2 == 0:\n", + "# print(e['sentence'])\n", + "[(eg[0]['index'], eg[0]['sentence'], eg[0]['relational_word']) for index, eg in groups.items() if '/' in eg[0]['relational_word']]" + ] + }, + { + "cell_type": "code", + "execution_count": 51, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "179" + ] + }, + "execution_count": 51, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "sum(['because' in e['sentence'] for e in examples]) + \\\n", + "sum(['so ' in e['sentence'] for e in examples]) + \\\n", + "sum(['but ' in e['sentence'] for e in examples]) + \\\n", + "sum(['though' in e['sentence'] for e in examples])" + ] + }, + { + "cell_type": "code", + "execution_count": 73, + "metadata": {}, + "outputs": [], + "source": [ + "# with open('WSC_switched_label.json', 'w') as f:\n", + "# json.dump(examples, f)" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [], + "source": [ + "vis_attn_topk = 3\n", + "\n", + "def has_chinese_label(labels):\n", + " labels = [label.split('->')[0].strip() for label in labels]\n", + " r = sum([len(label) > 1 for label in labels if label not in ['BOS', 'EOS']]) * 1. / (len(labels) - 1)\n", + " return 0 < r < 0.5 # r == 0 means empty query labels used in self attention\n", + "\n", + "def _plot_attn(ax1, attn_name, attn, key_labels, query_labels, col, color='b'):\n", + " assert len(query_labels) == attn.size(0)\n", + " assert len(key_labels) == attn.size(1)\n", + "\n", + " ax1.set_xlim([-1, 1])\n", + " ax1.set_xticks([])\n", + " ax2 = ax1.twinx()\n", + " nlabels = max(len(key_labels), len(query_labels))\n", + " pos = range(nlabels)\n", + " \n", + " if 'self' in attn_name and col < ncols - 1:\n", + " query_labels = ['' for _ in query_labels]\n", + "\n", + " for ax, labels in [(ax1, key_labels), (ax2, query_labels)]:\n", + " ax.set_yticks(pos)\n", + " if has_chinese_label(labels):\n", + " ax.set_yticklabels(labels, fontproperties=zhfont)\n", + " else:\n", + " ax.set_yticklabels(labels)\n", + " ax.set_ylim([nlabels - 1, 0])\n", + " ax.tick_params(width=0, labelsize='xx-large')\n", + "\n", + " for spine in ax.spines.values():\n", + " spine.set_visible(False)\n", + "\n", + "# mask, attn = filter_attn(attn)\n", + " for qi in range(attn.size(0)):\n", + "# if not mask[qi]:\n", + "# continue\n", + "# for ki in range(attn.size(1)):\n", + " for ki in attn[qi].topk(vis_attn_topk)[1]:\n", + " a = attn[qi, ki]\n", + " ax1.plot((-1, 1), (ki, qi), color, alpha=a)\n", + "# print(attn.mean(dim=0).topk(5)[0])\n", + "# ax1.barh(pos, attn.mean(dim=0).data.cpu().numpy())\n", + "\n", + "def plot_layer_attn(result_tuple, attn_name='dec_self_attns', layer=0, heads=None):\n", + " hypo, nheads, labels_dict = result_tuple\n", + " key_labels, query_labels = labels_dict[attn_name]\n", + " if heads is None:\n", + " heads = range(nheads)\n", + " else:\n", + " nheads = len(heads)\n", + " \n", + " stride = 2 if attn_name == 'dec_enc_attns' else 1\n", + " nlabels = max(len(key_labels), len(query_labels))\n", + " rcParams['figure.figsize'] = 20, int(round(nlabels * stride * nheads / 8 * 1.0))\n", + " \n", + " rows = nheads // ncols * stride\n", + " fig, axes = plt.subplots(rows, ncols)\n", + " \n", + " # for head in range(nheads):\n", + " for head_i, head in enumerate(heads):\n", + " row, col = head_i * stride // ncols, head_i * stride % ncols\n", + " ax1 = axes[row, col]\n", + " attn = hypo[attn_name][layer][head]\n", + " _plot_attn(ax1, attn_name, attn, key_labels, query_labels, col)\n", + " if attn_name == 'dec_enc_attns':\n", + " col = col + 1\n", + " axes[row, col].axis('off') # next subfig acts as blank place holder\n", + " # plt.suptitle('%s with %d heads, Layer %d' % (attn_name, nheads, layer), fontsize=20)\n", + " plt.show() \n", + " \n", + "ncols = 4" + ] + }, + { + "cell_type": "code", + "execution_count": 40, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{\n", + " \"attention_probs_dropout_prob\": 0.1,\n", + " \"hidden_act\": \"gelu\",\n", + " \"hidden_dropout_prob\": 0.1,\n", + " \"hidden_size\": 768,\n", + " \"initializer_range\": 0.02,\n", + " \"intermediate_size\": 3072,\n", + " \"max_position_embeddings\": 512,\n", + " \"num_attention_heads\": 12,\n", + " \"num_hidden_layers\": 12,\n", + " \"type_vocab_size\": 2,\n", + " \"vocab_size\": 30522\n", + "}" + ] + }, + "execution_count": 40, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "config.num" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.5" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/Untitled1.ipynb b/Untitled1.ipynb new file mode 100644 index 00000000000000..0a6ceec8cab0b2 --- /dev/null +++ b/Untitled1.ipynb @@ -0,0 +1,2971 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "%load_ext autoreload\n", + "%autoreload 2\n", + "\n", + "from IPython.core.interactiveshell import InteractiveShell\n", + "InteractiveShell.ast_node_interactivity = 'all'" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import json\n", + "import itertools\n", + "from itertools import product, permutations\n", + "from random import sample" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "import torch\n", + "from torch.utils.data import DataLoader, RandomSampler, SequentialSampler" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Warning: apex was installed without --cpp_ext. Falling back to Python flatten and unflatten.\n", + "Warning: apex was installed without --cuda_ext. Fused syncbn kernels will be unavailable. Python fallbacks will be used instead.\n", + "Warning: apex was installed without --cuda_ext. FusedAdam will be unavailable.\n", + "Warning: apex was installed without --cuda_ext. FusedLayerNorm will be unavailable.\n", + "Better speed can be achieved with apex installed from https://www.github.com/nvidia/apex.\n" + ] + } + ], + "source": [ + "from pytorch_pretrained_bert.tokenization import BertTokenizer\n", + "from pytorch_pretrained_bert.modeling import BertForPreTraining, BertForMaskedLM, BertConfig\n", + "from pytorch_pretrained_bert.optimization import BertAdam\n", + "from run_child_finetuning import *" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "06/09/2019 14:55:34 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt\n" + ] + } + ], + "source": [ + "BERT_DIR = '/nas/pretrain-bert/pretrain-pytorch/bert-base-uncased'\n", + "tokenizer = BertTokenizer.from_pretrained('/nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt')" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "def assert_in_bert_vocab(tokens):\n", + " for token in tokens:\n", + " if isinstance(token, str): # entities\n", + " assert token.lower() in tokenizer.vocab, token + '->' + str(tokenizer.tokenize(token))\n", + " elif isinstance(token, tuple): # relations\n", + " assert len(token) == 2, str(token)\n", + " for rel in token:\n", + " rel = rel.split('..')[0]\n", + " assert rel in tokenizer.vocab, rel + '->' + str(tokenizer.tokenize(rel))" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "19" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "fruits = ['apple', 'banana', 'pear', 'orange', 'peach', 'berry', 'plum', 'pinapple', 'melon', 'cherry', 'grape', 'lemon',\n", + " 'papaya', 'durian', 'kiwi', 'mongo', 'date', 'jujube', 'watermelon']\n", + "len(fruits)\n", + "# http://www.manythings.org/vocabulary/lists/e/words.php?f=fruit" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "16" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "animals = ['dog', 'cat', 'pig', 'chicken', 'hen', 'cock', 'duck', 'goose', 'monkey', 'tiger', 'bird', 'bear', 'lion', 'bee', 'ant', 'elephant']\n", + "len(animals)\n", + "# see more at http://www.manythings.org/vocabulary/lists/a/words.php?f=animals_1\n", + "# http://www.manythings.org/vocabulary/lists/a/\n", + "# especially http://www.manythings.org/vocabulary/lists/a/words.php?f=classroom_1 things in classroom" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "3" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "text/plain": [ + "3" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "male_names = ['James', 'John', 'Robert', ]#'Michael', 'David', 'Paul', 'Jeff', 'Daniel', 'Charles', 'Thomas']\n", + "female_names = ['Mary', 'Linda', 'Jennifer', ]#'Maria', 'Susan', 'Lisa', 'Sandra', 'Barbara', 'Patricia', 'Elizabeth']\n", + "len(male_names)\n", + "len(female_names)\n", + "people_names = (male_names, female_names)\n", + "assert_in_bert_vocab(male_names)\n", + "assert_in_bert_vocab(female_names)" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "spatial_relations = (\n", + " ('above', 'below'), \n", + " ('in front of/in the front', 'behind/in the back'), \n", + " ('on the left..side of', 'on the right..side of')\n", + ")\n", + "people_adj_relations = (\n", + " ('taller..than', 'shorter..than'), \n", + "# ('thinner..than', 'fatter..than'), # fatter not in BERT vocab\n", + " ('younger..than', 'older..than'), \n", + "# ('stronger..than', 'weaker..than'), \n", + "# ('faster..than', 'slower..than'),\n", + "# ('richer..than', 'poorer..than')\n", + ")\n", + "animal_adj_relations = (\n", + " ('thinner..than', 'fatter..than'), \n", + " ('younger..than', 'older..than'), \n", + " ('stronger..than', 'weaker..than'), \n", + " ('faster..than', 'slower..than')\n", + ")\n", + "object_adj_relations = (\n", + " ('bigger..than', 'smaller..than'), \n", + " ('heavier..than', 'lighter..than'), \n", + " ('better..than', 'worse..than')\n", + ")\n", + "assert_in_bert_vocab(people_adj_relations)" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [], + "source": [ + "rel2entypes = {\n", + "# spatial_relations: [fruits, animals, people_names],\n", + " people_adj_relations: [people_names],\n", + "# animal_adj_relations: [animals],\n", + "# object_adj_relations: [fruits, animals]\n", + "}" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [], + "source": [ + "twoent_A_template = 'is {dt} {ent0} {rel} {dt} {ent1}'\n", + "twoent_B_template = '{dt} {ent} is {pred}'\n", + "twoent_template = '\"{A}?\" \"{conj} {B}.\"'" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [], + "source": [ + "def reverse(l):\n", + " return list(reversed(l)) if isinstance(l, list) else tuple(reversed(l))" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [], + "source": [ + "def mask(ent_str):\n", + " tokens = ent_str.strip().split()\n", + " if len(tokens) == 1:\n", + " return '[%s]' % tokens[0]\n", + " elif len(tokens) == 2:\n", + " assert tokens[0] == 'the', ent_str\n", + " return '%s [%s]' % (tokens[0], tokens[1])\n", + " else:\n", + " assert False, ent_str" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [], + "source": [ + "def get_conj(join_type, A, B):\n", + " if join_type == 'no':\n", + " return 'no,'\n", + " return 'yes,'\n", + " assert join_type == 'yes'\n", + " subB = B.split('is')[0].split()[-1]\n", + " w0, w1, w2 = A.split()[: 3]\n", + " assert w0 == 'Is'\n", + " subA = w1 if w1 != 'the' else w2\n", + " if subA == subB and 'not' not in B: # B is repeating A\n", + " return 'Yes,'\n", + " else:\n", + " return 'Yes, in other words,'" + ] + }, + { + "cell_type": "code", + "execution_count": 134, + "metadata": {}, + "outputs": [], + "source": [ + "def make_sentences(A_template, B_template, join_template,\n", + " index=-1, orig_sentence='', entities=[\"John\", \"Mary\"], entity_substitutes=None, determiner=\"\", \n", + " relations=[],\n", + " packed_relations=[\"rel/~rel\", \"rev_rel/~rev_rel\"], packed_relation_substitutes=None, relation_suffix=\"\",\n", + " packed_predicates=[\"pred0/~pred0\", \"pred1/~pred1\"], predicate_substitutes=None,\n", + " predicate_dichotomy=True, reverse_causal=False):\n", + "# assert entities[0].lower() in tokenizer.vocab , entities[0]\n", + "# assert entities[1].lower() in tokenizer.vocab , entities[1]\n", + " determiner = 'the' if entities[0].islower() else ''\n", + " relations, predicates = ([r.replace('..', ' ') for r in relations], [r.split('..')[0] for r in relations]) \\\n", + " if '..' in relations[0] else ([r.split('/')[0] for r in relations], [r.split('/')[-1] for r in relations])\n", + " neg_predicates = ['not ' + p for p in predicates]\n", + " As = [A_template.format(dt=determiner, ent0=ent0, ent1=ent1, rel=rel, rel_suffix=relation_suffix) \n", + " for ent0, ent1, rel in [entities + relations[:1], reverse(entities) + reverse(relations)[:1]]]\n", + " negAs = [A_template.format(dt=determiner, ent0=ent0, ent1=ent1, rel=rel, rel_suffix=relation_suffix) \n", + " for ent0, ent1, rel in [entities + reverse(relations)[:1], reverse(entities) + relations[:1]]]\n", + " \n", + " Bs = [B_template.format(dt=determiner, ent=mask(ent), pred=pred) for ent, pred in zip(entities, predicates)]\n", + " negBs = [B_template.format(dt=determiner, ent=mask(ent), pred=pred) for ent, pred in zip(entities, neg_predicates)]\n", + " if predicate_dichotomy:\n", + " Bs += [B_template.format(dt=determiner, ent=mask(ent), pred=pred) for ent, pred in zip(entities, reversed(neg_predicates))]\n", + " negBs += [B_template.format(dt=determiner, ent=mask(ent), pred=pred) for ent, pred in zip(entities, reversed(predicates))]\n", + " \n", + " def form_sentences(sentence_template, join_type, As, Bs):\n", + " return [\" \".join(sentence_template.format(A=A, B=B, conj=get_conj(join_type, A, B)).split()) for A, B in itertools.product(As, Bs)]\n", + " \n", + " yes_sentences = []\n", + " for A, B in [(As, Bs), (negAs, negBs)]:\n", + " yes_sentences += form_sentences(join_template, 'yes', A, B)\n", + "# yes_sentences = list(itertools.chain.from_iterable([form_sentences(join_template, 'yes', A, B) for A, B in [(As, Bs), (negAs, negBs)]]))\n", + "\n", + " no_sentences = []\n", + " for A, B in [(As, negBs), (negAs, Bs)]:\n", + " no_sentences += form_sentences(join_template, 'no', A, B)\n", + " \n", + " return yes_sentences + no_sentences\n", + " \n", + "# make_sentences(\n", + "# twoent_A_template, twoent_B_template, twoent_template, entities=['apple', 'banana'], determiner='', relations=['taller..than', 'shorter..than'])" + ] + }, + { + "cell_type": "code", + "execution_count": 180, + "metadata": {}, + "outputs": [ + { + "ename": "NameError", + "evalue": "name 'make_sentences' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 13\u001b[0m \u001b[0;31m# yes_sent, no_sent = make_sentences(twoent_A_template, twoent_B_template, twoent_template, entities=list(ent_pair), relations=rel)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 14\u001b[0m \u001b[0;31m# sentences += (yes_sent + no_sent)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 15\u001b[0;31m \u001b[0msentences\u001b[0m \u001b[0;34m+=\u001b[0m \u001b[0mmake_sentences\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtwoent_A_template\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtwoent_B_template\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtwoent_template\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mentities\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mlist\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0ment_pair\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mrelations\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mrel\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 16\u001b[0m \u001b[0msample\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msentences\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m20\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 17\u001b[0m \u001b[0msentence_groups\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mappend\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msentences\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mNameError\u001b[0m: name 'make_sentences' is not defined" + ] + } + ], + "source": [ + "sentence_groups = []\n", + "for relations, entity_types in rel2entypes.items():\n", + " sentences = []\n", + " ent_pairs = []\n", + " for entities in entity_types:\n", + " if isinstance(entities, list):\n", + " ent_pairs += permutations(entities, 2)\n", + " else:\n", + " assert isinstance(entities, tuple) and len(entities) == 2 # people_names\n", + " ent_pairs += product(entities[0], entities[1])\n", + " ent_pairs += product(entities[1], entities[0])\n", + " for (rel, ent_pair) in product(relations, ent_pairs):\n", + "# yes_sent, no_sent = make_sentences(twoent_A_template, twoent_B_template, twoent_template, entities=list(ent_pair), relations=rel)\n", + "# sentences += (yes_sent + no_sent)\n", + " sentences += make_sentences(twoent_A_template, twoent_B_template, twoent_template, entities=list(ent_pair), relations=rel)\n", + " sample(sentences, 20)\n", + " sentence_groups.append(sentences)" + ] + }, + { + "cell_type": "code", + "execution_count": 115, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "4" + ] + }, + "execution_count": 115, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "text/plain": [ + "[78432, 38400, 32768, 59232]" + ] + }, + "execution_count": 115, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "len(sentence_groups)\n", + "[len(sg) for sg in sentence_groups]" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [], + "source": [ + "def comparative2superlative(comparative_form, structured=False):\n", + " assert comparative_form.endswith('er'), comparative_form\n", + " superlative_form = 'the ' + comparative_form[:-2] + 'est' \\\n", + " if not structured else 'the ' + comparative_form + ' st'\n", + " return superlative_form" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [], + "source": [ + "def make_relational_atoms(relational_template, entities, relations):\n", + " neg_relations = [\"isn't \" + r for r in relations]\n", + " relations = [\"is \" + r for r in relations]\n", + " atoms = [relational_template.format(ent0=ent0, ent1=ent1, rel=rel) \n", + " for ent0, ent1, rel in [entities + relations[:1], reverse(entities) + reverse(relations)[:1]]]\n", + " atoms += [relational_template.format(ent0=ent0, ent1=ent1, rel=rel) \n", + " for ent0, ent1, rel in [entities + reverse(neg_relations)[:1], reverse(entities) + neg_relations[:1]]]\n", + " return atoms" + ] + }, + { + "cell_type": "code", + "execution_count": 44, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['John is taller than Mary . Mary is taller than Susan . ||| is Susan shorter than John ? [yes] .',\n", + " 'John is taller than Mary . Susan is shorter than Mary . ||| is Susan shorter than John ? [yes] .',\n", + " \"Mary isn't taller than John . Mary is taller than Susan . ||| is Susan shorter than John ? [yes] .\",\n", + " \"Susan isn't taller than Mary . John is taller than Mary . ||| who is the tallest ? [John] .\",\n", + " \"John is taller than Mary . Susan isn't taller than Mary . ||| is John shorter than Susan ? [no] .\",\n", + " \"Mary is shorter than John . Mary isn't shorter than Susan . ||| is Susan shorter than John ? [yes] .\",\n", + " \"John isn't shorter than Mary . Mary is taller than Susan . ||| is Susan taller than John ? [no] .\",\n", + " \"Mary is taller than Susan . John isn't shorter than Mary . ||| is Susan taller than John ? [no] .\",\n", + " \"Mary is shorter than John . Susan isn't taller than Mary . ||| is Susan taller than John ? [no] .\",\n", + " \"Mary is shorter than John . Susan isn't taller than Mary . ||| who is the shortest ? [Susan] .\",\n", + " 'John is taller than Mary . Susan is shorter than Mary . ||| is John taller than Susan ? [yes] .',\n", + " 'Mary is taller than Susan . John is taller than Mary . ||| is Susan shorter than John ? [yes] .',\n", + " \"Mary isn't shorter than Susan . John isn't shorter than Mary . ||| who is the tallest ? [John] .\",\n", + " \"Susan is shorter than Mary . John isn't shorter than Mary . ||| is John taller than Susan ? [yes] .\",\n", + " \"Mary isn't taller than John . Mary is taller than Susan . ||| is John shorter than Susan ? [no] .\",\n", + " \"John isn't shorter than Mary . Susan is shorter than Mary . ||| is Susan taller than John ? [no] .\",\n", + " \"Mary isn't shorter than Susan . Mary is shorter than John . ||| is Susan taller than John ? [no] .\",\n", + " \"Mary is shorter than John . Susan isn't taller than Mary . ||| who is the tallest ? [John] .\",\n", + " \"Susan isn't taller than Mary . Mary isn't taller than John . ||| is John taller than Susan ? [yes] .\",\n", + " 'John is taller than Mary . Susan is shorter than Mary . ||| is Susan taller than John ? [no] .']" + ] + }, + "execution_count": 44, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "text/plain": [ + "['John is taller than Mary . Mary is taller than Susan . ||| who is the tallest ? [John] .',\n", + " 'John is taller than Mary . Mary is taller than Susan . ||| who is the shortest ? [Susan] .',\n", + " 'John is taller than Mary . Mary is taller than Susan . ||| is John taller than Susan ? [yes] .',\n", + " 'John is taller than Mary . Mary is taller than Susan . ||| is John shorter than Susan ? [no] .',\n", + " 'John is taller than Mary . Mary is taller than Susan . ||| is Susan shorter than John ? [yes] .',\n", + " 'John is taller than Mary . Mary is taller than Susan . ||| is Susan taller than John ? [no] .',\n", + " 'John is taller than Mary . Susan is shorter than Mary . ||| who is the tallest ? [John] .',\n", + " 'John is taller than Mary . Susan is shorter than Mary . ||| who is the shortest ? [Susan] .',\n", + " 'John is taller than Mary . Susan is shorter than Mary . ||| is John taller than Susan ? [yes] .',\n", + " 'John is taller than Mary . Susan is shorter than Mary . ||| is John shorter than Susan ? [no] .',\n", + " 'John is taller than Mary . Susan is shorter than Mary . ||| is Susan shorter than John ? [yes] .',\n", + " 'John is taller than Mary . Susan is shorter than Mary . ||| is Susan taller than John ? [no] .',\n", + " \"John is taller than Mary . Mary isn't shorter than Susan . ||| who is the tallest ? [John] .\",\n", + " \"John is taller than Mary . Mary isn't shorter than Susan . ||| who is the shortest ? [Susan] .\",\n", + " \"John is taller than Mary . Mary isn't shorter than Susan . ||| is John taller than Susan ? [yes] .\",\n", + " \"John is taller than Mary . Mary isn't shorter than Susan . ||| is John shorter than Susan ? [no] .\",\n", + " \"John is taller than Mary . Mary isn't shorter than Susan . ||| is Susan shorter than John ? [yes] .\",\n", + " \"John is taller than Mary . Mary isn't shorter than Susan . ||| is Susan taller than John ? [no] .\",\n", + " \"John is taller than Mary . Susan isn't taller than Mary . ||| who is the tallest ? [John] .\",\n", + " \"John is taller than Mary . Susan isn't taller than Mary . ||| who is the shortest ? [Susan] .\",\n", + " \"John is taller than Mary . Susan isn't taller than Mary . ||| is John taller than Susan ? [yes] .\",\n", + " \"John is taller than Mary . Susan isn't taller than Mary . ||| is John shorter than Susan ? [no] .\",\n", + " \"John is taller than Mary . Susan isn't taller than Mary . ||| is Susan shorter than John ? [yes] .\",\n", + " \"John is taller than Mary . Susan isn't taller than Mary . ||| is Susan taller than John ? [no] .\",\n", + " 'Mary is shorter than John . Mary is taller than Susan . ||| who is the tallest ? [John] .',\n", + " 'Mary is shorter than John . Mary is taller than Susan . ||| who is the shortest ? [Susan] .',\n", + " 'Mary is shorter than John . Mary is taller than Susan . ||| is John taller than Susan ? [yes] .',\n", + " 'Mary is shorter than John . Mary is taller than Susan . ||| is John shorter than Susan ? [no] .',\n", + " 'Mary is shorter than John . Mary is taller than Susan . ||| is Susan shorter than John ? [yes] .',\n", + " 'Mary is shorter than John . Mary is taller than Susan . ||| is Susan taller than John ? [no] .',\n", + " 'Mary is shorter than John . Susan is shorter than Mary . ||| who is the tallest ? [John] .',\n", + " 'Mary is shorter than John . Susan is shorter than Mary . ||| who is the shortest ? [Susan] .',\n", + " 'Mary is shorter than John . Susan is shorter than Mary . ||| is John taller than Susan ? [yes] .',\n", + " 'Mary is shorter than John . Susan is shorter than Mary . ||| is John shorter than Susan ? [no] .',\n", + " 'Mary is shorter than John . Susan is shorter than Mary . ||| is Susan shorter than John ? [yes] .',\n", + " 'Mary is shorter than John . Susan is shorter than Mary . ||| is Susan taller than John ? [no] .',\n", + " \"Mary is shorter than John . Mary isn't shorter than Susan . ||| who is the tallest ? [John] .\",\n", + " \"Mary is shorter than John . Mary isn't shorter than Susan . ||| who is the shortest ? [Susan] .\",\n", + " \"Mary is shorter than John . Mary isn't shorter than Susan . ||| is John taller than Susan ? [yes] .\",\n", + " \"Mary is shorter than John . Mary isn't shorter than Susan . ||| is John shorter than Susan ? [no] .\",\n", + " \"Mary is shorter than John . Mary isn't shorter than Susan . ||| is Susan shorter than John ? [yes] .\",\n", + " \"Mary is shorter than John . Mary isn't shorter than Susan . ||| is Susan taller than John ? [no] .\",\n", + " \"Mary is shorter than John . Susan isn't taller than Mary . ||| who is the tallest ? [John] .\",\n", + " \"Mary is shorter than John . Susan isn't taller than Mary . ||| who is the shortest ? [Susan] .\",\n", + " \"Mary is shorter than John . Susan isn't taller than Mary . ||| is John taller than Susan ? [yes] .\",\n", + " \"Mary is shorter than John . Susan isn't taller than Mary . ||| is John shorter than Susan ? [no] .\",\n", + " \"Mary is shorter than John . Susan isn't taller than Mary . ||| is Susan shorter than John ? [yes] .\",\n", + " \"Mary is shorter than John . Susan isn't taller than Mary . ||| is Susan taller than John ? [no] .\",\n", + " \"John isn't shorter than Mary . Mary is taller than Susan . ||| who is the tallest ? [John] .\",\n", + " \"John isn't shorter than Mary . Mary is taller than Susan . ||| who is the shortest ? [Susan] .\",\n", + " \"John isn't shorter than Mary . Mary is taller than Susan . ||| is John taller than Susan ? [yes] .\",\n", + " \"John isn't shorter than Mary . Mary is taller than Susan . ||| is John shorter than Susan ? [no] .\",\n", + " \"John isn't shorter than Mary . Mary is taller than Susan . ||| is Susan shorter than John ? [yes] .\",\n", + " \"John isn't shorter than Mary . Mary is taller than Susan . ||| is Susan taller than John ? [no] .\",\n", + " \"John isn't shorter than Mary . Susan is shorter than Mary . ||| who is the tallest ? [John] .\",\n", + " \"John isn't shorter than Mary . Susan is shorter than Mary . ||| who is the shortest ? [Susan] .\",\n", + " \"John isn't shorter than Mary . Susan is shorter than Mary . ||| is John taller than Susan ? [yes] .\",\n", + " \"John isn't shorter than Mary . Susan is shorter than Mary . ||| is John shorter than Susan ? [no] .\",\n", + " \"John isn't shorter than Mary . Susan is shorter than Mary . ||| is Susan shorter than John ? [yes] .\",\n", + " \"John isn't shorter than Mary . Susan is shorter than Mary . ||| is Susan taller than John ? [no] .\",\n", + " \"John isn't shorter than Mary . Mary isn't shorter than Susan . ||| who is the tallest ? [John] .\",\n", + " \"John isn't shorter than Mary . Mary isn't shorter than Susan . ||| who is the shortest ? [Susan] .\",\n", + " \"John isn't shorter than Mary . Mary isn't shorter than Susan . ||| is John taller than Susan ? [yes] .\",\n", + " \"John isn't shorter than Mary . Mary isn't shorter than Susan . ||| is John shorter than Susan ? [no] .\",\n", + " \"John isn't shorter than Mary . Mary isn't shorter than Susan . ||| is Susan shorter than John ? [yes] .\",\n", + " \"John isn't shorter than Mary . Mary isn't shorter than Susan . ||| is Susan taller than John ? [no] .\",\n", + " \"John isn't shorter than Mary . Susan isn't taller than Mary . ||| who is the tallest ? [John] .\",\n", + " \"John isn't shorter than Mary . Susan isn't taller than Mary . ||| who is the shortest ? [Susan] .\",\n", + " \"John isn't shorter than Mary . Susan isn't taller than Mary . ||| is John taller than Susan ? [yes] .\",\n", + " \"John isn't shorter than Mary . Susan isn't taller than Mary . ||| is John shorter than Susan ? [no] .\",\n", + " \"John isn't shorter than Mary . Susan isn't taller than Mary . ||| is Susan shorter than John ? [yes] .\",\n", + " \"John isn't shorter than Mary . Susan isn't taller than Mary . ||| is Susan taller than John ? [no] .\",\n", + " \"Mary isn't taller than John . Mary is taller than Susan . ||| who is the tallest ? [John] .\",\n", + " \"Mary isn't taller than John . Mary is taller than Susan . ||| who is the shortest ? [Susan] .\",\n", + " \"Mary isn't taller than John . Mary is taller than Susan . ||| is John taller than Susan ? [yes] .\",\n", + " \"Mary isn't taller than John . Mary is taller than Susan . ||| is John shorter than Susan ? [no] .\",\n", + " \"Mary isn't taller than John . Mary is taller than Susan . ||| is Susan shorter than John ? [yes] .\",\n", + " \"Mary isn't taller than John . Mary is taller than Susan . ||| is Susan taller than John ? [no] .\",\n", + " \"Mary isn't taller than John . Susan is shorter than Mary . ||| who is the tallest ? [John] .\",\n", + " \"Mary isn't taller than John . Susan is shorter than Mary . ||| who is the shortest ? [Susan] .\",\n", + " \"Mary isn't taller than John . Susan is shorter than Mary . ||| is John taller than Susan ? [yes] .\",\n", + " \"Mary isn't taller than John . Susan is shorter than Mary . ||| is John shorter than Susan ? [no] .\",\n", + " \"Mary isn't taller than John . Susan is shorter than Mary . ||| is Susan shorter than John ? [yes] .\",\n", + " \"Mary isn't taller than John . Susan is shorter than Mary . ||| is Susan taller than John ? [no] .\",\n", + " \"Mary isn't taller than John . Mary isn't shorter than Susan . ||| who is the tallest ? [John] .\",\n", + " \"Mary isn't taller than John . Mary isn't shorter than Susan . ||| who is the shortest ? [Susan] .\",\n", + " \"Mary isn't taller than John . Mary isn't shorter than Susan . ||| is John taller than Susan ? [yes] .\",\n", + " \"Mary isn't taller than John . Mary isn't shorter than Susan . ||| is John shorter than Susan ? [no] .\",\n", + " \"Mary isn't taller than John . Mary isn't shorter than Susan . ||| is Susan shorter than John ? [yes] .\",\n", + " \"Mary isn't taller than John . Mary isn't shorter than Susan . ||| is Susan taller than John ? [no] .\",\n", + " \"Mary isn't taller than John . Susan isn't taller than Mary . ||| who is the tallest ? [John] .\",\n", + " \"Mary isn't taller than John . Susan isn't taller than Mary . ||| who is the shortest ? [Susan] .\",\n", + " \"Mary isn't taller than John . Susan isn't taller than Mary . ||| is John taller than Susan ? [yes] .\",\n", + " \"Mary isn't taller than John . Susan isn't taller than Mary . ||| is John shorter than Susan ? [no] .\",\n", + " \"Mary isn't taller than John . Susan isn't taller than Mary . ||| is Susan shorter than John ? [yes] .\",\n", + " \"Mary isn't taller than John . Susan isn't taller than Mary . ||| is Susan taller than John ? [no] .\",\n", + " 'Mary is taller than Susan . John is taller than Mary . ||| who is the tallest ? [John] .',\n", + " 'Mary is taller than Susan . John is taller than Mary . ||| who is the shortest ? [Susan] .',\n", + " 'Mary is taller than Susan . John is taller than Mary . ||| is John taller than Susan ? [yes] .',\n", + " 'Mary is taller than Susan . John is taller than Mary . ||| is John shorter than Susan ? [no] .',\n", + " 'Mary is taller than Susan . John is taller than Mary . ||| is Susan shorter than John ? [yes] .',\n", + " 'Mary is taller than Susan . John is taller than Mary . ||| is Susan taller than John ? [no] .',\n", + " 'Mary is taller than Susan . Mary is shorter than John . ||| who is the tallest ? [John] .',\n", + " 'Mary is taller than Susan . Mary is shorter than John . ||| who is the shortest ? [Susan] .',\n", + " 'Mary is taller than Susan . Mary is shorter than John . ||| is John taller than Susan ? [yes] .',\n", + " 'Mary is taller than Susan . Mary is shorter than John . ||| is John shorter than Susan ? [no] .',\n", + " 'Mary is taller than Susan . Mary is shorter than John . ||| is Susan shorter than John ? [yes] .',\n", + " 'Mary is taller than Susan . Mary is shorter than John . ||| is Susan taller than John ? [no] .',\n", + " \"Mary is taller than Susan . John isn't shorter than Mary . ||| who is the tallest ? [John] .\",\n", + " \"Mary is taller than Susan . John isn't shorter than Mary . ||| who is the shortest ? [Susan] .\",\n", + " \"Mary is taller than Susan . John isn't shorter than Mary . ||| is John taller than Susan ? [yes] .\",\n", + " \"Mary is taller than Susan . John isn't shorter than Mary . ||| is John shorter than Susan ? [no] .\",\n", + " \"Mary is taller than Susan . John isn't shorter than Mary . ||| is Susan shorter than John ? [yes] .\",\n", + " \"Mary is taller than Susan . John isn't shorter than Mary . ||| is Susan taller than John ? [no] .\",\n", + " \"Mary is taller than Susan . Mary isn't taller than John . ||| who is the tallest ? [John] .\",\n", + " \"Mary is taller than Susan . Mary isn't taller than John . ||| who is the shortest ? [Susan] .\",\n", + " \"Mary is taller than Susan . Mary isn't taller than John . ||| is John taller than Susan ? [yes] .\",\n", + " \"Mary is taller than Susan . Mary isn't taller than John . ||| is John shorter than Susan ? [no] .\",\n", + " \"Mary is taller than Susan . Mary isn't taller than John . ||| is Susan shorter than John ? [yes] .\",\n", + " \"Mary is taller than Susan . Mary isn't taller than John . ||| is Susan taller than John ? [no] .\",\n", + " 'Susan is shorter than Mary . John is taller than Mary . ||| who is the tallest ? [John] .',\n", + " 'Susan is shorter than Mary . John is taller than Mary . ||| who is the shortest ? [Susan] .',\n", + " 'Susan is shorter than Mary . John is taller than Mary . ||| is John taller than Susan ? [yes] .',\n", + " 'Susan is shorter than Mary . John is taller than Mary . ||| is John shorter than Susan ? [no] .',\n", + " 'Susan is shorter than Mary . John is taller than Mary . ||| is Susan shorter than John ? [yes] .',\n", + " 'Susan is shorter than Mary . John is taller than Mary . ||| is Susan taller than John ? [no] .',\n", + " 'Susan is shorter than Mary . Mary is shorter than John . ||| who is the tallest ? [John] .',\n", + " 'Susan is shorter than Mary . Mary is shorter than John . ||| who is the shortest ? [Susan] .',\n", + " 'Susan is shorter than Mary . Mary is shorter than John . ||| is John taller than Susan ? [yes] .',\n", + " 'Susan is shorter than Mary . Mary is shorter than John . ||| is John shorter than Susan ? [no] .',\n", + " 'Susan is shorter than Mary . Mary is shorter than John . ||| is Susan shorter than John ? [yes] .',\n", + " 'Susan is shorter than Mary . Mary is shorter than John . ||| is Susan taller than John ? [no] .',\n", + " \"Susan is shorter than Mary . John isn't shorter than Mary . ||| who is the tallest ? [John] .\",\n", + " \"Susan is shorter than Mary . John isn't shorter than Mary . ||| who is the shortest ? [Susan] .\",\n", + " \"Susan is shorter than Mary . John isn't shorter than Mary . ||| is John taller than Susan ? [yes] .\",\n", + " \"Susan is shorter than Mary . John isn't shorter than Mary . ||| is John shorter than Susan ? [no] .\",\n", + " \"Susan is shorter than Mary . John isn't shorter than Mary . ||| is Susan shorter than John ? [yes] .\",\n", + " \"Susan is shorter than Mary . John isn't shorter than Mary . ||| is Susan taller than John ? [no] .\",\n", + " \"Susan is shorter than Mary . Mary isn't taller than John . ||| who is the tallest ? [John] .\",\n", + " \"Susan is shorter than Mary . Mary isn't taller than John . ||| who is the shortest ? [Susan] .\",\n", + " \"Susan is shorter than Mary . Mary isn't taller than John . ||| is John taller than Susan ? [yes] .\",\n", + " \"Susan is shorter than Mary . Mary isn't taller than John . ||| is John shorter than Susan ? [no] .\",\n", + " \"Susan is shorter than Mary . Mary isn't taller than John . ||| is Susan shorter than John ? [yes] .\",\n", + " \"Susan is shorter than Mary . Mary isn't taller than John . ||| is Susan taller than John ? [no] .\",\n", + " \"Mary isn't shorter than Susan . John is taller than Mary . ||| who is the tallest ? [John] .\",\n", + " \"Mary isn't shorter than Susan . John is taller than Mary . ||| who is the shortest ? [Susan] .\",\n", + " \"Mary isn't shorter than Susan . John is taller than Mary . ||| is John taller than Susan ? [yes] .\",\n", + " \"Mary isn't shorter than Susan . John is taller than Mary . ||| is John shorter than Susan ? [no] .\",\n", + " \"Mary isn't shorter than Susan . John is taller than Mary . ||| is Susan shorter than John ? [yes] .\",\n", + " \"Mary isn't shorter than Susan . John is taller than Mary . ||| is Susan taller than John ? [no] .\",\n", + " \"Mary isn't shorter than Susan . Mary is shorter than John . ||| who is the tallest ? [John] .\",\n", + " \"Mary isn't shorter than Susan . Mary is shorter than John . ||| who is the shortest ? [Susan] .\",\n", + " \"Mary isn't shorter than Susan . Mary is shorter than John . ||| is John taller than Susan ? [yes] .\",\n", + " \"Mary isn't shorter than Susan . Mary is shorter than John . ||| is John shorter than Susan ? [no] .\",\n", + " \"Mary isn't shorter than Susan . Mary is shorter than John . ||| is Susan shorter than John ? [yes] .\",\n", + " \"Mary isn't shorter than Susan . Mary is shorter than John . ||| is Susan taller than John ? [no] .\",\n", + " \"Mary isn't shorter than Susan . John isn't shorter than Mary . ||| who is the tallest ? [John] .\",\n", + " \"Mary isn't shorter than Susan . John isn't shorter than Mary . ||| who is the shortest ? [Susan] .\",\n", + " \"Mary isn't shorter than Susan . John isn't shorter than Mary . ||| is John taller than Susan ? [yes] .\",\n", + " \"Mary isn't shorter than Susan . John isn't shorter than Mary . ||| is John shorter than Susan ? [no] .\",\n", + " \"Mary isn't shorter than Susan . John isn't shorter than Mary . ||| is Susan shorter than John ? [yes] .\",\n", + " \"Mary isn't shorter than Susan . John isn't shorter than Mary . ||| is Susan taller than John ? [no] .\",\n", + " \"Mary isn't shorter than Susan . Mary isn't taller than John . ||| who is the tallest ? [John] .\",\n", + " \"Mary isn't shorter than Susan . Mary isn't taller than John . ||| who is the shortest ? [Susan] .\",\n", + " \"Mary isn't shorter than Susan . Mary isn't taller than John . ||| is John taller than Susan ? [yes] .\",\n", + " \"Mary isn't shorter than Susan . Mary isn't taller than John . ||| is John shorter than Susan ? [no] .\",\n", + " \"Mary isn't shorter than Susan . Mary isn't taller than John . ||| is Susan shorter than John ? [yes] .\",\n", + " \"Mary isn't shorter than Susan . Mary isn't taller than John . ||| is Susan taller than John ? [no] .\",\n", + " \"Susan isn't taller than Mary . John is taller than Mary . ||| who is the tallest ? [John] .\",\n", + " \"Susan isn't taller than Mary . John is taller than Mary . ||| who is the shortest ? [Susan] .\",\n", + " \"Susan isn't taller than Mary . John is taller than Mary . ||| is John taller than Susan ? [yes] .\",\n", + " \"Susan isn't taller than Mary . John is taller than Mary . ||| is John shorter than Susan ? [no] .\",\n", + " \"Susan isn't taller than Mary . John is taller than Mary . ||| is Susan shorter than John ? [yes] .\",\n", + " \"Susan isn't taller than Mary . John is taller than Mary . ||| is Susan taller than John ? [no] .\",\n", + " \"Susan isn't taller than Mary . Mary is shorter than John . ||| who is the tallest ? [John] .\",\n", + " \"Susan isn't taller than Mary . Mary is shorter than John . ||| who is the shortest ? [Susan] .\",\n", + " \"Susan isn't taller than Mary . Mary is shorter than John . ||| is John taller than Susan ? [yes] .\",\n", + " \"Susan isn't taller than Mary . Mary is shorter than John . ||| is John shorter than Susan ? [no] .\",\n", + " \"Susan isn't taller than Mary . Mary is shorter than John . ||| is Susan shorter than John ? [yes] .\",\n", + " \"Susan isn't taller than Mary . Mary is shorter than John . ||| is Susan taller than John ? [no] .\",\n", + " \"Susan isn't taller than Mary . John isn't shorter than Mary . ||| who is the tallest ? [John] .\",\n", + " \"Susan isn't taller than Mary . John isn't shorter than Mary . ||| who is the shortest ? [Susan] .\",\n", + " \"Susan isn't taller than Mary . John isn't shorter than Mary . ||| is John taller than Susan ? [yes] .\",\n", + " \"Susan isn't taller than Mary . John isn't shorter than Mary . ||| is John shorter than Susan ? [no] .\",\n", + " \"Susan isn't taller than Mary . John isn't shorter than Mary . ||| is Susan shorter than John ? [yes] .\",\n", + " \"Susan isn't taller than Mary . John isn't shorter than Mary . ||| is Susan taller than John ? [no] .\",\n", + " \"Susan isn't taller than Mary . Mary isn't taller than John . ||| who is the tallest ? [John] .\",\n", + " \"Susan isn't taller than Mary . Mary isn't taller than John . ||| who is the shortest ? [Susan] .\",\n", + " \"Susan isn't taller than Mary . Mary isn't taller than John . ||| is John taller than Susan ? [yes] .\",\n", + " \"Susan isn't taller than Mary . Mary isn't taller than John . ||| is John shorter than Susan ? [no] .\",\n", + " \"Susan isn't taller than Mary . Mary isn't taller than John . ||| is Susan shorter than John ? [yes] .\",\n", + " \"Susan isn't taller than Mary . Mary isn't taller than John . ||| is Susan taller than John ? [no] .\"]" + ] + }, + "execution_count": 44, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "transitive_P_template = '{ent0} {rel} {ent1} .'\n", + "transitive_wh_QA_template = '{which} is {pred} ? {ent} .'\n", + "transitive_yesno_QA_template = 'is {ent0} {rel} {ent1} ? {ans} .'\n", + "\n", + "def make_transitive(P_template, wh_QA_template, yesno_QA_template, join_template,\n", + " index=-1, orig_sentence='', entities=[\"John\", \"Mary\", \"Susan\"], entity_substitutes=None, determiner=\"\", \n", + " relations=('taller..than', 'shorter..than'), maybe=True, structured=False,\n", + " packed_predicates=[\"pred0/~pred0\", \"pred1/~pred1\"], predicate_substitutes=None,\n", + " predicate_dichotomy=True, reverse_causal=False):\n", + " if entities[0].islower():\n", + " entities = ['the ' + e for e in entities]\n", + "# print('relations =', relations)\n", + " relations, predicates = ([r.replace('..', ' ') for r in relations], [r.split('..')[0] for r in relations]) \\\n", + " if '..' in relations[0] else ([r.split('/')[0] for r in relations], [r.split('/')[-1] for r in relations])\n", + "# print('relations =', relations, 'predicates =', predicates)\n", + " predicates = [comparative2superlative(p, structured=structured) for p in predicates]\n", + " \n", + " P0_entities, P1_entities = ([entities[0], entities[1]], [entities[1], entities[2]]) \\\n", + " if not maybe else ([entities[0], entities[1]], [entities[0], entities[2]])\n", + " P0 = make_relational_atoms(P_template, P0_entities, relations)\n", + " P1 = make_relational_atoms(P_template, P1_entities, relations)\n", + " \n", + " wh_pronoun = 'which' if entities[0].startswith('the') else 'who'\n", + " wh_QA = [wh_QA_template.format(which=wh_pronoun, pred=pred, ent=ent) \n", + " for pred, ent in [(predicates[0], mask(entities[0])), (predicates[-1], mask(entities[-1] if not maybe else 'unknown'))]]\n", + " \n", + " def _maybe(s):\n", + " return s if not maybe else 'maybe'\n", + " yesno_entities = (entities[0], entities[-1]) if not maybe else (entities[1], entities[-1])\n", + " yesno_QA = [yesno_QA_template.format(ent0=ent0, ent1=ent1, rel=rel, ans=ans) \n", + " for ent0, ent1, rel, ans in [\n", + " (yesno_entities[0], yesno_entities[-1], relations[0], mask(_maybe('yes'))), \n", + " (yesno_entities[0], yesno_entities[-1], relations[-1], mask(_maybe('no'))),\n", + " (yesno_entities[-1], yesno_entities[0], relations[-1], mask(_maybe('yes'))),\n", + " (yesno_entities[-1], yesno_entities[0], relations[0], mask(_maybe('no')))]]\n", + " \n", + " Ps = [(p0, p1) for p0, p1 in list(product(P0, P1)) + list(product(P1, P0))]\n", + " QAs = wh_QA + yesno_QA\n", + " \n", + " def get_rel(atom):\n", + " for rel in relations:\n", + "# assert rel.startswith('is')\n", + " rel = rel.split()[0] # \"taller than\" -> \"taller\"\n", + " if rel in atom:\n", + " return rel\n", + " assert False\n", + " sentences = [p0 + ' ' + p1 + ' ||| ' + qas for (p0, p1), qas in product(Ps, QAs)\n", + " if not structured or get_rel(p0) == get_rel(p1) == get_rel(qas)]\n", + "# sentences = [s.replace('er st ', 'est ') for s in sentences]\n", + " return sentences\n", + "\n", + "sentences = make_transitive(transitive_P_template, transitive_wh_QA_template, transitive_yesno_QA_template, None, maybe=False, structured=False)\n", + "# len(sentences)\n", + "sample(sentences, 20)\n", + "sentences" + ] + }, + { + "cell_type": "code", + "execution_count": 41, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'a . . . b . . . c'" + ] + }, + "execution_count": 41, + "metadata": {}, + "output_type": "execute_result" + }, + { + "ename": "TypeError", + "evalue": "object of type 'NoneType' has no len()", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mTypeError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;34m'a'\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0;34m' .'\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0mrandom\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrandint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m10\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0;34m' '\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0;34m'b'\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0;34m' .'\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0mrandom\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrandint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m10\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0;34m' '\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0;34m'c'\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", + "\u001b[0;31mTypeError\u001b[0m: object of type 'NoneType' has no len()" + ] + } + ], + "source": [ + "'a' + ' .'*random.randint(0, 10) + ' ' + 'b' + ' .'*random.randint(0, 10) + ' ' + 'c'\n", + "len(None)" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['James is older than Jennifer . Jennifer is older than John . ||| is James older than John ? [yes] .',\n", + " \"James is younger than Jennifer . James isn't younger than Linda . ||| who is the younger st ? [Linda] .\",\n", + " \"Linda is shorter than Mary . Linda isn't shorter than Robert . ||| is Mary shorter than Robert ? [no] .\",\n", + " 'Linda is shorter than Robert . John is shorter than Linda . ||| who is the shorter st ? [John] .',\n", + " 'Mary is older than Robert . John is older than Mary . ||| is Robert older than John ? [no] .',\n", + " \"Jennifer isn't younger than Robert . James is younger than Robert . ||| is Jennifer younger than James ? [no] .\",\n", + " \"Mary is shorter than Jennifer . Mary isn't shorter than John . ||| who is the shorter st ? [John] .\",\n", + " \"Linda isn't taller than Robert . Linda is taller than John . ||| who is the taller st ? [Robert] .\",\n", + " \"Robert isn't younger than Mary . Mary isn't younger than Linda . ||| is Robert younger than Linda ? [no] .\",\n", + " \"Jennifer isn't taller than Linda . Mary isn't taller than Jennifer . ||| who is the taller st ? [Linda] .\",\n", + " \"Mary isn't older than Linda . John isn't older than Mary . ||| is John older than Linda ? [no] .\",\n", + " \"Linda is taller than Robert . John isn't taller than Robert . ||| is John taller than Linda ? [no] .\",\n", + " \"Robert isn't older than Jennifer . James is older than Jennifer . ||| is Robert older than James ? [no] .\",\n", + " \"Linda isn't older than Jennifer . Jennifer isn't older than James . ||| is Linda older than James ? [no] .\",\n", + " \"Jennifer is shorter than Robert . John isn't shorter than Robert . ||| is Jennifer shorter than John ? [yes] .\",\n", + " 'James is older than Mary . Jennifer is older than James . ||| is Mary older than Jennifer ? [no] .',\n", + " 'Jennifer is taller than John . John is taller than Robert . ||| is Jennifer taller than Robert ? [yes] .',\n", + " \"John is younger than Linda . Mary isn't younger than Linda . ||| who is the younger st ? [John] .\",\n", + " \"Jennifer is younger than Mary . Jennifer isn't younger than John . ||| who is the younger st ? [John] .\",\n", + " \"Robert is younger than John . Linda isn't younger than John . ||| is Linda younger than Robert ? [no] .\"]" + ] + }, + "execution_count": 23, + "metadata": {}, + "output_type": "execute_result" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "num_sent = 11520 -> 11520\n" + ] + } + ], + "source": [ + "sentence_groups = []\n", + "maybe = False\n", + "for relations, entity_types in rel2entypes.items():\n", + " sentences = []\n", + " ent_tuples = []\n", + " for entities in entity_types:\n", + " if isinstance(entities, list):\n", + " ent_tuples += permutations(entities, 3)\n", + " else:\n", + " assert isinstance(entities, tuple) and len(entities) == 2 # people_names\n", + " ent_tuples += permutations(entities[0] + entities[1], 3)\n", + " for (rel, ent_tuple) in product(relations, ent_tuples):\n", + " sentences += make_transitive(transitive_P_template, transitive_wh_QA_template, transitive_yesno_QA_template, None, \n", + " entities=list(ent_tuple), relations=rel, maybe=False, structured=True)\n", + " if maybe:\n", + " sentences += make_transitive(transitive_P_template, transitive_wh_QA_template, transitive_yesno_QA_template, None, \n", + " entities=list(ent_tuple), relations=rel, maybe=True, structured=True)\n", + " sample(sentences, 20)\n", + " print('num_sent =', len(sentences), '->', len(set(sentences)))\n", + " sentence_groups.append(sentences)" + ] + }, + { + "cell_type": "code", + "execution_count": 247, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "_StoreAction(option_strings=['--max_seq_length'], dest='max_seq_length', nargs=None, const=None, default=128, type=, choices=None, help='The maximum total input sequence length after WordPiece tokenization. \\nSequences longer than this will be truncated, and sequences shorter \\nthan this will be padded.', metavar=None)" + ] + }, + "execution_count": 247, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "text/plain": [ + "_StoreTrueAction(option_strings=['--do_train'], dest='do_train', nargs=0, const=True, default=False, type=None, choices=None, help='Whether to run training.', metavar=None)" + ] + }, + "execution_count": 247, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "text/plain": [ + "_StoreTrueAction(option_strings=['--do_eval'], dest='do_eval', nargs=0, const=True, default=False, type=None, choices=None, help='Whether to run eval on the dev set.', metavar=None)" + ] + }, + "execution_count": 247, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "text/plain": [ + "_StoreAction(option_strings=['--train_batch_size'], dest='train_batch_size', nargs=None, const=None, default=32, type=, choices=None, help='Total batch size for training.', metavar=None)" + ] + }, + "execution_count": 247, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "text/plain": [ + "_StoreAction(option_strings=['--eval_batch_size'], dest='eval_batch_size', nargs=None, const=None, default=32, type=, choices=None, help='Total batch size for eval.', metavar=None)" + ] + }, + "execution_count": 247, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "text/plain": [ + "_StoreAction(option_strings=['--learning_rate'], dest='learning_rate', nargs=None, const=None, default=3e-05, type=, choices=None, help='The initial learning rate for Adam.', metavar=None)" + ] + }, + "execution_count": 247, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "text/plain": [ + "_StoreAction(option_strings=['--num_train_epochs'], dest='num_train_epochs', nargs=None, const=None, default=3.0, type=, choices=None, help='Total number of training epochs to perform.', metavar=None)" + ] + }, + "execution_count": 247, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "text/plain": [ + "_StoreAction(option_strings=['--warmup_proportion'], dest='warmup_proportion', nargs=None, const=None, default=0.1, type=, choices=None, help='Proportion of training to perform linear learning rate warmup for. E.g., 0.1 = 10%% of training.', metavar=None)" + ] + }, + "execution_count": 247, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "text/plain": [ + "_StoreTrueAction(option_strings=['--no_cuda'], dest='no_cuda', nargs=0, const=True, default=False, type=None, choices=None, help='Whether not to use CUDA when available', metavar=None)" + ] + }, + "execution_count": 247, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "text/plain": [ + "_StoreTrueAction(option_strings=['--do_lower_case'], dest='do_lower_case', nargs=0, const=True, default=False, type=None, choices=None, help='Whether to lower case the input text. True for uncased models, False for cased models.', metavar=None)" + ] + }, + "execution_count": 247, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "text/plain": [ + "_StoreAction(option_strings=['--seed'], dest='seed', nargs=None, const=None, default=42, type=, choices=None, help='random seed for initialization', metavar=None)" + ] + }, + "execution_count": 247, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "text/plain": [ + "_StoreAction(option_strings=['--gradient_accumulation_steps'], dest='gradient_accumulation_steps', nargs=None, const=None, default=1, type=, choices=None, help='Number of updates steps to accumualte before performing a backward/update pass.', metavar=None)" + ] + }, + "execution_count": 247, + "metadata": {}, + "output_type": "execute_result" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Namespace(do_eval=True, do_lower_case=True, do_train=True, eval_batch_size=128, gradient_accumulation_steps=1, learning_rate=0.0001, max_seq_length=128, no_cuda=False, num_train_epochs=100, seed=42, train_batch_size=32, warmup_proportion=0.1)\n" + ] + } + ], + "source": [ + "import argparse\n", + "parser = argparse.ArgumentParser()\n", + "\n", + "parser.add_argument(\"--max_seq_length\",\n", + " default=128,\n", + " type=int,\n", + " help=\"The maximum total input sequence length after WordPiece tokenization. \\n\"\n", + " \"Sequences longer than this will be truncated, and sequences shorter \\n\"\n", + " \"than this will be padded.\")\n", + "parser.add_argument(\"--do_train\",\n", + " action='store_true',\n", + " help=\"Whether to run training.\")\n", + "parser.add_argument(\"--do_eval\",\n", + " action='store_true',\n", + " help=\"Whether to run eval on the dev set.\")\n", + "parser.add_argument(\"--train_batch_size\",\n", + " default=32,\n", + " type=int,\n", + " help=\"Total batch size for training.\")\n", + "parser.add_argument(\"--eval_batch_size\",\n", + " default=32,\n", + " type=int,\n", + " help=\"Total batch size for eval.\")\n", + "parser.add_argument(\"--learning_rate\",\n", + " default=3e-5,\n", + " type=float,\n", + " help=\"The initial learning rate for Adam.\")\n", + "parser.add_argument(\"--num_train_epochs\",\n", + " default=3.0,\n", + " type=float,\n", + " help=\"Total number of training epochs to perform.\")\n", + "parser.add_argument(\"--warmup_proportion\",\n", + " default=0.1,\n", + " type=float,\n", + " help=\"Proportion of training to perform linear learning rate warmup for. \"\n", + " \"E.g., 0.1 = 10%% of training.\")\n", + "parser.add_argument(\"--no_cuda\",\n", + " action='store_true',\n", + " help=\"Whether not to use CUDA when available\")\n", + "parser.add_argument(\"--do_lower_case\",\n", + " action='store_true',\n", + " help=\"Whether to lower case the input text. True for uncased models, False for cased models.\")\n", + "parser.add_argument('--seed',\n", + " type=int,\n", + " default=42,\n", + " help=\"random seed for initialization\")\n", + "parser.add_argument('--gradient_accumulation_steps',\n", + " type=int,\n", + " default=1,\n", + " help=\"Number of updates steps to accumualte before performing a backward/update pass.\")\n", + "parser.add_argument(\"--dev_percent\",\n", + " default=0.5,\n", + " type=float)\n", + "# args = parser.parse_args(['--output_dir', '/home'])\n", + "args = parser.parse_args([])\n", + "args.do_lower_case = True\n", + "args.do_train = True\n", + "args.do_eval = True\n", + "args.eval_batch_size = 128\n", + "args.learning_rate = 1e-4\n", + "args.num_train_epochs = 100\n", + "print(args)" + ] + }, + { + "cell_type": "code", + "execution_count": 243, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "num_train_steps = 10800\n" + ] + } + ], + "source": [ + "child_dataset = CHILDDataset(tokenizer, sentence_groups[0], dev_percent=0.5)\n", + "train_features = child_dataset.get_train_features()\n", + "num_train_steps = int(\n", + " len(train_features) / args.train_batch_size / args.gradient_accumulation_steps * args.num_train_epochs)\n", + "print('num_train_steps =', num_train_steps)\n", + "eval_features = child_dataset.get_dev_features()\n", + "\n", + "train_dataset = child_dataset.build_dataset(train_features)\n", + "eval_dataset = child_dataset.build_dataset(eval_features)" + ] + }, + { + "cell_type": "code", + "execution_count": 250, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "06/09/2019 10:05:44 - INFO - run_child_finetuning - device: cuda n_gpu: 1\n" + ] + }, + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 250, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "device = torch.device(\"cuda\" if torch.cuda.is_available() and not args.no_cuda else \"cpu\")\n", + "n_gpu = torch.cuda.device_count()\n", + "logger.info(\"device: {} n_gpu: {}\".format(\n", + " device, n_gpu))\n", + "\n", + "args.train_batch_size = int(args.train_batch_size / args.gradient_accumulation_steps)\n", + "\n", + "random.seed(args.seed)\n", + "np.random.seed(args.seed)\n", + "torch.manual_seed(args.seed)\n", + "if n_gpu > 0:\n", + " torch.cuda.manual_seed_all(args.seed)\n", + "\n", + "# Prepare model\n", + "# model = BertForMaskedLM.from_pretrained(BERT_DIR)\n", + "CONFIG_NAME = 'bert_config_small.json'\n", + "config = BertConfig(os.path.join(BERT_DIR, CONFIG_NAME))\n", + "model = BertForMaskedLM(config)\n", + "_ = model.to(device)\n", + "if n_gpu > 1:\n", + " model = torch.nn.DataParallel(model)" + ] + }, + { + "cell_type": "code", + "execution_count": 252, + "metadata": {}, + "outputs": [], + "source": [ + "# Prepare optimizer\n", + "param_optimizer = list(model.named_parameters())\n", + "no_decay = ['bias', 'LayerNorm.bias', 'LayerNorm.weight']\n", + "optimizer_grouped_parameters = [\n", + " {'params': [p for n, p in param_optimizer if not any(nd in n for nd in no_decay)], 'weight_decay': 0.01},\n", + " {'params': [p for n, p in param_optimizer if any(nd in n for nd in no_decay)], 'weight_decay': 0.0}\n", + " ]\n", + "optimizer = BertAdam(optimizer_grouped_parameters,\n", + " lr=args.learning_rate,\n", + " warmup=args.warmup_proportion,\n", + " t_total=num_train_steps)" + ] + }, + { + "cell_type": "code", + "execution_count": 253, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\n", + "\n", + "\n", + "\n", + "Epoch: 0%| | 0/100 [00:00 1:\n", + " loss = loss.mean() # mean() to average on multi-gpu.\n", + " if args.gradient_accumulation_steps > 1:\n", + " loss = loss / args.gradient_accumulation_steps\n", + " loss.backward()\n", + " tr_loss += loss.item()\n", + " nb_tr_examples += input_ids.size(0)\n", + " nb_tr_steps += 1\n", + " if (step + 1) % args.gradient_accumulation_steps == 0:\n", + " # modify learning rate with special warm up BERT uses\n", + " lr_this_step = args.learning_rate * warmup_linear(global_step/num_train_steps, args.warmup_proportion)\n", + " if global_step % 1000 == 0:\n", + " print('global_step %d, lr = %f' % (global_step, lr_this_step))\n", + " for param_group in optimizer.param_groups:\n", + " param_group['lr'] = lr_this_step\n", + " optimizer.step()\n", + " optimizer.zero_grad()\n", + " global_step += 1\n", + "\n", + " if args.do_eval:\n", + " logger.info(\"Epoch %d\" % (epoch + 1))\n", + " logger.info(\"Evaluating on train set...\")\n", + " validate(model, train_dataset, device)\n", + " logger.info(\"Evaluating on valid set...\")\n", + " validate(model, eval_dataset, device)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.5" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/Untitled3.ipynb b/Untitled3.ipynb new file mode 100644 index 00000000000000..eee4c4c8357630 --- /dev/null +++ b/Untitled3.ipynb @@ -0,0 +1,804 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "%load_ext autoreload\n", + "%autoreload 2\n", + "\n", + "from IPython.core.interactiveshell import InteractiveShell\n", + "InteractiveShell.ast_node_interactivity = 'all'" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import json\n", + "import itertools\n", + "from itertools import product, chain\n", + "\n", + "from pytorch_pretrained_bert import tokenization, BertTokenizer, BertModel, BertForMaskedLM, BertForPreTraining, BertConfig" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "01/24/2019 22:16:56 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-tensorflow/uncased_L-12_H-768_A-12/vocab.txt\n" + ] + } + ], + "source": [ + "CONFIG_NAME = 'bert_config.json'\n", + "BERT_DIR = '/nas/pretrain-bert/pretrain-tensorflow/uncased_L-12_H-768_A-12/'\n", + "tokenizer = BertTokenizer.from_pretrained(os.path.join(BERT_DIR, 'vocab.txt'))" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "def reverse(l):\n", + " return list(reversed(l))" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "def mask(ent_str):\n", + " tokens = ent_str.strip().split()\n", + " if len(tokens) == 1:\n", + " return '[%s]' % tokens[0]\n", + " elif len(tokens) == 2:\n", + " assert tokens[0] == 'the', ent_str\n", + " return '%s [%s]' % (tokens[0], tokens[1])\n", + " else:\n", + " assert False, ent_str" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [], + "source": [ + "A_template = \"{dt} {ent0} {rel} {dt} {ent1} {rel_suffix}\"\n", + "B_template = \"{dt} {ent} {pred}\"\n", + "\n", + "causal_templates = [[\"{A} because {B}.\"],# \"{B} so {A}.\"], \n", + " [\"{A} so {B}.\"],# \"{B} because {A}.\"]\n", + " ]\n", + "turning_templates = [[\"{A} although {B}.\"],# \"{B} but {A}.\"], \n", + " [\"{A} but {B}.\"],# \"{B} although {A}.\"]\n", + " ]" + ] + }, + { + "cell_type": "code", + "execution_count": 79, + "metadata": {}, + "outputs": [], + "source": [ + "def make_sentences(A_template, B_template, causal_templates, turning_templates,\n", + " index=-1, orig_sentence='', entities=[\"John\", \"Mary\"], entity_substitutes=None, determiner=\"\", \n", + " packed_relations=[\"rel/~rel\", \"rev_rel/~rev_rel\"], packed_relation_substitutes=None, relation_suffix=\"\",\n", + " packed_predicates=[\"pred0/~pred0\", \"pred1/~pred1\"], predicate_substitutes=None,\n", + " predicate_dichotomy=True, reverse_causal=False):\n", + " assert entities[0].lower() in tokenizer.vocab , entities[0]\n", + " assert entities[1].lower() in tokenizer.vocab , entities[1]\n", + " \n", + " relations, neg_relations = zip(*[rel.split(\"/\") for rel in packed_relations])\n", + " relations, neg_relations = list(relations), list(neg_relations)\n", + " predicates, neg_predicates = zip(*[pred.split(\"/\") for pred in packed_predicates])\n", + " predicates, neg_predicates = list(predicates), list(neg_predicates)\n", + " \n", + " As = [A_template.format(dt=determiner, ent0=ent0, ent1=ent1, rel=rel, rel_suffix=relation_suffix) \n", + " for ent0, ent1, rel in [entities + relations[:1], reverse(entities) + reverse(relations)[:1]]]\n", + " negAs = [A_template.format(dt=determiner, ent0=ent0, ent1=ent1, rel=rel, rel_suffix=relation_suffix) \n", + " for ent0, ent1, rel in [entities + neg_relations[:1], reverse(entities) + reverse(neg_relations)[:1]]]\n", + "\n", + " Bs = [B_template.format(dt=determiner, ent=mask(ent), pred=pred) for ent, pred in zip(entities, predicates)]\n", + " negBs = [B_template.format(dt=determiner, ent=mask(ent), pred=pred) for ent, pred in zip(entities, neg_predicates)]\n", + " if predicate_dichotomy:\n", + " Bs += [B_template.format(dt=determiner, ent=mask(ent), pred=pred) for ent, pred in zip(entities, reversed(neg_predicates))]\n", + " negBs += [B_template.format(dt=determiner, ent=mask(ent), pred=pred) for ent, pred in zip(entities, reversed(predicates))]\n", + "\n", + " def form_sentences(sentence_template, As, Bs):\n", + " return [\" \".join(sentence_template.format(A=A, B=B).split()) for A, B in product(As, Bs)]\n", + "\n", + " causal_sentences = []\n", + " for causal_template in causal_templates[int(reverse_causal)]:\n", + " for A, B in [(As, Bs), (negAs, negBs)]:\n", + " causal_sentences.extend(form_sentences(causal_template, A, B))\n", + "\n", + " turning_sentences = []\n", + " for turning_template in turning_templates[int(reverse_causal)]:\n", + " for A, B in [(As, negBs), (negAs, Bs)]:\n", + " turning_sentences.extend(form_sentences(turning_template, A, B))\n", + " \n", + " sentences = causal_sentences + turning_sentences\n", + " substituted_sentences = sentences\n", + " \n", + " if packed_relation_substitutes is not None:\n", + " packed_relation_substitutes = list(itertools.product(packed_relations[:1] + packed_relation_substitutes[0], \n", + " packed_relations[1:] + packed_relation_substitutes[1]))\n", + " substituted_sentences = []\n", + " for packed_sub_relations in packed_relation_substitutes:\n", + " sub_relations, sub_neg_relations = zip(*[rel.split(\"/\") for rel in packed_sub_relations])\n", + " substituted_sentences += [sent.replace(relations[0], sub_relations[0]).replace(relations[1], sub_relations[1])\n", + " .replace(neg_relations[0], sub_neg_relations[0]).replace(neg_relations[1], sub_neg_relations[1]) \n", + " for sent in sentences]\n", + " substituted_sentences = list(set(substituted_sentences))\n", + " \n", + " if entity_substitutes is not None:\n", + " for sub in entity_substitutes:\n", + " for ent in sub:\n", + " assert ent.lower() in tokenizer.vocab , ent + \" not in BERT vocab\"\n", + " assert len(set(chain.from_iterable(entity_substitutes))) == 4, entity_substitutes\n", + " assert len(set(chain.from_iterable(entity_substitutes)).union(set(entities))) == 6 \n", + " \n", + " entity_substitutes = list(itertools.product(entities[:1] + entity_substitutes[0], entities[1:] + entity_substitutes[1]))\n", + " substituted_sentences = [sent.replace(entities[0], sub[0]).replace(entities[1], sub[1]) \n", + " for sent in substituted_sentences for sub in entity_substitutes]\n", + " return causal_sentences, turning_sentences, substituted_sentences" + ] + }, + { + "cell_type": "code", + "execution_count": 80, + "metadata": {}, + "outputs": [], + "source": [ + "frames = \\\n", + "[\n", + " {\n", + " \"index\": 2,\n", + " \"orig_sentence\": \"The trophy doesn't fit into the brown suitcase because [it] is too large/small.\",\n", + " \"entities\": [\"trophy\", \"suitcase\"],\n", + " \"entitity_substitutes\": [[\"ball\", \"toy\"], [\"bag\", \"box\"]],\n", + " \"determiner\": \"the\",\n", + " \"packed_relations\": [\"doesn't fit into/can fit into\", \"doesn't hold/can hold\"],\n", + " \"packed_relation_substitutes\": [[\"can't be put into/can be put into\"], [\"doesn't have enough room for/has enough room for\"]],\n", + " \"relation_suffix\": \"\",\n", + " \"packed_predicates\": [\"is large/isn't large\", \"is small/isn't small\"],\n", + " \"predicate_dichotomy\": True,\n", + " \"reverse_causal\": False\n", + " },\n", + " {\n", + " \"index\": 4,\n", + " \"orig_sentence\": \"Joan made sure to thank Susan for all the help [she] had recieved/given.\",\n", + " \"entities\": [\"John\", \"Susan\"],\n", + " \"entity_substitutes\": [[\"David\", \"Michael\"], [\"Mary\", \"Linda\"]],\n", + " \"determiner\": \"\",\n", + " \"packed_relations\": [\"thanked/didn't thank\", \"took good care of/didn't good care of\"],\n", + " \"packed_relation_substitutes\": [[\"felt grateful to/didn't feel grateful to\"], [\"was appreciated by/wasn't appreciated by\"]],\n", + " \"relation_suffix\": \"\",\n", + " \"packed_predicates\": [\"had received a lot of help/hadn't received a lot of help\", \"had given a lot of help/hadn't given a lot of help\"],\n", + " \"predicate_dichotomy\": False,\n", + " \"reverse_causal\": False\n", + " },\n", + " {\n", + " \"index\": 4000,\n", + " \"orig_sentence\": \"John gave a lot of money to Susan because [he] was very rich/poor.\",\n", + " \"entities\": [\"John\", \"Susan\"],\n", + " \"entity_substitutes\": [[\"David\", \"Michael\"], [\"Mary\", \"Linda\"]],\n", + " \"determiner\": \"\",\n", + " \"packed_relations\": [\"gave a lot of money to/didn't give a lot of money to\", \"received a lot of money from/didn't receive a lot of money from\"],\n", + " \"packed_relation_substitutes\": [[\"subsidized/didn't subsidize\"], [\"borrowed a lot of money from/didn't borrow any money from\"]],\n", + " \"relation_suffix\": \"\",\n", + " \"packed_predicates\": [\"was rich/wasn't rich\", \"was poor/wasn't poor\"],\n", + " \"predicate_dichotomy\": True,\n", + " \"reverse_causal\": False\n", + " },\n", + " {\n", + " \"index\": 10,\n", + " \"orig_sentence\": \"The delivery truck zoomed by the school bus because [it] was going so fast/slow.\",\n", + " \"entities\": [\"truck\", \"bus\"],\n", + " \"entity_substitutes\": [[\"car\", \"ambulance\"], [\"bicycle\", \"tram\"]],\n", + " \"determiner\": \"the\",\n", + " \"packed_relations\": [\"overtook/couldn't overtake\", \"fell far behind/didn't fall far behind\"],\n", + " \"packed_relation_substitutes\": [[\"zoomed by/didn't pass\"], [\"was left behind/wasn't left far behind\"]],\n", + " \"relation_suffix\": \"\",\n", + " \"packed_predicates\": [\"was going fast/wasn't going fast\", \"was going slow/wasn't going slow\"],\n", + " \"predicate_dichotomy\": True,\n", + " \"reverse_causal\": False\n", + " },\n", + " {\n", + " \"index\": 12,\n", + " \"orig_sentence\": \"Frank felt vindicated/crushed when his longtime rival Bill revealed that [he] was the winner of the competition.\",\n", + " \"entities\": [\"John\", \"Susan\"],\n", + " \"entity_substitutes\": [[\"David\", \"Michael\"], [\"Mary\", \"Linda\"]],\n", + " \"determiner\": \"\",\n", + " \"packed_relations\": [\"beat/didn't beat\", \"lost to/didn't lose to\"],\n", + " \"relation_suffix\": \"in the game\",\n", + " \"packed_predicates\": [\"was happy/wasn't happy\", \"was sad/wasn't sad\"],\n", + " \"packed_relation_substitutes\": None,\n", + " \"predicate_dichotomy\": True,\n", + " \"reverse_causal\": True\n", + " },\n", + " {\n", + " \"index\": 16,\n", + " \"orig_sentence\": \"The large ball crashed right through the table because [it] was made of steel/styrofoam.\",\n", + " \"entities\": [\"ball\", \"board\"],\n", + " \"substitutes\": [[\"bullet\", \"arrow\"], [\"shield\", \"disk\"]],\n", + " \"determiner\": \"the\",\n", + " \"relations\": [\"crashed right through\", \"failed to block\"],\n", + " \"neg_relations\": [\"didn't crash through\", \"blocked\"],\n", + " \"relation_suffix\": \"\",\n", + " \"predicates\": [\"was hard\", \"was soft\"],\n", + " \"neg_predicates\": [\"wasn't hard\", \"wasn't soft\"],\n", + " \"predicate_dichotomy\": True,\n", + " \"reverse_causal\": False\n", + " },\n", + "]" + ] + }, + { + "cell_type": "code", + "execution_count": 75, + "metadata": {}, + "outputs": [], + "source": [ + "causal_sentences, turning_sentences, substituted_sentences = \\\n", + " make_sentences(A_template, B_template, causal_templates, turning_templates, **frames[-1])" + ] + }, + { + "cell_type": "code", + "execution_count": 76, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['John beat Susan in the game so [John] was happy.',\n", + " 'John beat Susan in the game so [Susan] was sad.',\n", + " \"John beat Susan in the game so [John] wasn't sad.\",\n", + " \"John beat Susan in the game so [Susan] wasn't happy.\",\n", + " 'Susan lost to John in the game so [John] was happy.',\n", + " 'Susan lost to John in the game so [Susan] was sad.',\n", + " \"Susan lost to John in the game so [John] wasn't sad.\",\n", + " \"Susan lost to John in the game so [Susan] wasn't happy.\",\n", + " \"John didn't beat Susan in the game so [John] wasn't happy.\",\n", + " \"John didn't beat Susan in the game so [Susan] wasn't sad.\",\n", + " \"John didn't beat Susan in the game so [John] was sad.\",\n", + " \"John didn't beat Susan in the game so [Susan] was happy.\",\n", + " \"Susan didn't lose to John in the game so [John] wasn't happy.\",\n", + " \"Susan didn't lose to John in the game so [Susan] wasn't sad.\",\n", + " \"Susan didn't lose to John in the game so [John] was sad.\",\n", + " \"Susan didn't lose to John in the game so [Susan] was happy.\",\n", + " \"John beat Susan in the game but [John] wasn't happy.\",\n", + " \"John beat Susan in the game but [Susan] wasn't sad.\",\n", + " 'John beat Susan in the game but [John] was sad.',\n", + " 'John beat Susan in the game but [Susan] was happy.',\n", + " \"Susan lost to John in the game but [John] wasn't happy.\",\n", + " \"Susan lost to John in the game but [Susan] wasn't sad.\",\n", + " 'Susan lost to John in the game but [John] was sad.',\n", + " 'Susan lost to John in the game but [Susan] was happy.',\n", + " \"John didn't beat Susan in the game but [John] was happy.\",\n", + " \"John didn't beat Susan in the game but [Susan] was sad.\",\n", + " \"John didn't beat Susan in the game but [John] wasn't sad.\",\n", + " \"John didn't beat Susan in the game but [Susan] wasn't happy.\",\n", + " \"Susan didn't lose to John in the game but [John] was happy.\",\n", + " \"Susan didn't lose to John in the game but [Susan] was sad.\",\n", + " \"Susan didn't lose to John in the game but [John] wasn't sad.\",\n", + " \"Susan didn't lose to John in the game but [Susan] wasn't happy.\"]" + ] + }, + "execution_count": 76, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "text/plain": [ + "['John beat Susan in the game so [John] was happy.',\n", + " 'John beat Mary in the game so [John] was happy.',\n", + " 'John beat Linda in the game so [John] was happy.',\n", + " 'David beat Susan in the game so [David] was happy.',\n", + " 'David beat Mary in the game so [David] was happy.',\n", + " 'David beat Linda in the game so [David] was happy.',\n", + " 'Michael beat Susan in the game so [Michael] was happy.',\n", + " 'Michael beat Mary in the game so [Michael] was happy.',\n", + " 'Michael beat Linda in the game so [Michael] was happy.',\n", + " 'John beat Susan in the game so [Susan] was sad.',\n", + " 'John beat Mary in the game so [Mary] was sad.',\n", + " 'John beat Linda in the game so [Linda] was sad.',\n", + " 'David beat Susan in the game so [Susan] was sad.',\n", + " 'David beat Mary in the game so [Mary] was sad.',\n", + " 'David beat Linda in the game so [Linda] was sad.',\n", + " 'Michael beat Susan in the game so [Susan] was sad.',\n", + " 'Michael beat Mary in the game so [Mary] was sad.',\n", + " 'Michael beat Linda in the game so [Linda] was sad.',\n", + " \"John beat Susan in the game so [John] wasn't sad.\",\n", + " \"John beat Mary in the game so [John] wasn't sad.\",\n", + " \"John beat Linda in the game so [John] wasn't sad.\",\n", + " \"David beat Susan in the game so [David] wasn't sad.\",\n", + " \"David beat Mary in the game so [David] wasn't sad.\",\n", + " \"David beat Linda in the game so [David] wasn't sad.\",\n", + " \"Michael beat Susan in the game so [Michael] wasn't sad.\",\n", + " \"Michael beat Mary in the game so [Michael] wasn't sad.\",\n", + " \"Michael beat Linda in the game so [Michael] wasn't sad.\",\n", + " \"John beat Susan in the game so [Susan] wasn't happy.\",\n", + " \"John beat Mary in the game so [Mary] wasn't happy.\",\n", + " \"John beat Linda in the game so [Linda] wasn't happy.\",\n", + " \"David beat Susan in the game so [Susan] wasn't happy.\",\n", + " \"David beat Mary in the game so [Mary] wasn't happy.\",\n", + " \"David beat Linda in the game so [Linda] wasn't happy.\",\n", + " \"Michael beat Susan in the game so [Susan] wasn't happy.\",\n", + " \"Michael beat Mary in the game so [Mary] wasn't happy.\",\n", + " \"Michael beat Linda in the game so [Linda] wasn't happy.\",\n", + " 'Susan lost to John in the game so [John] was happy.',\n", + " 'Mary lost to John in the game so [John] was happy.',\n", + " 'Linda lost to John in the game so [John] was happy.',\n", + " 'Susan lost to David in the game so [David] was happy.',\n", + " 'Mary lost to David in the game so [David] was happy.',\n", + " 'Linda lost to David in the game so [David] was happy.',\n", + " 'Susan lost to Michael in the game so [Michael] was happy.',\n", + " 'Mary lost to Michael in the game so [Michael] was happy.',\n", + " 'Linda lost to Michael in the game so [Michael] was happy.',\n", + " 'Susan lost to John in the game so [Susan] was sad.',\n", + " 'Mary lost to John in the game so [Mary] was sad.',\n", + " 'Linda lost to John in the game so [Linda] was sad.',\n", + " 'Susan lost to David in the game so [Susan] was sad.',\n", + " 'Mary lost to David in the game so [Mary] was sad.',\n", + " 'Linda lost to David in the game so [Linda] was sad.',\n", + " 'Susan lost to Michael in the game so [Susan] was sad.',\n", + " 'Mary lost to Michael in the game so [Mary] was sad.',\n", + " 'Linda lost to Michael in the game so [Linda] was sad.',\n", + " \"Susan lost to John in the game so [John] wasn't sad.\",\n", + " \"Mary lost to John in the game so [John] wasn't sad.\",\n", + " \"Linda lost to John in the game so [John] wasn't sad.\",\n", + " \"Susan lost to David in the game so [David] wasn't sad.\",\n", + " \"Mary lost to David in the game so [David] wasn't sad.\",\n", + " \"Linda lost to David in the game so [David] wasn't sad.\",\n", + " \"Susan lost to Michael in the game so [Michael] wasn't sad.\",\n", + " \"Mary lost to Michael in the game so [Michael] wasn't sad.\",\n", + " \"Linda lost to Michael in the game so [Michael] wasn't sad.\",\n", + " \"Susan lost to John in the game so [Susan] wasn't happy.\",\n", + " \"Mary lost to John in the game so [Mary] wasn't happy.\",\n", + " \"Linda lost to John in the game so [Linda] wasn't happy.\",\n", + " \"Susan lost to David in the game so [Susan] wasn't happy.\",\n", + " \"Mary lost to David in the game so [Mary] wasn't happy.\",\n", + " \"Linda lost to David in the game so [Linda] wasn't happy.\",\n", + " \"Susan lost to Michael in the game so [Susan] wasn't happy.\",\n", + " \"Mary lost to Michael in the game so [Mary] wasn't happy.\",\n", + " \"Linda lost to Michael in the game so [Linda] wasn't happy.\",\n", + " \"John didn't beat Susan in the game so [John] wasn't happy.\",\n", + " \"John didn't beat Mary in the game so [John] wasn't happy.\",\n", + " \"John didn't beat Linda in the game so [John] wasn't happy.\",\n", + " \"David didn't beat Susan in the game so [David] wasn't happy.\",\n", + " \"David didn't beat Mary in the game so [David] wasn't happy.\",\n", + " \"David didn't beat Linda in the game so [David] wasn't happy.\",\n", + " \"Michael didn't beat Susan in the game so [Michael] wasn't happy.\",\n", + " \"Michael didn't beat Mary in the game so [Michael] wasn't happy.\",\n", + " \"Michael didn't beat Linda in the game so [Michael] wasn't happy.\",\n", + " \"John didn't beat Susan in the game so [Susan] wasn't sad.\",\n", + " \"John didn't beat Mary in the game so [Mary] wasn't sad.\",\n", + " \"John didn't beat Linda in the game so [Linda] wasn't sad.\",\n", + " \"David didn't beat Susan in the game so [Susan] wasn't sad.\",\n", + " \"David didn't beat Mary in the game so [Mary] wasn't sad.\",\n", + " \"David didn't beat Linda in the game so [Linda] wasn't sad.\",\n", + " \"Michael didn't beat Susan in the game so [Susan] wasn't sad.\",\n", + " \"Michael didn't beat Mary in the game so [Mary] wasn't sad.\",\n", + " \"Michael didn't beat Linda in the game so [Linda] wasn't sad.\",\n", + " \"John didn't beat Susan in the game so [John] was sad.\",\n", + " \"John didn't beat Mary in the game so [John] was sad.\",\n", + " \"John didn't beat Linda in the game so [John] was sad.\",\n", + " \"David didn't beat Susan in the game so [David] was sad.\",\n", + " \"David didn't beat Mary in the game so [David] was sad.\",\n", + " \"David didn't beat Linda in the game so [David] was sad.\",\n", + " \"Michael didn't beat Susan in the game so [Michael] was sad.\",\n", + " \"Michael didn't beat Mary in the game so [Michael] was sad.\",\n", + " \"Michael didn't beat Linda in the game so [Michael] was sad.\",\n", + " \"John didn't beat Susan in the game so [Susan] was happy.\",\n", + " \"John didn't beat Mary in the game so [Mary] was happy.\",\n", + " \"John didn't beat Linda in the game so [Linda] was happy.\",\n", + " \"David didn't beat Susan in the game so [Susan] was happy.\",\n", + " \"David didn't beat Mary in the game so [Mary] was happy.\",\n", + " \"David didn't beat Linda in the game so [Linda] was happy.\",\n", + " \"Michael didn't beat Susan in the game so [Susan] was happy.\",\n", + " \"Michael didn't beat Mary in the game so [Mary] was happy.\",\n", + " \"Michael didn't beat Linda in the game so [Linda] was happy.\",\n", + " \"Susan didn't lose to John in the game so [John] wasn't happy.\",\n", + " \"Mary didn't lose to John in the game so [John] wasn't happy.\",\n", + " \"Linda didn't lose to John in the game so [John] wasn't happy.\",\n", + " \"Susan didn't lose to David in the game so [David] wasn't happy.\",\n", + " \"Mary didn't lose to David in the game so [David] wasn't happy.\",\n", + " \"Linda didn't lose to David in the game so [David] wasn't happy.\",\n", + " \"Susan didn't lose to Michael in the game so [Michael] wasn't happy.\",\n", + " \"Mary didn't lose to Michael in the game so [Michael] wasn't happy.\",\n", + " \"Linda didn't lose to Michael in the game so [Michael] wasn't happy.\",\n", + " \"Susan didn't lose to John in the game so [Susan] wasn't sad.\",\n", + " \"Mary didn't lose to John in the game so [Mary] wasn't sad.\",\n", + " \"Linda didn't lose to John in the game so [Linda] wasn't sad.\",\n", + " \"Susan didn't lose to David in the game so [Susan] wasn't sad.\",\n", + " \"Mary didn't lose to David in the game so [Mary] wasn't sad.\",\n", + " \"Linda didn't lose to David in the game so [Linda] wasn't sad.\",\n", + " \"Susan didn't lose to Michael in the game so [Susan] wasn't sad.\",\n", + " \"Mary didn't lose to Michael in the game so [Mary] wasn't sad.\",\n", + " \"Linda didn't lose to Michael in the game so [Linda] wasn't sad.\",\n", + " \"Susan didn't lose to John in the game so [John] was sad.\",\n", + " \"Mary didn't lose to John in the game so [John] was sad.\",\n", + " \"Linda didn't lose to John in the game so [John] was sad.\",\n", + " \"Susan didn't lose to David in the game so [David] was sad.\",\n", + " \"Mary didn't lose to David in the game so [David] was sad.\",\n", + " \"Linda didn't lose to David in the game so [David] was sad.\",\n", + " \"Susan didn't lose to Michael in the game so [Michael] was sad.\",\n", + " \"Mary didn't lose to Michael in the game so [Michael] was sad.\",\n", + " \"Linda didn't lose to Michael in the game so [Michael] was sad.\",\n", + " \"Susan didn't lose to John in the game so [Susan] was happy.\",\n", + " \"Mary didn't lose to John in the game so [Mary] was happy.\",\n", + " \"Linda didn't lose to John in the game so [Linda] was happy.\",\n", + " \"Susan didn't lose to David in the game so [Susan] was happy.\",\n", + " \"Mary didn't lose to David in the game so [Mary] was happy.\",\n", + " \"Linda didn't lose to David in the game so [Linda] was happy.\",\n", + " \"Susan didn't lose to Michael in the game so [Susan] was happy.\",\n", + " \"Mary didn't lose to Michael in the game so [Mary] was happy.\",\n", + " \"Linda didn't lose to Michael in the game so [Linda] was happy.\",\n", + " \"John beat Susan in the game but [John] wasn't happy.\",\n", + " \"John beat Mary in the game but [John] wasn't happy.\",\n", + " \"John beat Linda in the game but [John] wasn't happy.\",\n", + " \"David beat Susan in the game but [David] wasn't happy.\",\n", + " \"David beat Mary in the game but [David] wasn't happy.\",\n", + " \"David beat Linda in the game but [David] wasn't happy.\",\n", + " \"Michael beat Susan in the game but [Michael] wasn't happy.\",\n", + " \"Michael beat Mary in the game but [Michael] wasn't happy.\",\n", + " \"Michael beat Linda in the game but [Michael] wasn't happy.\",\n", + " \"John beat Susan in the game but [Susan] wasn't sad.\",\n", + " \"John beat Mary in the game but [Mary] wasn't sad.\",\n", + " \"John beat Linda in the game but [Linda] wasn't sad.\",\n", + " \"David beat Susan in the game but [Susan] wasn't sad.\",\n", + " \"David beat Mary in the game but [Mary] wasn't sad.\",\n", + " \"David beat Linda in the game but [Linda] wasn't sad.\",\n", + " \"Michael beat Susan in the game but [Susan] wasn't sad.\",\n", + " \"Michael beat Mary in the game but [Mary] wasn't sad.\",\n", + " \"Michael beat Linda in the game but [Linda] wasn't sad.\",\n", + " 'John beat Susan in the game but [John] was sad.',\n", + " 'John beat Mary in the game but [John] was sad.',\n", + " 'John beat Linda in the game but [John] was sad.',\n", + " 'David beat Susan in the game but [David] was sad.',\n", + " 'David beat Mary in the game but [David] was sad.',\n", + " 'David beat Linda in the game but [David] was sad.',\n", + " 'Michael beat Susan in the game but [Michael] was sad.',\n", + " 'Michael beat Mary in the game but [Michael] was sad.',\n", + " 'Michael beat Linda in the game but [Michael] was sad.',\n", + " 'John beat Susan in the game but [Susan] was happy.',\n", + " 'John beat Mary in the game but [Mary] was happy.',\n", + " 'John beat Linda in the game but [Linda] was happy.',\n", + " 'David beat Susan in the game but [Susan] was happy.',\n", + " 'David beat Mary in the game but [Mary] was happy.',\n", + " 'David beat Linda in the game but [Linda] was happy.',\n", + " 'Michael beat Susan in the game but [Susan] was happy.',\n", + " 'Michael beat Mary in the game but [Mary] was happy.',\n", + " 'Michael beat Linda in the game but [Linda] was happy.',\n", + " \"Susan lost to John in the game but [John] wasn't happy.\",\n", + " \"Mary lost to John in the game but [John] wasn't happy.\",\n", + " \"Linda lost to John in the game but [John] wasn't happy.\",\n", + " \"Susan lost to David in the game but [David] wasn't happy.\",\n", + " \"Mary lost to David in the game but [David] wasn't happy.\",\n", + " \"Linda lost to David in the game but [David] wasn't happy.\",\n", + " \"Susan lost to Michael in the game but [Michael] wasn't happy.\",\n", + " \"Mary lost to Michael in the game but [Michael] wasn't happy.\",\n", + " \"Linda lost to Michael in the game but [Michael] wasn't happy.\",\n", + " \"Susan lost to John in the game but [Susan] wasn't sad.\",\n", + " \"Mary lost to John in the game but [Mary] wasn't sad.\",\n", + " \"Linda lost to John in the game but [Linda] wasn't sad.\",\n", + " \"Susan lost to David in the game but [Susan] wasn't sad.\",\n", + " \"Mary lost to David in the game but [Mary] wasn't sad.\",\n", + " \"Linda lost to David in the game but [Linda] wasn't sad.\",\n", + " \"Susan lost to Michael in the game but [Susan] wasn't sad.\",\n", + " \"Mary lost to Michael in the game but [Mary] wasn't sad.\",\n", + " \"Linda lost to Michael in the game but [Linda] wasn't sad.\",\n", + " 'Susan lost to John in the game but [John] was sad.',\n", + " 'Mary lost to John in the game but [John] was sad.',\n", + " 'Linda lost to John in the game but [John] was sad.',\n", + " 'Susan lost to David in the game but [David] was sad.',\n", + " 'Mary lost to David in the game but [David] was sad.',\n", + " 'Linda lost to David in the game but [David] was sad.',\n", + " 'Susan lost to Michael in the game but [Michael] was sad.',\n", + " 'Mary lost to Michael in the game but [Michael] was sad.',\n", + " 'Linda lost to Michael in the game but [Michael] was sad.',\n", + " 'Susan lost to John in the game but [Susan] was happy.',\n", + " 'Mary lost to John in the game but [Mary] was happy.',\n", + " 'Linda lost to John in the game but [Linda] was happy.',\n", + " 'Susan lost to David in the game but [Susan] was happy.',\n", + " 'Mary lost to David in the game but [Mary] was happy.',\n", + " 'Linda lost to David in the game but [Linda] was happy.',\n", + " 'Susan lost to Michael in the game but [Susan] was happy.',\n", + " 'Mary lost to Michael in the game but [Mary] was happy.',\n", + " 'Linda lost to Michael in the game but [Linda] was happy.',\n", + " \"John didn't beat Susan in the game but [John] was happy.\",\n", + " \"John didn't beat Mary in the game but [John] was happy.\",\n", + " \"John didn't beat Linda in the game but [John] was happy.\",\n", + " \"David didn't beat Susan in the game but [David] was happy.\",\n", + " \"David didn't beat Mary in the game but [David] was happy.\",\n", + " \"David didn't beat Linda in the game but [David] was happy.\",\n", + " \"Michael didn't beat Susan in the game but [Michael] was happy.\",\n", + " \"Michael didn't beat Mary in the game but [Michael] was happy.\",\n", + " \"Michael didn't beat Linda in the game but [Michael] was happy.\",\n", + " \"John didn't beat Susan in the game but [Susan] was sad.\",\n", + " \"John didn't beat Mary in the game but [Mary] was sad.\",\n", + " \"John didn't beat Linda in the game but [Linda] was sad.\",\n", + " \"David didn't beat Susan in the game but [Susan] was sad.\",\n", + " \"David didn't beat Mary in the game but [Mary] was sad.\",\n", + " \"David didn't beat Linda in the game but [Linda] was sad.\",\n", + " \"Michael didn't beat Susan in the game but [Susan] was sad.\",\n", + " \"Michael didn't beat Mary in the game but [Mary] was sad.\",\n", + " \"Michael didn't beat Linda in the game but [Linda] was sad.\",\n", + " \"John didn't beat Susan in the game but [John] wasn't sad.\",\n", + " \"John didn't beat Mary in the game but [John] wasn't sad.\",\n", + " \"John didn't beat Linda in the game but [John] wasn't sad.\",\n", + " \"David didn't beat Susan in the game but [David] wasn't sad.\",\n", + " \"David didn't beat Mary in the game but [David] wasn't sad.\",\n", + " \"David didn't beat Linda in the game but [David] wasn't sad.\",\n", + " \"Michael didn't beat Susan in the game but [Michael] wasn't sad.\",\n", + " \"Michael didn't beat Mary in the game but [Michael] wasn't sad.\",\n", + " \"Michael didn't beat Linda in the game but [Michael] wasn't sad.\",\n", + " \"John didn't beat Susan in the game but [Susan] wasn't happy.\",\n", + " \"John didn't beat Mary in the game but [Mary] wasn't happy.\",\n", + " \"John didn't beat Linda in the game but [Linda] wasn't happy.\",\n", + " \"David didn't beat Susan in the game but [Susan] wasn't happy.\",\n", + " \"David didn't beat Mary in the game but [Mary] wasn't happy.\",\n", + " \"David didn't beat Linda in the game but [Linda] wasn't happy.\",\n", + " \"Michael didn't beat Susan in the game but [Susan] wasn't happy.\",\n", + " \"Michael didn't beat Mary in the game but [Mary] wasn't happy.\",\n", + " \"Michael didn't beat Linda in the game but [Linda] wasn't happy.\",\n", + " \"Susan didn't lose to John in the game but [John] was happy.\",\n", + " \"Mary didn't lose to John in the game but [John] was happy.\",\n", + " \"Linda didn't lose to John in the game but [John] was happy.\",\n", + " \"Susan didn't lose to David in the game but [David] was happy.\",\n", + " \"Mary didn't lose to David in the game but [David] was happy.\",\n", + " \"Linda didn't lose to David in the game but [David] was happy.\",\n", + " \"Susan didn't lose to Michael in the game but [Michael] was happy.\",\n", + " \"Mary didn't lose to Michael in the game but [Michael] was happy.\",\n", + " \"Linda didn't lose to Michael in the game but [Michael] was happy.\",\n", + " \"Susan didn't lose to John in the game but [Susan] was sad.\",\n", + " \"Mary didn't lose to John in the game but [Mary] was sad.\",\n", + " \"Linda didn't lose to John in the game but [Linda] was sad.\",\n", + " \"Susan didn't lose to David in the game but [Susan] was sad.\",\n", + " \"Mary didn't lose to David in the game but [Mary] was sad.\",\n", + " \"Linda didn't lose to David in the game but [Linda] was sad.\",\n", + " \"Susan didn't lose to Michael in the game but [Susan] was sad.\",\n", + " \"Mary didn't lose to Michael in the game but [Mary] was sad.\",\n", + " \"Linda didn't lose to Michael in the game but [Linda] was sad.\",\n", + " \"Susan didn't lose to John in the game but [John] wasn't sad.\",\n", + " \"Mary didn't lose to John in the game but [John] wasn't sad.\",\n", + " \"Linda didn't lose to John in the game but [John] wasn't sad.\",\n", + " \"Susan didn't lose to David in the game but [David] wasn't sad.\",\n", + " \"Mary didn't lose to David in the game but [David] wasn't sad.\",\n", + " \"Linda didn't lose to David in the game but [David] wasn't sad.\",\n", + " \"Susan didn't lose to Michael in the game but [Michael] wasn't sad.\",\n", + " \"Mary didn't lose to Michael in the game but [Michael] wasn't sad.\",\n", + " \"Linda didn't lose to Michael in the game but [Michael] wasn't sad.\",\n", + " \"Susan didn't lose to John in the game but [Susan] wasn't happy.\",\n", + " \"Mary didn't lose to John in the game but [Mary] wasn't happy.\",\n", + " \"Linda didn't lose to John in the game but [Linda] wasn't happy.\",\n", + " \"Susan didn't lose to David in the game but [Susan] wasn't happy.\",\n", + " \"Mary didn't lose to David in the game but [Mary] wasn't happy.\",\n", + " \"Linda didn't lose to David in the game but [Linda] wasn't happy.\",\n", + " \"Susan didn't lose to Michael in the game but [Susan] wasn't happy.\",\n", + " \"Mary didn't lose to Michael in the game but [Mary] wasn't happy.\",\n", + " \"Linda didn't lose to Michael in the game but [Linda] wasn't happy.\"]" + ] + }, + "execution_count": 76, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "causal_sentences\n", + "turning_sentences\n", + "# substituted_sentences" + ] + }, + { + "cell_type": "code", + "execution_count": 40, + "metadata": {}, + "outputs": [], + "source": [ + "examples = [(2,\n", + " \"The trophy doesn't fit into the brown suitcase because [it] is too large.\",\n", + " 'fit into:large/small'),\n", + " (4,\n", + " 'Joan made sure to thank Susan for all the help [she] had recieved.',\n", + " 'thank:receive/give'),\n", + " (10,\n", + " 'The delivery truck zoomed by the school bus because [it] was going so fast.',\n", + " 'zoom by:fast/slow'),\n", + " (12,\n", + " 'Frank felt vindicated when his longtime rival Bill revealed that [he] was the winner of the competition.',\n", + " 'vindicated/crushed:be the winner'),\n", + " (16,\n", + " 'The large ball crashed right through the table because [it] was made of steel.',\n", + " 'crash through:[hard]/[soft]'),\n", + " (18,\n", + " \"John couldn't see the stage with Billy in front of him because [he] is so short.\",\n", + " '[block]:short/tall'),\n", + " (20,\n", + " 'Tom threw his schoolbag down to Ray after [he] reached the top of the stairs.',\n", + " 'down to:top/bottom'),\n", + " (22,\n", + " 'Although they ran at about the same speed, Sue beat Sally because [she] had such a good start.',\n", + " 'beat:good/bad'),\n", + " (26,\n", + " \"Sam's drawing was hung just above Tina's and [it] did look much better with another one below it.\",\n", + " 'above/below'),\n", + " (28,\n", + " 'Anna did a lot better than her good friend Lucy on the test because [she] had studied so hard.',\n", + " 'better/worse:study hard'),\n", + " (30,\n", + " 'The firemen arrived after the police because [they] were coming from so far away.',\n", + " 'after/before:far away'),\n", + " (32,\n", + " \"Frank was upset with Tom because the toaster [he] had bought from him didn't work.\",\n", + " 'be upset with:buy from not work/sell not work'),\n", + " (36,\n", + " 'The sack of potatoes had been placed above the bag of flour, so [it] had to be moved first.',\n", + " 'above/below:moved first'),\n", + " (38,\n", + " 'Pete envies Martin although [he] is very successful.',\n", + " 'although/because'),\n", + " (42,\n", + " 'I poured water from the bottle into the cup until [it] was empty.',\n", + " 'pour:empty/full'),\n", + " (46,\n", + " \"Sid explained his theory to Mark but [he] couldn't convince him.\",\n", + " 'explain:convince/understand'),\n", + " (48,\n", + " \"Susan knew that Ann's son had been in a car accident, so [she] told her about it.\",\n", + " '?know tell:so/because'),\n", + " (50,\n", + " \"Joe's uncle can still beat him at tennis, even though [he] is 30 years younger.\",\n", + " 'beat:younger/older'),\n", + " (64,\n", + " 'In the middle of the outdoor concert, the rain started falling, but [it] continued until 10.',\n", + " 'but/and'),\n", + " (68,\n", + " 'Ann asked Mary what time the library closes, because [she] had forgotten.',\n", + " 'because/but'),\n", + " (84,\n", + " 'If the con artist has succeeded in fooling Sam, [he] would have gotten a lot of money.',\n", + " 'fool:get/lose'),\n", + " (92,\n", + " 'Alice tried frantically to stop her daughter from chatting at the party, leaving us to wonder why [she] was behaving so strangely.',\n", + " '?stop normal/stop abnormal:strange'),\n", + " (98,\n", + " \"I was trying to open the lock with the key, but someone had filled the keyhole with chewing gum, and I couldn't get [it] in.\",\n", + " 'put ... into filled with ... :get in/get out'),\n", + " (100,\n", + " 'The dog chased the cat, which ran up a tree. [It] waited at the bottom.',\n", + " 'up:at the bottom/at the top'),\n", + " (106,\n", + " 'John was doing research in the library when he heard a man humming and whistling. [He] was very annoyed.',\n", + " 'hear ... humming and whistling:annoyed/annoying'),\n", + " (108,\n", + " 'John was jogging through the park when he saw a man juggling watermelons. [He] was very impressed.',\n", + " 'see ... juggling watermelons:impressed/impressive'),\n", + " (132,\n", + " 'Jane knocked on the door, and Susan answered it. [She] invited her to come out.',\n", + " 'visit:invite come out/invite come in'),\n", + " (150,\n", + " 'Jackson was greatly influenced by Arnold, though [he] lived two centuries later.',\n", + " 'influence:later/earlier'),\n", + " (160,\n", + " 'The actress used to be named Terpsichore, but she changed it to Tina a few years ago, because she figured [it] was too hard to pronounce.',\n", + " 'change:hard/easy'),\n", + " (166,\n", + " 'Fred is the only man still alive who remembers my great-grandfather. [He] is a remarkable man.',\n", + " 'alive:is/was'),\n", + " (170,\n", + " \"In July, Kamtchatka declared war on Yakutsk. Since Yakutsk's army was much better equipped and ten times larger, [they] were defeated within weeks.\",\n", + " 'better equipped and large:defeated/victorious'),\n", + " (186,\n", + " 'When the sponsors of the bill got to the town hall, they were surprised to find that the room was full of opponents. [They] were very much in the minority.',\n", + " 'be full of:minority/majority'),\n", + " (188,\n", + " 'Everyone really loved the oatmeal cookies; only a few people liked the chocolate chip cookies. Next time, we should make more of [them] .',\n", + " 'like over:more/fewer'),\n", + " (190,\n", + " 'We had hoped to place copies of our newsletter on all the chairs in the auditorium, but there were simply not enough of [them] .',\n", + " 'place on all:not enough/too many'),\n", + " (196,\n", + " \"Steve follows Fred's example in everything. [He] admires him hugely.\",\n", + " 'follow:admire/influence'),\n", + " (198,\n", + " \"The table won't fit through the doorway because [it] is too wide.\",\n", + " 'fit through:wide/narrow'),\n", + " (200,\n", + " 'Grace was happy to trade me her sweater for my jacket. She thinks [it] looks dowdy on her.',\n", + " 'trade:dowdy/great'),\n", + " (202,\n", + " 'John hired Bill to take care of [him] .',\n", + " 'hire/hire oneself to:take care of'),\n", + " (204,\n", + " 'John promised Bill to leave, so an hour later [he] left.',\n", + " 'promise/order'),\n", + " (210,\n", + " \"Jane knocked on Susan's door but [she] did not get an answer.\",\n", + " 'knock:get an answer/answer'),\n", + " (212,\n", + " 'Joe paid the detective after [he] received the final report on the case.',\n", + " 'pay:receive/deliver'),\n", + " (226,\n", + " 'Bill passed the half-empty plate to John because [he] was full.',\n", + " 'pass the plate:full/hungry'),\n", + " (252,\n", + " 'George got free tickets to the play, but he gave them to Eric, even though [he] was particularly eager to see it.',\n", + " 'even though/because/not'),\n", + " (255,\n", + " \"Jane gave Joan candy because [she] wasn't hungry.\",\n", + " 'give:not hungry/hungry'),\n", + " (259,\n", + " 'James asked Robert for a favor but [he] was refused.',\n", + " 'ask for a favor:refuse/be refused`'),\n", + " (261,\n", + " 'Kirilov ceded the presidency to Shatov because [he] was less popular.',\n", + " 'cede:less popular/more popular'),\n", + " (263,\n", + " 'Emma did not pass the ball to Janie although [she] saw that she was open.',\n", + " 'not pass although:see open/open')]" + ] + }, + { + "cell_type": "code", + "execution_count": 77, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "47" + ] + }, + "execution_count": 77, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "len(examples)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.5" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/Untitled_likunlin-Copy1.ipynb b/Untitled_likunlin-Copy1.ipynb new file mode 100644 index 00000000000000..a48277551d3723 --- /dev/null +++ b/Untitled_likunlin-Copy1.ipynb @@ -0,0 +1,827 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "%load_ext autoreload\n", + "%autoreload 2\n", + "\n", + "from IPython.core.interactiveshell import InteractiveShell\n", + "InteractiveShell.ast_node_interactivity = 'all'" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "/home/xd/projects/pytorch-pretrained-BERT/pytorch_pretrained_bert/__init__.py\n" + ] + } + ], + "source": [ + "import os\n", + "import json\n", + "\n", + "import numpy as np\n", + "import math\n", + "import matplotlib\n", + "import matplotlib.pyplot as plt\n", + "from pylab import rcParams\n", + "\n", + "import torch\n", + "import torch.nn.functional as F\n", + "from pytorch_pretrained_bert import tokenization, BertTokenizer, BertModel, BertForMaskedLM, BertForPreTraining, BertConfig\n", + "from examples.extract_features import *\n", + "\n", + "import pytorch_pretrained_bert\n", + "print(pytorch_pretrained_bert.__file__)" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "01/03/2019 16:37:32 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-uncased-vocab.txt from cache at /home/xd/.pytorch_pretrained_bert/26bc1ad6c0ac742e9b52263248f6d0f00068293b33709fae12320c0e35ccfbbb.542ce4285a40d23a559526243235df47c5f75c197f04f37d1a0c124c32c9a084\n", + "01/03/2019 16:37:32 - INFO - pytorch_pretrained_bert.modeling - loading archive file /nas/pretrain-bert/pretrain-tensorflow/uncased_L-12_H-768_A-12/\n", + "01/03/2019 16:37:32 - INFO - pytorch_pretrained_bert.modeling - Model config {\n", + " \"attention_probs_dropout_prob\": 0.1,\n", + " \"hidden_act\": \"gelu\",\n", + " \"hidden_dropout_prob\": 0.1,\n", + " \"hidden_size\": 768,\n", + " \"initializer_range\": 0.02,\n", + " \"intermediate_size\": 3072,\n", + " \"max_position_embeddings\": 512,\n", + " \"num_attention_heads\": 12,\n", + " \"num_hidden_layers\": 12,\n", + " \"type_vocab_size\": 2,\n", + " \"vocab_size\": 30522\n", + "}\n", + "\n" + ] + } + ], + "source": [ + "class Args:\n", + " def __init__(self):\n", + " pass\n", + " \n", + "args = Args()\n", + "args.no_cuda = False\n", + "\n", + "CONFIG_NAME = 'bert_config.json'\n", + "BERT_DIR = '/nas/pretrain-bert/pretrain-tensorflow/uncased_L-12_H-768_A-12/'\n", + "config_file = os.path.join(BERT_DIR, CONFIG_NAME)\n", + "config = BertConfig.from_json_file(config_file)\n", + "\n", + "tokenizer = BertTokenizer.from_pretrained('bert-base-uncased')\n", + "model = BertForPreTraining.from_pretrained(BERT_DIR)\n", + "device = torch.device(\"cuda\" if torch.cuda.is_available() and not args.no_cuda else \"cpu\")\n", + "_ = model.to(device)\n", + "_ = model.eval()" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "import re\n", + "def convert_text_to_examples(text):\n", + " examples = []\n", + " unique_id = 0\n", + " if True:\n", + " for line in text:\n", + " line = line.strip()\n", + " text_a = None\n", + " text_b = None\n", + " m = re.match(r\"^(.*) \\|\\|\\| (.*)$\", line)\n", + " if m is None:\n", + " text_a = line\n", + " else:\n", + " text_a = m.group(1)\n", + " text_b = m.group(2)\n", + " examples.append(\n", + " InputExample(unique_id=unique_id, text_a=text_a, text_b=text_b))\n", + " unique_id += 1\n", + " return examples\n", + "\n", + "def convert_examples_to_features(examples, tokenizer, append_special_tokens=True, replace_mask=True, print_info=False):\n", + " features = []\n", + " for (ex_index, example) in enumerate(examples):\n", + " tokens_a = tokenizer.tokenize(example.text_a)\n", + " tokens_b = None\n", + " if example.text_b:\n", + " tokens_b = tokenizer.tokenize(example.text_b)\n", + "\n", + " tokens = []\n", + " input_type_ids = []\n", + " if append_special_tokens:\n", + " tokens.append(\"[CLS]\")\n", + " input_type_ids.append(0)\n", + " for token in tokens_a:\n", + " if replace_mask and token == '_': # XD\n", + " token = \"[MASK]\"\n", + " tokens.append(token)\n", + " input_type_ids.append(0)\n", + " if append_special_tokens:\n", + " tokens.append(\"[SEP]\")\n", + " input_type_ids.append(0)\n", + "\n", + " if tokens_b:\n", + " for token in tokens_b:\n", + " if replace_mask and token == '_': # XD\n", + " token = \"[MASK]\"\n", + " tokens.append(token)\n", + " input_type_ids.append(1)\n", + " if append_special_tokens:\n", + " tokens.append(\"[SEP]\")\n", + " input_type_ids.append(1)\n", + "\n", + " input_ids = tokenizer.convert_tokens_to_ids(tokens)\n", + " input_mask = [1] * len(input_ids)\n", + "\n", + " if ex_index < 5:\n", + "# logger.info(\"*** Example ***\")\n", + "# logger.info(\"unique_id: %s\" % (example.unique_id))\n", + " logger.info(\"tokens: %s\" % \" \".join([str(x) for x in tokens]))\n", + "# logger.info(\"input_ids: %s\" % \" \".join([str(x) for x in input_ids]))\n", + "# logger.info(\"input_mask: %s\" % \" \".join([str(x) for x in input_mask]))\n", + "# logger.info(\n", + "# \"input_type_ids: %s\" % \" \".join([str(x) for x in input_type_ids]))\n", + " \n", + " features.append(\n", + " InputFeatures(\n", + " unique_id=example.unique_id,\n", + " tokens=tokens,\n", + " input_ids=input_ids,\n", + " input_mask=input_mask,\n", + " input_type_ids=input_type_ids))\n", + " return features\n", + "\n", + "def copy_and_mask_feature(feature, masked_tokens=None):\n", + " import copy\n", + " tokens = feature.tokens\n", + " masked_positions = [tokens.index(t) for t in masked_tokens if t in tokens] \\\n", + " if masked_tokens is not None else range(len(tokens))\n", + " assert len(masked_positions) > 0\n", + " masked_feature_copies = []\n", + " for masked_pos in masked_positions:\n", + " feature_copy = copy.deepcopy(feature)\n", + " feature_copy.input_ids[masked_pos] = tokenizer.vocab[\"[MASK]\"]\n", + " masked_feature_copies.append(feature_copy)\n", + " return masked_feature_copies, masked_positions" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "def show_lm_probs(tokens, input_ids, probs, topk=5, firstk=20):\n", + " def print_pair(token, prob, end_str='', hit_mark=' '):\n", + " if i < firstk:\n", + " # token = token.replace('', '').replace('\\n', '/n')\n", + " print('{}{: >3} | {: <12}'.format(hit_mark, int(round(prob*100)), token), end=end_str)\n", + " \n", + " ret = None\n", + " for i in range(len(tokens)):\n", + " ind_ = input_ids[i].item() if input_ids is not None else tokenizer.vocab[tokens[i]]\n", + " prob_ = probs[i][ind_].item()\n", + " print_pair(tokens[i], prob_, end_str='\\t')\n", + " values, indices = probs[i].topk(topk)\n", + " top_pairs = []\n", + " for j in range(topk):\n", + " ind, prob = indices[j].item(), values[j].item()\n", + " hit_mark = '*' if ind == ind_ else ' '\n", + " token = tokenizer.ids_to_tokens[ind]\n", + " print_pair(token, prob, hit_mark=hit_mark, end_str='' if j < topk - 1 else '\\n')\n", + " top_pairs.append((token, prob))\n", + " if tokens[i] == \"[MASK]\":\n", + " ret = top_pairs\n", + " return ret" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "import colored\n", + "from colored import stylize\n", + "\n", + "def show_abnormals(tokens, probs, show_suggestions=False):\n", + " def gap2color(gap):\n", + " if gap <= 5:\n", + " return 'yellow_1'\n", + " elif gap <= 10:\n", + " return 'orange_1'\n", + " else:\n", + " return 'red_1'\n", + " \n", + " def print_token(token, suggestion, gap):\n", + " if gap == 0:\n", + " print(stylize(token + ' ', colored.fg('white') + colored.bg('black')), end='')\n", + " else:\n", + " print(stylize(token, colored.fg(gap2color(gap)) + colored.bg('black')), end='')\n", + " if show_suggestions and gap > 5:\n", + " print(stylize('/' + suggestion + ' ', colored.fg('green' if gap > 10 else 'cyan') + colored.bg('black')), end='')\n", + " else:\n", + " print(stylize(' ', colored.fg(gap2color(gap)) + colored.bg('black')), end='')\n", + " # print('/' + suggestion, end=' ')\n", + " # print('%.2f' % gap, end=' ')\n", + " \n", + " avg_gap = 0.\n", + " for i in range(1, len(tokens) - 1): # skip first [CLS] and last [SEP]\n", + " ind_ = tokenizer.vocab[tokens[i]]\n", + " prob_ = probs[i][ind_].item()\n", + " top_prob = probs[i].max().item()\n", + " top_ind = probs[i].argmax().item()\n", + " gap = math.log(top_prob) - math.log(prob_)\n", + " suggestion = tokenizer.ids_to_tokens[top_ind]\n", + " print_token(tokens[i], suggestion, gap)\n", + " avg_gap += gap\n", + " avg_gap /= (len(tokens) - 2)\n", + " print()\n", + " print(avg_gap)" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [], + "source": [ + "analyzed_cache = {}\n", + "\n", + "def analyze_text(text, masked_tokens=None, show_suggestions=False, show_firstk_probs=20):\n", + " if text[0] in analyzed_cache:\n", + " features, mlm_probs = analyzed_cache[text[0]]\n", + " given_mask = \"[MASK]\" in features[0].tokens\n", + " tokens = features[0].tokens\n", + " else:\n", + " examples = convert_text_to_examples(text)\n", + " features = convert_examples_to_features(examples, tokenizer, print_info=False)\n", + " given_mask = \"[MASK]\" in features[0].tokens\n", + " if not given_mask or masked_tokens is not None:\n", + " assert len(features) == 1\n", + " features, masked_positions = copy_and_mask_feature(features[0], masked_tokens=masked_tokens)\n", + "\n", + " input_ids = torch.tensor([f.input_ids for f in features], dtype=torch.long)\n", + " input_type_ids = torch.tensor([f.input_type_ids for f in features], dtype=torch.long)\n", + " input_ids = input_ids.to(device)\n", + " input_type_ids = input_type_ids.to(device)\n", + "\n", + " mlm_logits, _ = model(input_ids, input_type_ids)\n", + " mlm_probs = F.softmax(mlm_logits, dim=-1)\n", + "\n", + " tokens = features[0].tokens\n", + " if not given_mask or masked_tokens is not None:\n", + " bsz, seq_len, vocab_size = mlm_probs.size()\n", + " assert bsz == len(masked_positions)\n", + " # reduced_mlm_probs = torch.Tensor(1, seq_len, vocab_size)\n", + " # for i in range(seq_len):\n", + " # reduced_mlm_probs[0, i] = mlm_probs[i, i]\n", + " reduced_mlm_probs = torch.Tensor(1, len(masked_positions), vocab_size)\n", + " for i, pos in enumerate(masked_positions):\n", + " reduced_mlm_probs[0, i] = mlm_probs[i, pos]\n", + " mlm_probs = reduced_mlm_probs\n", + " tokens = [tokens[i] for i in masked_positions]\n", + " \n", + " analyzed_cache[text[0]] = (features, mlm_probs)\n", + " \n", + " top_pairs = show_lm_probs(tokens, None, mlm_probs[0], firstk=show_firstk_probs)\n", + " if not given_mask:\n", + " show_abnormals(tokens, mlm_probs[0], show_suggestions=show_suggestions)\n", + " return top_pairs" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "01/03/2019 17:13:21 - INFO - examples.extract_features - tokens: [CLS] what ingredients account for the marvelous function of a dream ? [SEP]\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + " 0 | [CLS] \t 3 | . 1 | the 1 | , 1 | ) 1 | \" \n", + " 35 | what \t* 35 | what 25 | do 9 | can 7 | could 5 | would \n", + " 0 | ingredients \t 51 | could 23 | would 13 | can 8 | might 2 | may \n", + " 0 | account \t 32 | were 26 | are 7 | remained 6 | existed 6 | exist \n", + " 100 | for \t*100 | for 0 | to 0 | of 0 | up 0 | all \n", + " 98 | the \t* 98 | the 2 | this 0 | a 0 | that 0 | such \n", + " 0 | marvelous \t 5 | biological 5 | normal 4 | cognitive 2 | specific 2 | physiological\n", + " 0 | function \t 21 | ##ness 8 | beauty 5 | quality 5 | nature 4 | power \n", + " 91 | of \t* 91 | of 8 | in 0 | within 0 | as 0 | during \n", + " 14 | a \t 55 | the 16 | this * 14 | a 4 | my 3 | his \n", + " 0 | dream \t 3 | heart 3 | plant 3 | soul 2 | brain 2 | body \n", + " 98 | ? \t* 98 | ? 2 | . 0 | ; 0 | ! 0 | | \n", + " 0 | [SEP] \t 13 | what 12 | \" 7 | they 4 | and 4 | ' \n", + "\u001b[38;5;15m\u001b[48;5;0mwhat \u001b[0m\u001b[38;5;196m\u001b[48;5;0mingredients\u001b[0m\u001b[38;5;196m\u001b[48;5;0m \u001b[0m\u001b[38;5;226m\u001b[48;5;0maccount\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\u001b[38;5;15m\u001b[48;5;0mfor \u001b[0m\u001b[38;5;15m\u001b[48;5;0mthe \u001b[0m\u001b[38;5;214m\u001b[48;5;0mmarvelous\u001b[0m\u001b[38;5;214m\u001b[48;5;0m \u001b[0m\u001b[38;5;214m\u001b[48;5;0mfunction\u001b[0m\u001b[38;5;214m\u001b[48;5;0m \u001b[0m\u001b[38;5;15m\u001b[48;5;0mof \u001b[0m\u001b[38;5;226m\u001b[48;5;0ma\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\u001b[38;5;226m\u001b[48;5;0mdream\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\u001b[38;5;15m\u001b[48;5;0m? \u001b[0m\n", + "3.421217077676471\n" + ] + } + ], + "source": [ + "# text = [\"Who was Jim Henson? Jim Henson _ a puppeteer.\"]\n", + "text = [\"What ingredients account for the marvelous function of a dream?\"]\n", + "# text = [\"Last week I went to the theatre. I had a very good seat. The play was very interesting. But I didn't enjoy it. A young man and a young woman were sitting behind me. They were talking loudly. I got very angry. I couldn't hear a word. I turned round. I looked at the man angrily. They didn't pay any attention.In the end, I couldn't bear it. I turned round again. 'I can't hear a word!' I said angrily. 'It's none of your business,' the young man said rudely. 'This is a private conversation!'\"]\n", + "# text = [\"After the outbreak of the disease, the Ministry of Agriculture and rural areas immediately sent a supervision team to the local. Local Emergency Response Mechanism has been activated in accordance with the requirements, to take blockade, culling, harmless treatment, disinfection and other treatment measures to all disease and culling of pigs for harmless treatment. At the same time, all live pigs and their products are prohibited from transferring out of the blockade area, and live pigs are not allowed to be transported into the blockade area. At present, all the above measures have been implemented.\"]\n", + "# text = [\"Early critics of Emily Dickinson's poetry mistook for simplemindedness the surface of artlessness that in fact she constructed with such innocence.\"]\n", + "analyze_text(text, show_firstk_probs=100)" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "01/03/2019 17:10:45 - INFO - examples.extract_features - tokens: [CLS] the trophy doesn ' t fit into the brown suitcase because the [MASK] is too large . [SEP]\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + " 0 | [CLS] \t 2 | . 1 | ) 1 | the 1 | , 1 | \" \n", + " 100 | the \t*100 | the 0 | his 0 | a 0 | its 0 | her \n", + " 97 | trophy \t* 97 | trophy 0 | cup 0 | prize 0 | trophies 0 | competition \n", + " 100 | doesn \t*100 | doesn 0 | can 0 | does 0 | won 0 | didn \n", + " 100 | ' \t*100 | ' 0 | t 0 | \" 0 | = 0 | ` \n", + " 100 | t \t*100 | t 0 | not 0 | s 0 | n 0 | to \n", + " 100 | fit \t*100 | fit 0 | fits 0 | sit 0 | get 0 | fitting \n", + " 100 | into \t*100 | into 0 | in 0 | inside 0 | onto 0 | within \n", + " 100 | the \t*100 | the 0 | her 0 | his 0 | a 0 | my \n", + " 100 | brown \t*100 | brown 0 | black 0 | green 0 | blue 0 | plastic \n", + " 95 | suitcase \t* 95 | suitcase 3 | bag 1 | luggage 0 | backpack 0 | trunk \n", + " 100 | because \t*100 | because 0 | as 0 | since 0 | due 0 | . \n", + " 100 | the \t*100 | the 0 | its 0 | his 0 | it 0 | her \n", + " 0 | [MASK] \t 21 | suitcase 19 | bag 6 | box 2 | luggage 2 | case \n", + " 99 | is \t* 99 | is 1 | was 0 | being 0 | has 0 | it \n", + " 100 | too \t*100 | too 0 | very 0 | extra 0 | overly 0 | more \n", + " 87 | large \t* 87 | large 11 | big 1 | small 1 | huge 0 | larger \n", + " 100 | . \t*100 | . 0 | ; 0 | , 0 | ! 0 | ' \n", + " 0 | [SEP] \t 35 | . 8 | ) 5 | , 4 | ( 3 | it \n" + ] + } + ], + "source": [ + "text = [\"The trophy doesn't fit into the brown suitcase because the _ is too large.\"]\n", + "# text = [\"Mary beat John in the match because _ was very strong.\"]\n", + "features = convert_examples_to_features(convert_text_to_examples(text), tokenizer, print_info=False)\n", + "input_ids = torch.tensor([f.input_ids for f in features], dtype=torch.long).to(device)\n", + "input_type_ids = torch.tensor([f.input_type_ids for f in features], dtype=torch.long).to(device)\n", + "mlm_logits, _ = model(input_ids, input_type_ids)\n", + "mlm_probs = F.softmax(mlm_logits, dim=-1)\n", + "tokens = features[0].tokens\n", + "top_pairs = show_lm_probs(tokens, None, mlm_probs[0], firstk=100)" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['Tom has black hair. Mary has black hair. John has yellow hair. _ and Mary have the same hair color.',\n", + " 'Tom has black hair. Mary has black hair. John has yellow hair. _ and Mary have different hair colors.']" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "text = [\n", + " # same / different\n", + " \"Tom has black hair. Mary has black hair. John has yellow hair. _ and Mary have the same hair color.\",\n", + " \"Tom has black hair. Mary has black hair. John has yellow hair. _ and Mary have different hair colors.\",\n", + " \"Tom has yellow hair. Mary has black hair. John has black hair. Mary and _ have the same hair color.\",\n", + " # because / although\n", + " \"John is taller/shorter than Mary because/although _ is older/younger.\",\n", + " \"The red ball is heavier/lighter than the blue ball because/although the _ ball is bigger/smaller.\",\n", + " \"Charles did a lot better/worse than his good friend Nancy on the test because/although _ had/hadn't studied so hard.\",\n", + " \"The trophy doesn't fit into the brown suitcase because/although the _ is too small/large.\",\n", + " \"John thought that he would arrive earlier than Susan, but/and indeed _ was the first to arrive.\",\n", + " # reverse\n", + " \"John came then Mary came. They left in reverse order. _ left then _ left.\",\n", + " \"John came after Mary. They left in reverse order. _ left after _ .\",\n", + " \"John came first, then came Mary. They left in reverse order: _ left first, then left _ .\",\n", + " # compare\n", + " \"Though John is tall, Tom is taller than John. So John is _ than Tom.\",\n", + " \"Tom is taller than John. So _ is shorter than _.\",\n", + " # WSC-style: before /after\n", + " \"Mary came before/after John. _ was late/early .\",\n", + " # yes / no\n", + " \"Was Tom taller than Susan? Yes, _ was taller.\",\n", + " # right / wrong, epistemic modality\n", + " \"John said the rain was about to stop. Mary said the rain would continue. Later the rain stopped. _ was wrong.\",\n", + " \n", + " \"The trophy doesn't fit into the brown suitcase because/although the _ is too small/large.\",\n", + " \"John thanked Mary because _ had given help to _ . \",\n", + " \"John felt vindicated/crushed when his longtime rival Mary revealed that _ was the winner of the competition.\",\n", + " \"John couldn't see the stage with Mary in front of him because _ is so short/tall.\",\n", + " \"Although they ran at about the same speed, John beat Sally because _ had such a bad start.\",\n", + " \"The fish ate the worm. The _ was hungry/tasty.\",\n", + " \n", + " \"John beat Mary. _ won the game/e winner.\",\n", + "]\n", + "text" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "config" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [], + "source": [ + "with open('WSC_switched_label.json') as f:\n", + " examples = json.load(f)" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [], + "source": [ + "with open('WSC_child_problem.json') as f:\n", + " cexamples = json.load(f)" + ] + }, + { + "cell_type": "code", + "execution_count": 89, + "metadata": {}, + "outputs": [], + "source": [ + "for ce in cexamples:\n", + " for s in ce['sentences']:\n", + " for a in s['answer0'] + s['answer1']:\n", + " a = a.lower()\n", + " if a not in tokenizer.vocab:\n", + " ce\n", + " print(a, 'not in vocab!!!')" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [], + "source": [ + "for ce in cexamples:\n", + " if len(ce['sentences']) > 0:\n", + " e = examples[ce['index']]\n", + " assert ce['index'] == e['index']\n", + " e['score'] = all([s['score'] for s in ce['sentences']])\n", + " assert len(set([s['adjacent_ref'] for s in ce['sentences']])) == 1, 'adjcent_refs are different!'\n", + " e['adjacent_ref'] = ce['sentences'][0]['adjacent_ref']" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": {}, + "outputs": [], + "source": [ + "from collections import defaultdict\n", + "\n", + "groups = defaultdict(list)\n", + "for e in examples:\n", + " if 'score' in e:\n", + " index = e['index']\n", + " if index < 252:\n", + " if index % 2 == 1:\n", + " index -= 1\n", + " elif index in [252, 253, 254]:\n", + " index = 252\n", + " else:\n", + " if index % 2 == 0:\n", + " index -= 1\n", + " groups[index].append(e)" + ] + }, + { + "cell_type": "code", + "execution_count": 62, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[(2, 'fit into:large/small', False),\n", + " (4, 'thank:receive/give', False),\n", + " (6, 'call:successful available', True),\n", + " (8, 'ask:repeat answer', False),\n", + " (10, 'zoom by:fast/slow', False),\n", + " (12, 'vindicated/crushed:be the winner', False),\n", + " (14, 'lift:weak heavy', False),\n", + " (16, 'crash through:[hard]/[soft]', False),\n", + " (18, '[block]:short/tall', False),\n", + " (20, 'down to:top/bottom', False),\n", + " (22, 'beat:good/bad', False),\n", + " (24, 'roll off:anchored level', False),\n", + " (26, 'above/below', False),\n", + " (28, 'better/worse:study hard', False),\n", + " (30, 'after/before:far away', False),\n", + " (32, 'be upset with:buy from not work/sell not work', True),\n", + " (34, '?yell at comfort:upset', False),\n", + " (36, 'above/below:moved first', False),\n", + " (38, 'although/because', False),\n", + " (40, 'bully:punish rescue', False),\n", + " (42, 'pour:empty/full', False),\n", + " (44, 'know:nosy indiscreet', False),\n", + " (46, 'explain:convince/understand', True),\n", + " (48, '?know tell:so/because', True),\n", + " (50, 'beat:younger/older', False),\n", + " (56, 'clog:cleaned removed', True),\n", + " (58, '?immediately follow:short delayed', False),\n", + " (60, '?between:see see around', True),\n", + " (64, 'but/and', False),\n", + " (66, 'clean:put in the trash put in the drawer', False),\n", + " (68, 'because/but', False),\n", + " (70, 'out of:handy lighter', False),\n", + " (72, 'put:tall high', False),\n", + " (74, 'show:good famous', True),\n", + " (76, 'pay for:generous grateful', False),\n", + " (78, 'but', False),\n", + " (80, 'if', False),\n", + " (82, 'if', False),\n", + " (84, 'fool:get/lose', False),\n", + " (88, 'wait:impatient cautious', False),\n", + " (90, 'give birth:woman baby', True),\n", + " (92, '?stop normal/stop abnormal:strange', False),\n", + " (96, 'eat:hungry tasty', False),\n", + " (98, 'put ... into filled with ... :get in/get out', False),\n", + " (100, 'up:at the bottom/at the top', False),\n", + " (102, 'crash through:removed repaired', False),\n", + " (104, 'stab:taken to the police station taken to the hospital', False),\n", + " (106, 'hear ... humming and whistling:annoyed/annoying', True),\n", + " (108, 'see ... juggling watermelons:impressed/impressive', True),\n", + " (114, 'tell lies: truthful skeptical', True),\n", + " (130, 'but:disappointed', True),\n", + " (132, 'visit:invite come out/invite come in', True),\n", + " (134, 'take classes from:eager known to speak it fluently', False),\n", + " (138, 'cover:out gone', True),\n", + " (144, 'tuck:work sleep', True),\n", + " (150, 'influence:later/earlier', False),\n", + " (152, 'can not cut:thick small', False),\n", + " (154, 'attack:kill guard', False),\n", + " (156, 'attack:bold nervous', False),\n", + " (160, 'change:hard:easy', False),\n", + " (166, 'alive:is/was', False),\n", + " (168, 'infant:twelve years old twelve months old', False),\n", + " (170, 'better equipped and large:defeated/victorious', False),\n", + " (178, 'interview:persistent cooperative', False),\n", + " (186, 'be full of:minority/majority', False),\n", + " (188, 'like over:more/fewer', False),\n", + " (190, 'place on all:not enough/too many', True),\n", + " (192, 'stick:leave have', True),\n", + " (196, 'follow:admire/influence', True),\n", + " (198, 'fit through:wide/narrow', False),\n", + " (200, 'trade:dowdy/great', False),\n", + " (202, 'hire/hire oneself to:take care of', True),\n", + " (204, 'promise/order', False),\n", + " (208, 'mother:education place', True),\n", + " (210, 'knock:get an answer/answer', True),\n", + " (212, 'pay:receive/deliver', False),\n", + " (218, '?', False),\n", + " (220, 'say check:move take', False),\n", + " (222, '?', False),\n", + " (224, 'give a life:drive alone walk', False),\n", + " (226, 'pass the plate:full/hungry', False),\n", + " (228, 'pass:turn over turn next', False),\n", + " (232, 'stretch pat', True),\n", + " (234, 'accept share', False),\n", + " (236, 'speak:break silence break concentration', False),\n", + " (240, 'carry:leg ache leg dangle', True),\n", + " (242, 'carry:in arms in bassinet', False),\n", + " (244, 'hold:against chest against will', True),\n", + " (250, 'stop', False),\n", + " (252, 'even though/because/not', False),\n", + " (255, 'give:not hungry/hungry', False),\n", + " (259, 'ask for a favor:refuse/be refused`', False),\n", + " (261, 'cede:less popular/more popular', False),\n", + " (263, 'not pass although:see open/open', True),\n", + " (271, 'suspect regret', True)]" + ] + }, + "execution_count": 62, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def filter_dict(d, keys=['index', 'sentence', 'correct_answer', 'relational_word', 'is_associative', 'score']):\n", + " return {k: d[k] for k in d if k in keys}\n", + "\n", + "# ([[filter_dict(e) for e in eg] for eg in groups.values() if eg[0]['relational_word'] != 'none' and all([e['score'] for e in eg])])# / len([eg for eg in groups.values() if eg[0]['relational_word'] != 'none'])\n", + "[(index, eg[0]['relational_word'], all([e['score'] for e in eg])) for index, eg in groups.items() if eg[0]['relational_word'] != 'none']\n", + "# len([filter_dict(e) for e in examples if 'score' in e and not e['score'] and e['adjacent_ref']])\n", + "# for e in examples:\n", + "# if e['index'] % 2 == 0:\n", + "# print(e['sentence'])" + ] + }, + { + "cell_type": "code", + "execution_count": 51, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "data": { + "text/plain": [ + "179" + ] + }, + "execution_count": 51, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "sum(['because' in e['sentence'] for e in examples]) + \\\n", + "sum(['so ' in e['sentence'] for e in examples]) + \\\n", + "sum(['but ' in e['sentence'] for e in examples]) + \\\n", + "sum(['though' in e['sentence'] for e in examples])" + ] + }, + { + "cell_type": "code", + "execution_count": 73, + "metadata": {}, + "outputs": [], + "source": [ + "# with open('WSC_switched_label.json', 'w') as f:\n", + "# json.dump(examples, f)" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [], + "source": [ + "vis_attn_topk = 3\n", + "\n", + "def has_chinese_label(labels):\n", + " labels = [label.split('->')[0].strip() for label in labels]\n", + " r = sum([len(label) > 1 for label in labels if label not in ['BOS', 'EOS']]) * 1. / (len(labels) - 1)\n", + " return 0 < r < 0.5 # r == 0 means empty query labels used in self attention\n", + "\n", + "def _plot_attn(ax1, attn_name, attn, key_labels, query_labels, col, color='b'):\n", + " assert len(query_labels) == attn.size(0)\n", + " assert len(key_labels) == attn.size(1)\n", + "\n", + " ax1.set_xlim([-1, 1])\n", + " ax1.set_xticks([])\n", + " ax2 = ax1.twinx()\n", + " nlabels = max(len(key_labels), len(query_labels))\n", + " pos = range(nlabels)\n", + " \n", + " if 'self' in attn_name and col < ncols - 1:\n", + " query_labels = ['' for _ in query_labels]\n", + "\n", + " for ax, labels in [(ax1, key_labels), (ax2, query_labels)]:\n", + " ax.set_yticks(pos)\n", + " if has_chinese_label(labels):\n", + " ax.set_yticklabels(labels, fontproperties=zhfont)\n", + " else:\n", + " ax.set_yticklabels(labels)\n", + " ax.set_ylim([nlabels - 1, 0])\n", + " ax.tick_params(width=0, labelsize='xx-large')\n", + "\n", + " for spine in ax.spines.values():\n", + " spine.set_visible(False)\n", + "\n", + "# mask, attn = filter_attn(attn)\n", + " for qi in range(attn.size(0)):\n", + "# if not mask[qi]:\n", + "# continue\n", + "# for ki in range(attn.size(1)):\n", + " for ki in attn[qi].topk(vis_attn_topk)[1]:\n", + " a = attn[qi, ki]\n", + " ax1.plot((-1, 1), (ki, qi), color, alpha=a)\n", + "# print(attn.mean(dim=0).topk(5)[0])\n", + "# ax1.barh(pos, attn.mean(dim=0).data.cpu().numpy())\n", + "\n", + "def plot_layer_attn(result_tuple, attn_name='dec_self_attns', layer=0, heads=None):\n", + " hypo, nheads, labels_dict = result_tuple\n", + " key_labels, query_labels = labels_dict[attn_name]\n", + " if heads is None:\n", + " heads = range(nheads)\n", + " else:\n", + " nheads = len(heads)\n", + " \n", + " stride = 2 if attn_name == 'dec_enc_attns' else 1\n", + " nlabels = max(len(key_labels), len(query_labels))\n", + " rcParams['figure.figsize'] = 20, int(round(nlabels * stride * nheads / 8 * 1.0))\n", + " \n", + " rows = nheads // ncols * stride\n", + " fig, axes = plt.subplots(rows, ncols)\n", + " \n", + " # for head in range(nheads):\n", + " for head_i, head in enumerate(heads):\n", + " row, col = head_i * stride // ncols, head_i * stride % ncols\n", + " ax1 = axes[row, col]\n", + " attn = hypo[attn_name][layer][head]\n", + " _plot_attn(ax1, attn_name, attn, key_labels, query_labels, col)\n", + " if attn_name == 'dec_enc_attns':\n", + " col = col + 1\n", + " axes[row, col].axis('off') # next subfig acts as blank place holder\n", + " # plt.suptitle('%s with %d heads, Layer %d' % (attn_name, nheads, layer), fontsize=20)\n", + " plt.show() \n", + " \n", + "ncols = 4" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAABREAAASVCAYAAAA7VJllAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvOIA7rQAAIABJREFUeJzsvXmYa1d1Jb6OVKNqnqveZL/BfvbzCMaGQMAQMJN/QIcfQxgaks4ADSSdkHRCOiS4M3STTkhCIAmkm8YEMCEMJsFMZjI4BBOwjcE27/lNfnNVvRoklUqz7uk/Vm2fI5WqSle6UklVZ33f/apKJd17de+5++yz9l57K601HBwcHBwcHBwcHBwcHBwcHBwcHBzWQmizT8DBwcHBwcHBwcHBwcHBwcHBwcGhueFIRAcHBwcHBwcHBwcHBwcHBwcHB4d14UhEBwcHBwcHBwcHBwcHBwcHBwcHh3XhSEQHBwcHBwcHBwcHBwcHBwcHBweHdeFIRAcHBwcHBwcHBwcHBwcHBwcHB4d14UhEBwcHBwcHBwcHBwcHBwcHBwcHh3XR1CSiUuo2pZS2trbNPieBUuqsdV7/utnn4+DgUD84W+Tg4NAMcLbIwcGhWeDskYODQ6vB2a1g0NQkooX/uLIV7BeVUgeUUn+nlDqmlEoppRJKqQeVUn+ilJq03ieD5cB6B1FKdSul/uvKPqJKqaWVff+jUuqFJW//1ZVzmgvoOzo4ODQ/nC1ycHBoBjhb5ODg0Cxw9sjBwaHV4OxWDWga5nU9aK0/VvqaUuqVAD4KYBnAxwA8AiAM4EkA3grglQAur/QYKyz0NwHcAOAfAXwIgAZwAMAtAH4OwJetc7pz5XN/XM13cnBwaD04W+Tg4NAMcLbIwcGhWeDskYODQ6vB2a3a0BIkYimUUteDN/ZHAF6otZ4v+f/vAHiHz93+BwBPBfDrWuv3ljnm5OqPODg4bGc4W+Tg4NAMcLbIwcGhWeDskYODQ6vB2S1/aEkSEcAfguf+mtIbDABa60UAv+Nzn5KK+q1y/9RaT/vcn4ODw9aHs0UODg7NAGeLHBwcmgXOHjk4OLQanN3ygVapifgElFLdAF4A4F6t9bEAd31y5ecblFLhAPfr4OCwBeFskYODQzPA2SIHB4dmgbNHDg4OrQZnt/yj5UhEkNHtAPBQwPv9HIAfA/gNAGdXil3+F6XUwaAO8IpXQL/3vdCnTkEXCtCA29zmtmbcPA86kYCenoY+fhz6V37lXe8CgPe/H/o974EG0bK2CE1wjd3mNrf53971Ltqij30M+qUvhQbRsrbozjuhv/Ut6NlZaK03//q6zW1uW3vL5aCTSeholM/s299Oe/Tnfw4Ng5a0R3/2Z9D/8A/Q990H/ZOfcK02Ows9Pw8di0EvLUEvL0OnUtCZDHQ+T18RTXBf3Oa27b55HvSPfgT9lrdAozq0pN3aTLSinLl/5Wc8yJ1qrTNKqWcB+E0ArwHw6pUNSql7AfyC1vp4Lce4/nrge98DFheBpz0NuOIKYHIS6Oqq+fQdHByqhNZAOg0sL5stleLrnsf/JRJ876WXAjfe+MRHW9YWOTg4tCY8jz8//Wn6EStoWVv0pCcBR44ADz0EjI0B+/YB/f0bf87BwaG+KBSAbBbI5cxPvbI8D4WA9nZjj/btK/poS9qj5z4XOHqU3zOZBDIZYGkJiET4XTs6zHtDIUCp4r9Lt3C4+G/7/Q4ODsEhmwXm5oCPfQwYHq56Ny1ptzYTrUgiys3tC3rHWusogN8H8PtKqTEATwPw8wBeDuDzSqknaa0z1e7/t34L+PjH6Szffz8QjZJE3LULmJgAenvdJOPgUG9ks8WE4fKycYTDYaCnh89joUDysKODDiQA3Hwz0GcsT8vaIgcHh9ZDOg3EV6zOzp3Af/kvT/yrZW3Rnj20qefP0yd68EFgaop+USQSyFdwcHDYAJ5XTBZms8YvUoo+UCRi/KG2NiCfB86d43tuuaVody1pj667jsRhOk37CvDvXI4bwO/f2cnvL4Sq/VOuoy6TC6XU+iSjbA4ODpWhUODzmssBd9zBn29+c9W7a0m7tZloRRLxGIAsgOvqeRCt9UUAnwdv7scAvA7ATQDurXafXV3Aa1/LifjIEUa42tuZ9XThAomL0VFG4dta8c44ODQZCgVGlBMJQxiKM6gUn8XRURKHPT18RuNxYGaGzmN7O/fR08PP9BVPLS1rixwcHFoHnkc/IZOhmgFgUFLsElrYFoVCDKDu3EnicHqa/tDCAsnEiQmn1nBwCBJakwC0CcN83vy/rY3PnGTftbWtTnDQGrj3Xi7gga3hG4XDwNVXM5Bx+jRw1VVcjwlRkc3SBmeztFsdHYZUlOujlNmESPS84q1Q4HXPrEEZbEQyuqxGh+0Orc2zqBTwjW8AZ84AL3vZqqxoP2hJu7WZaDmqSmudUkrdDeBFSqn9DUoB/R54k3fWuqPubg7yL3wBePxxYGCAk5DWfADm5gyRODjonGcHh0qhNRfapbJkQVcXnyshDCORYkdseRk4fpyf6ezkAnZhgY70wEC547W2LXJwcGh+5PNmof6P/8hsPQDYu9e8p9VtUWenITMOHCBxeOYMF/ILC1RsjIwUywkdHBwqQz5fnGVYKkvu6ODaRLIMK8mGO3IEOHECGB9f/b9Wtkd9fbRBjz3GLMvdu2mfOjoMAVhKvqZSJFrlfYDJ4gRMJqdkItp+ZzmCUX7P5/lzrazG9UjGcNgRjQ5bE+ITeR6fq6NHgW9/m5nEz3lO9fttZbu1WWg5EnEFt4EddO5QSr1Ia71g/1MpNQjgHVrrd1S6Q6XU9QAuaK1nSl5XAF688udPajrrFUQiwIteBHz5y8x4am8nYTEwwElpepoLhXicE/vAACc2NyE4OBhkMsWEYTJpHLe2NhKFQ0PMcolE1s7uTaX4HCYSfBZ37uT7T53i/i69dN3F621oYVvk4ODQvBCZTjgMfPe7jLYfOAAcPlz27behhW1RTw8XB8vLxm7PznJ7/HHWkh4Z4eaUGg4O5eF5q+sYlsqSe3pMlmG4il6hc3PMiJ6YAHbsWPNtt6FF7dHUFNdfFy7QDg0PG8JOpNyFAt+bz5tNamdLdqJkKGpt3i8QMlEIv/VsmsikS0nGUrKxFKXy6bWk1A4OrQCpX5/LcdxGIuRKPvEJ1lN+9asD4UluQ4varc1AS7piWuv7lVJvBHA7gMdW0kEfARAG01BfDeAigNKb/Bal1AJW42MAngfgj5VSXwLwbwDmAIyBevWnAvik1jqwjj29vcALXgDcfTcwP89JZ2yMmVJDQ5zAFhZIIqbTfE9/PwlF50A7bDfk86tlySK/kclkbMxkGXZ2brzPTIYL1FiMDtXUFJ3FbJaLVq1JIK6XDbwVbJGDg0NzQeTLnkff4MIF4EMfAi6/nPburrtWf6bVbZFS9Ivicdr33l7Km/v7SVosLJiakEND3KohQBwctgq0NpmF2Sw3m6wSWbJkGEpt51qQTgPf/Cb3dfPNwA9+sNa5ta49UorBmkSCwWQpdSOEmxCJnmeIRcAQerkcy1VJyaquLm7hsMkstElBOaadqWiTe5J1GA6vfw/XIhnlvEqJTIFrCuPQ7MjlaHu0NgR9JgN85CMc3697XVF5l6rRynZrM9CydJTW+hNKqQcAvB3ASwC8GUABwGMA3r+yleI31tjdfQA+DaAbwPNX3jcGIAngUQC/CuADQZ4/wOzC5z6XE/L8PIkQrfkg7NnDyPvyMiey7m7+vbjI/w8O8jUHh60GrUkY2lmGIucDOO4HBw1h2N3tz8HJ5YCLF/ksKUU5zugoHaV0mgSiUpQLVkJGbgVb5ODg0Bywa2VJEPHP/oz+wa/+KvDBD6792Va3RW1t/M5S/7GzkyRiJEJbH4txPshkmIEwPMy5wGXTOGwHlNYxlPrOgCGY7CzDoIkfzwO+8x2uSX7mZ1bVQVyFVrZHbW0M2jzyCH3CAwdM6Smti0k2IQWF/JPAs9hyIRSF1BViFygmFaVWpaCUVNzoflaSWViOYCyt1eiawjg0CzyPPlA+zzEnZLzWwJ13suzJK17BhI+g0Mp2q9FoCRJRKTUKAFrrOft1rfURAG/a6PNa69vAFNWN8EcrWyXnNAQy0zWZzcFBRvO+9S12J7zsMk46p07xoZiY4OvpNCedSIS/nzvHv0Xq7Iy3Q6sinV4tSxYnRpzikREjS642+6RQYEbL3IoVGR5m9qJk9qZSfO5CobUlzHMrHx4dHS16fSvYIgcHh82DFAoX+XJ3N197//tps/7rf2W2tGCr2qLubpN53tZmpH4TE5wD5ucNyXjxIsnEkRGSjS5bxmGrQGTJNmkofpFS9E96e02WYSOycn/0IxJq11/PRAcbc3NzGBsbG91K67T+fn7PkyeZDb5rl/EXCwVDpgmBKL/LJnUSQyHT9TmR4CaEiBCK9v0rzVa0Sb1SUtGvzauE7LOPXa5eo2sK49AISCMjoJh4B1je5d//HbjpJuCnfqr6Y2xlfqkRaAkSEUwdhVKqXWud3+jNDcKPYQphnq5lR8PDwE//NB+KY8eAQ4foFBw/ziypyy5jBF5qJfb2cpOMKlvqHIRcwcGhXpCaV7YsWSQWoRAJw4kJk2UYRCF9z+MzMjfHYw0O8rmy9728zCL+bW0kENd6jsbGxuTXtq1oixwcHBqPUvmyZEB/5jPA/fcDr3oV8OQnF39mK9siO+uwv7/49e5u2vNEwkj0ZmYodx4d3Tg7ysGh2SCyZJswtKWn7e0MoNrdkhuNs2dJIu7aBVx77WoiaMUeXdxq67SpKWYRzs7StkhJqVDIEHxCAIq8WUg6kRED/MzgIK9bOs1NFDeSvShEiXxe9ivZj3K8QqEyGXS1sOXT68E1hXGoB8QfKhRM9q49rk+dYkmXXbuAl7605gDKluaX6g2lyz3hTQKl1D4AdrPur+smOWGl1E8DkGppUa31GpVBirDuuU9PA9//PieWQ4foPM/M0Hk4cIALi7k5vpbPc0IaHDTRLYBO9sAAP+PgsJnwvNWyZDt6KRI12bq6gnUctKZkeXaWz0tfHwnK0hqHiQRT4tvbSSCWc9BPnDiBEydOPPH3LbfcEtrKtsjBwaExkFo/StE2if158EHg3e8mefjrv26Ixe1ii7JZ2uaurvL+TCpFfyifNwv6XI7XaXQ0mPpIDg71gF3HUH4XhMMmu1B+bjahEo+zfrvWwPOfzzWGwLZHt9xyyy1o7XVa2fPOZIBHHyWpcdllRlIZChkyT/62yT4h+AoF2il5zc4cTadNlqL8XwhFacxS9kR1ccaifcX9yqDriY2awshWCtcUZnsik+Emz0FpQkc8DnzgA/QNfvEXgUsuWXNX6476LcgvbQqamkTcgtjwYku0b3kZuOIKdop9/HHTJXZ0lL9L10LPo5RndJROdSxGQ+2kzg6NhHTNsgnDVMo4Nh0dhiwUWXI9x2UsRrI9m+WxJifLL0SXlkggdnZyMvIR4W/1OKkz/A4Om4hy8mVZ7M3MAL/3e7SX73wnyy6sgy1riyTw1NdXPjtcAkXSHKu93XRvjEToF63XGMvBod4oFFbXMRS/SJpy2KRhs/nruRzrts/MAM961rqLdmAL26LFReCxx2iLLr3UlFkQIlFrQ3ABq8lEO9AhpFlbm9mH1hwfkqUoEunOTkMoVlLv0D5usxKLpVhLPl2a5VgOGzWEabbv6lAehYJRY0gzotL7ls8DH/sY65S+/OUbypjdXW8AHInYWFR0sU+dYtQrnQb27WPk69QpEh6jo5zAQiE+UDMzpsbb2Bi3dJqy50yG7+vrY8aikzo7BIVsdnUdQzsia2cYSqHvRiCR4DORSnESmphYW94Wi7G2aFcXHWOfKfGtPkE5w+/gsEmw5cudncWlFTIZ4LbbaJt+53eAq67acHdb1hZpzcwDranMWGsRnc2ytEs2SzK2rc0EVHt76TcFURrDwWE9CBFkd0sWwkgy0GzScDNkyX6gNbsvP/ooJczXXbchkbVlbRHAJI8zZ4AdOyhz1rqYBCwUjFRXCJByZKI0UBGpsxCStg9qE4riWwuhWCrvXPPLlMigSzP+aq2v2GisRzCWqx8psLMa15NROzQeEkzNZo2sfy27+JWvAF/7GvD0pwO33rrhnN7ko3lrwJGIjUXFF/vECeDoUU4ge/ZwITE7yyYr3d2UN0t35myWUuiFBRrI8XGSidksHelEgg9qJMLsRCfzcfCDQmG1LDmb5f+UKi9LbjRSKT4Dy8ucWMbHOdbXcoqiUS7SIxESiFU4EK0+QTnD7+CwCbDly93dxQtHrSnVuece4Jd+iR1QKwhubGlbVCiQSGxrW7/eoRCOi4u8tgMDXFQuLPBnfz/JxGYnbhxaA0IE2aSh3Vm3ra2YMGwGWbJfHDsGfO97VEQ97WkV+XYt9g1XYUNbdOwY/ce9e4GhoeJOzfKe0teAYtmuXbtQyEQhGSU70R4rMmdIl1qA40oIRT8B8I1k0EHXV2w0ymU1lpNRl8NGJKPLagwW+bxRrEkt6LWu78MPA5/8JOsgvupVfPY2gLtTDYAjERuLii+21pysTp6kczI1BVxzDSeR48dpFC+5pFjmlE6zg1gsxklocpJSZ8/ja/E4H9r2djrY60X2HbYntKZRL5UlCzo7i2XJ3d2bO4YyGWYeyiJzbIyNitab6BcW+Jz09gK7d1d9/q0+QTnD7+DQQEjJh3zeFAsvtVNf+xrwoQ+RPHzd6yqubbzlbVEmw7koEtmYyMjnqc6QbPShIao4olH+f3CQc0Qjutk6bB2ILNmuY2jLkkvrGLa6b33xInDvvbRVz3gG1xIVYMvbomSSCR4AVWGRSDFpqJQhBW15s6CUTJT/FwprS51t5POGUJRamiL/XC+Laz20qgy6FmxEMq6V1bgewWiPAYe1Ib6QlHLZiAifmQE+/GHek5/7OSo0K4C7Cw2AIxEbC18X2/NYg+PsWU4co6MkEkMhEonxOCd2qc8hWF5mxqJkZU1NmYjZ8jKd6XTaSJ0HBpzUZ7tCitfbsmTbiSmVJTdLFkcux8zcxUWO49FRbhs57tKYqK+PBGINk32rT1DO8Ds4NAjryZcFjz0G/MmfMDj4a79Ge1YhtoUtSiQ4Xw0MVEYAJhLs4qw1icOeHgaQYjHOE0ND3Fqd7HEIHlK7zs4yLJUl26ThViOkk0kSiIuLzEC89NKKP7otbNHcHGvV9/fTjxS/2M5WE3KqVN4ssMnEch2dJeNQ6maWG2OFgiEURR0kAapyTSkqxVaTQVcL+e7rNYTx0xSmNMtxOyKbZVBQa/pC0jBuLaRSwEc+Qh7kJS8Bbryx4mu3RUdlc8GRiI2F74vtecDhw6ZJxNAQa5N0djKb6uxZThYHDqzOWojH+Z5UihljU1Oc9AA+xLEYI/Ra8//iaDtsTRQKxRmGiYRxVJRaTRhuZNw3A4UCI+Tz8/x7eJjZh5WQm7Oz/OzAAOU5NTo+rT5BOcPv4NAArCdfFkSjwO//Pu3xb/4msH+/L/u0LWyR1vRZlKIfU8n1KRRIHCYSXFBLoGlujq+FwwzErlf6wmFrQ+vV3ZJLZck2YVgqNd1qKBQoYT55kmuNK690DedWvUlz7TU9zbrbU1PmfzaRKHUSy8mbBWuRiXbdxPWkzvZ+hFDMZPiaZHl1ddWeKLLVZdDVYqOmMEJAlsN2agojgdRCwRDdG40TrYE772Rd1mc+E3j2s00ZtwqwRa5cc8ORiI1FVRe7UGBh44UFOjh9fcxIjERIAh47xonmkktYC64Ui4skE7NZSjinpgxZKPWGYjEjsxKp81aLrm4naL26jmE6bf7f1bValtzMk5XnkTi8eJG/Dw1xrFcaaZUGRIODLIodwHdt4qtVEZzhd3CoIyqRLwOcg//0TznHv/WtwA03+F7wbRtblMvR55GyGpUilaL9z+fp2wwN0R+am+M82d5OMlGCrA5bF/l8ceMTIWkA0+HbJg2b2S+qB378Y3Y/3bOHJGJvr6+Pt/rV8mWLTpygPdq9mwGKUjJQxo4QSeXkzcBqYq40U61Q4DiVBitrSZ0FnkciUQhFkVbbhGIQ43o7yqCrxXZuCpPJcFPKX4bsd74DfPWrTJK65RYS9j6wzUdcY+BIxMai6oudy3GREY/T6HR1kUjs6TGTWSzGzKy9e1dPVFqThJme5mQ0MEAyUeoLidQ5FqPDrZSROjdjRppDMTKZ1bJkebTb21dnGbYKQaw1yfOLF80CcGLC35i8cIH7GB4ujhjXiFafoJzhd3CoE0Rmtp58WXDHHcBddwGveAXwvOdVRWRtK1uUSnHr7fVHtnoeA6rxOOe/0VEGYpNJzi+ZDO/V6KhTZGwVeF6xJNmuOafU6jqGreIX1QunTwMPPsig8nXX+V60A9vMFsXj7NasNdUt/f1rE4kbyZuB1WRi6fv8SJ3tfQqhmE6brEYhFNdrZuEXpTLoUmJxu8igq0UtTWE2agizGbatUDBlXKRuZ6X3/Phx4FOf4hz9ghcABw/6PrwbXQ2AIxEbi5oudjbLCGEyyYezvR24+mrTsVDkzR0dZO7LOcKeR4d5dpb7GB5mAxbbGc9mKa+ypc7S1dkZ/c1HPl+cYbi8XOxUlBKGrVrvMhrlOM1m+T0mJipuNACAY/f8ee5ndLQqh3g9tPqT4Ay/g0MdIDV/JPNjPef9e98D3v9+4ClPYcHwKm3UtrNFEkwdGPCfhZHJMANR5pWREd6jpSW+nsvR5xkd9SWdcthkiCzZJg0lcwswHZI7Oows2cEgGqU9SqWYoLBvX1X+/razRdPT3ETl1dXFdZaQZLa8txJ5s7xPiLhymXyyH2nus5HU2f5cNmsIRTlPu9Nz0Flt69VX3M4y6FoQdFMYm4AMYo0vxHU2a/wgP/Y2GgU++lHOyc99LtUZVdjrVrdFLQFHIjYWNV/sdJoZieIgKQVcdRWdaYAP3fHjnFz27Fl7UZLPmxpxgCFZ7Ae1UOD+YjHuz0mdGw/PWy1LlnonABc5tizZT6SnWbG0RPlxOs3vMznpW1IDrYFz5zh2x8eLu5gHhBa/yo5EdHAIEpXKlwVnzwL//b8zkPfmN3O+rnJe3Xa2yPNo28Ph6iTIUl8xGuU9Gh5mMFZrEpTz87yPPT30jZwao/mQz6/uliwIh1dnGba6X1RPZDLAffeRRD94ELj88qqDz61+lauyRWfO0GaMjtJfDYfXJhIBQ/wI+bfmyWxAJsq+/EidbdiEonzeJhTrtc7bqL6ik0HXjiCbwpTLclwL0j3c8ziW/Ga65nLAJz7BxkVPfzpw001VlxlxI6cBcCRiYxHIxU4mSSQCZvI4dIh1fuS1EyfoIA8NMaK41mSQyzGKNj9PwzA+zq3USIjUOZmkQejtZX0551wHi3S6WJacSpkJtqNjdZbhVorcJZMkD6Wr+MSEIcf9QGs6dUtL3IePLqd+0OoTlDP8Dg4BwY98GaBdv+02ymvf+lYu2mvIetuWtiib5VzZ3V39tcvlSJxIwGp0lIST1rw3Cwu8p/39zFisttupQ22QrCs7y9CuH1dax3Ar+UX1htZsXHD2LGv7HThA375KbEtblE5TCp5OM2A9MVFM/pUjEiuRN9vvtWX45cg1ycSVTMdQyBCKlUAagEkgDDASVL+ZZNVgvfqKTgZdHwTVFEbGryQ3tbVxfVrNmPniF4EHHiCn8ZSnMLhaJdwoaQAcidhYBHaxEwngJz/hA1wo8OE9dIgRdcH0NMmU9eTNgnSacuhYjA++kC+lxjqbNV2dPY+Ty8AASUVn2P0hl1stS5ZoYDi8mjDcqguYdJrk4dISx974OMnvasaTRIUTCUpL7OchYLT6aHeG38EhAPiRLwN03N/3Pi7c3/hG4ElPqtlObVtblEjw+vf317bIXVoiYag1CRTp1lwokExcXOT7BgaM/NmhPhDJpU0ail8kmVsiSW5vd7LkWnHkCHD0KDNx9+8Hdu2qaXfb1hYtLNCPDYVoz8fGijsrlyMSK5U3CyolEyU70c529NNVXLLJ0mmT4SvZ9X4aY9SC9WTQgCMWG4mNSEapuam1yT4EzP3ZqF6j4MEHga99jdzDjTdSZVnDfXUjogFwJOIGUEr1aK2XA9pdoBc7HgcOH+YDK1GkgweLpZuJBOXN2SyjjJOT6+8zmWQduUSCxmBysjyh43mmq3MuRyMxMMDNOdir4XmrCcNslv9TqliW3NOzPWoxZbOU1EejnEjGxrhAqzaLwPOAU6c4hnfurCmaXgk2ZYIK0B45w+/gUAP8ypcFd90F/NM/sdvgc5/LYEeNC6Bta4tElgwY4q9aFApUZEgmvC1jzuf5v1iM89PQEDeX8VYbtDayZCENJQsK4HNlZxk6WXKwmJ4GHnqIv+/bt75qqUJsa1t0/jwDDpEIfdmBAUMkhsPFsmQblcqbBTaZuJ68tFTqLN3H/dgtybJPp82aJRwu7vTcKGwkg3b1FRsLzzM+UChk5stqmsJMTwP//M8c/zfeyOBqd3dNTWGafqYImF/aFDT9Y6aU+nmllFZKvVAp9YdKqbNKqWWl1FeUUntW3vNWpdRRpVRaKfUDpdQN1ucvUUq9Tyn1iFIqsbLdq5R6UZlj3bOy/4NKqS8qpeIAvqCUetPKOTyjzGduXvnfL9X1QpRBfz9w2WWmq2BPD0nFmRnznt5esvmDg0y3P3q02EkrRSTCrMX9+/ngnj7NSGU8Xvy+UIj7vOQSYMcOHn9hgXUMpJ7ddoXWlKtdvMjr8cgjTM8+coSSkWSS92X3buCKK2gsDx3itdwOxdzzeWa9Hj3KRdnoqCG/q534CwVe61SKkfR6EYi33347lFJw9sjBYfuiUDANrbq6aLMrITceeQT4zGc4Jz/zmbR51ZIizhaZ0ioSpKsF4TCz4CcmeH/PnzfZiaLOuPRS+lnz88DJkyQMXBy+cki3znicMvLpafpJsRh9xrY2ZsSNjDCALaoEaRDnCMTgkEiwLFI+T59p587qCURnizg2JyY4VpNJjulUymQAinS5XFZdOGxIRmmWsh5EqhwKmY7N5QiacJhrs+5ukodC+tiS5Y0gqqiREVNiqK2N9nZ+nuu9WKy4Vnu9INJvyUbu7DTZyHJt83n7SbAqAAAgAElEQVReQ2nsISSqs9PBQsqJFAr0gXp7TaBHxlxvL3mKwUFm546O8ufgIO28JMxkMsDdd/O+XX45bb+oHhcXOVcsLDDhJB7ncZNJkylb7f11/FLtaCUhwP8AkAbwvwDsAPCbAP5ZKXUHgDcA+DsAEQC/A+BOpdR+rXUOwI0AngfgTgCPAxgE8Hrw5t2itf56yXEiAL4G4G4AvwXAA/ApAH+5cpzvlLz/jQCSAP5poy/wjW8AT34yH5qgaglKzcPjx03Dk8ce42QxNcX3tLWRbJyZISn48MMkCtdrVtHXR2InGiXhc+IEH/gdO1bLoiMRbrkcH/p4nPKgzk4ai60udc5mV2cZyoQuE7A4wtXWidgK8DxOBnNz/H1oiIuEWqUR+TwzEDMZErPSrbweSKWe+LWl7dFXvkInTJxXBweHyiBNHJTiHFdp4CMWAz70IT53u3cziHLunP/ji3zogQeeeKmlbdG3vuXjy6+BTIZbUJkxnkcfJpk0KgvbZ8tkzMJZ/r9euZjtCCE3ZLPJEZsMEBLAzUONQ6FAAjGR4MI+n6dfVi0OH37i15a2RadOkVCtdiy2tTEwJEGmcJjEWzjM1woFQxZ6XvHcIRl0kj1YibxZPmNnepXLTFTKEDzyPEr2rx+pcyhk1nuSiZ9O0y9OJk02WleX/4Ya1UK+q9yzUhm03aFd3u9k0NVDMlOl9qHfjt4yPmUd7HnAF77AufQpTyFXsX//+g1h1iLa5b5++MPA29/u62u1tN3aTLQSnVEA8CytdR4AlFJh8CYMAbhKUkKVUosA3g/gRQD+BcAXtdaftneklPprAA8C+G0ApTd5CMCfa63/R8ln7gTwKqXUr2mtMyuvRQC8AsCdWuuSXL3VeOABsumXXUZHVwqCRyL8WS25NDpqMrGGh2lMjx3jw7Zzp3nfxAQJvWPHWE9x1y5DNK4FqQ+0sMCo8dGjJCunplZnzLW3m0iDdHWemaFz0t9vIlitDHEO7E1qhijFezk6agjDrq7NPd9mgNYcP7OzvH79/RyLQRDp+TzHvXQj99vF2Q8uXgS+//0n/mxpe5TJGAkCYMhE2ZwUxMGhGFKrTRaCfjKj8nlmIGazwAtfSBsYiVR+TKk5JJIyrTmHr6ClbVEQ6OzkfRFSr1ZCKhSiv9LVRT9mYYH+Tl+fye4ZH+f9kP/H4/xMJfd1q0EygOzNXrzLNRM55UaNJBzqB62ZFJBMmiB/rcoNi4BsaVuUTDLRYteu6oPbkn21uEhybW6uciJR5MzyPs/beM0khHxpk4y1ZM5CGgoZI5uQ+pXaTinD1N3NY8sclcnwe0uQTWTPjXre16o9acugRWpb+n4ng14bco+zWXPvg6iN+e1v85nbv58ZiHv3VibrX6spzOc/D3z9675JxJa2W5uJVqJ0/rfc4BV8B7zJHy/RlAuTux8AtNZJ+YdSqgtAD6iVvwfAq9c41t+Wee3/AngtgJfBsMIvB9AH4PZKvsBP/RQjXdEoJ6lCgROMMOrt7cWkotQDqAQiwTlzxnRXPnGCr9ndjXp6gKuvphTnzBk6vvv3r//AKsVU9qEhnu/MDKW5Q0MkE0sj/+KADwyYtH4pTt7by9dbQbIrsmS7W7It0+7qorMghGEk4hxjG1KvamaGTkpvL8dpUPc+lyOBmM9TCl6vxZvn8Vn67neLFkYtbY9e/GIjZ7EzRgRSh0QcTj9FuR0cthpEhql1dYXlP/xhPl//+T8D11zD4F65uT2X45xpb3ZWu2SB/PCHRQv/lrZFN9+80Tsqg+eZmoUDA8HsE+A9j0a5b/GFSoNViQR9o2yW42NsrDV8nGohmYWSzWRnhoRCxY1P/NZgc6gvTp7kfbviCo7TfftqIwPOnCl6HlraFu3cSeXV+fNcR1X7DI+Omgy9dJprH0nwEOlxKFSeSAQMyS5dySvxv8qRidKludxnJWvQDgBIgzA/XZ3l2EIWAsVBL1kz2YRio+2BXBsbpbUVbf+3lFTc7r6vNNrxPCMjD+KaHDnCZirj43z2LrvMH4ldGjD8wQ+AT30KuO4636fS0nZrM9FKJOKpkr+jKz9Pr/H6MAAopToAvBNMFb2k5L3lVPQLWutomde/sXIOb4C5yW8EcGblfxvipps4WV+4wEl33z46u3Y6uNSLEUi9JSEW13t4d+ww9eampmj8Tp3iRLR3r3lfOEw58+ws///wwyQSN5KBhkJ82EdG+NmLF+lcS/2acpNOOalzIsHvMTDAYzaLgc5kVmcZimMsLetHRgxp6OQ3a2NpiZmrmQzH7c6dwWYJZrMkED2PdarqtWBLpSjVOX6c4/TKK5/4V0vbI4lEi/MnkAwScSot+XZRpNqvk+ng0Kqwuy9HIv4XQN/6Frebb2btWwnyCTFpE4Z2Vnt3NxeeModK5vaPf8w5+xJjPVraFgWFUIhzjMiQgwoqKWXKkczN0e8RGajYwN5e/j8eZ52wM2f4t92cpVXheau7JdsdYtvb+V2lAYrzi5oX8/NUInV00LZMTdVGIC4u0h5Zz1pL26L+fo7t+XluUqbAr80PhRgwP3eOz4s0a+rtpc0QIjEcLs4cLN2HLW+utMFEKZkotRjXIsPWkzpL5rDfNVpnp1njZbOGTIzFuHV0GEJxs+yFk0FvDJGs53L83kGue+fmWAexp4f8Q61KstOngb/4C/pX73iH74+3tN3aTLTSMrDg83V5xP8KwJtBTfu/AlhY+cwvgMxvKVJlXoPWWiulbgfwe0qpcQAdAH4GwLu11mv0HCpGezuzAMNhTr4nT5J8m5gw3bwAs3iXBUY8zvcDfJBLZdC2E7BnD43fhQusuxQOs5mH5/FBtTE+vlrevGPHxt8jHKbzMTpKomh+nnKe8XFGNssZGZE6j4zQyY9GSUTOz2+O1DmfX00YSiRKFovj44YwbPWFQKOwvMzMw2SS12zPHt7fIJHJkEDUmgRivSTjFy/y2YjHOTavvtos8rFF7JFkkYgNEXJQxrsdpZaouF1A285UdPUVHbYSJBO9UDDFwv0uHE6eBD76UQYMn/pUzsOzs6YouKCzk3OxEIZrNWo5fRr46ldJIA4PP/Fyy9uioNDezvkgnTYL46DQ0UH/SPyxs2dJLvb3m0XlwAD/jkbpE506xUDp6Giw51IvSGMHmzS0F9Nyfe0sQ4fWQDpNwi8cNsqiWnyzVAq4/36OBStJoeVt0eAg7XQ8TjtdKBiS3A86O2mj5+fpP0WjJkNXiESRNgvZV+o/2fJmIbcqLQUgZKIt390os65U6iy+n1+psw35zv393KcQivE4N7EpXV2bG5heSwZdSixuFxm03CutDSkcFDIZyo6VYiOVoaHKuIe1EI8Df/InvAfveEdVdq3l7dZmoZVIxGrxGgD/oLV+q/2iUuoXq9jX7QD+AMDrAHSB3a0/4mcHkQjJvBMn+JDOzvKBmpw02VTSoc7ODMxmTaZiMsmJSeqQtLUVS6CFSDxzhpN7KMSImOcxA9GeQCIRdop8/HE6xUtLlcsb2ttJVI6Pk7Scnjb1P0ZGyhtVpfiA9/fzu9hS554eTuBBZ5V53mpZsk2EdHfzuEIYVtpp08EgnSZ5uLTE8bhzJ69p0NcxneZYVYpjux7krucx83BmxjhO4+N8Tn74w5p33zT2SBxUcRTLOXB2lFpQWjBfpCv2Pp0M2qGVIfKdauXL2Sztx7vfzX1dfTXnyLExM18PDvqrh7ywAPzzP9PhfulLgU98orrvZqFpbFGQ6O422T/9/cEv7qSepQRPl5dJEkpZF8lclFrSi4v0PQYGSCo0Uwa3nXUkPwVS91MyDKvJSHJoDnge8NBDvN9jYxy/ExPV7y+fJ4GYywFPexrXDjWiqWzR0JCpsdrezudXVGF+MDjItcfyMu2QXR+xUiIRKJY3i69W6bMotRH9kIlrSZ3FH6y2pqn4kn19Zp+pFNcNsnYQQrEZAhSV1FfcajJo6eAt5HHQ2aJas7FjNEq/qKeHvES1yGaB//k/Odf+9m8XKTQagaayW5uBJnJn6oYCDGsMAFBKHQTwH/zuSGv9uFLqm2DKaReA72qtH/O7n8FBZv2dP0+jmUhwEh4ZKcouKIJEc6QOkqQZC7FYKoPu6KCB/uEPKaHavZukoueR+beNWzhMYrO/v1jeXCmb39nJjLBkkgulc+dIjk5NcTJey5AK6ZnPG6nzuXM8d5E6V7MASKeLMwyTSSNL7uig0RobM3UMXfZU9chmea+jUV7HyUmO4XpE5aTodSjE8RZEF85yxzh8mD8nJuj0tbcD114bmFPTVPZIJDIS6a7kvtk1rwS2BLpUBm3X2BFSthWdK4ftASkeXql8uVBYXccwmwX+4R9oG1/7Ws73UjKkGruVTLIxSzgMvPzlgQVPmsoWBQWlmNUZj3P+36hMSzVoa+P8sLxMMvHcOfpmduAsFCK5ODjIBY5I+YaG6jdHrgfJJLdJQ7tbssgthTB0ftHWweHDfB7Gxnh/16rJWgk8jxmNsRj9oqGhQE6xqWyRUnxGpc5pb68hVvzKm8fHufYqFPi5+Xm+JpmCdrOV9YjEauXN9udLycS1mq/Y16Gc1Bkw/ly140g+39NjCNt0muvhRMIQWJL53CzYqL6i/C5oJRm0lG4B6nfdv/99JmlcfjmfqwMHqj+O1sDf/i3t2y/8AnDjjcGeawVoKru1GdgOJOLnAPyCUmoZ7JizD8B/BvATAE+qYn8fBvDRld/fVO1JjY7yYZUMvHyek0sqRee0ko5cQsIJpMaSZCuOjlKOee+9ZOeVomw5GmXh0VIZqBBrx47xody5kynGlRo9ybJMJEiQnj5tyMT1Cp23tRkCVbo6X7xYLHVei8CRjAN7k3RzqeEwMWGyDJtpMmpl5PO8t4uLHB9jYxxv9Vp4LC9zPLW1kUCsR5RyZoaTWzjM2odnzvBZuuaaQOs5Np09kvo5EuGuxikUKbMtgy4lFsX5BFbXVnQLVofNRiXyZXmPTRjaWe3SbOvrX+cc9p/+E6PtUrKkGuTzwOc+Rxv46lcH2jSk6WxRUJAmNNIMrV4lL0S5sLBAv2p5mb6M7Ze1tZE0kMZ0QigOD9cnWx8wXb1t0rBUlhyJmCzDZsqOdAgW585xGxrivZ6YqO15OHaM+ztwgMkJAaHpbFE4bIjEVMqop+Jxf/JmUbJcuGA63EajfPbF17KDuDbBV2obapE3C0rJxPU6Oduwpc62X1eL1FlgNw3zPEMoJpMmi9MmFJuNiKukvuJaMuhmIBZFrVcomGzQegS5Tp0CvvMd8gpSk7WWIMSnPsXuzi94AXDrrcGdpw80nd1qNLaD6/DroA795aBO/TB4c65EdTf5MwD+BtSsf7LakwqF+AAVCkZ2Iw/ymTPVLTrCYZIdNuGxdy+zEWMxHi+fJ0F4/ryJBNj1FUXefOoUHYWlJRKDfkib3l7uOxbjcU6e5H537FifjLGlzum06YgYjXLSlowCW5YsxIRdiF4Iw66uzTfOWw3SUXx+npPj0BAdpHouQhIJEogdHSQQgz5WoUDycHaWC/TLL6dken6ejvLYWKCHa0p7JESiSF1qfW5sWbNAnE8hF+2op/1+cUq3Wo0Zh+aFyJeBYilVJlNMGEqHZsBkUUjzk+5ujt0HHgDuuQd45jNJIIr8tVp8+cucR1/yEs7hAaIpbVFQ6Ow03a7r2QhKMg57ejhnTE/TVynNNmxv5/0TYuLiRQbhRkZMXcVqIbVts1lutsROZMl2HUPnF20PxOP096Xeam/v2mqnSnD2LEnEyUl2Ug0QTWmL2tvp40rZgsFBkynnR94s5SuiUdqlpSU+j3amu5BMpcqQcs9qqbw5HK6u+YtS/slEUaLY2YlBSJ3t/UuCjJCudvNRuxN0UF2Cg0Y1MujNqq+YyZj7V9pjIUjE48AXv8g11p49nC/37Kl+f/feSxLx+uuBN7xh0xIRmtJuNRJK23m3DhtCKdUJ4AKAr2itX+Pz46sudirFBYLnGWltJkOHUGQvtRrJfB549FHu94or6Og+8ggngt27i42ZTGzd3TTY09N0vqWTtF9ozQl4eprfqa+PZOJGk6/ItWMxZohJs45wmPsYGuJPIQyr6ZrpUDk8j/fx4kU6LgMDJLrrndm5tERSvbOT2bRBLwSXl+lkp1Kc0HbvpqN89CjH6eWXrzmumsJ1qcEerbJFko0INK6WoZ2tKL8LnAzaoREQ+bI4+JIBIUX1AbOwkblGsrhKMTMD3HYbAytveAMXOn6y+Uvxne8A3/0uCcmnPnXNtzXFUxGkLQoKWtOHkABlve2H1iQGYzHTwKKnp/x7UynOp+k059GRkcqk14XC6ixDceNDoeIahh0dzi/arsjlgPvuo12bmOA42Leveh9qYYF1ELu6gJtuWrOkwpa0RYkESZDeXj6jqRTnDQkkVfKMaW26NXd08O+JCTOP2MShEInA+rX1JPCrtX95c+l+bBluJWSiDSmVIF3ba5U6r3WOdqdnz+N16ew0hGKr2TqbVJTsRUG96yuKctHzTHObes2P+TzwT//EufGGG3ica6+tvv/B4cPAH/8xEzx+7/cYxCuDprBFflAjv7Qp2A6ZiEHj5wAMAfi/Qeysu5sPwsxMcRv1fJ4PXDpdmbx5PbS1UZ75yCPAY4/x90iEv6fTrJkoEXspAhxdaUIeCjEN//x5ZoHt3+9PCqGU6QQ3N8fveeQIo3JTU8YRyWZXy5JlQgqHTbFUyZTq7ORkPjDgJMr1hNamk3YuRydqYiL45jflEIvR6erq4v0POtI0Pc0GR21tlCwPDHBRd/Ikx+u+fS3hlARmj6TWixB6jZC4+ZVB2xJoJ4N2qBaexzlmYYELxFyu2IHu6jKNTyKRyjIeMhngfe/jmHz1q/lzbKx6x/zRR0kgXn31ugRiMyFQ3ygIKEV/ammJ/s1ahF6Qxxse5jw5N8d5MxKhD1RqT6UJXiLB9164QJ9vdNRkrkqXVJs0FL9IMn8iEUMaOlmyA8B59Ec/4niRRIFdu6ofH4kEa6WHQlz816OhXcAI1Bb19tIvSSRMY6y2NtqUSuXNStF3ltr04bBptGITgEKOVZKRGIS8WfYjahS7rl+l5JX4cfWQOtvnKF2CBwaKCUVREQihWC85btCoRAZd+v5a6ytKdqdd97ne88Y3v8m1/1OfyuPv3Vv9GvLCBeAv/5L3+M1vXpNAbFU0nQ+1EVwmYoVQSv0MgIMAbgNwDsAN2v/FK/t+ydZbWOBkZC+U5+fN5FOLJAqg4Xj0UR7v0CEuon7yEx7z6quLJ8FcztRXTCQo95yf53svuYRG3JZBV2qECgUagTNnOAF3dBQbE6W4T8kwFFly6feIRnleWvP9AwP1XyBsN8TjNPyZDO/R5GTjrvHiIolrSXkP0iEoFCjJuXiRRMHBgxz78TiJ9lDIdA1bB5sa5QrAHq35XrvQdzOQdHaHQNnshgClxGIrOI8OjYWdXZhMcu4QKX1PD7PUJMuwu9v/GNIa+OAHge99D3jTmzhfi0y1Gpw9y8j9zp3AK16x4XO4ZW1RUBBfRhqHNAJac06RusFDQ+uPh1iMga1UynQwtf2qtrbiDMNGZYs7tB6OHWMwdM8ezuUjI9V3Y85k2Nk5GmWwdYOSClvWFsk6LZPh9ezsNMGofJ5/V7JGi8fpe/b2ksjp7i4mQoQQFIlypYSe1FIEqpM3l35XOW41mXC2zyb7EP+sHjYrlzNkoqhZOjoModgMfmy1KJVBl2Yr+pFBy3XSmtenEXLwhx8GvvpVrqlERXjwYHX7WlpiBuK5c8Bb3gL81E+te/4tMzsGxC9tChyJWCGUUvcAeAaA+wH8J631o1XsZs2L7XmMWtvdBCUr8eJFTjZByJtTKRKJ4bAhEh99lIb2mmvWdrC1Jvl3+DAN0dgYDZAMH4mIC7EokSApRG9nGKZSppFMOs3P7N5NcrKvr/LvVyiYrs75PM9hYICOuiMSqsfyslnMdHbS+ax2MVwNFhY41np7OS6CvJeJBMdwOs3xJoXBUykS6skkM3VHRjbc1WY7y/egNnu0ruEXIrFZSTk5P5tYFIgM2iYX3WJ7+0Cy6u3NzmqXcSH1wYLIqrn7buCOO4CXvQx40pM4p1W7aF9cBD7+ce7jda+rKPN/S9uioBCPm1IcjbRp+bxpztDZScKgo4Ov21mGIgcUnyYUos8nwbtmtMMOzYeLF1kHfXzc1Hfdu7e6OVBKIZ0/zxqI+/ZtuJ8tbYs8j8+y5/E5FpJfGmxJXfqNntWZGVMLP5ViMNsuZVAtkRiUvLl0f0D1slrx02Q/9Q72Sn3jdJo2FTBy3a6urZGt7VcGLetwyQ5tFLE6MwN88pMMPOzfz/O47rrq7kEuB7znPcCDD9IvevGLN9xPy3j9AfFLmwJHIjYW617sXM7ImgcHjbx5cNDU2enqolNZiyGUDMSODhImQiR2dJBIXG9RlU4zyplMcpE0Omqi/JK1KL/LpCGdLqWws51lWCiQsFpcpFGTffqZYLQ2Eux02tQ/clJnf0ilOP4SCd6z8fH6dY9cCyJ57+sjwRfksS9coHy5vZ21QYUYzeUosV9Y4ERXYQ2zlpmg1sC6tmgz6iPWitLairYUpLQT9FZwJB3oRJcShrJwkALs0kFXCrUXCiYKHwQeewz40z+lzO/lL+cxdu6szklPp0kgptN0lAcHK/pYCzyd66IhTqgQdOFwY4NicuzFRfo6kn0kAVORJdtZhkrx/YuL/OzAQHlJtIODjWSS2dCRCAP92SyJv2r8YK1ZG/rkSUqhDx6saPxteVtUKJCoVYrXWNYquRzXIYApMbAWPI9qLGlmkU5zX3bASLL4/BKJco62LLpW/00kzkD1ZKItdQZM/dZ6klmFgiEUpSyOdB+2G6i1Okpl0HKvABOkAiovzRIEUin6MgDwtKcxi/Cqq6qbe7UGPvQhZjS+6EVUZ6zXpHUFrW6LWgKORGwsNrzYqRQzEkMhOo4iuRoaoiGcneXfExO1SUuXlpiR1d1tiMSHH6aBvfba9bMfPI/dci9cMMSf1DRMp4vrLUgdi+5u081MMhbtTlCpFPcXj/O1ycnqsi4zGS4UlpZoeLq7TW2rViBCNgOZDMeVLLDGxrhgafT1mp2lczYwwEV4UMfP50l8z83xObr88uJi1idOcHG3Ywfrflbo1LT6aNrQFgmRGJQj2miUyqDFsQacDLoVUdr0JJk0tZAA0xRMNluWXNp9OSgyJhoF3vUu7vNtb+Nxpqb81Q0WFArApz/NrJ9XvYo2sEK02JO5Cg1zQrNZBsnE/6gHtC7ulpzLmYVzoUDfJJfjGJ2cXL+ZSqFAxUYsxr+Hhri1sjzPoT4oFIB//3f6cwcO0JfesaPiQMQqnD5Nv2lwkEHXCsspbQtblM3yuWxvL/aVS+XN3d1r+03pNGWZvb0muDU5WfxsC5EogVw/RGKQ8mZ7n+JD+W2+Imi01FngeYZQlHW11OPu7t56CSeFghmLoVBx45l6d4PWGvjsZzm+n/c8zl+7dhnll1/8y7+wvMsNNwCvfW3FKo9Wt0UtAUciNhYVXeylJRIeXV0kVCS6NTTEh3N6mkZwcLA2sicaZRZFby+dhGSSRKJSzEi0nQat+X9bljwzwwWPUpRL7NxpMgztyTOTIUkojVukJgNgihQLuVgomFT/zk4uyKpxggoFOlGxGI1oW5uROjsHnMjnSdxJzabRUY6nzbg+09N0yoaGeM+DciZEvpzJUL68a5f5n9aMBp85w+996aW+Fv+tPkFVZIvEERWHsdWIxFKUFv4W6Q9Q7Mw6GfTmI5stJgylkyBAG2UThusVB5eglsh4gnKYCwVmID7+OPBbv8UF5eAgbVg1+NKXWJP11lsZ2POBVh+lDXVCl5c5Jvr7gyGTpfmTTRoKwuHyWYapFH28fJ7nMTS0/rjM5Tg/isx5eJhjzQU+HAQ//jF95yuvpN/b31/s7/jB9DSzENvbmYHow6ZtG1uUStF37u5efX1knRMOmzr35bC4SAXM0BDnOFEA2X5HLURi0PJmQRBkItB4qbPA8zgHCKEo18fu9NzKvl8mw03UGJI00ahu0N/5DgMaN9/M+xuJMAuxmn3edx/rTe/eDfzH/8iyChWihe9g68CRiI1FRRdbOuJGo5yABgZILHoeJ5u2NjqgsRiN3eRk9WnZCwt0FgYHmaElRGI6TWIQoNOdTBbXPxSyMBymw5FO8zwqkaDaGSVCLtodWDs7aXhiMe5rcJAR1fUi9usdK5nktUyluD/p6twCHebqgkKB42d+ntdneJjZh5sllbpwgeNweHjDot2+cP48pTgdHSTJS8ePNPjp6iKB6DPNvtUnqIoNv90hcCsQiaUora1YKoMulUJvte/fDCgUVsuSJXtLJF82YViJ7ZbMg6Dly4I77mAtxF/+ZS7W29urD4Dcdx/wr/8KPP3p3Hyi1UdkQ51QaXiiNf0AP/fL8+irlNYxBLgfmyzcSKrnefRLRAFgd2VeC5kM5+7lZdojad7jbNL2xunTLMmyb58Zj/v3V0fGLC4yuSCbZUajT5+s1UeiL1uUSNCW9PWt9i8rkTdrTT81m+WzHIsxqaOUlKyFSASClzfLuduNPmohE8tJncXnqjekW7FkKUp2pN3puVXsa6Fggq1SB3KjGpqlUmibEqqmG/Tx48wcvOoqrtuTSdZBrMb/OnqUdRC7uoA3vAG4/npfY6xF7lprw5GIjUXFFzufNx2I+/roKMZifNAHBzkhJRJG3jw+XlGNgLK4cMHURBwfJ6Hz2GM0SJddxtfsOoalk6HWdGJmZvj/Awf8GwwxfjaxmM9zgp6fp+EYHWU22chIdann2awpWq41DdPgIM+5VSaJWuB5vJZzc7zeg4O8t5uVxi8OVDTKe1ttI4JS5POcfObnSUxefvlqZ2R+nmPW89jBcGzM92FafcT4Mvyl9Xm2MkRuY5OLdo2Z0tqKLrPZH6TIt00Yiupp8GEAACAASURBVLwIMPVzbVmyX/tcL/my4L77gA98ALjlFuBnfoZzy86d1R3n8GHgrruYQXTrrVWdzrayRUFAfIuOjrX9JpEl21mGdoChNMOw2jEmxGA2S1+kEjWAZDJKJ+fR0eqCrA6tj8VF4P77OQZGRzmu9+6tTq6/vEwyMh5nYHXPHt/z/bazRdEo5zApm2SjEnlzPs9gdns7bdHSEv1Wu1yVnVFYSiRWKke161wH6ccFSSbK98zlGit1to+fzRpCUYhXu9NzM/q/QoRKGbFafJ716ituJINeXGRwdWiIdRBnZrj+qqBR5SrMzADvfjfnuNe8hvvzySu0ui1qCTgSsbHwdbEzGU5QmYwhEqNRQwB1dNDYirx5YIBOxHrGVgrR27JkcWKnp5lNeOWVnGROnqSRuPpq7nsjLC6yvhzAiGi1si5BNms6O587xy2f57WYmuI52TUWK13Me56ROudyNLbSiGUrEgJa897MzprrNzFRXd2uIM/p3Dneg/Hxqki8spBan9ksHekdO1a/Jx4HTp3i2Nq1q+rsoVafoHzZItuJ3Q5EYinsSLmQi2vJoLfj9VkPmcxqWXJpOQt7q9UGS1fGoOXLgrNngT/8Qy6yf+VXaHPGxqoL4p0/z+6Fk5Osg1jld99WtigoiBqip4eLk9I6hqWy5NIswyAXtVpzLoxGud/h4cpIweVl+m6ZjOn8XEutbIfWQibDgEZbG2XHs7P0p0ZHq9vX0aMcT1IfuooA87azRVoz8UKyCctds43kzYkESZPBQSO1LQ3wl6tRXQ2RWA95s+y79HxqsZGlUmcpDdFI38omFOU8bEKxGdaLEjD1PKO4CJpwLSUVy8mg83n6Mskk8JKXkBgfH2dGtF8sL7NUzLlzbFb3jGdUVdas1W1RS8CRiI2F74u9vEznUms6lb29JIQKBSPJ1ZqZVdFosbxZZMM2YWgv4Do6ijMMFxeZlTg5yYy/bJZ1VtJp4NChykjBTIbFmJeXSVTt3h1sDarTp7mIS6f5Xfv6jJRbIn1CLFaShi7XN5nke3t7aay2itQ5FqNzks3ymtTakCcISC3CpSWOtWqiVOVw7hzrk3V2Ur5cblGfSvE98TiPvWNH1aUAWn2C8m2LgqqDs1WwngzaluNsJxl0Pr9alizXJRRaLUsOMgva84yUpx7yZYD7v+02znO/+7v8fr291QVBYjF2L+zoYCfmGhp9tPrIargTKtku8/O8pzbZLF1DbdKwUbYulyOJk07znEZHK5ufRLEhDVtGRzc3SOhQf2gN/OAH9KOuv54EopRm8YtCgQTixYv0x/bsqVrZtC1tkefxufU8PnvlssA2kjdfvGj8UinlNDFRbHtsIlGO4ZdIBOojb7bPMUgyUTLCbfKzUVJnG7mcIRQlo1Mkw/VQO2wEWd/ncsa3aiSpWUoqfulLtCG33mpKdFx3nf+AWz4PvPe9LKv2ohexvEuVtV1b3Ra1BByJ2FhUdbFjMUaqwmGSZt3dJAxzORKJ4ixGo8weTCZN2rwdxbEJw56e8s7pqVPMSNy1i/KsXI5EYjLJDMVKCB8hiaanOVkeOBCsQ5vPkxiTSbu3l9dFCvHbtbS6uoobt6y1aBWps9SelKY2vb2tufiXyKYskCYmmkPu5HkcG4kEMwCHh2vfZy7HyWthgQ7cgQNrO3GPP06yfGSEx6+w42A5tOCoKEJVtmir10esBRLlt4lFWwpS2g26GaLYtUDqzdqbXdtWbK9s9awtlMsZSXS9HHqtgb/+a+BHPwJ++7f5nZRiIMIvyZTJkEBMJtltsEY72OpPYV2dUJGo2VmG4heJtL6jg3NHLbLkILG0xPlMawY1K6ndKNmM8/P8fr295js5bD0cOcKg+tVXcwznclQA+Q2Kak0F0ews1wU7dlSXybiCbWuL8nmuSaT0Urk5wZY3d3SYOUT+d/Ysf05M8Dnu7FwdoAqKSKyXvNnef5BkIlDsVzVa6lx6HkIoSsZ6W5shFKvtUVAphNDUmmNks5NeHngAuOceEn59fVxjHTpUnLBSSX1FrYHbbzf1oZ/xDCaEVIlWt0UtgZYlEZVSPw/gwwBu0Vp/bZNPp1JUHeWKxTj5dHSYroLnz/P1UIhOYzZL47awYKJYElH0Q+KdOMGo2CWXMCqWzzMqkEhQMlFp1kU0yn1pTWlpEISRjWyWROXCAidBkcWW1lcs7eppS6C7u4sXDZ5HB15I2nDYdHVuhsXFRkileE1krIyP+y8eXy94HknqZJIEdTVdt0sRj9OZzmbpQK9VBNzzDIHY28vrUqPcvuiKtqA9qtrwC5EoEexmGFvNCulubROLpTJom1xs5uxOuxnW8rJxYgE6zaWy5EZ8F6kFJLa6mvqJleLznwc+8xng9a8Hrr2W12Jqyr8D73nApz/NBeMrX8ls/RqxbW3Rqh3p1d2SZaEM8Bmzswzb2/n/pSVDejcLCgUSCTKXj45W3lAoGqVf5Hn0XdbKjnJoTUxPM7i/Zw/9qPl52pFqAsVnzjDgHArRL9q5syYbuq1tUTbLe9HezkD1WtdxLXlzJkNFTSRiVGdSaqnoJMsQiULa+SESAeOTBC1vFtgKlqDIxGaQOtvnIoSiBFGllEpXV/BqC8mEFH9ns33Gc+eAT32K66+bbmIi0yWXMBixkQy6tL7iF78IfO5zJCCf9SxmMtYwJjccZS1on5oOzq2oAkqpEIA/APBDrfXn6n88PmDLy5zw83mz6FxaMpO/SFUlU3FxkQsdn11nsXcvDeOpU3yAx8aAa64BHnmE9eYkUrYRBgcZJT12jNv4eFWFmtdERwf3Nz5OGfaFC4wETkyYroWAWWjaxKI0pJH92MSidG+Wrs4LC7yWPT38Ts0oE8pk6AjG4xwXkuXXLASPjKd0mlmuldTY3Ahnz3KfnZ2cbNYrkH/2LEnwSIT3sNZ6nc2ERtujUMg4BfIsN8s4azZIxqYdmbYdYLsBiLy/tHHLZlxbkSUvLxubacuSIxHOC0IY1jvyXg6NkC8LHn4Y+OxnWdz7pptM06ZqjvnVrzKD6IUvDIRAbCo02hZJ8NSuY2gX+Je5XUjDcr6HSNLSaUMsNgMkMJpM0q85f54+zdDQ+j5UKMSxOTBA3yUapZ84OMjXWz0DersjkWAjxKEhEn6nT/P3agjE2VmOLa25j8nJrTOXN9oWAbQzg4NcL0Sja/uZ3d20M8vLfDa7u01G2cgI70l3N9cc0gDKLnchMmTxIcRPCIUMaVfpOqutzQSGpeZ1kGNAfCA5r0Kh9nI44TA3IVPFj9oMqbOt8hOSTwKuy8um0YkQitVeW6nPKAq7ZsgwX14GvvAFzjXPfCYbsg4Omlr0Mk5tlJKKEuT7wQ+AO+/k+vCGG5iwtJXnqs2wT/WAIxGrQwjAuwB8BEDgNz+bpaMgdQyTSVNsN5vl5NPXxwe1r880R+nrM5H0kRFOOjMzJFDGxip3MpRiMdRCgZmE4TCdz6uuAn7yExoKz1s768tGRwdl0GfPkuRLJIKXN3d1kfhcXqajffYsMyknJzmJi9Ht6jKTuixA7S6hsZj5/tIhtLubjns6zck8keD/BgZ4PTfb4crl6AguLhoyeS0pxWYhnyfZl8lUHy23kcsx+1C6Ol922fqTjZCrUg4gqCYuTYS62qNS2A5saWTRYWOIA2yjtLaiLQuutwxabKE4vamUOb7YTuk8GYnUp3C3X9jy5dJs8qAxNwf83d9xwf7615sFXjWBkO9/nxlET30qA2xbEHWzRZ63uluyneHS3m7KtHR0+HtOuru5z0SC97WZ5s9IhAurxUXOY8kk572NamhKAHhoiGN2cZE+ztDQxkSkQ3Minwceeoj27qqrmFTQ2VlZUL8U0Sh98nye/v3U1JbLVm2oXyTo7jbZzW1ta/u78j9Zf+TzfNYlgWF+3pSUWljgPbbvjzy/hQI3If+qIRJFziw+SD3kzaVkol0Wp1qI3ZeMcjsLfTOkzhJgjURMzcJ02vhX9lq0Uj9KMh0LBX6feqot/KBQAO66i37Yz/4sgxnhMNf360Hut8zPWjPJ6I47yFs8/enMamxr474rkUG3KDbFPgWNrTVltCAKheLGJ8vLpsaCUibjQyId0rHYlucODpp6flqbOgSRCEmbmRlTH29srLKHMBRia/bDh/mAHzzIye3QIRKJx47x3CspeKoUz6O/Hzh+nFkde/cG11BD0NNDQikep3N06hQJtqmp1dmYoZC5poJ8vjhbUbIQ5f3SxCYe533q7DRSg0Y7X4UCidL5ef49MsJ722xOYC7H+5DL1VSs+wnEYiQQ83lOVpOT679fskgBI2N2i6faYTurIoXZQpN7w1EaPbcj7OIYC2kmJK5NLPqpg2R3SxZZskDqNI2OmiBKMz0vjZQvAzzO+9/Pcf62t5mC99UEIo4eBb71Lc6lP/3TwZ/rVoIU07dJw1JZstSekjqGtYwDaaomc3sz1A+2EQpxju/pISE4Pc3zrSSzsK2N8+TwMD8rTfhGRpqn1InDxtCavnMqBTzlKfT/CoXq1D3JJBf9+TzXDyMjzamwaVX09RUTiWsR/qEQn2MhmvJ546eeOcP1y+SkyRidmCh+Xm0iEaiNSBRCTgKZkpUYNOpBJgLGF5Jgk2ziKzU6q00pUzJL/JZ02qjiJFlFCMXS7y+fyWbNvpolSx4A7r2XSTsvfjGDb9I3we85zs0Bf//3/H7Pex4TmHbsKM5YlAxWoLwM2mHzsBUuf7tS6t1KqfNKqZRS6ttKqSfbb1BKtSmlflsp9ahSKq2UmlNK3aGU2lPyvmeuvH7Set9nlFIHrfdcCmCF5sMblVJ6ZbtnoxPVmg7q7CzrBjz8MPDgg8zsO3eOBqa/n07BlVcCT34yf+7ebaRTvb0mJTqfp9OrNZ3B7m4+zImEOWZbGx/I4WG+98yZ4iyX9RAKccHT3c1zFOn0oUNcRJ08SUekUgwMMPuip4dk4smTxY0HgkJ/P8/7kktMNqV0jF4PEhmcmGCHu0OHSEru2mXq92WzvN6JBDMeH3mERWVPnNh4/0HA8+g8HjlC4zs4SLK3GaPI0sgkl+O9qIVA1Jpj7cc/Nl2/NiIQl5ZInkuR+ZGRYCQAG5SRbRl7VCts4lAme4dgIA69lFcYGuKz3ttrJLSZDO2QlK5YWjIF9mWM5nIkvi5cMAGcI0c4D0SjPMbEBIM6V13F+eaSS0zgqpkcNM+jo5rLrS5KXy989KO0YW96E69VNstr43dBMj1N2c/UFDsOBnXeG8zlLWOLJIAXi3F+m57m/BaL8Tu2tXFel4ZY4+N8HiTzMIjrGQ5zTEnR+mZEVxezk4aG6G9ImY5K0NFBX3DPHtqQ2VmO7Xi8rqfsEBAef5zPxuWX0xYmErTdfsm/bJa+dz7P56e/P5j61DbJXwYtY4uCwuAgn7lodOM1V1eXCVzE47yW4+P8nBD+kpFYCqllWEq02GVn/EAIN88rruEcNOS8lTLHCsKHlIQPId1ExWd3VW40bEWHlNuS7PdolOuUhQWjOsznad+zWY6h3t7mIhAPHyZ38eQnc5xOT3Ne9mtHkkngb/6G3/M5zzFrbztQLqVq2ttNsFCC7KJIkdrHQn5XynFY2Hb2KShshcYqD4EFND8CoB/A2wC0A3iK1vqoUkoB+CyAWwHcDuB+ADtX3pcE8CSt9cWVfb4PwCEA9wC4AOBSAL+ysv+rtNazSqkeAP//yvHuBfD3K6c0o7X+6nrn/Jd/CS319vr6jPxYtkoXJoWCmWgkmiET0NISjWV392rSJpWiE+J5NGKVRttzOZKI+TxJNUnVPnGCjv7UFB3TSqE1F7UzMzSsl166sTSnWmjNBfbsLM+/v59Gr9qoq6SoixRaFvHLy/yfZPGMjAQr/dOax5mf5/fo66u80PpmIJslUaE1Fz213N9slmMtHud1vfTSjZ+VdJoLLK05CfX11d7YJ53m83X2LHDrrWsWEG8Je5RMBtfMQOyQjPNmIp62OuzaipmMkSOLJFlkThIgsTvVN6vtKAchdiQi34isgnvvJYl4663A85/P+aqvz38GfTwO/OM/8h783M8F17xjZobk5t13t7Ytmp6GtmXJUr9QfjY6gySR4LPT7M3Ucjn6X+Lv+W2esrzMz2cytAWjo8XKDIfmwfw8g9WTk/TBT56kf++3pmqhwIB6MmnmgV27ap+zo1FmbL/zna1tixBwp3jP4zPmeZUFnyTRRAJlEliZnOS9k3IE5QLyUtfQbpBik4h+7ajWxRmJ9fTr6tHJ2UZpV2dpxNIMWdji20gmqtR27O7mvW6G2oc25uaAT3yChN9LX8rEjs5OJgj5bejzvvdRoXHLLbRD111X+ffVenXjFtnvf/tvwHvf66uxSqvYp6bDViARTwC4TmudWHn9OgAPAPi01vrVSqlXAfgkgJdprf/F+vz1AH4A4M+11u9YeS2itU6WHOdyAD8GcJvW+n+uvNYGssgf0Vr/fKXn/MEPQktLdokADg+b7JJyKc1rQeomyu0ThwDg66lUcXRLkM+TSEynTXZWJcfMZk0txMsuM6TQyZMk6CSC4AfxOLPLRBYdtLzZhufREZub4/GGhjipB2GgCwVTw2RurlhW3tdnumN3dVWXkh6Lcb+5nJG314t0DQKZDAlEgPe1FplMLMYMqkKB46sSGWEux+MXCry/XV0kjv06DKWEcS7He9zRAbzgBWs6yy1hj4IkEcXZBLZk3ZKmhIxNKb2QTJpuyUIcSvRWbI90crRrKzZ74WpbviwS1kaMrccfB/7X/2LWz1vfykh7OMyAmZ/jZ7PAJz/JOeHVrw5ujltcBH75l0kk3n9/a9uiaBS6o8PIkjcbWhvZen9/89uyeJzjQZpj+D3npSXjXwgZ2cz+xXZDKgV873u05095ivGZ9+/3Z7+1pr++tMR9dXSQhKwlwymb5dj5yEeopnr/+1vbFiFgEhHgmmtujr5RpfXKxe9UirbI83ivolG+Pj5ePggoRKJN+tVCJMr517N7s416k4kidbbl35shdS4HqcmbSpnrDRT7cJs9P2YyrF2YywGvex1LVS0tAdde62/O0Br42MeAf/s34OabmYR09dX+m8CWwvOAP/gD4NvfBr79bV8kYqvYp6ZDE7hsNeP/yI0HAK31Q0qprwN4sWL3m9cAOAfg35RSo9bnzgI4CuC51mefuPFKqV4AnQAWABwBcGOtJ/qmN9FAzM0ZqY7UMuzt5dbXZ+okbNTNSRaQwsj39RnCRhqzdHWtLgC/Zw9TpxcW+NCNj1dGpk1OsitcPE5yqLOT8piTJ42s5rLLKjf6O3bQETp+nPtMpymlq5dB37WLE+LsLInUWIyTemnB4lohGZ+SJQCYbCG7RopkBa2VWWPLcaemeJ7NVqupFOk0F+BCKleb7STy5fl5jpMrrqgsg8fzOLGNjPDYIuev9P5qzfsnGV3hsMleXVrifZDOY2ugJexRUNlQAqlbIgSiIxKDRTZr6hjKJgGktjba+Kmp8lntIo+xG7fI/ZLOjvbWLPdNGr60t5syHo3A0hLwf/4Pbciv/ZppprVzp78Ft+cBX/oSP//KVwbXiXl5GfiN3+Dc8Ld/u+5bW8IWBSGlDBJ2fcRksvmz8/r7+czPz9OnW16mX1Pp8yKBznic+zhzht+5mZUO2wWeB/zoR7T1111nMker8ZPPnaNt6+riZycmqicQ83kSWokEO6pevAi88Y3rfqQlbFE90NbGZJH5eZL9w8Mbz7FCGC0vc32wsGDqI87McF8TE6vHgPxdGtSVGoQSYPR7/vLZenRvtiFZgkImFgrBkol2bXvxhzIZUz6mnt9tLUhH53yeNlsae0lDFVFAydpRuIFGy5u1Br78Zc4Tr3wln/9YjGt4v0Gnu+8mgXjTTfSL9u6tnUDUGnjPe6ggee1rfX9829qnWrEVSMQja7x2C4AxAAfBtNOLa3z+iap+SqkpAO8G8BIAQyXvm6v5TMGHbfdubtksH0ipaRWNmjpVkYjJurCLr9rGIxIpTtNeWjJp2r29/KxkK5YW0B4e5rlMT9NpHBvb+CHu6iKZ8+ijrIlw6BCPtXcvjd7p0zyPgwcrN8Tt7Xz/+fN0cqR7c9Akh0BIpbExSqqlOcn4eHBNN7q7SdTu2EEjK9JzkdaGQpy4lpbMZ6SGh0jFYzEuYCRaXE0n0EZDinWHQiQQq130Z7Os2xaL0VHav7/y+yK1ReXZqaTZjNRas0l56bLW08N9zc3x2VorAmyhpexRUJD7I0SiEFzNQki1EiSz2d6klo8025LGJ5HIxs9ZKMT32O+ToIZsqVTx+0szFht9H235sp9SH7XC84APfIA2+53v5HVKpXi9/Trt3/gGA2zPfz4X/UEgnQbe8haqAt7zHnZ5Xgfb0hYFAQn0CYndbJKyUrS1ca5cXqY/c+4cfYahocqeXaVMmR1pKHfqlKlD2Uz1uLYTDh+mLbr+etrpxUXTYMcPxM+VRllDQ9WR4yKplTqc3/gG1xA/+7PAjesvj7e1LRJySIiXSgInUgdWmp/NzdEXHR01RGK5RplCwgVJJMo+xG+ot7xZyES7qUaQZOJaXZ0B/83qaoHdME+ShgThsGn+WSiYuo7S9yAcLk42qjf+/d9ZVuo5z+G4fPhh2qLxcX/7eeAB4HOfMz0HxsYYAK8VH/wg607feisTtnxiW9unWrAVSMSNoACcBDXp5ZAGgBW2+W5woPwlgEcAJAB4AP4KdWhC09HBCWF01DRJke6AuRx/7+gwkQqBSNXEeEgdDCGfhoaMAVKKZFU0yonLNsBCds3MMMqVTG5MpEUiJBJ/8hNuhw7R4EpkVJqlXHGFv65gO3fSgT1+nCTlnj3+jZMftLebY1y4YAq5T0zwfgQxUbW10cgOD5vaidKyXuqa2B2h5+f53ZeX+dmdO/lZKRTbzAuZ5WUSiG1tJBCrXXQsLhrZ/OWX+xsDMzO8zkLEDg2tHSGziRqRgYbDpm6cXctSal4ODQVCbjetPaoVdiFvuXaORFwfkvlqE4biVAK08ZJpZAeWaoVImYUQl0VHaUdo+/2lxGI9IDJtyZBslHxZ8NnPslnWL/4ig0Dnz/O6+80Av/9+4Ic/5ML62muDObdcjhmIDz4I/PEfA8997saf2QBb1hYFASl8L/NxK9R67ekxmUsSiJQi/pUgFKLPMTDAuVgaNg0McD/NIPvbLjh3jtvevfTdT5zgffTrF8ditGOieOnu9l9WwfO4HpHAd18fcM89rGf2nOcAz3ymv/2VwZa3RZEI51jJKKuk0aBkRbe10U84fZpJFpLZuBYh2dZmVAdAeSLRLyEn5VAKhfp2b7ZR2sk5aDIRKO7qbPs/9ZQ6S5ZhoWD8nPXmF2n6FYkUN4pJJjk/hUKVqRerxalTzBy84grgmmuYHd3RAezb528/J08Ct9/OtfeTn8zvs39/7ef3iU+w7vQznwm8/e118Rm3vH2qFluBRDy4xmsJkDU+BuBmAN/SWufKvFdwDYCrAfyC1vp2+x9KqWEUM8iB182QlPfhYTPRSMRPiCfJRmxr42vSqUuKsUq9vWzWpLpLB0vJeCwlEsNhLpYWFzkpZTJMmV8v26q3l9mDhw8zY+yKK7gfKdAsROChQ/4c7/5+1kU4cYKS2KWlyppo1IKuLjppyaTJhrx4kdeg0ij+RlDKNNJJp42MPRotlh+KxKS3l/cykzFZAYBxAO3mCM3g1CcSdG46Oni/qpGGa82J6uxZLoSuuMJfivziIq+TEIiRyOrszXzeTLpC1LS3G5Km3JgX8re3t+Js0C1hj6qFyDAEEuBwIDKZYsJQMl8Bk4E+PGye8UY933a9RIE41UIu2lFzeb9NLtZ6n8Wx9jxTs6uReOAB4K67gGc/G3jGMzgfhEKV1WG1cfw48M1vMsr+rGcFc26eB/zu7wL/+q/8+dKXVvSxbW2LgkBvr/HDapVbNQpSe623l4HR6WnTWKzSZzQc5j4GB+kXRqP0IYeGuDmbXl/E4/SvR0a4yD51yjSp8+OTijpE1g2hEH3MSqE1/fB4nDaop4d+0De/CXz/+5QjPv/5Fe3K2SLwOZSEEfHnK4GsU44cIRGzfz+f76UlzpPlgts2kWiXmAmHi5uw+F3jiDKhEfJmQTkyUV4L8hjSzKtU6hxkqZdMxuy3mhr50nSlu9vUjJYamskk9yuEYhCNPeNx4Itf5HzwvOdx/KXTXKv7WevNzwN/93ecR5/9bJ7XlVfW7uPedRfw938P3HAD1SNVliZz9qlKbAUS8ZeUUn+ttV4GniiI+VwAn9Fae0qpTwD4/wC8A8Af2R9c6bgzorWeAyBL39LCwG8EMAXgMXlNa11QSqWxOlU1EITDdN4GB2kwEwlTOzGZpBGRLpEdHVzcxWKmuHYux6wsiUBLQ49Uiu8pR44NDfF9MzMkckZH1ydN+vu5SDp6lJljBw/yvHbs4Pk/9hjTna+6yp+RsOXNZ8+S8KmnvFkQifA4S0s89unTzM7csSPYxYMY95ERkl4nT/JnWxujMwcPFk8qMknIBJFK8bwEHR3FxOJGEa2gEY/zPnV2Vk/4ZjJ0juJxkrf79vn7DokEF0qRiHEsRleqVti15CSzqqODz1ZPz/oTeDrNiU/uV4XYcvbID2wnVf7erkSikNb2ZmcGSJMkCSQ0m2RQnGobdraikH72++1MRT9OtxCUjZYvC6angf/9v7lQe/3raZNzOcps/Izd2Vk6tZOTwItfHMyiQ2vgD/8Q+MpXgLe9DXjNayr+6La2RUEgFOI8IQXvW6nhSFcXSScpkyNZiX5krCKTHhriXCiE4vDw6oC0QzDI5YCHHqLtveYaXvNkkr6on8BKNkv/sq2NNlXsWSW2VboEx2K0893dXA90dDCQ8c1v8txe+tKK7aOzRSsYHOQ1jUZ5Lyq9px0dXKMcPcoSVDIeFhaMNLcUQiRKZr88r7USiY2WN9vHtclEOf8gj21LnQsFb9Lv4QAAIABJREFUPjey1RIwldIonmeapdRqP23CUBRrUkdRGvPY5dD8nnc+D3z+89z3S15Ce3DxIhOG/KgzUingb/6G+/vZn+U1uPLK2ppuAsyG/qu/4r7e9a6aavg6+1QltkJ3ZmnNfTvYmvtXwUKWT9FaH1m5wf8E4BUAvgjg62Dq6V4ALwNwh9b6tpVOOQ8DGAfw12Br7qcC+A8AFgGc0Vo/2zr+twHcAOBdYHHNWa31NzY47ZouttZ0ZkX2LBGm3l4SXaGQkWbGYvxMe7vpzJXL0chEIlzklFusFQokEpNJ7ncjefPcHDMvhoaKm6pcvEhiqLfXf8RCsLTEfedyJNj8RFBrRTRKmXMmQ6d7x47gCqx7nmmuIx2EOzpMdK2vzzhsa31eOgYLsZhbiY3IpGITi/Uqjh6LMXNTZPHVLPwXFkg4a00HyW/WTzrNKH17O79nPs+FUi7HayPXRTqiRyKVjcVcjvdfOrKWeQZKJ4mfR2vZo7oaftvB4/lt7QWnPJM2YSiktTyTQhaWyuVbGeVk0DLfABvLoDdbvgzw+H/0R5xTb7uN5zA7y8XekA/3bmkJ+PjHef6vf30w84XWwF/8BSVAb3wjpTrOFjUekr3e37/5HTKrgXTRzfw/9r4zTLKyTPs5lVPn6jTTPdM9DMMEBpQgQZEgioAEWRQEAyquYZdVvzXsuusa1riGdV3125X1wk/FsOaAuigiKyBZJjA5d46V8wnfj3se31PVFc6pqq6u01P3ddXV3dVVJ77nee/3flIG9qenp7rz4NpsySS+HwxaJ0LTCtA0RESHwyiF4HRCCOzogCBsFIpCdOgQeExXl+joa+ReJZPYvyxjnursFBzyqadQz2zjRjgzinDLli0yAFXFGknTwHnNcOdQCNyUm3HG4xgn/f3F12o8R2vaUsdeYbpzNWAxkiP2GgV9J2ei+ouJeuhTnYnMpTpzIEg2KzIKG3Gd9IIi32e9oGjk2O+/H+Vdrr8e9mfnTswf27YZHy+KAgFx/37YDElCnf9166o/NyLUaPzgByFofvKTRcs8VDxCC9qnpsNqEBGvIaLLiOj1BEX3SSJ6t6ZpT+k+ayOitxPRm4hoC0EtHiei3xHRlzVN23Pyc6cR8thfREROInqMiN5LyGWngpu/jYi+QkTnEZGPEOb65/+XQN0utqZhsmdBkcUSRcHE0tWF3/1+GKx0GoYsGgUJtNmEUKVv2sLeCk4PdTgqpzfPzCD9OBjMr2+wsIC6iT4fvJbVRNrIMtKb2fs9Otq4KBVNwzWYnsb1bW+HoFRtNAJvb3YW96a9HRM/X1tOX+ZmOJySa2QxylFPemGRF/Mc9aQXFmudxEIhRGz6/ZgMzE7emoYxMzEh0uPNXldZxjZUFWN3cVGIpoXCjZkxo6ogaaqK+13iWpUiy1axR8tu+Jm4rjYhkUmhXjBk5w2RSC/iFxezP1Wg73zIL32jHX2UIpPylUhf5mP9j/8AIX3ve1GHdWICc9XgoPHxmsuhLk84DKJs1hlSCnffTfTFLxLdfDPRP/xDyxatFDQN3KlYkzorgTNWJAkcsVoBMJkEj0ynRW1vIzXeWiiPQ4cgGm7dCvtz+DDeN9NcTtOwjXgcC2tOxa9USzGdhv3KZmH/OjvzOdnu3UTf+x4EgNtvL8lLW7bIIGRZrMWCQXMcYXISz2BnJ7YTi+F55gycQpQTElmIq0VI1NcrbHQjtkaKiXpuo2mVU525QRw31Vwp5zEfBztsifCM83qpGK/YtYvot79F87aLLsLzn0qhS7zRwBRNAy/6wx+IbrpJlEPYurW267BrF8q6dHbCATw6WvRjZkREq9inpoNlRUSLYtkudiol6h7Oz+MB9fvxsHPTEkYmA+Evl4PQIstChCQSwiKRCLnv6SnfUWxyEiH2/f1Ia2WEQqiP6PFASKx2oTg1hbRZDumvV1SgEXDk4MwMJsquLhA8o+fCkaF8zf1+CLPlmn5Eo/iOLMPYd3SIaFOj++RUXhYWM5n8+mt6UdGM0LGwAGE1EAChNDthp9PwSsViuI7c3dsMFEWkQHu92FZbG7bH4k01RELTcJ8M1Aa16DLyz2iI4dfXzWHSZbUFOEe16l8s0OsLXvPLipFKy43CouVsj9jB4XLlRyw2aozcfz/Rt79N9KpXIf14ehp2c+1a4/dR0xCdc+QIiHIJQmsa3/420ac+RXTVVWik0rJFKwuuZeZyWVswY/EilcKYCgar52XxOLaVzYLj9fZaK+W7mTA7izTmtWuxyB4fx3gbHTV3TScmRJPAZBJcb2iotE3NZsHz02nYvGKO6wMHIAYEg0S33VY2Qrtli0wgkwGfdrvNNbuRZay3HA7ci8VF3OvBwdJlqFgAKyb01UNI1AuVjUpvLtw/n0e9m68UA2dgcJSfPtWZG+ZxqrfH0xz164lED4V0Wqz7OQvE44G9mJmBw2BoCOnHY2OwK5s2mRunv/0t0Q9/SHTFFYIXPe95tXHkAweI3v9+zFn/9E+IiiwBq9siS6AlIjYWDbnY8Tge+HQaaTiyDHLX0wMhyuOB8QiFRKdgIhGtyAaGvUuhELbR1QWjwtGNhThxAmLfmjUQlxiRCEKiucZLtem18bhI0RgehsjTSHCq9/w8JolgEEStnEGMxbAwzWRABLlxihHo69JwfYu2Noi51ZB+7vKtj1bUd2AtlgZdOAmzmNreXp6YlsLCAmq6EEEMLuU5LXX8fOxHjuDa9vVhbHZ2QryulbjMz2Oc9fZWFKqtPkE1xBYxsWRS1+xCoqouFQz1pQL4+dCnJbdgHPr0ZSJRd0hPxony04UKG77UCwcOQKR7/vNRa5Brx/X2mhOJHnwQ3Zhf8hJsqx74+c8ReXjJJUSf+UzFmsBN+jQZhmVIKHfEZAetlRGPQ3xQVQgP1dY45CjNhQU8x34/5nWrX59GIpkkevxxPOfnn4/rOTkJfmOGI83Pg/v39Aj+Pjxc3H7mcqKDN2cmBQJLx8CxYyjT4PcjyrpCWaGWLTIJTh/nKC2jSCSwtujsxH0bH4d9Wr++9PylFxILx0Q9hESilUtvZjRaTCxMdeaSLpy6vBIZFkbBda3TabEWzGaJfvYzHPcb3oC16549sEVmOinv2EH0n/+JyMUXvhDrte3ba3PAHT9O9L73wXa9//1o7FTm3lrdFlkCLRGxsWjYxWaxxW4HseBuYFzIlzvScrfNrq6lBl8fAj07C/GIu1X6fMJzoU+DPnoUnx0ehpjIiMUQDm23w5BU662WZeyDG8SMjjZ+osrlMHkvLsKA9fUtrWuSSIjaki4XiJcZglCITEY019E0Ueja769tgpTlpfUV9XVSOErR58O+w2FRn8fMflUVZHRyEpPI5s3GiupyMWJ9J9tQCGOSRUxNEw19agGLCNzUqAKsPkE1zBaxkKgXeJshvZcFrcK0ZIbbvTQtuVnFTyuAn2VNK56+rE8VYmFRX1+xsLZiLc97OAwvts+HgtySJOpMmUlFfvZZeNvPOQfe9nrggQeI3vMeeOy/8AVD84bVR6WlSGgshrHZ3t480SXVQlHAY7iuWjBYfbF7TcNztbiI7ba1YXvN1jCq2aAoKKeQyRBdeCHmxiNHMN+sX298O9GoqJ/odoODrlmz1AGhKKLjuCRhHLe1FZ+TJyaIvvUt/O/WWw05jlu2qApEoyLt3IzIMjeH765ZI+pnZrNwqJdygjdCSGQBm6ixmQWFx8D8oVGRifE4OCR33uYMCyvwRg7S+P73EXl4ww1wRhw6hHM57zzjvP3ECaLPfQ7j8qaboAls3FhbX4OpKQiI0SjRu95F9OIXV5x/LXDVrY+WiNhYNPRix2KYUAIB/K6qeOhiMRAMvvWahs8MDVXuVnv8uOhSyBGNDK75MDkJY7ppU77RiMchJEoShMRaOi5PT8PQOZ0wTiuRXpTJwLCFw6KLod+PiT0WE+/Vs4thYaozp5/Uc0FTmAadTuOcuC7l+vX5NRYrTSzpNNG+fbj/a9ZA+C13PfRdbVnU4Q6D2awQkG02HGOlmp1GkEjgHAMBw55/q09QDbVFTOg4rZmo8UKivlN3Yd1QHl96wbCVllw/cPdl9s4btVWFadCcLkWUX4/ITBq0LCMCcWwMQuLgIBbLkgT7ZHRcHj1K9KMfoZv8jTfWx8b/8Y+Iijz9dKIvfalli5oRqor512bDvGuFBWIlpFJwNssyBKXu7urts6pCSAyF8HdHB7bXsqfFsWsXHM7nnANec/QoePWGDcYF2FQKi31OT19chACgTztWVXDHWAx/BwK4N6Xu8+ws0Te/iTHxqlcZzvSw+tOwYrZocRF8t7vbuJCvaYhA5IhTVcX40TTMa4FA6WYryy0krnR6M6MRYmImgxfXYrfZSqc6NzMefpjoySeJXvpSRB3u2oV10ebNGEv63gmlrmEoRPTpT+Oev+Ut4FYDA+aiGAuxsIDIw5kZone8Aw5bA2s+q9siS6AlIjYWDb3YXIuPm3REo6JgsqKATLAgFQ7DKKxdC/JRavJRVZCLeBzbDAZFjQVOheZowVgMQuLgoDA+uRwMk6aha3Mt4l8iAeKUzUIAHRysflu1IJmEuHriBMhcfz+EzZ6e5Z00EgncN32qM3uh64nJSYilHg+2n0qJCFYiEbHFUYv6Dqvz80hfliSMhe7u4vvQ153jbTudoqOyy4XzPXECY6atTYiatXaHzGQgSrvduHcGCYbVJ6iGG/7C+ohEy/d8KMrStGRON5GkpXUMmznlxMpYju7Lhd2g9WnQNtvSiMXC/d17L9FvfkP09rejaPjsrKgnZdR2zs2hZmFXF1L86hFt9eyzRG99K4TML30JP1u2qDmRy4HfcPOu1QCO8o9E8NwEg7WdmyxDGIlE8HdXV23i5GrEiROo7Xz66RDpZmawYB4ezq9jXg65nCgRMzQEe+b3Cz6saYLrq6pImy0n6i4uEn3jG+B6r3wl+KxBEbhli6rdsSaEfDMRvNkshESfD2JNKgXhhusllio/pW+GslxCItHKpzcz9GJivZqvcHaFquJ+FfKbwlRnPT9pNhw6hDIq27cTXXklbNGRIwga6e4Wa3y+l7ym159zOo0IxPl5OEPn5rAm3L69+usdjaKJyrFjECWvvNKwbmB1W2QJtETExmJFFu6RCIyWyyWIr56gsGd9bAwPLNf7YU9lILA0ciQSgaGw2yG86NOTczkY1t27QYjWrhVCj92O/R05gmN6/vMNpY6WPb+jR0F6OjvhvW2kgZZlkLZQSNRPc7sFiaslhdkoslncD+4e6fHgWtSa6qxpEBDDYVH/kVGYZsxFhInEBDM3h+MKBovXwtRHhnE9Dn0KqZ5EZTKYRJxOHMfcHM6v1i6osgyB1GbD/TIx0Vl9gloRw8/e73rWR+Qi1nrBUC9y67t0cxmG1RA91OzQpy9zwe7lAEc86IXFwjRorq341FPoeHzVVRD/YjHMY93dxm11IoEUP01Dl1Kji/1y2L+f6E1vwjF85SumO95bfTRbkoRypHxb2+pK2c1kRLMUvx/O0FqyHHI5bC8Ww3a6u+ubnWFVhEKopdrbi7ph8ThERW7cZwSqisV/JgMRkrv9Dg/j+sbj4IWKAo7e2Vl5rEajiEAMh5HSuGmTKSeb1e/qitoiRcE9JAJvNvrc8XosGMQcwl3Y9UJPsRJSLCTabEv3VVhXsBY0Q3qz/lhqFRM1Dc9cNiuyK8qtO5mj5HLGujo3GqGQcIrecgvObdcurNu3bMn/bGHfBEmCfXC5iO65B1lnb3ubaNhqpptzIZJJog9+kGjvXqLXv57oZS8rHYhSBE1wZVc/WiJiY7EiFzuTAZngEHkuDF7oZeY0lFgMhoEjSCQJQiLXTmFjyRFcudzS1AkiGM29e7G/0VEYGTZAsRgWTrIMI9XXZyxUuhRmZkDAnE6ETddjYVcOPNkvLGBS6OrCOTgcmMAnJ3GuPh8iShqRbs3pKpEI7onDgXvW0WF+EcBpEtGoqPlYCdksRIPFRQjI4TC+u2aNSBdl8YhfRPkiT7GJWJYhIGoaPO1zc/jc4GDtdVumpnAvBwdNLwStPkGtiC2qR6OVTGZpWjKPJYdD2DaOjrV63TIrQp++bKbze73AEQB6cXF8HE1KRkaI/s//wTFxl3l9/d5yyOXQtXBhATXCaqnxwzh2jOiNb8Q4/cpXMH+ZHLMtW7RCiEREY5LVFGHHzVJCIdjm7u7aORWLk4kE7DQ3+muGRXSjkckQPfYYrsMFF+C9I0fw3I+OGhtLmgbbEY3CpsVi2O7wsGiaIsvg052dxhbyiQQitWdnia69luiMM0xHo1r9bq64LWLR3eGAKGj0+ZiaAhcaGsJaa24Oay1eezA3KhxbjRIS9enNxfbVSOibrxCZExM5805VRQkvMzassJGc3Y61x0rNH7kcOq8nk3CK+v0QEFkALLcuymaFoPjjHxM98ghKHwwNwR6ddVb1QUKZDNFHPoIMjVtvRQSiUZ52Ela3RZZAS0RsLFbsYicSwmvOYcnt7UuJhaqCOCoK/s/CVDQqosX8fvyP6/Dp05v7+/MnB1lGZ6dsVtRV4P3EYkTPPCNIEBcCZs+GvmmLEU9oMim8smvXmjY4hqBpWEDOzeEadXTgnIs1CVhcFCJrWxuOp9qGMmbBXZ2TSSECd3QYq7WiaYhKjcWQHtHTY3y/c3O4B5y+7PWK+kiLiyL12uUStZICgdIRYpqGVHHuOsedwqsQ/ZZsd2YGY6W/v6pi8lafoFbMFrEnmFNlygmJ+vqY/NI3/ilMS15NEUFWBEeFKgruRTUOoeVAMon6h+k00Qc+gLlmbAzjkG1JpTRoTUPXwoMHkeJXS40fxvQ00R134Pi+/GXMkVWM4Sa4wjXBsiSUaxQ7HMvvuFwJsKCRTmOOrEejlFRKCBwuF7a5EjWtVwqqigjEWAzdRQMBOMATCWTSGI3amZzEdVy7Fn9z0ztFAd9mjmWUc6bTiEaanESk9hlnVFUqpmWL6oBMBusMj8d45JWiYE6z2yHiaBrmF0lCkAPXF/f7lz7DnHK83EKifl+SVHyebSTMiIlcmiWXE87RWoRQTcO29MLqSqQ6//KXRAcOoAHKunXI7JueRnCPUQHwwQchRF5yCRrCHTmCMbh+vVjHmzkvWSb6xCfQsf7GG4le8hLYRpOwui2yBFoiYmOxohc7EhHiYDyOB7WjY+mEoqogJLlcvvCUTotIN04X9HoF0eAO0MXSm597DvveunXp/3btwiLq9NMxwXG0ItdfIBIh4ywqlirOryjwzi4s4NjNFKcuB+48ODuLYw4Elp5nMagqSPjMDI6tq6s+jUCMglOdubEO1zUMBIpP3qoqCO3goHECo6pEhw/jPNvaMBlxXTomDCwUEuH+cgqqPg2a0y44iow7iw8NiYjavr7a61DNz2NbNSxgrD5BrXjaDtdHJBKRqYXdktlxQYSxoY8ybBaBqgWgUenLZqFpRP/2b5hn/v7vUd9rYUGUWnC5iqdBcwo0v7jo+OWXE517bu3HtbCACMT5eaIvfrF4yQeDsPpTYGkSmslgvtTPb6sNsRicgJqGhWVHR+22Nx4XadMsUK6W+pLlsG8fxJ7t28EF2dk8MGCcby0sILI6GAT/PnFCOIsdDtwjM9cylyP67nfhsL3iCjgzTKQN6tGyRXUCBwJwDUsjSKUgAre3I3som8Waxe3G/UwkME8XS29mca9YE5R6C4nNlN5MlC8mFmu+ksuBm2oarmW91296/tHIVOdnniF66CGiF72I6PzzEaSxbx/WfiMjxraxezcyKLZvR4mYPXsw/kZGhOhKJGpiV+KGikL0+c/juK6+WtijKq6F1W2RJdASERuLFb3YXPuQm3Bw05XOzqWCHItm2SwMQuGEk8mICMVUCu9xCrTbjag7PQnJZGBcNA1Cop5syzIMUTwO76c+dVYfLs0dsBh6o1SYBj07C2JltyNipJbmG9GoiFrzekH2OGrSKBQFxzQ3h2vQ0wMRslELbY78jERwTe120dWZPUSqChKZTMK7bdQLlUzi3oZCuOccuaiPFPN6S08CXENTX2ORU+ujUVFTkzsxG+xYWhJcL6azs6Z6nFafoFbc8CcSeGWzePbTaTFGXC6MGRYNVyIltgXj0NcHarZ79fOfE/3wh0SvfS1SYpJJ2PP29qVR1ty1Uv/SNDjBHnwQ6T0vfWnt3RajUaI774TD6/OfR2fWGgSUli1aYcTjgis1Y9H8ekBRIF4lEiKCsB6L6WhUNJTgZn2rVYydmgLXXb8emRrpNCJ/AgGkIRtBLIbvtLWBK+/Zg/fXrKmuFraiEH3/+3ACX3IJIpDMpNEWoGWL6ohIBM9bR4fxNcfCAtZuvE6Jx8F3OXuMGyMWS28uJySy0FYvIbGZ0pv1x6QXE4lwrWQZx7fc3KZYqjNnRtQbExN47jdsILr+esxfO3bApp95prHzHB8n+uxnEdhx110oX+ZwgCfxMSuKWMNzUIDdLtbt+iw+TUNGxv33E112Gfja1q1Vz6lWt0WWQEtEbCxW/GLnciBtLhcIWyiEB7pYoWu9kNjWVnqRw9uMRoXHOpsFEdm4Ed+VJExee/Zgf1u35hsPRcFCLRIBuSpVa0pVhZjIhokj2YiEl8jjEVF16bRIbzZDjBIJeIhTKdG5t9ZOwLmc6MAnSSCBfX2NnUCTSUFOJAlEoq0Nx5VOI+rPyHkqCkTHPXtwDzZsQKSlzyea81RDRDnV+NgxcT8nJiC4cjoVC0sc/WF0Yk8k6taUxeoTVENtkb77tj4tWVFEKiA3JCqWbtNCc6JZ05cZu3ZBpLvwQqK//EvMCRMTsLdG54MjR0C2h4eJrrkmP1qRU5D0UYuVtplMovD4nj1En/oU0UUXmXdKFaCJrnhVWHFeVCs0TThoV3udv2QS/EWWca5dXfWplxYOgzsqCuaDnh5TDT2aHvE40vM6OhDJrGkQAxXFeB3UdBrlFOx2XJ/DhzHWNm+urlmNqhL99KewRRdeSLRtG/hoDffT6iO/6WzR4iLue3e38XJEExPgXMPDmJMWF8F9g0HwZm5qSLSUbzVSSNTvrxnSmxmaJhr0sWO0UdljvH+9E7Peqc7xOGqfut2IHnS5IAByHUMjJRAiEaJPfxrH9773Ibo6mcT3S2kFqpovKPK5cRDQvfeiZMzFF0NAPPPMmq57E4yk1Y+WiNhYNMXFTqdF+o3dDmPA9VMKweQ4k4G3tNJih2sETUzAqEgSxLe+PrH9ffuwvy1b8icvVRURbaedZrymod7TweIiL/Q0DaJRIoFJuDCduhhSKYhY8TiOr6+v/t0EuSkNi7j9/ZjgGxnBw8W3Fxdxr4hARsstrrlGHTfGWVjAteECuvWYaJNJiL/clGZ6Gvezuzs/DZrD5LkbNAuLPOEXngNfc5cLXtoa76fVJ6hls0WqulQw1N8rTktmEZgjumy2/PIFLTQ3uMA4EZ63ZhN+5+eJPvQh2I0PfhDP/fS0cCoZOd6FBdQJa2sjuu02IWowwS+MHCDKFxT5d0YmQ/Q3f0P0xBNEH/0ovO2lSkuYQMsWNQFkGdyHnSGrGVw7m+tB9vTUJxWZtxsKiYY1PT3Wj+6UZTRSUVWIdS4XohJDIUQlGhkvsgzOFY2CLy4uwr5U0fyEiMCNf/lLRB+dcw44HDcHrAEtW1RnaJqI1DVakzSXQ5QYZ4VpGjKhZBljx+HAWIzHi6c3s3jVKCFRn95cbJ+NBK8nFQXH4nKJmt2Fac6NQL1TnRWF6Ac/wLr4Na+BfZ2YwJrrtNNgAyohk4FzdmaG6D3vgSA4NQVbZDRTjDtc89r9Jz/B65xzkMZ89tk1B+1Y3RZZApYXESVJuoOI7iGiUU3Tjpn8rkZE92qa9tplOLRiaJqLHYuJ9BtFwd9eb+n6cJGI6PRllCCn02iywUIZd3aWJBicnh54PvUeWFWFyLiwgC51Q0PVnR93B2UDNTUFQ2mzIWKOU2bYA2Kz4fOzszhXux2Raj09yztppFI4tmgU5IDr4jRqosrl4A0Ph0Uqlt0uujo7HCKKjFNPWeTTNNSxPO20+h1vNosIRIcD5HpuDvdlcHBpVIIs56dAc0QUkfAesrDodIKISRK2VYfIzyVnfCraomJ1DFlYIhIRz3rRsHCsmGm00kJzoJnTl4lwbB//OOz5hz+MhRM7TIJBY00wkkl4xnM5pEKXI7Sl0qAZPO/9wz8Q/f73+Hn11ZhP63Dtij4pFrJHTcOLagXPQ4HA6oqiK4VMBnM014nu7q5PVgWnTkci+LurC69mSHk0C02DUDc/T3TeeXC2RqOipqGRRbssE+3ciWuyfr3gZQMDxuvlFeI3vyF66inUMnv+8+uWnt6yRcsARcH4IcJ9MvIcxGKY/7q78ezIMkQfDlpgrlUqvZnnsGKC1XIIiSud3syiVjaL8/J680s96R3cK8F3VFU0YiGqPtX5wQfR8fiaayD6xeMosdDdjSxAI8fx1a/CHr397bBBBw5ArB4dNX9eRIg+/K//QmDRpZfCxvX0iKxCXqObRMUVhIXsUtPC4v69FqpFICAabnAR5mRS1H0oBBfSjsdhbI00o/B4IBLOzWFf3NgjmYRR2LMHk9w552D77IHasgUeV071WL/e/Pm5XHjxYnFoCILXnj0gb4mEEAhlGdeB05YHB2EQG+H99nohasbjEBPHxnBNBgdrqtdnCNks0pEVBWHjPh+uQTgs0olZJGBDns2CzPCEU89j5O5yREjDiEYhSHHzg0JwGqxeEMhk8oXF+Xlsd24O93r9eggJrTp71YFFZH5x/UoiPL8+n6jfoydh5WCzCVLK6SwswLSExOZCYfpyM9Yu0zSib34Ttu1d78KCKZOBM4tLN1SCLBP9+MeYJ269tbJHXJJwPQoj61lQzGaJ/vmfiX73O6K3vhXRSIoiFm/NUFy+hdrh9WKhl0jUVjPTKnC7EdUbiYA3JJPgVbV2W7bbIa5F+SMkAAAgAElEQVR1dUE4W1zE9lkQsdKzcvQo+AenHOdy4Hpeb+WSKpomFvlc6qenB9vjBjfV4A9/QIfoLVvQTbWrq7Hpmi2Yg92OsT8/LxxhlZ6BtjY8j4uLgsPz2FlcFOsfzgZJJsG5Ob3Z4RDzV+H8xOKh3gFcKzjKjtObORKyEc86Z1WoKtYahZlM+kwZ/Tk30r7bbDguvcOS061ZTKx0rfbuhYB4zjkQEBUF5RFcLuPdj3/8YzhFXv1qfGfnTtFIpRo88ADRPffAmXHttVij9faKACB9tgsH/qz2edVKWA2RiHYichJRRjN5MlaN/qkXFEVE3bW3Q0jLZPB7KUIRi2Gy8fmMLcb032Mhhz2vx46hDqLXCw9GICCK/9rtMG4zMxAAq/VwFIKbh8zNCaO0sIDzZgGEJ0y90WpUul4kAoKZTuN4BgfNXWej4Ig/VYXR9nrzI8rSadyzXA4TDIvOnBZ+xhn1vR6ahvuSTuN4OP2CayNVC66LGQ7jHFQ1v+OvPg3aZMffYpGIq8oWcXdt/UvfSZuvGUca1hp5I8uiLg53a25FJDYP9OnLHk/zphg+9BBI6fXXE910E575yUmMp7VrKxNQTSO67z5ExF9/vTHvfKXtffrT6H765jcT3XILbKfNtjQNujAVulpbRGQpe9RUvKhWcAM75lWnCnI5iBzpNOaGnp76cYRMBttmcbanxxq1JxcW0AF1cBCOWiLwrnQaC/BycyZ3552YgJC4aRO48/g4xtbQUHWL6SeegDPj9NOJLrhAcO46oWWLlhHpNARAj8dY92xVzXfM22wQCiMRCMd6sV+f3sy8mMhYRCJRfSMHG5XezJk0uZwImKh0HvrmK0QrF5lItDTV2W6HzS1mF+fnib7zHThVb74Zx3zoENbCZ55pbJ35hz+gvMullxL9xV9AQFRVpB5Xw/8ffRSNWU4/HVxrYACZbXpwZ2x9/wOXS6zNy9wvI5GIVrFLTYsmXQYYh6ZpChEpFT/YwhLY7ZhEWBhsaxOpzYW1nBjcJCWRgBE1Sj64ccL0NESyri7UJ+zrQ3RgLAbjNzmJl8+HSVKWQZoUBU1aaoXNBpEqm4UB1DQY0I0bYZjYYHEqNHvY+XqxoLhcHhHumBwK4VodPoxrNzhYn5pDRDi3Y8dw7v39uPccscfiUGcnSK4k4TieeQb3aGgIhr7emJxEhNPatRh3U1O4zkaIUjmEQvi5caMgTIqSH60YjYrPcZFffX1Fo5OjlW0RR5jpBUN9J3S3WzRXqtRtu1rY7aLZChODVkRic6DZ05cZR44gCnH7dqIbb8R7Cwuw64ODxo77kUcgIL74xbULiEToNvjd7xLdfjvR614n5g4iEVXAtRU5apFRWFvRzELNyvbIyrDZRFfUVMpYkfrVAKcTzxg315uYAM+rh9jHEY+cXTAzY640wUoglUJjp0AAEX9EWLAnkziXUryCs0E4opXLxqxZAy6sacZtWSF27ICAODpK9IIXYC5vhNDdskX1gceDNUIkAt5a6d7ZbOD4ExMYe9wcMpsF5+VmaPzZ9nbB/WQZY5d5WaWIRD1vqxVc2ob5IEcl1hO81tM00cDRCPSO7pWMTCQS/ECf9cCdpPV8IZMh+vnPcY7XXovjnJvDa3jYmA3duxc8Zts2RCHu34/tbt9enYD4pz+hruLICNErXoG5oliwEGd4tLUJR3Y6LZq5ckZMNY7tll2qHU26FDAOSZLukCRJkyRpRPfehZIk3S9JUlSSpIQkSY9IknR1mW1cIUnSk5IkpSVJOi5J0l1FPqNJkvQtI5+1ElwuEYWWzYq0ZU4/LoZAAK9UStSsMbqv4WHsIxQSJHPTJhg1pxNiT18f9s0NNWQZQuOuXfl1psxC00A8DxzABHLWWfB6xOMgpZomjFUwCMFswwYcc28vCFcuh21MTmLBevw4vss1I+sR2CtJEM+2bBHE+cABCH96YacaJBKI/uSi3Bxd6PHgHIeHcf25Vtf0NNJxenuJLr8c9yccxnlPTYkOb7Vgbg6TQW8v9js7KzpX17L44G7hHR35HlcWz/v6IChv2YIxODyM8UgE4WFsDNd93z6c7+wsxgoLjoWwki1iEjkxgYjfXbvwc2IC18zthli8YQNE9s2bidatw3NRrK5hPcCEVF9vh6g+z1QL1YGb5GSzsI36mknNhliM6EtfggPkrW/FcSYSeGY7O42lXu/ejQYI27djkV0r/t//Q62fG24gestbhAedwWnQHg9sUmcnXoEA3pMk2HyOSgqFROkNboxVClayR6sNnBLH9+lUQlsbuJPXK7iSXhivBV4v5mmOKJ6awtycSNRn+/WCqgon9dlng3Mkk+A6HR3F05AzGXDJuTl8z+PB2AkGcb5cH7q/v7oIz717iX71K8zjF12E8cl8px7Q10IuRMsW1Q9+v3BSGOHfHLUYj2PuIMLfDgd4rlIgofh8ImsnGsUYZDGKBT099HURC7dVCzi9mUXKwjrD1YI5TSqFbQcC1aXys5jIGQMspK4EX7XZxFre6cSxcGmnXA7PfTQKsc7vx7N69Chs9dq1lbc/OYk6iIODRHfeiXXC4iJEv2qcOHv3En3qU3CM3HQTxGvWAcrB4cD94lqy7KDiTMe5OfzO9UMroWWXasdqSGe+g3SFMSVJeiERPUBE80T0n0SUIqI3EtEWIrpV07T/1n1XI6LdRNRPRHcT0QQR3UJELyaiKzVNe6Caz5ZB017saBRGmj1b4TCMY7muxNyl1+0W4qNRxOMQZYhAiljQGBgQNRCzWeFtOHwYhqyvD0WgOzrMefgjERC0bBaTZH8/jCmnu87OwjhxRGI5sIHW12zgyZPToPXRirWm9HBNv9lZTFDd3bhORrfLk+b8PMLXORqTa9fxYlUPWcZn5+eF0Mv74y6UkQiOjTt7t7WZFxgiEdzXjg5MKNxJe2CgtnprySSul89nrHB5ITjNQR+xmMlAWHz8caIPf7hyY5VmtkWf/jRp7K3kBS+/VjpNlVNF2Bvd6ti8MlAUsfh3uZq7qYGqEv30p7Alr3oVnnluzuBwGKujNj+P9JqeHiyyax1vf/wj0fe/DxGB04eK2Voj4DpRHJnBXWyffproG98w1sygWe3RiRPNy4tqgaaBH3HGxqkYTZ1MYpwqimjWVs/rEIth+7IMPtjV1Rx1Wg8cAN/cuhX2RFHAMyUJIp7etmSzOI90WjQgtNshjjociNRJJmHL2MlgFsePoxNzXx9SmF0uLMTrMacqCkSBRx8l+tjHrG2LqInXaIXgMkzcgKISJifx+aEhcPlcDmPU5SrusFdVcHFZFusZzlYqxhGXK7WZj6Ue6c3ccJMI51TP5lf6NOeV6uSsh6LgHj/+OByjV1xBdO65OKbdu2Fvzjqr8tiJRlGORZaJ3v9+nNdzz2HMVJOpceQI0Qc/iLngtttgt7dtq81uq6pYi8/OIsLxq18131ilie1S02I1iohPENE2ItqiadqJk5/pIKKdROQionWapuVOvq8RkUpE52ua9szJ99xEdIKIHtI07dW6/Rj+bBk07cXmOj5EovhzJCIEolJgIdHlKi84FkMuh0i3TAbf5YjAoaGl3hFZBlHZuxfGZmQExo/ruZSKjuJtplL4Xn9/cc/JwgIi/SQJ0VdmiRqHWevFRX3NDH1txQp1HMruY2ZGdBkOBnE+xbalr2fHQtjMDITSzZvxs9S9iscRfZfJQGws1SGbi36Hw6LALy8UjAic3OXZ6wWx5jSorq7qC4YTgShMTWFMDgzUbyI/cIDof/4HY+3NbzYkIjatLfqv/yKNU1mq6fC23NB32W4JiY1HLgd7wxHizX7dH30UnUavvBILd00T4kJPT+XxHYuh3o/bjTTmWh0/zzxD9K1vwda+5jV4xqoVEIthbg6LA7eb6CtfMbxwb0p7tFpFRCKMv3hcRPGeilBVcIR4HM9Bd3d9hT5Nw0I3HMa84feDQ6xUd+ypKThg160TDvGpKZz/8LA4d27ol0yKiKhAAOdw9CjOa2QE129mRvBXs5icRCpjZyfRi14kakrW6izUNAgCzzyD4wsGid73PmvbImriNVohNA1rAVmGoFPpfsoynOBOJ9ZXkiTE6ba20mueVEoI3D6fEMlK7a+Qu9ULtXRvVhQR7MFz8XLWWWwWMfHYMaIf/hDlEK68Eu9NTOB53bKlcr35XI7oX/8VZRT+9m+xnnr2WfCOs84yfw0nJog+8AHY5je+Efdiy5b6laQIhdDAbmaG6N57qxIRm9UuNS0sXxNRD0mSBojofCL6Gg8AIiJN0yKSJP0HEX3i5P8f1X3tcb6pJz+bkSTpMSIqKO9p+rOWgs2GB5k7Nre3g9DE43iV6rbH4h2TODNCotMJgWp+Ht/1ePB9Lhytr73ncCDFrKcHYk48jmNcXMQkyEXM+bjTaQiUiYTYTzkveE8PyOehQ9j+wAAIn9Fz4TBrvk6all8Qlgsi68+9sHFLpX05HJj8e3txbrOzOPe+PrzHnkN9PTunE9cmm8X5jIyUJxuTkyCwLheud7maK5IkuiNz/UiuIenzia7fxZDN4j7zvclkRNfkWgREFlq5u2O9Ju9Dh9CwYc0apARUQrPbojvvFJ5KruficjWPWMSEsdVopbFgjy5HGFuhY+fTTyMK8Y47iN7wBrwXCsEO9fVVFm9SKaJ770WE++2312Z/iCBG/vKXKP/wmc/AxnF5iHrg6FGiX/yC6LLLUNzcCJrZHq1bZ+wcrApuWOb3W+N5Wi6k0+B6uZxovFLP+Yajc0Mh0WU1GGxsZH0kgrIgW7bAnkiSEFBPOw3Hoyii1Ep3N4TG9nZRyuPwYfC5jRsxXsbG8IwMDZl39k1Pw8Gyfj3Ry1+OffT21j4Ojx2DE2NuDuP6pptwvkbQzLbISuCyR3NzWAf09pZ/nhwOzIfT06I7s88H7s2BIMX4uteL73JpEK6JzDUSC8E1FJlX1ouvVdu9OZPBi+u9L3eDTOasXCuReWyjxcRIBGnMfX1EV18t0tcnJkRZolyueMMcIlzfr38dz/pb34r1I5cU27zZvO2enSX60IfwvTe/WQTs1FNA/NjHMLbf9S7z32/ZpeqwqkREIho5+XNvkf/tOflzlPIHwfEinw0R0VlF3jfzWcvB4QAhYCHK5xNNKOz20unD3GSBazZ1dho3MFz7zuuFkXE4QHCOH8c+e3vzP79mDd4/cABEbMsW0RyDU5ZZrOvogGgWDBoz3h4PoljGxjDRxmKCyJmFJGFSdrmEEKdPg+af8Xj+5/XRiqU86S4XSGVfH67TgQOoGdnRAQ+824174Pfj2oyP4xjWry9NQmUZ5HdhAcRk0yZz5JvF0GBQFH2enMSEzc1i9HVT9B3jiECEnE58v1qoqkj5LhWhWQ0OHCD63/8F6br2WsPjYeTkz6a1RVzPRZaF4M3d3VZaTGQi1mq00jjouy/zwqHZMT1NdPfdRKOjEACJcA7cjb2SgKgoRD/5CWz9LbfULiA+9RTRe96D7vWf+hSuoc9Xv+fp0CEIlMEgFu4mIrpGTv5sWnu0WsG17ZLJ5oz6bhQ8HjhBw2FwhFQKXKOUg9osbDbM0Z2d4IDhMJ7rzk7sZ7mvOzfrc7vhgJUkvDc9DTvU3S24kabhvDs68o/rxAnw75ER2ODJSdioagTE+Xmi730P22EBkflhtZiaQmrk1BQ43bnngpP395va7sjJny1bVCPsdoz5+XkhDJbjRn4/7hs7+rmJItd65yYWhXA68T1eGzqdWIc0WkjkbbOIyU1Eis2vzGdUVdQdbiRv5EYrejGxUc1XZBmORk0juu46nL8sY73Y2Ql+wkEELCRy/UnGz36GKOObbkJJloMHsV7dssV8JHkoBAExnSa662QVwLVra1vv6bGwQPSJT+DnO95BdP75VW1m5OTPll0yAQssE5YdpUrBFjM3Zj5rSXg8MDaplIiuUxQYD7u9tLDFBpqFxK4uc8aSi9tOT2NySybhkbXbl3bo7e/HtvfvR22GM88UAqf+ON1uiErJpIhSrETEuF5gWxsiPnbvhrekHgWoubOpXoyV5fwU6FhMpJXbbEujFR0OfJ5Tld1uEIdwWEzuXDcnHAYJ5XThUuceiyF9OZvFYtxIod1S4PvV1QXCEQ4LgtPWhnswPY2Ja906HC830BkYqG2CnZvDOfT31y+Vaf9+oocfxjldffWyR5KsiC1iAsEprCwmulwrK9bpO/8xGSVqCYnLgXRaFFBfzlSfeiKdJvriF2FD7rpLdClkh0SlVB0ilCeYmEB08Zo1tR3Pc88R/c3fwH5+7nOwFfUUY/ftw/EODKDzdAOi2lrcqE7w+yEecQbFqWq7JAncwO8HL+AayPVIr2Ww87mrC/sIhcCpurrM81Kj0DRE6WSzWMA6nXhvfBz/b2sDF1NVkaVReL7T0+BLg4MQFxcWwMP7+sw/6+EwOqnabLBtnDVSbUr9wgIiD48dw/G/+MW4Z9ksrnUD6lC2bFEJOJ0Y1yycV1qrBIOYO2dm4MRnIZJLJfH6qhCcrcbpzbIs1n3F1hbLKSTqoxILuzdzPfNcDvv1+VbWIVooJjaik/Pvfoe17w03iDT1Q4dwTTZvFmJxYVdnLl/zxBNEv/41yh9ceaXIfBseXroer4R4nOjDH8bYfPe7cX96ekqXyTKLuTmif/kX2Pm3va0+DfFM4JS3S6tNRDx68ueWIv/bUvCZFkrA7xfCIUeRhcMgwcXID4Mj4MLh6oRETm1dWMDfY2NYOG3btjRChEP3d++Gwezrw/e5kzJ3w+PGLLEYFovsiWtvLx/W3t0t0psPHsTEum5d/ck/Czh6csfFf1lYDIWEcJjLiVTo9nZcYyYC0ajoVnjwoIjIKyzkrcfEBIgh17ioV2i5JIn07kxGeOC5M/amTZjcFxfx/97e2tIMFhdBbnp6zDXcKYd9+4geeQTbvOoq09u1nC1yOjEWOTKRHQlO58otegu7NRd2bD5VF+P1gqriPnP6n1XSLTWN6J57YO/e+15BbOfmMHcNDlaee/74R0Rwv+hFINa14PBheMC7uiBs+nz1Ldy+axfRAw/A1l9/fVW20nL2aDXBZsMcz521S5X5OFXgckG0j0Yxd4+P4xkuVz7FLBwOCO7d3RBHFhbATXt66t/g5dAhnMe2beIcuNxMezu4NJfrKWYTQiGION3d4LKJBN5jrmoG8TgERFlGFBGncVYTZR2NQlA4cAD27KKL4LRnpzWnRZpEyxbVGR4Pxkk0Kko7lYIkYV0wPo4xOjgohMTZWYzjchFi+vRmDmQoVeddn9a7nEIipzdztpemCT7TLByxUWLirl1waF5wAQJgiCAChkKIcNavNbmMEUcqyjLW1F//OjjRLbfgPh85AtvFmWNGkUoRffSjcKD87d+KdSEfV62YnkYTlcVFpEhfeGFNm2vZpSpggXgD49A0bYaIniSiWyVJ+rPOLUlSGxG9lYimT/6/hTJgryURSC+RIF2RiGhyUAzcYEVR8GArpbT3MvsOBhHNsW4d9vfss+I4GGyEXS6Qr6kpGMjBQVHjwe/H32ecgXo0vb0wklNTiDA7fFhErxWD24305oEB7GPPHpHut5xwuUQqHtfw4DRvrvnIEaNzcxACudP1yAj+Pz0NApvNwpAXQpZxPkePgrg+73n1ExAL4XaDGAcCovtgMon9j43hPGspOs9CcXt7/c5hzx6IDMEg0Utfal6YtKotkiSMNyaKHJWcywnhrtHgujf6DuhEK3c8qwWcYqlpuN9WERCJiO6/H5ExN98MG00kmhQYSdnbuxcOgm3baiaeND6OmkFOJ9GXvgQbxOlT9cCf/gQBcXQUkQXVOFusao9WE3hMcJRMC5izh4ZwXRYWsNgsxceqBQuW69aJ7JRjx8AZ6gHe3tCQiGaenYUTkgjcua8Pr2ICYjwOHhQIYBvZLPim2720nE8lpFIQEJNJole+UmQUmI0eSiZR2/Xb3wZPfv7ziV77WvyMRPB/drSbRcsWLQ8CAQi68TjuTzm4XBANk0mR+cRBIFweqhw4vdnpFBlUslz8s/rmeMvB2TidOZEQ60S/v/Hpy0bBTWH4mshy+TW1GUxPEz34ILLpLroI7yWTIo15cLD495j3RyJwzvb3E73+9bimu3ZhLXDGGeYbp37yk7Af73ynECtPP70+wunEBJq+hEKohf3CF9a2vZZdqg6rLRKRiOjdRPQ7InpMkqT/S0RpQovu9YQW3S36ZgAcuh6NwpBw/RauZ1OugYrLhYUcF7nu6jJfz8Xvx6LJ4YBn5LHHYBT9fmxzdhbGd+1aCISHDkH4KdWyntOI+/vhqWLhaWZGdL5jr69+8SdJIJ+c3vzcczgus6TMCDgqiFOVuQuZ3y/qRvI1V5T8aMVEAuezsADxdmAgP4K0pwcTiMeDv/fvB1k97bTSE0s9wREHw8Mg2qEQoiW5K7imlfbSl0MqJRqy1OOeaBru8VNP4ZpdcUVNAqdlbRHX6HQ6MU441ZkjExsNmy2/PmKxNOcWjEHTYDs4fVlvV6yA/ftR6+vcc4muuQbv5XKwfUYibiYmkKozNET0spfVdiyzsxAQcznUZuRmEfWKhn7iCTRG2LgR5RRqrOtmWXu0WsDF7DnLo2W7RMRgPI65fHIS18ZMkz4j8HjwzCcSiEzk6JxgsPo5PpEAX2hvxyKbm8ccOiSc0OW2nclAgHS54ADWNByXJIGXmTn/TAZ2MRSCc4W5VKU6eXpks3Da79iB+X7rVqLzzhPnsLiIc+7srNlh27JFy4CODnCkcBhzRTlnWkeH6M7s9WK8tLVhDEQiokZ7KejTmxMJfKetrTiHL+Rr9XyuOXOLqDGNU+oBfcNAvi683qv22qRS6MLu94MXsUh58CD2tXFj+e/H40Rf/jLu3zvfiWd8507YtG3bYA9Kpa4XQlGIPvtZCJB33QX7mM2inmI97s+JE0T/8R+wdbfdhvIKdULLLpnEqhMRNU17RJKkS4noo0T0d0RkJ6I/EdErNE375YoenMXgdIr6hA6HENr0HZzLfbe7Gw/54iKERLN1KRwOECuPB4up++6D6OXxwFCuWydSKbxeiI07dqCodblFHHt4e3tB6FlQnJ3Fi5uhtLeL7Xd14ffDh0EQ+/rKpwkbBTeuSSRgrLm2B3sVS3nT7Hb8X59KMjGBzw4NYQLIZkWR8X37cNy8z+5uFMutV1HzckilRG3GNWtwjskkfg8GMZZiMdwDFgH8/sqTaTaLSEyXy7zHvhg0DWPoT3/C9i65pLbrsxpskSTheVFVUYRZlkV0bCOPo7DGTktINA+rpi8zwmEQ3b4+dBiXJDy3s7Oi82il7//kJ7DtN9xQmygXDkNADIdBaNkZY8R2GcGjj2Le27wZYmetY3w12KPVgEBAOGeXK/rfiggEMP9zbbdEAvyg3vX2OPMhFoPgNzGB/QaD5sR/RQHflCQsjufnweGmp3Eu27eXt6+yDMc0dym12/HdbBbOcTPzqywT/fCHsIM33iiaInJpHyPnsns3Ot2n0xAcXvACUU+NSDSqaW+vvQFVyxYtD7jmKNcCrdSdvK9PNJIcHhYdn9kpNzBQefywcMfNLfk5LkS9hUTmMooiSkKxs9lM9+aVRKGYWG0nZ1XF+jiVIrr1VmEzjx3DWquSeJfLgcNEIqhb2NOD6MVkEo4EHhPc5ZrLcJXq6vzFL4K7vOUtomzF5s31ca4eOUL0ta9hnrjlFqLLLqt9m4yWXTIPSbN4TpgkSW8mov8iomFN08ZX+ngqwJIXOxYDsenogOFIpeC18HoriyyyjMmMqDohkfe/fz+iEZ1OLKhGR5d+Lh4HEZIkEDiztVpkWYhZ8TiMocMhBEX2xo6PIyXa5wPZMktyZVlEG3J6NHfx9PurW9hz+nJXl0ip0TTcN66r+NhjmBja2+FZGhoSYf/cDbreE24uB6Jss+Ge2e0guqkUCAqfq6KIpjLcLYzrcRYjMYqCe6BpomN3LVBVeM127sQEevHF+QS6CJZcqVPBFnFHNyaCTmdju40yCWVyqGl46esltlAcTAKJ8LxbofuyHrKMjsdjY+j0x3ZuYQE2u7+/vM1Pp5Gel0yik3MtzbISCRDkw4cham7ejHHp99fneXjoITgzzjyT6CUvqTi2i/7XQvbIkryoFmQyGEPsKGwhH6kUhBBZhtDa3b18DVE4g0OW8fwGg8Y42M6d4CAbNuDz3DE2kYBtKmdfNA22I5mEY9zvF03oenrM2SZVhYB4+DDqpfb1wdYZEWBVFfUOn3gCnHd4GOUdCp0x0Sg4ZCBQsWFVyxY1ARQFDnabDeOg3LPDTv72dnHfZRlrCqcT48kIt1JVcPhMRqwNi32vHlF3mYwQtDyepQKZpon06lLdm5sRLCYypzV6jR5+mOjJJ7E23rYN7y0uYt28Zg3Sm8vt85578P0770SGx8ICAk8GBmCfGIqC68qlhQq7Omsa0Ve/ikyP176W6JxzsN7bsKE+gR4HDhB985s4vhtuQKmpMve24pWzkF1qWlhsGVEUawiGf3GlD2S1IhAQ0YcdHZggOIKOIxRLweFYGpFoNJw5mUSqcSIhIkd27oRQyHVu9AY2EEA6M4tBZ55pLpLM4RAd/LixTCQCcre4KOr5caTcsWM4ltHRyp1Auf5YMikW8txJ2eerrQD/5CSub08PjD6Do8jSaRjydeuQEs7e8qNHcT6dnWKy4iLJ/LOW0HNFwYKfSHSH5oLc3d35RN1uF12lOTWCU7MDAbzPn9c0jAtuoFAPAXHHDqTD9/aiu2IFAbEUVr0tsttFRCCLUo0UE7nRCqc1txqtVIbV05cZ3/0uoqnf8Q4hIHLzLH3UeDEoCtHPfgb78+pX1yYgptPowrx/P9EXvgBPfS5XHwFR09AobNcu1B+79NKaNrfq7ZFV4XYLTsALsRYEvF44ObmzcjIJflNL7eRikCTwubY2wfPY0drTU5r/HDmCNPq4cRYAACAASURBVMGBAZGh43SK71ayL2Nj4Dnr1+OcWDT1+83ZJk1DCuPhw0QvfznsYjyObVQSEI8ehWM5FIJQ9JKXIAKyELEYPuP3G+t4XwItW9RA2O3g2Myhy6W0e72iISYHMjgc+M78vLGOz0TgZh0doiajouC5KpwT9RGJLJQZBa87VVXUmC12XlzjjwUvfffmZgZHJvL1MRKZeOgQBMCzzhICYjYLm8AZe+Vw3334/g03QEBMpWDbAoGlwTrM/zUN8xeLijYbxsx3vgMB8aabEIhx4gRsUj0ExD17wAHDYaLrrkPX6DqIwy27VCMsG4l4svDljUT0ASI6omnai1b4kIzAmhebYCwiERGZR4S/OUKxkgimKCAiqlq59l06DZEoFsP++vowiUkS3nvsMVFfYWho6bZSKSzCFAVCYq0pQ6qKiZG7PCuK6Gq1sID/DwyAEOqNWjaLyTSREIXU3W6RhlxrbQhNg4AYDsNI9/Ut/f/4OIit14toGSbh6TS86FxUuaNDdFLm7mZEoq4KRyu63cYmY00DUU4mRdp5KoX7GgiU7/7G4NossRiusccj6rikUjjfWrtcckrS/v04prPPXnodS+DPU/qpbIu4kzOTNKezMV7fwvosrYjE4rB6+jLj0Ufh4X75y5GuQ4Rnd2ICY6DQoVSIX/8aDp9rrhGNWKpBLod0n0ceQdHwyy4TkRe1dmJWVTSM2bcPqYQXX2z4q3lnbkF7ZFleVAs0LX/+bdmt4shkIGZks5jvg8HlEwSYp3L2TEcHxBTen6piYfzoo3j/BS8AH5YkCIuqisidcsc3MwMn7sAAoqdlGVzJZkMkoJn589e/Rg3Dyy4TXZPZ0V0KExPg0DMz4OIXXli6WyrXj/R6wTENjNGWLWoipFIYy15v5cjYiQnMb8PDwqkRiWDdY7aJTjqNNRPXdC/GOwoj7sqBHaHZLD5rJpNCL8Y1e3pzIVhMJCouJoZCyK7o6kJqLwt8e/bg+p99dnlnwhNPIArxoouIXvc67GvnToyDs8+uzBfZoS/LRD/+MUTEq64CRzt0COPm9NNrvw47dxL96EcYj1deiYhLA+vnknfagnapaWFlEfFGIvoWET1ORH+padrhFT4kI7DmxT6JTAaGiWsSahpIi6KAjFQy6qoKo8efL1x05XIgNuGwqG/Fher1iEQg/KRS8JRwE5HCY2VjuG1b7TVcGJommphEo9j+7CyErt5eok2bYMiTSRFO7/EI4bBeEQcsEEajEL0KPT3ZLEK/WWDcuLE4sU0mIUTG47gfAwO4Nxxlxo1b9B0T2QOoj1osnJinprDvwUFsT5axH4fDfMFwVRX1Vubncd+HhkB2allMyDJSBrlD9bZtpprM6EXEU94WNVpMZPLSEhJLI5fDsytJWERYwRNfDGNjRP/8z1jovve94jymp3F+a9eWJ5SPP45OoxddVFsHP1Ul+ru/g9D3wQ/CG55KCTtYCxSF6Fe/AvG++GKIEyZQuHC3mj2yNC+qBbKMuc3trn+U3WoCpx2HQqJu23LWk5RlOIgjEcxjzG8XFlA3MBBA0zV+7tmZy5GFpRAOw6nb1QXnKos32Wxxh3g5PPggbNtFFxFdcIEQ+0pFC87PQzw8cQLHf/75aAZTap5OpcBtPR7jKa3UskVNB6453tZW/pnJ5bCmcLtFpD8R0qIzmdLdxcttj8tCcfBE4RgyIiTKMsaipglHqFl+x3yR+alV0psZejGR64HnchDtuDwL39uJCTzjp51WPiDi0CGif/s38Kq77oJ9278fduLMM82tmX/9a9RUfOEL0Sl5/35RF7bWNe/TTyNaMhZDnfqrrjLsDC8nIlrNLjUtLCsiWhSWv9jcACQQEA0XmNhxWmw56IXEjg5sQ5YxUS2eDCju6alcEDoUItq7F+RrzRpsq7d3aSTgrl043q1ba0thK4VEAkTryBGIdrIMYfO004S4We/FO0f5xWLYRyFpDIdhxBUFx9HfX3mbsRiIcCoF0rhmTb4wq6pCVOSfLJJyJ1+OVozHcQw9PZjENA2ioixju9VOKrEYSI6qCiLBXcPNLuJzOQiIJ07gODdtKp7KUwZWl6nqbou4Dg1H3XIn5+US9PRFuvm51xOtUxWrJX2ZCAT5wx+GLf/oR/Oj4BcXEZVUbmG0fz/S/TZvJnrFK6o/Dk2DkPmjHyES8fbbYfu5+VgtkGWQ5KNHkb78/Oeb3oRF7+6fYXleVAtSKbwCgdqjWVc7uOED85RgcHm7sWYy4FpTU5hbOOX4kkuEWBiNgpcEg+UX7YkEUgx9PvAySQLvjUTA48yU3vnjH1E39ZxziC6/HBzU6SweLRiJIOLo4EHwpnPPhUhQjodxCRynE/zRxHzaskVNCC4j1NVVvrlFLIb73t0t1kuqKjqGmxwLfxYAmYsEAkvXQ6WERE3DOMzl8H49HKGyLOoxWtGpqhcTf/Ur2JObbhIpy7EYusV3d2NNUwpzc0Sf/jTux3vfC1s2OQkOMjJibi30v/+Lsi7nnQdutGcP1p6bN4t6idWW7HjsMaLf/AZj94ILICCaaM5idVtkCbRExMZiVVzsSESIgHY7DHM4LJphVFqwqio+zwvdeBzvdXWBhBklhfPzMKLckcztBhnTE/FcDkIid6iqoabLn6FpmBi5xiHX9lAUiInhMIzymjUw0tyYpR6phJxOU6x4t6bhf2NjIKqbN5tf4IbDIMyZjDiHUp51Wc6PVsxkQKinp3G+IyM453gcnx0crH7BzanQXi/GiCyLVAsWFTs7Sxdz1iObJXrmGZxnVxc8cZXqhhSB1SeoZbNFXC+FRWbu5LwcQpbeu8zbP5WFRH36stttbVFC0+Ap37WL6O//HtHURLAz3Niq3KJ9aoroe9/DZ1796uqdF5pG9K//SvSNb6Dw+NvfDvvLqVq1jOtcDrUax8ZQk2z79qo207JFFkc0KjjVqWi3zCIeh5ioaZj3lyMdPJkEH2Lh4cABiIXbtoFbtbfj+T18GLZ2ZKT0MWSzEPHsdtgxhwMLfk4pNlLehfH001hYb9tGdPXVEASIYOf0wkgyiVpne/diTJ19NhwUleaETAbH5XCAT5scjy1b1ITQNDwvuRzWQOXGwMwMnq+1a4VzPpuFuOh2m6tvxxGAnNmkaZi3C9dC3FCEhUTOouAoxnqWYbFyejMRrsmTT0K8e+ELIa5xV+odO/CZs84qzXcSCaLPfAb3+P3vx/2MRIT4uHmz8WN54gmIkdu2EX3gA7BxmQz+9nphO2VZ3NtyXZ0L8Yc/4BzTaditl73MnKOFrG+LLIGWiNhYrIqLraowOlyUWpKEgOR2L00tLgRPaIcOwUBw96hqJoqZGTQ4CQQwOakqjKL+GGQZ9bDicaRvVFPkVVWFaMih9TabSFPmaB+uU3jsGPYbDIr6gnxt2tura3WvKBAJk0lM8PrmH9ksom4iEXgLN2yo3tOmaYjymZ7GZN7eDgGw0jGn0yDa7LHMZHCfw2Fso7s7v7YidzSshFwOgkAxUssd4bg+p90uujoXm0QzGZDw+Xlcv3XrMPaqIBJWn6CW3RbpxUQmEMsRNcJCov5+n4pC4mpJX2b87GeI/Hvd6yCwEeG+Tk7ifq9dW/r+RiJE996L8Xb77bVFC959Nzow33or0fveBxJOBAGxlvGVyRD95Cews1ddZY68F6BliywO5lR2e2X+1AKgKOAXiQREEaNdlSshnQZnyWZF87twGByS63On0/hfMol7tmFDaWFGUbC4lmXUB3O7se2xMZH1YZR/7N5N9ItfYDs33ig6WOud75kMsix27sS42roVUUJGbGAuB3tks4FrVTGHtGxRk4IjaVUVz0opkUlVIZZrWn6NzkQC64L2dnOpriwkcjaTLGOsFjrgOJMlk8FnOYtiubqyWzW9eXyc6Ac/gM259lqxvjx8GBl627aVzs6QZaIvfhERh+98Jxwa2SzqqjoccDQYfeZ37UJ2xsgIskXGxjCHnXHG0vFR2NXZbsf+Su3rd79DqYZcDvbrpS+tqiSZ1W2RJdASERuLVXOxczmIhi6XMFjcRIQ7fBWCC4nPzOD7Ph9IFHc9rnahNzkJAxYMYtJJJrE9fXqzosDTEokgzNtIiq+iCOGQvWJ2uzi/crU5olEYdUUBSfR4RJdBTcMkyoJisVohxY7l+HEcx9BQ/kIjFIJ4pyiYFAw2BqkIVYWXe3YW2+7qgphYjCznchBOJQmTisMhIoZYbOZoRU53JcpPg/Z4lm5bUbANTcO+y0UTJZO4xrzI51RnFj9TKUQghkJYGAwOYiKu0hNp9QmqYbZIVUUnN+6aV89upKd6fURO+5FlXNdSHQuthF27iD7/edT7estbxPnMzcEZNDhYuoRBJoNi4/E4BMTu7uqP4zvfgaf9uutAlDnKs9ZOzOk0BNL5eUQT1Vh83OJ3e/XwolqQzWLMer3VORlPVSSTEBNlWXRGrkYUyGYhFqbTIqvG74eT8okn8Pe554oGfwcOgMuuW4f05GL3TNOQnZJIgGsEArAfY2NCpDFqRw4cQPOC9euJbr5ZHGswCFsoy7CbzzwDG3j66aitanTxLcsQEIkgIFY5R7dsURNDljHn2GwYN6Wek3QatfUCgfy10uIixjKvtYyCBUJJEs5OjuTnccZZTZom1gPLDRY3rZLeHI/DOep2E912G9ZLmgY7dPAgbNG6dcX5n6Yhm+Kxx4je+EbYBk0TmXpnnWV8DX7gANGHPoS15sc/Dvs7PY21X7m1NY8DfVq5PtVZ0xBl/cwz+H3jRjiQq+RwVrdFlkBLRGwsVtXFTqeFaMgTSiyG99va8ieBWAxGhjtZ9vdjgmJhMZPB39UWFz9xAmITpxAvLECsGBgQ3mlVRb2GUAikT188mCHL+cIhkah7VSwMvxxyORDISAQpBCMjeJ+bsnDRYYcD16u9vXg6rixDoMtmQTpZtNU0CIvj47humzcvz+JDUTBJzc9jn8Eg7h8bflXF8eVyIoWZxT8iXGc9WVGU/NqK6XR+5Bg3KnC5QJS5+7XRa5/LLU11djgwySYSon7maafV5IG0+gTVcFukqhjDnNrgctWPuLGQqK+PeCoIifwsrYb0ZcbcHAS77m40MOFzisfxv87O0vVtVZXohz/EIv3mm6sqU/Bn/Pzn2P/llxN99rPCAeLz1RZRm0hAQAyHUadxdLT6bZ2E1Uf3quJFtSCRwDhra1veWn+rDVxrOxrFXN/TY3xBzHwhmcTc0dEheFguh4gYVUUXY7ZFySSieZivyDI4WGE05NgYhJd164TNmpoS2SRGhZJjx4i+/33wrltvxffjcdhCnw8d3Z98EuNn3Tocq5kUaUUBP2euVcPYa9miJkc2i/WRy4U5thQ3CoUwdvv68tccs7MY7/o1gBHohUQijFVVFc+UomDcud3GuzbXA1ZJb1YU2ID5eaLXvEaU5kqnEXXMJayIindy/tWvkN3xilcggpEI69OpKUQPGrUXx48T/eM/wt598pOYr44fh91Yv974+RSmOksS0W9/i2AfhwPBMpdfXlNQTJPeydWFlojYWKy6ix2Pw4i0twviwamlnZ34OTMD0uNyYeIp5hmNRGAM/X7TdQ/+jKNHMcEND4OwzczA8AaDYp+qCsK1sIDF29AQiGIigWPkLsQulxAOa12YT05C6PN44FlhcsupuNEofrJHrK1NEFlFEanR69YJkTWTQfpyNArjvWHD8k+4nOqyuAiD39eHazs1hXHAx8eesUymdORisW3raytmMqITc38/xpI+FdrIuaoqjmtiAuk9uRyOZ+1aTLY1ClhWn6BWzBZxjRwe705nfcTEYo1WVrOQmM3iOVkt6ctEOKePfxx2/CMfEQRSlvEccwf5UvfyN79BXaCrrqq6viARET3wAAqOn38+0b//u0jFYvtTLeJxiJyxGNENN2CuqgOsPrJXHS+qFtyFWNOWp87fagfzhmwWXKSnp7RdVBTwzngc17m9HdxLP3fs2IHtnXeeKB/Dta8lSWQyhMPgRYqCbQSDEGGmpsBfBgbw3cVFvHp7jUcITkwQffe72P/tt4sa5Owsf/xx/D0wAPGwmHO8HNhJrCg41hr5rtVH7Clhi1IpjE+fL78sUiEmJ/FMDQ2J9Z0sY7zY7RgvZmyUXki02URkI5cj4H0U1khcblghvfnBB5F2fO21ommKpqHEQTqNVGSXK7/5Cl+/p58m+trXEH14xx2iqdOBA7AXRh2ZU1OofWi3g6e53ViHdnUh8rma+UpVcfz33Ydt+f0YV5dearrZZSGsbossgZaI2FisuovNkYSqKrozczfemRnhqeVaMuWMTDSKyc3nK99xs9yxHD4sBMKeHiFgBgI4Bj6+Xbsg7PX0iPBrt1ukKtcz3ZIIi8bDhyGerF+/1LuiaSCzHKXINSTCYQgEW7eKa7K4COPP4d7V1HisBZymHA7j5XSiaQ2HnIdCGBPBYPWC8OIiJjmvF/eFu7QxOA1aH7VYbGxFoxAQ43EhLq1fDwLf2VlT5KbVJ6gVt0XcyZmJm9NZO3kr1miFhcRmJIbVYDWmLxPhvL72NaKHHyZ617uInvc88f7UFMbK2rWlbfNTTxH9/vcoNH7JJdUfx2OPEd11F2zaf/4nxlIqBRtTS6R3JAIBMZ1GPTOzi/0ysPrdX3Fb1Exgccvlqn7+PJXBnDQchl3s7s7nk6oqHLdEouxJ4fxw5Ag42+bN+WL/+Di+OzKSbw9UFbwlFBJ1mjnVmQg8dHISx2KknA4RnCnf/jb289rXwhbNz2M/+/eDI3V3QzzkTBczUFVRXqivry4ppC1bZBHEYnhxBlQxyDKiaZ1OzL3MM9JpjD2fz3yzSuYvmYxYjykKxrZ+7dVoIZFIpDdzDe9mwd69RL/+NcopvPjF4v3jx2FTzjhjacovi4lHjqBB3egoeJXDAeF2507Yom3bjPHHhQU0uEuniT7xCexvzx7YjK1bq79HsowIyYMHRXmvCy4QZbFqiA61ui2yBJroMWnBipAkGKJIBBOSx4PJZWFB1GsZHTVmkNvbsT2uG2i2wLgkgbApCqIS7XYs1EIhEK9IBNtkb7HHg8g6nw9GcDkjedraiM48E6T02DGQ2NFRsU++jm1t4pj37MHiNRBAurbPB2IciYD0Llf6ciVwJ8LxcRBQVRWpy243jq+trfoFEIupwWA+QVEUkQKdyWAijEbxP0kSgiKLi4kEPHeaBi+914vjTqXwPY5s6ujIj0BooTHgWigsJqbTtYuJ3C1eUYTN4YZHHKVoZazG9GXGQw9BQLzhBiEgEsHmZTJY5JaaRw4dgoC4aRPRi15U/THs2AGiPTpK9KUvYSwmEthvLbZ2cREpzLJM9Bd/YVxEaOHUA9ddTiYx7uvZmfRUgCTBQej3g/fNz+MZ7u6G7eQyJ34/5v5iNmVhAVxtcDBfQAyH8f2+vqX2gOvMuVzgRoqC1/w8+MX0NP5nND0vFEIEotOJFGaXC5xwxw5w7bY21AvbtKm6RTanpmazdRMQW7AQ2towH0WjoolJIRwOjA3OQGI+7vHg2YlEYJ+Mcn29gKjvvKwo4P2xGI6DHaM2m0g3bkSmBQtWHMTRDOnNc3NI8x0ayuc2kQgExP7+4jUDOdLzq19FAM+dd+I9WUY2nsMB8dHI+UWjqIGYSBB99KMYE889h+uzaVP1vDqXQ51XTod2uZD9MTKC48xmRZq507ny96KFpWhFIpaAJEl3ENE9RDSqadqxOm121V7sZBIej1RKpJF0dGBi4CLVRg1APA5j5fVW16lQVUWq7/AwDFMoJASvwUG8fD6IjZOT+HvjRvP7qgZTUyCZLhf2WVgHMp2GUSVC1ByTvR07QGJ7exE63tmJ67MSQkI8Dg8lp15PTQkhee3a6oltOo375PFgoqq0jVwuv7Yik5NwGNGabjeuFwvFnEquaSJagD2i3HXOYD2ghk1np4ot4k7OXCe0WtJQrNEKkfU7NnP6ss0mGlKtFhw5Au/2li1E7363uEfpNGwLpwgWw8wMFtvBINEtt1QfQbB/P9Gb3wxCfs89IN4cwVzYSdIM5uchIBIR3XSTuVplBtGyRasQsZhoFrKanvVGIxoFV4nFREZKV1fpeT6VQoqw2430P772mQz4otdbuvZXNouIGpsNC2EWHWdnse9t24zxtWiU6Fvfwv2//Xa8d//9cED39uK4tm2rflxoGsSJVAr2qNpa5EXQskUWgqZBMM/lsGYrNTbn5jAmed3E4JJDfX2VnR3sKGbx0OkUdaztdrzPJaX03ZvZAUzUODvYLOnN6TQikRUFdoCvfS6H9aDDgYYoxY4vlSL6l3/BfXvf+2A3VBVRjZEI0p+NZPwlk6gNPTaGWtWbN4sAl23bqm+ImskgM2NqCmtGTYPzeMsW8RlVFbUTiSp3dS5ARVu0TDbklEIrErGFmsCeVo48ZILFpESSROqIUUGQi1pz4xEzrd1VFcatqwtC3ZNPio7F/f04jlQKP30+0VxjfBzfrbaugxkMDsJ4HzoEYzw8LGrmpFIQEDnt1u3GtZ2awiRw7rmY6CMReAenpyEocKfnRniTuXObxyOapvj98G5xisTBg6LJjVHkciDbDgfO1ch9cDrx4v1oGkThPXtwjV0ujKPubrzvdOZHKw4NYTLjCM9wWEQoVDs5tlAduGszi4mcrmtWTNR7sPXRh8XeswJWa/oyIxZD1F9nJ9Hb3ibujarCHjidpVOmYjEIdF4v0StfWb2AePw49h0IwHPf3Q27QQQ7UO31npnB8TmdiEAs1RCmhRYK4feLBmycpdGCOSQSuH4+n3BOcT3eYlBVLM55QcuLVU0D55Gk0nW6OANG0xDJ7PHALnENOEkSJXTK3c9kEk6RTAZR2Tt3gscSoU7YeefV7jhm8aenp64CYgsWA6f7z82JWp3FBJpgEByEa87zZ7q7McctLGB9Vey7XPOOI/u83qWNFony05mTSdg+/rvREYmczszRxKra+PRmTSP6n/8Bx3nVq/LXI4cP43pu2VKcyyoKeMzsLNE73ykyH8bHEVAzOor7UIkLZzJEH/sY+NEHPoD9cYPKM86ofo2UShH94AcYd6edJgRJvYBIhGNzucCfeE3AdcC5lnprXlxZtCIRS0CSJDsROYkoo9XvIq2ai80erLk5GKyODhgqniza24XRTSZFF2czhCWZhAF1u8tHMqqq6KicSgnvkcsF4ydJME4sNIXDOHa7HeKdx4N04ePHMYkaDfGuFbKMCJxwGIvL/n4QVbtd1IM4dgziVyAAD5BeJMxmRQ3FZBLvuVxCUFwOEUyWQZQlSRwjEcZBIoFzSCYhbuZyIj27Uiogd3PWNIis1U7Ys7Ood+nz4RqoqjhOfbQie7a4S7DHg89kszh+RamY6txIj/spZ4s0TYiJTOjMdoxkwbBYfUSrNFpRFGHTVlv6MhHO77OfhUPlH/8xP8JnZgbnvmZN8fPOZom+8x2I/7fdVn2E3/Q0io1nMkRf/zrqmHHnSL+/+kXL5CTRT34C23LzzdVF1RtEyxatUuRyggO1xB7jSKVEgz+ex71ePNcLC4KzFtbpfu45PLfPe15+rWkWSoaHi0fvaBp4UTyOZivMNTkKsbsbdmB+HhyEG0kUOlnTadi0mRlw1ulp3P8NG4guu6x42qJZLCzgOLu6lsUmtWyRBSHLGJuckl9MWMpmIUJ5veDojFwO49XlWur85+wJotL8hUU6jkjk9xIJ/OT0Zn1EYmHn4eXESnVvfvxxokcfJbriCkQNMqanYWtGRvLvA0PTiO69l+iRR4he/3qiiy7C+1wiizPZVBWfJcpvRsiQZWSHPPss0XveQ3TxxVgnT02Bp3Hgi1kkEugyHQrBxoVCOJ7zzjP2fX1jRiJRFqmaNdoy2ZBTCi0RsbGw/MXmVNHZWTzIgQCEIxaJ+P9EiCxhgxuLgSBxLUKjYCHR5crfnqIIcTKdxnsOh+iozPvI5UAMFQXprHycnDYry/DGdnZigjx6FEStlIdnOTA9jdRbbgizbRuOd98+kL01azBhlDserm0SjeKasNedU3RriaZhqCqE1mwWx8PpC9EovJhdXfldsOfnRde/ri5MOsVSHjQN1yCbLf0ZI5iaEhGI7e3Y3uhocdIvy0JQ5J/6jmZcf1HTMGY6O/HSCVkWkKDKwhK2SFXxDDOJ42hFoyjXaKXZhcTVnL7M+P730ZXvzjvz6/1Eo7CH3d3FI9FVFQLd0aOI8KumqQAR9vHGN8J+fe1rcCAlkxhzPp954Zpx4gSKhbe14fiWuUFGE49iQ7CELVopJJOYowKB1edEqDc4qyCTEWV0CsVXboASi+EzwSDm+PFxpPpt2CCaoRCBg504AVtUauE8Pi5ERhb6OGPD681vohSPiw7SHg/27/PB5nzrW1i0Dw6CwwwNoTTM2rXVNRssRCgE28qN5ZYBLVtkUWQyGMNuN8ZwMW4UiWDsBoP583Iyie+2tWFcce1mrk3t8ZRfv7CQqE8d1qc3Oxx4jm02EbnYSCGx0enNx46hVuCWLUQvf7l4P5lEkATXxC+G3/wG2Q8vfzkimYlwL3bswL3Vpz+zMMt8mK+pqhJ97nMQMf/qr4iuvBJr/qNHsd6vlm/FYkT//d+wgWefjTXbyAgaQ5m9lwZSna1uiywBCyV1NRaSJN0hSZImSdLIyb+/fvLvfkmSvilJUliSpJgkSd+TJKkO/sHmRzSKiJGJCTyso6NLO9RxgxBVFd3viPCe04n39F12K4EjyrJZRLuFwzA8Y2P5HuU1a0C42OPL4M7BNhsIIguOHg8IHxffnpoSdREXFyFGlUp5qTf8frFgTaVQm+tPf8KxbtkCUltp0nI4cO4jI5hchoZEI5ajRyFITkzg+lfrN5iYwDGtXSuEvnQa18vnyycVNhtSyLduxaQTieAYxseX3v/5eRCYYLB6AXFiAmJxZycEy0wG3rJSxNvhwKKspwfns2ED+2MeSgAAIABJREFUIpD6+vAdrxcvux3jbO9edH/duxcLikbiVLZF3N2dSWg2i2eEiUMlMKHQP8ssHrKY2GzQNJwjL4R9vtUpID79NATEyy/PFxCzWdgUr7d0KYvf/x5R3FdeWT2hjUaJ3v52kOMvfxkCIneB93iqFxCPHiX66U9hi171quUVEB9+ePm2XQynsi1aKfDzz9GxLSxFLgd+yI7h7m7wuWLRmxxtNTiIeWB6GumBe/aAD2zYID4ry4hMdLtLN0PiRoJ9fUJAVBRs1+FY+r1AADZrYADbZ+f1Zz5D9Mtfwm6cdRbRddcRnXMOvl8PATESgc1joafe2L+//tssh5Ytqi/cboyLTAZjpRhYlF9YEBGGRLBRbW1YX4RCwlZ5vfhfpfULC3MsJhLh2eS1kaKIGrF6Ttco/sbZMHyMRvlnNYhEiH71K9ioK68U76sqgk3s9nwnhx5/+hPEx3PPJbr+evG9ffvw++bN+feCoyv1EaCyDD706KNwsF55JY7p2DGMj1L1YI2c13e/i7HxghdgzT00VJ2ASCRSnb1ecDVVxZhMpaBTGEHLhtSOlohoHvcRkZuI/p6IvkZEf0FE/7aiR7TMSCRAslg8WbcORqxUeg0LNLmcSLMlwgRkt4PIsDepEjgaJ5FAFNyxYyKybe1avLq6ynvo3W4YT02DMc1m8b7NBiLX24vjHBvDtjZtwkS4e7fx46wWXPS7uxvGOpmE8Z6bI9q+vXQtsHKw22Hs163Dea9bhwk+GsU13LsX+4xEjC9KZmbgPRoYEItiRcFxOp2lUwntdpD1LVtwLizITU3h++Ew7m1XV/XpWidOYJs9PdhOMgmB2EwtTSKRCt7Xh+9v2IDX5s0Y714vJvHf/ra641wGnDK2iMVEtxuEI5sVnu5K4KLd+s/qoxKbCZzGI8uiplYzR0tWi6kporvvxnPFjQOIRMF/my0/nVCPZ57B67zz8tN8zCCZJPrrv8bi/QtfwHZ4rnG5qndmHDxI9POfwx7efPPy1VXVNKKvfIXommuWZ/tV4JSxRSsBnnMTiZU9jmaDLIua0ZkMeA/XYq5kNz0e8EefDw6NWAyOcf33Jidhk4eGSkdmTU6Ca3BqIWdWKAr4UikHUHu7aL7ykY8QPfAA0ZlnEr3pTei6LEk4xnoIfrEY9hMI1CclmpHNgivfdx/RP/1T/bZbI1q2qEr4fBgjyaSoCVwIrpvIjSoZfj+ex+lp/B0ImHPEFRMSiTAXt7XheeCa9ishJPIxMp/M5eq/b1km+sUvsN3rrsvPujl2DOd++unFr+vx42gINzJC9IY3CHt1+DDmjU2bSmcBsphos2Ebv/0t+Mt112GfBw+Ci27cWB0fXVxEmYZMBg7j48fhHLn44tr5LWcoeb0YK88+S3TXXbVtk1o2xDBajVXM42FN097Ff0h4Av5akqS/0jQtunKHVX+kUkI8cjpBuPQpxeXgdsPIplIwhC4XvtfRIZpYFNajYWQyIlWZPT7s6crlxKRiJqTc64UYtHcvhMQtW4Qh7uiAcZ2eFoWvN2+GZ3XXLhC75SiqGw4jgs7nw8S8dy/O7XnPw/8PHsQiuxYvNHcdbm/HxJRIiLTnSATXPxAQnylGeEMhTALd3aIxAHeMVlUQ5Ur3wukEEe/txXWemYH4Z7dD5DQr+DGOHsUk2dcnxtbatfUhyjabiEjkVGauV9kkOGVsEYNJnKLkp/u6XKXHoFUarejPx4j33qpIp4n+/d/xPP3VX+Xb1sVFUdagmC06coTowQdBZi+9tLr9ZzLoAL17N6J/LrwQ8wzPVZXqt5bC3r3ooDo4iDSiaoXIUuAFiyyj2+LddxNde21991EDTjlb1EjY7bAJiQTGabVjdLVAVcFfWOhg/lKNzRwfx3c3bBCp0MGg2P7gYPFnOZUCh/H5wGEYi4vGOtaeOEH0xz8SPfQQ9vmmNxFdcAH2uW8fjqFUWqkZxOMiW6Qap3Qh2KmfSICPP/ggGkBs3Fj7tuuEli2qAe3t4FfRqEhF1sNux9ienISAz01XcjmM10gEY8OIkF+IQocvP892O9ZBXNpBliFacjpuI1ObeV8ctaev5Vgrfvc7rKtuuCHfefD/2fvu+EjLav/zzkwyM5mZ9L4lW8k2dikiSK+KUkQQC4KKHUGvqIhesaBe5HKtKCpW9FquIoiiAhZWFhaXBRa2ZGvKZpNs6iTT61t+f3w5v+edlnmnJJts3u/nM5/dTDLvPG95znOe7znneyYnsWdqb8++V5qcRFCxuhoN4nhvOzIimuEYaer24IPIhr78cqK3vAXXeu9enG9nZ3HnOT6O4xKh6mTPHtihc88tf4XNP/4Bn64Mds60IQZxnG5TZhTfS/v5KSKyElGRSb5zD4kEMtV6euAMtbYiipGL9MsF7qwVCqV24OJFilPmuXzP68X3Dg9jAeOunNy9uKkJC5aigNgqtLTH5YIhjMdBEOozk+x2fI/bjXEkkzjnUAjd8QopwTaCqSkQiJyuv3s3xrVuHVK916/Htd6/H4t1OcCEYXs7rsOKFVj043GMZd8+kHJ8/kQ4/5ERoX3JmJwUznYh0Ua7HenwHR14zlhf0+stPKrX3Y1ntK0N5+Hz4TkptsHCdGBh9cbG4rOfZgDHvS3KBe7yV1kpuhfrdS3Todd60T9n+k7Axwqs/ROPYy4dzwSipkF7cHiY6MMfTiX7uSMjN0FIx/g4svyam0GeFbNpUBSiT38aouV33omMH27MxeRtMdi9GxvpxYvRJbpcBCKX3PMzGwwSvfWtRD/+MaLtv/51eb6nDFiwtmi2wI0JCpFzON7A5CFLs7hc8Gdqa4uzmd3d8GU2boS/19CAtaSnBwELjyf75juZhK/Esj783aEQfLuamtxNS0ZHoef65z8jo9rpRDDlHe+AX6RpODfWmi6lGoa16lh7sRjo/fMjR+CPBgKwv489BhL0nHOIvvjF4sdZZpi2qETU1sLWTE1l3/twYN3rFU0UuQFmSwvs0+Rkcd9tswmSTu+rcXmzyyVITlXN7tfNNPTlzazNV+r379oFSaYzzkiVVEgkYI/c7tRgBSMWA4GYSMCOsN0JBmHD6uqwt82HRx9FufGFF0Kj2mqFfUwkECAoJpFmZITot7/FdbrkEpxfdTUCwOVMzNE0ZFDefTfG+r10C1A4TBtiEGYmYuHoT/t56pV/5329vCyD0JmagpFk0q7YaAHrI/p8MGjcYbmiAu+PjuJ3bIglCYuTy4V/szmF3GDF5xPNPAoZn8eDdPCDB0Ek6jUiuLyZxYOtVhjzvj4Y+BNPLI+wOS+8LpfoeFZdDWKPN58uFzIg+/rwe+7QV6xOVzokSTShaWvDQsSaOcPDeFksImN00SLx2VAI46muLq4EmTtOrlghnoOBATx7bW3GyncOHMBnODt2bAzPay7dolIwPIznrb5+Zo5fAo5bW2QU3JmNOznHYqKTczrJxBmMLPbNYPvD4tKzCX335VJ0+OYLnniC6PnnEeVeu1a8z9IIlZXZN+2hEMTC7XaQdMVcJ1Ul+sIXkDlz++0o1eHsbCLYsmLu/44dRFu2gEy47LLSneNsmxFJwjrw5jcjsPSNbxC9//2lfU+ZseBt0WyASwZDIeFPLQRoGs6ZJVhYg7kUezk2hhLBxYtF45PqatjhHTtgF1pbRYdnhqrCL1MU+JI83xMJHDMXYTc1RbRtGz7Lfm5NDUiDs8/GOU5Nwf4tXw4faXJS+B6FBvGjUfixdjsCL4V8lonDcBhEJPvn7DMSYaO+dy9s3rXXzqnAl2mLSoQk4ZljvU8uYWaoquiYzFVKvHdhbUWfD/uJYjqA22yiaQaTiozKSqERGwphHHb77GckEomGfZyVmD5WoxgZgV/CTUYYmoaKNFWFrUk/tqoioDg8DHkWllRIJuEnVFYiOJJvTP/8J9FPf4pOzh/+MP6+uxtzf80a0eeAr7GRuT40RPTQQ7B1r30tNOWrqpCNWM4GYYkEsg//8Q+is84i+uxny5Kpb9oQgzBJxMKRKy44b905RYGzwdlg9fVYNMoRKbBYhB5fMAijG4mIcuV4HNHf5mbj+l+80ZyaEk5XIURibS3KhLu7YaDTjay+vDkSAVF19Ci6W514YmHdpdMxPg5Hs6IC1zsahRPb0ZGd9Fi1Cn/f34808FWryiOynQ6HA6+WFtwTbi4Ti+H9nh44A04nrrnDYSw9Ph2qivMhwndxB2m/Hwvh4cOC2Mx2nqxrOTSEqFxdHe5NfX1qB8RyQNMwJr8fm4JcGm3HEMedLSoW3LVZlgWhmI1MZM0dRUm1Gdxohf8/G4jH4QAd7+XLjP370Znv1FOJXv/61N+Nj+P6Z9vsJpMQC4/FiN7+9uIalWga0T33IPvn5ptxHCKxQXa7i7v+27dDw3b1anRDLDbglos4ZOzYgY16LEb0+9+nCq7PEZi2aBbA1QSBgCgZPN4RDmMNZp1YzpIq9ZhdXfD1OjtTfzcxAZ9k+XLYaNY85OBmfz/m4fLlwhdUVfiLkgTiUT93QyHYiQMHsB69+tWw+1u2EJ18MjJ/iOBXJRLwh5lkrKvDeCYmQMo0NGBs+daoeFzoVRslEDkjm1+aJtYmDuxLEvy3b30Lwd93vYvo/PPnHJlt2qIywGLB8zY+jv1AY6PQo+amKkuX4nmYmEhNNPB48Hd+P+ZqMXum6YhELm+ORkV5s9N5bIjEUsubIxFkAbrd8Iv0Yx8agq1ftSrzGmoa/KmuLmQxc1BW02BrZBkZ1vn28c8+i0YqJ50EmRerFQkak5Nij0UkMj6NkIn9/ci25nN67jmM44ILSts/p8PvR2B4506iq69OLeUuEaYNMQiTRFzAUFUQWRMTMIC1tXA4yhklUBQYeRZ25m5dbje+K5HAiyM6RlFRAeKI9frq6gojPRsaMLa+PpCJ6YKxXN48Po6x82LKGYnFRDpGR3GtZRnXwmZD2XI+Mq65Gderuxslx4sWgTCbqYWyogJOAkfoEwksZKOjggBdtQoLeCHlf6yjKMuCQGRw+c/UlOiW6PGATOTv0DQsmCMjcODr6rDY1dZirOWEpomSKc7INTG3wVnO+sxEWRbvcVdmzkjUayGmN1qZSSeUszwUBWPjZjHHM6amkLnS0oJSGf35+v24HtmkETQNxN/YGDIQm5uL+/777kOpzjvfie8ngvMuy8V3v966FVmVa9eiVKdQEjIfccj44x+RddjYiP9v2FD4WE0cP2DdTu7gXm7tzbmCaBR+UjIJn7S5uTwbUFnGptNiwSZbP28DAXxnYyO+j31kbgAXj4MUXLw4Ncg5NgY/adEi4dfEYmjYsns35vWmTei2fOAAsmbWrEHggb83EsmUcrDbcUzOKhwdFYROrmAyZ0TabDiH6eySooiAfiwGm8QETVUVrrfeJh06RHTvvfjcRz86p6RdTMwAbDbss3ifyJ2SWSuRO51zBZt+L1Nfj7nr9Wb6+0bBvlo2IpEzY2020QjG6RRlxrPpU3F5M/uVPI/yjUFVoUEYi0GmRG/fgkFUH+RKYNi8GVICl1yCTGZGfz98qtWr81eKvfQSqho6O1GdUVGBfe7Ro7AdnNnI58ialdORiT09RH/6E56Fyy+Hn6RpIBCLbZ6ZDQMDRHfcgUSPm24iuuaa8mssmsgPk0RcgOCyCSZ0PB4Y+XJFCGRZRDNjMbxns8GocGSUFxSHAwYvGIQhKiSKwMfkjERufmEUzc0w+keOgEzU61AQYTwtLViYxsdFVJiJxEIINBa4DbwiycoRcKOEbVUVCMfDhwW5tXLlzJQ/Hj0qMiTZUW1owPcS4Z75fLjmnEnIpc3TLZpeL56HpqbszxqXUPB1Hh1F2XltLe7DoUO4hqtW4b3+foxv6dLyOgyahsU7FML3lrOboYmZhyRhXjGZyISinkzUSyjwszMbRCKXXBMtjPJlIpzzffdhc/vpT6dukuNx2BGXK/um+Kmn4JReeGGmfTaKn/8cJT9XX41IuyThe5PJ4u6BpmFcL7+MdeDCC40/K0aJQ/7bb30LXU9PO43oV79KdepNLFw4nXh+IxHYtONp8xSPiwYnNhs20eXscr53L67bKaek+iHJJHwfp1Ns2rlLvNsNX0RfAcHw+eArcAZhMgmS8qWXYPvWrMH8dbsRBH7sMdiyK67A3Gct2Fw2kAjHXbIEZN/EBDbOTCbqN+bJJHwkiwX+bbbngv1zJg6JhB/HxGE2bNsG3bHqamRzL1tm+JKbmMew2/FMjI9jTra0pK6ZnBE4OYnnlJ8fScLzyXrihZbU8zGsVjyzXD2Sfgx9eXMkIjIfZ5tIJCq8vHnrVpBhr31taoBUUbDfqazM7vfs2oWKhJNOQnCV4fXCRrW25g+47tsHDcGlS0HGORywQ3192J/mmt/ZyETOWj54EJ3aWbd661bYpIsuKq6sPRd27oSmdTIJ8vP884+vNXA+wSQRFxj8fhj1RAIOw5Il5YkOsEPLDQKIhH5hVZVogMCEYU2NMPLV1UI/o7a2MGPAkTJ9aXMhm8K2NhjsoSF8b0cW2VTWyRkZgcEcHhalzfnKibgk9uhR0SFv6VJc92I6l61cifFweTP/XC6MjeH+tLSkOrScEbB8udDHYPFv1qfkCHZ1dWZ5IHc6rK3N/7yxHidrsoyMIB1eUdC9sL4eZKrLhYWunI6CqmJRj0SM6zOamJuwWIRWTjIJm5dMCjKRuwDqHVMua54JfUR9+XIuzdfjEb/5DbKob745VXJAVTG/rdbsmb47d0JH5+STseEvBg89RPTNbxK97nVwlCUJz0AshjWp0CwuTYN+0J49GFe+DtGFkIZ6JBIgPH/xC5Cf9923MEpXTRiH2y06oZbTBzhWSCbhS0SjsAn19cXrlOZCfz/839WrU4ODXHlAhMy/bJIKmiYqIwYHQRpaLCD13G7cg927YbOiUWz+Tz9dEI49PShbXLIEG3+rVQRRWEcuH7ixRCAgCIOqKthPmw3nRpQaqOfxM8mS7p+7XNMHszUN437kEZzTBz84J6VdTMwAWLPZZsOzIstYm9L3WNyheXQUzzf7Nrw/m5jAc15MQJ6z/JhIzJbRqC9vjsdTKwxm288yWt586BBsxcaNSBDRo7cX13n9+szPDgxAv3DpUqIbbxS2KhrFMT0e7NOmQ28v0Ve+gnn8hS/ABkSjIAEdjuz6i+lIJxN37SL6299gP6+4guiZZ2BvLrigOOmrXHjiCaJvfxvP43/+J3zDheJLz0WYJOICQSgEAx+Nwkh0dJSurZdICMeEO3jZ7TAYVVWZCw03WmFCiR1fiwWk4tSUaORRiONotWZmJBZSkr14MQz+yAgWKL22B6OyEn/HEa6+PkSaN23K7cCzY9rTg/NtbET2YanEFEfGu7uhM9bent3xLRQ+HxzTurrUxZ7Litxu8czwPaupwQISDsOxZVLRYhGOtcWC++J2F3buVivOtb8f17KpCc5IXx/OV98VsRxQVWSlRqO4pjU15Tu2iWMHJhMVRZCJnJlIhPuud9Q4S1Ff7lwK0suXy6kJM9fx7LMg3S69FNk4enAX+La2zOt8+DBK/lasgBNaDB5/HI7y2WfjX4tFZOFwSWghUFU4yfv3Q9fszDOz/12xxCHD5yO6/npkO952GxzlcnYyNHF8wGLB5i8UwjNdzmy92YQsCzLUYoGP4PGUP4gzNYVNdnNzZpbNxITQv073HaNR+CBVVaiCkGVR8jc1Bb9uaoro739HYHXRIjRH0DdhGxiArmtzM5ojVVTgOF4v1p6GhsLOt7pa+NJeL3yiSATXbskSIUnDGYd6/7y+Prt/ng3JJDK5n30W9vv662dGk9vE3IKmZWo2V1cjWcDvxzOr92O4amtoCHND/+w7nfhsIIDnr5ikFT2RyBl+2f5GX94cDIoEltkmmfKVN09OggxrbUUWnR7j47BHS5dmzjWfDwHFqiqU8LKtUhRkFlosqc1Cs2FoCFl8Lhc6qtfUYJ4fOIDPdXYWlsgjSQiq/u1vsD2cgej349zKFXDQNKIHHkC35xUrIKfQ2Xn8SwHNdUjabPZFNzHrFzsaBTkWDsNpaGkpratfLCYyDmUZ7zkcomubkc0O68o4HJmlGD4fDGMxBI6qwpljfcdCtR17e2HAOzpg3HMhGIRTyFGfU07JJMc0TWQLEsFpPeGE8upNqiq+Y3wc41i5svjjh8M4J85O5edDluEs22zY7Od7bjQNzwZ3eo5E4OTW1sLw19QY3xDLMojaQIBo3TqM7bnnMNZly0D0NTWVx0HgsvZ4HJsAA47yfF+6FqzhVxQ4x5xtaLWKlx6qin9Leb705ct2+8IoX2YMDBB9+cuY97fdlnp9QyEhEZFuOycmiH79a9iKt7+9OJv29NNEH/sYyn3uuw9rjarie7k5RSFroKKgDLG7Gx0A0wnRUolDRm8vGqgcPoxS5uuuM+TQm7ZoAYO1+jye+WVfFAVreyiEn7mKYSY2/PE4ynG5sYneB4lEMN9qajIDyMkk/DwiZOfoA0/79yPzcGQEdn7JEnQ3XbIk9RgjI7BnHg8aIFRViQZzqpqZNVgoZBljmZyEP51eksw65C5XYeRAMAgd2wMHQAy84Q2GAi+mLZrnkGXsGzVNZOvrpV5Y1z2bhjEnIjQ3Z/rQXA5diu6+puG7maTLBV7rWU/V5Tp22WqqCltHJEqzf/MbXOPrr0+tMIjFUIHhdmPPo/ch4nGir38dduO221Jt1YEDuC8bNky/dx4bQ1BSUYjuugt7OrZl4TD0nQuteHjxRegzrlgBDcSnn4bNO/NMEKHlKCuPxaDd+NRTsN/veY+hKrT5bovmBUwScXYxaxc7HkfmYSAAY8vloYVOZk1LJQ4VBcdgx8TpLE6LgPVY3O7UkjJuwuJwFBfxVFVReltbW1i5mqZhkzg5CYM4XQQlkQDptGcPFqkzzhDZe6oKo757NxavjRuRxThTEZOJCTjBFguIxEIJ2Hgcn6+ogGHmxZZLsWUZhF2hjq4sY1PMgse8kFZVYYweT25nIplER9JQCGXj1dXI6LRYcC3Hx/FsMzFeaCQ/fZxHjuCeLl5seBGd7wvUgjf83MmZbZrdnvqM68uai3m2Fmr5MhHs+xe/iGt8552p2dqyjGh4ZWVmJ9NwGNp/ilJ81ssLL6B0euVKoh/9CDZY02BLNK3wTsyyjOYuhw+jfPnkk/F+uYhDxrPPgjTUNGT/nHOO4bXVtEULGJqGtVDTSgsQzxb0Uig8H2tqZk7TSlVhE0IhlBfrA9eKAh9FkuDz6e2CqsIfjMeRgagn0PbsQXflSATvr1uHrJimplRiZWIC9qyyUtgzJmISCfx9KYFlDiT7/Vi/wmHhP7e2wp8phlgeHib6zncwzne8A9fNYAb9HH/68mLB2iLe6yWTItMw25xUFDwXRCAS0//m6FEchzNiGdzBnAjPZrH+EJNy+YhEIszPaBTf5fEcu4x+ls7RNAQje3vRCEQfcNA07BnjcVS46e2CqhLdfz9+/+EPpzZXGxqCb7JsWfYqOsbUFAjEYBCVGZyN3d0N4jdd4sEItm1D2fIJJyDI8O9/Q+rh9NNxfH0gvtj7PTmJ8XZ1oUz6jW80vJ+e77ZoXsAkEWcXM36xWVh5akp0zmpsLGwCc/ldOIx/uaxPTxyWY0Ps98OwpjuQXCLNUdVCwY1jkkkcu5DyQVWFLgR3t5rOqGoaFswXXsB5nHUWnMIXXwSJ2NqKjJXZKIuNxbAYsJafUdJSlrEAaRqMfroDzM1Fiin9Gx7GdWlrw3FjMVHyrG8uwZ2ZmfBNJHANo1EQsNXVODdNgzPPfxcO4/qHw3ivtbVw7Y1kEgSiLOOaFVBqMd8XKNPwv4JkEo6bqmaPvBdKJKoqnu+FWL5MJBqC7NlD9JnPYM7qfzc8jGuu72RKhPd+9zvYnbe9LbUkyii6utDJuLUVukGc5RgOY467XIVtJJJJdBocGCC6+OJM7SKi8pA2v/0tiM8lS5C1dMIJBZEqpi1a4FAU+CwVFXO33FTTBHmoqvDtamtnfmO/fz/m78aNmTZlcBBjWrYs1cfhShK/H7IpHATxepF109UF3/DCC5G9wxUXmoZzqqnBZ3/5S7x3/fXCN5mcxN9zWXGh4GqPcBi+SywGH581JCsq8B2BAPz0+nqMyajPvm8f0Q9+gP+/5z0ojywgGG/aonmIRAI+kKbhXue738kk1mlufKRfA2UZ84qlofS/487hdntpZa5MJObSG0wfayiEz7hcx9Yf274dwYezzwbRpr82/f3Yz3R2Zu47H3yQ6Mkn4RfpdZj9fmGL1qzJ/b2hENFnP4trf+ed8C+IcJ+GhuB36PWqjeDpp1EZtm4dNKeffx6yCqeeKo5PJKSBiAonE/v6UM0yPo4A63nnFTTO+W6L5gVMEnF2MWMXW1Ew0bxe/FxfDyNt1EFTVZFtyKnsrIXBxGG5I9yqCiMoSZkR9EAAi5qeXCoEmoaMxEQCxyiEBOP07lAIBj0fCTg1JTQg2FiuWkX0qlfNbnmRvrzZ7cYY8glm9/fDCe3oSL1GwaAoQy5Uw1HTsFjFYrm7ficSglCMRPCe3Y7x9vbi502bsCHq7sbzvWpV9mMFAiL66XSCtDQiNJ9M4vwVBWn3BRKl832BMg2/DnoNIJsNr4qKzEYr+Wxgevflhahl98c/Qv/rhhvQlU+PqSnY5ebmVMKexfsPHiS66qpU4tEoenqw6XW70UWUuxNGo6KRWCH2OB5HM4HhYTjJeie9XGuhphF99at4nXMOiM/m5oKDdKYtMvH/K0am67B7rBAKiaCxw1Gc3EwxGB5GMKOjI3VjSwRbNDwsKhn0OHoUftSiRSBJAgEQAHv34lxOOQW2TW9PFAXkHZdRPvEE3nvHOwRhwj5PdXVhzXDYP9cH9n0+2KHFizOJHCLYPA4Es+5ivkzVp58G8dnYSPS+92HDXqD/bdqieQRVFXrNNpvQfDeCWAzPu8M+BSv5AAAgAElEQVSRSXqFw8g6rK3NnFvhMD5XXV1agkUhRCKXNycSQkZrtjO2BwfRUXn5cmTtEYnGL34/bEtLS2Y35qeeIvq//0PA4tprxfuJBNHLL+O+bdqU+xpEo2ie0tdH9LnPIZhCBNvQ0wPblK0DdC5oGgIpO3bgey++GP8/eBBVY/osST0KJROff57onnvwd+9+N6Rp2tqMj5Pmvy2aFzBJxNlF2S+2qoLsGR/H/2trjWtOKIogDmMxIf7KGYD6jJyZAotqV1amRtC5k7MsFx+t1hOJHk9hUV9FgVGPxbB5zBfdHx0l+v73cR8uvRTaEMeqrMjrRXYhl+jkIgGHhuDQLlqU6tDG43AAHI7isoG8XpCQjY3GSoNlGeMYG8PCIctYjFpasNBZrSAV8t0/3hQkEvjetrbcmYWJhGjYsnRpUZuu+b5AmYY/Day3I8uiVJU7OfPvpyMSmYTkUqCFVL7M2LUL3ZBf8xpkBOqvFevzejyZ3Zg5qn3++Qi+FIrBQTiaRBDfXrwY/4/HYcPt9sLmeCQCInRiAg7/qlXlt+exGEqTHnwQUfavfa1o7SbTFpkgIqy7XIExU+XBhYD1kVmbrLZ29gjOYBDEX00NsmPS9cV6e+FTdHSkfs7rhT1pbET24IsvIttH0+CTrF8Pe5Dr+nq9RD/5Cc77uuuQqWix4FpMTuI7jZQNsn/O0j96/zwaxTrV0JDfN43F4JdGo1jPGhoyCUxNQyf7J55ABc673oVzL4LoNW3RPEE8jpckYU4Wk/AQDuM55yaKekxM4Hfc0VyPyUl8trGx8ConPQohEonEftdmmzn91WwIhSBr4HBA57miQpQ3KwoCHTYbCD79mLq6oOm8YQPRhz6UKjW1ezfOZdOm3NcwmUQmX1cX0e23Q0+QCLZx3z7YjjVrjPs2moYGKrt3w6ZecAH+v2cPjsNSL9N9nhvNEGUnEzUN8jE/+hG4jBtvhL0tYi86323RvIBJIs4uynaxuWR3bAzORHU1Jlw+B427U7IYNxEMGmccFpP1VypiMYyHMx4ZHG3VNDg0xRh8JiPjcSx0hXQGSyZBJCaTcARzffbIEWhBcJQ9HocjtmnTsRM615c3t7amNkshEh3AmptTI4WKAiKOCFHoQq+534/nsqamsNLicBjRrGQSC4aq4tpHo3DyObswX0METYMTz2LnNTX4rH5exOMgECUJBGKRz/x8X6BMw58F7NRJEp4fvfYOO6npc0Jfvszl0AsR4+PQQayvR8RbvwFVFAQtLJbMEqfdu7F53bSJ6JJLCv/esTE4mqEQMvlWrsT7ySTsH69v+cCuUDhM9PDDCGxcdhkyB8qNiQlsJrZtw7X66EdL6iJp2iITRARbFAhgflVXH7tAZiwmArgVFViHZ7N7dDKJoISqQq9ab4s0DVk5ySRshT5AHQzid5WV8GV27cI6sHat0C9btCi3n51IoGnCyAiyl7mRnMcDu1JZmT1rkCHLQs6HM9orKlID+5OTGCeXTRtFOAy7E4/jOI2NOG48DtLzpZfQDOGqq+BnFem7mrZojkNRRDYry62UYie4s3pNTWZ1weAgvm/JklSSj6uVZBnkUCkVG9wB2SiRmEhg/nCDtZnOiFYUBAonJhBU0AcQOFklEECmnf76DQ0hsNjYSPTJT6b6lb292Kd1dmYGZPXHvuceBFI+9jFRBh2LgVS02RAQKaRa8bHHQD6ecQZKsvfvh91YuVIQlEagJxMlSTRfURSQh3/5C2zuW9+KPVqRpe/z3RbNCyzAYqv5D58PBjiRgNFZunR6By2ZFI5JIoH3OCrMGirHEg6HIDe5jJAIhqWmBqSU34/xFrrYSRI+5/enCusbQUUFDFlXFzQO165NJTllGQtAVxeOee65cBZ37ECaeCyGyEwhxGW54HBAq+LIETi0oRAMvd2OazExkVlqoGkgAljHsNANbSSCe+VyFUYghkK4ZkRYiKqq4Mi3t2OBZCKYdT65i6PHkzlGSRL6QOPjyBD1+/FzayvObWAAn1u6dHZKqkzMH7BDww52RQXsZzKJ+W61wkbxc6cvX3Y6F2b5MhHWle98B3P1Ix/JnFcTE7im6Y1Ujhwh+vvfEShIL302Ap8PEXqfj+iHPxQEImfxWK2518Zs8dNQCBk54TAEvNM7rZYD+/ejLGlkBKTnlVfiOVuImasmyguLBetvMCh0pWcTiQTmYiyGuVdfX3i3z1KhaciMicWgSZ1ui0ZHReMHvb2OxbA5P3wYvkMigYDmq18N2zU5ic1sLgJRllGuODqKpgmrVomqjv37cR06O7OXHXNgX++f19XBdunH7/Ph3hZTCupyiWdjYgIkRSIBgmN4GOTh2WcXLvtgYn5AL9kiSbjP5fBXamqEJqvNJsguSQJBODiI/aq+FFWSsPcYHcWz2NJSPJHJxKGqCv9tOvDeNxjEy+mcGbkuxlNPYX5ddllmBvLYGMbAexEmQwMBZCDa7ahW0BOIY2M4HkstZIOmEd17LwjED3xAEIiyjL0sEWyR0fuvKMgOPHQIsiunn4497ksvYeynnVbYNZEk3DcmE5nY/trXsBc891xU9LW3Z5bDm5hbMDMRZxclXexgUDhAXGqaq5QhHhep28kk3rPbRURzrm12mSjikmz9QpBIiJLnUjQ0AgEYqqqqwsTHYzGQhZIEYs5ux73YswcOZ1ubIL/4XPbsgbFuakK0p5TuwaVichKkHBE28cEgxpqenchkrdEyZD3YWc7WcXU6BAJYiCwWpMc7nbimgQAWJyYjubsqawpxhhiXUVRXZ49CyrJwVFgbrbkZZEOJjvJ8j3KZhn8acKmJ1SoipMmk0HThchR9J8OFSgJpGjJZnnmG6OMfF5o7jEAA2cHpZXSTkyjxcbsRoS80gzMcRsl0Tw/R974H+0Ek9I/YPuht0XQdlf1+EIixGNGb3lSw/o4hbN4MrUi7Hed+8smwmSWuDaYtMpECzmSbjUwbIqyzPh++12IRQb5j4fP09sImrFmTGQQIhRC44KAiI5lEmd7Onbhmy5djo9zcjHM6ehTnk6ukTlWRvdzdjaDAunXi/dFRXBsOStXX455wYJ/9c4cDflmu5k+BAHw0t7v0jbWmwaf99rcxhre+FedbX1/y3sC0RXMQHOzM1jyuHOCO47KM/YPetw4EQMo3NGRKK3GpfVVV6c80ZySyzqCRMXNFHmf7llsCYt8+oscfh29y7rmpvwuHUYVRWwtCj31OWca8HB4m+sQnsA/Sf2bXLtii9euz30NNQ0D18cfR0Omaa8T7+/dj/7d2rfE9sCxD57qvD7qMp5wCG7p1K3ykc88t3fcdHSX60pdw3KuvhqTN4sWFN8pMw3y3RfMCJok4uyjqYkcimGRcDtHcnGmMOcrExKEsC60LLlWeCxo504GjWVZrZilOLCaiRqVEtjlC73QWJmwdicDp4tKcvj5sjJcvh8OYrkmhaYjaHDiAxWn1ajitxyrCG49jQevuxvNz5pmpzmIkggiXx1P4Yi7LWPAkCYuK0efM5wOBWFmJhcnhwCLi82EByTUO7k7IhGIyie92uQShmO4IT03hu8JhRPDa24tqYKDHfF+gTMOfB7KMf/XPkqLAFulJqtks05uL2LyZ6Oc/RybLVVel/i6RwAbc4UjdtEciINGSSTQeKDQ4FI8jQv/yy+gEfc45eJ+DDZxxbrFMTxwyJidBICoKnFhuylJOPPAA0a23YsPwq1+B4ODmPSXCtEUmMsDBtpqamQtwsM/G9jBXhcBsYWIC63xbW6bAvyyDXKyogN/G866nBx3YJybgy513ntBUTSZRuVBRgfdybdr/9Cf4V5deirJEfn9iAjawoQHHOnIEvghnGno8IrA/nd8UDMJGuVy5s48Kwa5dKBt0OEAycEfe+nqMtQQ/1bRFcwiaBn8lmcScdDpnbh+oKHjeiZA8obcBIyNY8xctygwWBgKi0qzUzvKsZ10IkRiPY1/AVQvlCrqMj6MhSmsr5ph+PKqKOagoCLryfJNlzMudO4luugkSL/pz27kTnz3ppNxz9Je/hC/zpjchYKm3cxMTSKAwakOSSWhDHzkCeYYTT4Q/t2ULjnHBBaU/TwcPEn3lK3hOb7gBpOmiRULCrAT/aL7bonmBOZaPZkKPeBzkYSCAjWx7OyYWTypNQ3YVE4eczu10wiBXVc2v7BirFRu/YBBGXU8WOhypJWrFivFydDwcxvUzunmtqkJ5yhNP4H60tiKas3Jl9vIWSUI3QKsVxvvQIRjk5ubZL+8hwvPjdsPw22wgN1etwoKeTGJxYSeyEKgqyEdNwzUxuqBMToIAcDgQpbPbUfbg82EDMB2RyYShy4W/jUYFoXj0KF5VVYJQTCQwj5Ytw/UfH4dTw2UU02kUmVi4sFphcxQltWRG04R+HTuhC7UctacHTuvGjSj/1YPlESyWVE0bWUbX43AYGTCFEojJJCL0O3aIrsYMXgerqkRnbUauOT4+jiwiSUKZcbnLZ1SV6POfR3nRRRdhk1BdXTYC0YSJrHC5sCaGw6VvztPB2ovBIOYYS4wcy0B1NIoKEI8Hvlk6hoYwbtZkPXoUWtb79mHc114rCEAinNfICP4/XXXFE0/gGOefn/p57kRvt8P/UFXcE9bN5gz2fDrP3M3W6SxPBuKTTxL97ncgRW+8EffN4RCZjsEgbHJDw9xPPDCRG8mkaMjDJPFMguULJiaQYKH3q5ua4N+PjuK50/tK7KP7fKVrSttsqVrW+dZXScL3Wa2YZ+Ewrhv7D8UiFiN69FHMq8suy/QNDx+GvVq3LpUMfPRREIVvehN+x+XNRCDb4nEQebkIxIcfBoH4utelEohDQ7gv3MndCOJxHItLsdeuxV7vmWfAQ5x3Xun2YetWNOKrqUFVSX09gqtutyhz1msmmph7MEnEOYhkUpRAWCwgOhoahFYXk4YsjssRJpdrZrUdZgOVlTgH7iSnX1BcLhiVUAjGq9iIETttoRB+NiJAHgiACHQ6xX1ZsSJ/I5uVK/G3/f2IaCsKCN7ZJK5Y4FiWUbLC2j979kCLjDu0NTUVNiYmCRIJPKNGo9cTE4jCVVUhA7GyEguV1wuSr9AsINY0aWnBuTChODKCzEvWRFy3Ds+Q2y1KlIaGcA5tbcVpbpo4fqHXR1RVPOdcvsyZ4KyVyERjGUpT5w1Yt6e+nuiDH8ye3ZdIZAYXHn8cc++KKwovGVZVojvugCP7uc8h84fBpYF6bcp892JkBJH2igpkC5RYPpOBSITofe+DntD734+SHS5rXCjPiYljA86sCYfhT5XSBZWhaSCZAgFBinHzkGMJVcXmW9OQvZO+ufV6cR3a2jD+v/8d2TXJJMoCX/Oa1ExpImyY43F8Jpdvs3kzgqGveQ2aDagqrvXoKD7PmU1cDcTkhKJgTFNTokttNvIkGoW/5HAU7p9lu0b/939E//oXgj5vexvGxmWcjY1Y17xe+LiBAOxhsU0NTRwbcKM31nB2Omfv/lVU4HmZnMSzzUkJViv8+qNH8XylN8qor8ec8Xrhx5dCTjGRKMv4vxEikZsfseSRohRf3qxp8HGCQaK3vCV7Z+rRUSQF6QOozzwDSYXzzkODOSbRNA3XbWoKe85cAaEnniD63/9FefEHPiDOW99tftEiY+cQjULfdXwc8gyrV2PcW7bgupx/fmlVddwN/he/QLINy9ksXSrOT+97cwdu0w7NPSzocmZJkh4goncRUYWmafIsfOW0F1uWMWknJ/FzQ4MwtpxxGI2KdG12SkrtrjUXEQiIrtN6B1XT4OAwGVeK8xqJwNDb7TDmua7h4CBIQE0TpGEigcWys9OYYTtyBNEnm02Ic89WefPRoyDSFi0SJdzxOLKIBgawKJxySuFlmV4vrl9Dg/FMh9FRkS1w8sk4fxYKLmSRM4LxcZCIioLztljgNHOGYlUVxn/0KOaVw4GF3WCZe1ln3FyzRSYEuGMmR631mz29MLS+/Pl4J4kUBSLY3d0g9To6Un/PEhw1NanZzVu3IvuHxbkLgaYRffnLiLbfeivRu94lMg3jcWyc7HbjZMnQELR+HA6iN7+5MHkLIxgexiZi926iu+4ieu978UzMwLNh2iITOREKwV/JJvNR6HH8fsx9pxNze640Juvqwjp+8smZmTaxGORnNA1z/tAh2IlVqxBQbGkR5csMvx/+Q3197uqMf/8bTRM2bUJDEvbPuTKivh7Hnc4/j0RAEnLZub7SKBaDb1RRgTGWsoGOxYjuvx/X6eKLkanEFR3ZiJJEQvh3nGFmMMhq2qJjiEQC95p9lWM1P1m/nDXLGUwutrRkVmRx8kxlZemEOZEobTZCJBIJX44zOIlg5wrNjNy2DbbhwgtTy5GJcH927oRN2LBBjGv/fjSmW7sWMi081znYsG8fghknnJD9O7dsgazLqacSffrTYk6HQvisywWNWCM2JBxGsyWfDwTiihWwh//8J67lJZeUFpCSZWhY/+MfkNW6/HKMq6MjdyMwJhOJCiITi3qCjoHNmddYEJmIkiSdQkRXEtEDmqYdPsbDyYCqwpHgDpZ1dVi0k0k4MmzQOFrCxOHxDI9HdKLTOy+SlNqxuZQoKUeFAwF8V7qTlEwihXxqSnSsczph7Px+kHDd3YjS5Fukli6FYe/txXfV14PAa2oqf6mRHhMTGGtTU+pibreDsONOir29cKqNPldcylRTY3z8w8PQlaypQdmPzYbxDQ/jPpaTQJyawrEXLYIjr6oYr9+PRXliAt9fXY2/SSaRldTbi4WsvX1mOlvOdVtkIhXJJAgqIji36U45l8wwcciZibKMn406sPMNDz0E5/T9788kEBUF6xbrfjH27oVzvWFDcQTiN78JAvG97yV65zsFgcj3iLPYjeDIEeiYeTzIQCy3xMSuXSAQfT6iX/8aZcwzRCAWDdMWLQy4XLBHodD0wdJciETwHMsy/IZcWXPHCoODIBBXrMgkEFUVpOHu3TgHmw0b7VWrQCi63Zl+RywG/6CqKjeBuH07Mn+WL0cgmf0Jlt1ZvtwYEVJVBf9kchK+CWclWiwgEG220glEr5fou9+Fn3XddQgYM4GY67iVlSAt6upwbuPj8KkaG8sfbCEybVGpYK1mRRHP4bHM2nK7RdWYzSYSFOrqQLKPj2OM+qAGNx3yeoVGYilgOZpCMhKZfLNaReKOLBsvbz58GD7O2rWZBCLr5Gta6p5xeBiNUFpbUbWgv2/JJPYkVVXws/TlzYznn0cjlg0biG67TZxDPI79a2UlyEcjz0MwCKmDUAja0EuX4v+bN+PzF15YGoEYDBLdfTfs8ZvfjIakqgqpqemSWLicmclEvg6lPOOmzSkPFgSJSESnENEXiOhfRHT4mI5EB02D8zA+DkPldGJToyiIyBCJRh4u19yJ+s4GJAnXgskqveNiseCa+HxisSl2Y8bl334/nCSOBPv90A2UZRj3RAKOc0cHFqTGRtynw4dh5FeuzP9dixZh7N3dIlI9OorFqhyRt3RwV7SamuzR+akpONOVlTiHri4Y83y6O5EIntuqKuPlf4ODiLbV14tyo6kpOPI1NZldFEsBlwu43UIM3WIRpTmKIkqyfD78PetxOp343aFDeOba28tO2M9JW2QiFax1yOXLLKXA3f/04LIL1kpkMpEJRSYYjxe88ALRX/8Kh/KsszJ/zxqpzc3Cpg0OosRn6VJEsguBpkFH8Oc/RwnezTeL4/LmqRCd3N5eor/8Bbbg6qvL3xjn8ceJ3v1uHP+JJ5ABIElzcv02bdECADeAYn1Eo4R5LIb1MZGA/WpqKk9JdDnh98OvaGwEiahHPI7ywBdfhN9x0kno+mm1imzEjo5Uv0tREExk8k6PZBK+z4svYo4vW4bGAtwchYk/bn5i1J+zWPD3bjcIu4EBfE99fekEYl8fJCeSSaJbboH9zUcg6uFwwIfijMmREfhLPN4ywrRFRYD9lERC6OHPFV+julp0bbdaRUfo5mahj9jenjpPqqpwLsGgkAEoFkwKFkIkEgn/zuUSmZ1Gypv9fvhFTU3I9k3H0BBssD5ZIxjE/KyogF+j32uoKmybJEEHkc+FKxGJQMb9z//A9n3mM8LHUBTsXzUNQQ4jGeg+HwjEWAz6sO3t2Jtu3ozjXXxxaXN+eBhyLqOjwhZpGgIuRtYVvp+cMVoGMtG0OWXAQiER5xQ0DQZndBSLs8WCyWm14me7HRsQzn5bqLDZYLhDIVwX/YLCWWR+PwxzoeL8erDh9vvhIIVCcOScTpBbU1P4m46O1EWkpQXGdWAA7y9blv+72tpwvw8ehFFsb8f4YzGQleXaaEajosFIuu4Yl81XVIio94YNIDd7ejCejo7shjmRwGft9kxdk1w4cgTn29gILR6LBdd6YADPfbojXwo4as4EYLbjsqZdbS0WIS69CAaFkC9vJnw+jLuc98bE3AbrWqlqqtB3tkYrDL1+C5fN22ypZCJnJs5nDA8T/fjHCJpcd13m7zmzubFRrF1TU2ikUluL8hgjOkN6lZXf/AaO9hVXEN1+u7BLqirKzF0uYzbk4EEQAE1NEC8vZ4BA04h+8AM48xs3YtxsXxfyOm7i2MNmE1rT8fj0mYTc5CAWw+caGmYmK79U5CoNlGVsrrduhY+9cSPRa18LH1GWQSBKEjavelvEjVQUBcSZ1YrvCIfhfyYSCEA8+SQ25tddJza/qgq/g0jolxcKhwM+5YEDuE+yjH+L3bi/+CLRT3+K8/6P/xBZqEYJRD2qqrDpDwaRKXb0qNBpnGvE8kKBLGOOqirWl7kmayVJIpOVs1g5oNrUhLmp101k1NRgrk1OCv3gUsZgtabqVhslEhVF+HGRCPYIVVXZbacsoykKEcpz0/28YFDoEvK+KZkk+v73cdyPfzzzOvT0wPasWyf8FL1OYk8PZFLa2tC4jeehpsHPicUQwDTi40xOgkCUZTS7Y335zZtxnIsuKm2P3dWFsRJB/ob38suXF+6DZSMTNc1svnKscNzLVEqS9EUi+tErP26WJEl75fVu3Z81SJL0v5Ik+SRJCkqS9FtJkjIKGSRJWi5J0i8kSRqRJCkuSdJBSZJulyTJ8HUMBlHW1dUlSr4405A1VNra8J658YDBdjiE4K0elZVwsBIJ0SSlWDgcuAd79uDV0ABSkLvipROIjPZ23K/RUZBiRtDSAuMeCiE61dwsyMhgsLTzIML1GBjA88OZeAxuhsKZQuxMVlYiBb+9Hb/v6sI114MzZFkk2YjB7uvDgtbSIgjEYBAak04nFpFyGf6xMZF5mYtATIfFAsJx8WLck2XLMA+5+7Pfj/n63HMYM+veFYO5ZotMZCKZFJ3b0x1Gzmhl5yUd/LwxAWaxCPslSZiX0Sjm0XxELAbdHo6apzvK8Tg2BS6XkDiIRkXn46uvnt5h1DTxYvz5z0T33IOsxzvvFPZK0+DYExkvNdq7l+ixxxAQuOaa8hKIskz0yU+C5HzDG5DpOJcJRNMWLTxww6FIJLsNSiZFthlrPre1zU0CUdNAFCaTqGyoqIBN3ruX6Fe/QpMCq5XoqqtQNldTg88cPozPLF+eGRScnIS9qq7GGjAwAP+MG+mFQigd7OxE51M9eTY5ieM2NBQ/3xUFPkxNDXwlhwP+zMhIYX6HpsHO3X8/iL/bbhOa0G53aZmNHg984ZYWjImvEUt+FAPTFhUGTROltkRY/+ZqQ02LRVQ2eb3Cb3K78SxNTQm5LoYkCSKeJb5KAcvNaFphvhcTjhYLxlpRgWvO/qEe//wn5urrX59Zhs2Bi8pKkS2taWgq0teHDunpCSgjI7AFS5akVntZLDiXw4eR1VddTfTFL6ZKSh0+DGJy+XJj0gPj42i4pKqo9GhpgS3717+wVzvvvNyyDkaweTOIw+pqaFrzs1oMgagHk4m8L+cAv5E2H6bNKR/meV6EITxMRIuJ6L1EdBcR7Xvl/WeJ6PxX/v8XIuolos8QUScR3UJECSK6gQ8iSdIqIvo3EYWJ6D4iGnvl83cT0Qoi+mC+gfzhDyAlLBZMyoYGOGtWKyZtOQik4xGaBgdOUWAs08m8cBhOTClakYEAosyJBBaBwUFEelgfr7d3+s+HwyCajhwx3l3Ybke6en+/EK/t6xOEcjHOnqLAqVMUlE+nj5u7AdbX53b8bDac+6FDOEZ9vYi0Kwo2x+kEYzb09+M6NjfjXHt6sAgzwdnRgffKgYkJOPvcKbK7u7TjsQNht+Nce3rQhZHLe17/+qIOO2dskYlUpJcv53LK9SQi6yEy0klE/tliEVpZrN/H5FIpXQhnE5pG9JOfIBPxU5/KdCpVVWh4sXSCoqBxSSAAfcBsGkfZHD6+bv/8J9EXvgD9xP/+79RrxUSI0Q6Ku3Yhg2jpUmQ0lpPYCwbR5OUf/0DGz+c/LzJSZ4JA1Dfz+d3viK6/vqjDmLZoAcLtFtp7vMFUFLwXCgnNaY9nbnfC7O4Gcbd+Pcba0wP/y+fDJrizE/Zm5UphTwYGcN4dHalVLZoG/6GvT9hkLg2trcXfjoygNLqpCbZMT0AyEVJXV7xWJAdoVRXj567JgQCOPziI4+fLBpJlol/+kujZZ6E3dt11qZIc5SCa+BmprsbYWC8xEECjhCJg2iKD4KYfmpbZ5G2ughvzeL2Ysw0NeIYaG3Euo6Mgy/T2xmrF342NifL5UsBEor5rsxHopWpcLvhunCXM1YO7diF4ccYZIMbSwfvK9euFr/Loo5CFedOb0AxKj2AQn6mrg7+SjpERov/6L9z7z30ulSgcHsY1a283Vik2PAx964oK2DWWe9qyBXP6nHOM72fToWnQg/7tb1GO/dGP4hmw2UCalquyi8lEfSfn557LLrWjg2lzyoTjnkTUNG2XJEnbCA/L3zVN+xf/ThIr6jOapn0s7f1bJEm6WdO0wCtv30tEQSI6WdM0/yvv3S9J0hARfUKSpG9pmsYPYlYEApjczW59kWAAACAASURBVM1wUOaykzaXwCUYrOnj8aQ6Qy4XjAeXhhdinDQNhpTLMzo74UwfPozjtrYau0/cvGN4WCyA+VBbC4Hd7m68OjtxDn4/Fp3GxsLPZXQUjkZ7e+Znw2Fx/abTGvF4MK6BAbxCITyvyaSxMWkaHPLhYVy/FStwv+JxOMM2m2g0Uw6MjeHZqK0t3dnQw+nEq6kJ452cBEl86FBxJOJcskUmBPTly0Y6GurLmtOd0VxEIn+OS2uYTLRa4cDN9bXgiSeQhfPWtyJjOR1eL86L5RqIsOkeHCS67LLU5gXTEYeMbdvQZXDDBjRU0d8TduI5syofduyAU7x8OcZSzpLygQHoBx08SHTvvchQkuXiCUS91k+ul6Zho3HXXTivYkhE0xYtTFgsQiImFMKzysFrjwfk0Fy3RWNj8M/Y5/r97/FefT2ygKuqQAq2t4s5ODKCTXFbG/wEzuYKh0E8Hj0KG9PRIbSR+TqMjaFbqduNTB19oDoYFD5VsRmbHICRZewN9LauuhrnwwQMN17JtkaFwyiPPHgQgZLXvlaUpBvN1k4fF69zuV4s+bFrF8qwiyERTVuUH6qKeynL8BkcjvkTgCTCM1Jbizno84mGmC0tSHoYH8/UILXb8RmfD/59qQ19SiUS2T+02TDXAgHs1TZvBil2xhmZnx0bw9xdulRkC27bhkzhs87K1IdOJpFYYrdn78Ts9SKwqihY/9vbRfad34/9SUODMY35wUFUiTidIBBranCOzzyDcZ95ZvHNLhMJNHt5+mloKb7znbCxFRW4VjMRXGV7/b3vEf3sZ5BzyAXT5pQPxz2JaBDfS/v5KSL6DyLqIKLdkiTVEdGlRPR1IqqQJElPVTxGRJ8gogtJsNlZcf3188vwzzUkkzDcdnumToymYbFRFCw8RhaIRAKOj9WKjn0rV8JRGxvDgsClH3V1xu7b6tVw4Lj7shEikT/X1QUC8cQThTaPqmZ2VZ4Ow8M4//b2zIh1IiFIvZYWYw7lunUw/Hv3YoE69dT8USlNQ9dWqxWLEC+E8TiI0uXLRTOXUqFpGF99Pb7HqEZjqfD5ZvTws2KLTAAc2ZckbLSM2meLJbc+4nREIhFsEzuz/P1zmUzcvx8Zb696FdGll2b+ngmJujqxwd62DTbtrLNAOhohDhk7dxJ97GOwFd/9bmrAI5EQTa6M2JDnnkO3xNWrMfZyrr8vvABSIRaDM3722WKTl239SScDeROuf2UDC4fzM7JvHwjWsTFoKc0gTFt0HKKiAuvx0BDmVm2tyOCf6wiHITfDjQOOHoWfdtFF8AGiURCMtbXCb5qaQnCVswrHxuBrsU0KheA7rF6dubmdmkKpX0UF5rqeKIxG4Rc5ncXrhWkaxpNIwLfKVknDTV7CYaFHWFOT2lBwbAxSE14vuteffDLGl4tAzEcO5ioLtFqFLbLbcR337YPtet3rirsGBrFgbVEiIaqGHI75q83tdOK5CgTwXHo8eIY4S9HpzNzreDw4f78f512qBEm2ZitGoCcSrVaMc3wcWs+VlfAt0udYLIZkCtZnJ8L+8Je/RMLI29+eKTXFzTw3bswcWyAAAjEcRinz4sXinAIBHNvtzmwwlQ39/aiMrK4Ggeh24/u3bYN9Oe00BFSKgd+PTMn9+1GhccklCLba7SAQZ2qdmZyEX/TCC0TnnluWQy5Ym1MI5oHbMCvoT/t56pV/uWhrNRFJRPTJV17ZkDfp1yQQS0NFBRyiSASGSL+gcInF1BSMGEe6csHng8FWFDifzc1wNCcm4JxxhGdqCsapri6/8ZMkOKIHDqC8hgnIfKipAXm4Zw8iuieeiMgV62Jw9+bpzsfrFU1A0h1ajnRbLIV1gZYkLDKtrTj+kSN4LxdZp2kgDkZGQABwx+pkUpRVr1xZPgJxaAiZAM3NxgnbciBbWWYZMSu2aKFD00Rkn21JIZkaemeUS1fTf8/6frmOa7OJzEQWSWfh8bmibzQ5iaYmLS1E73tf5riSSdgGh0PMi/37Ec1euxbBGP1mNN95HTgAvcXmZmTV6DcV3GiAReTz4Zln4FCuXQtHtpwE7SOPEH3gA7guf/4z7B2T0aqKTV8+gpD1ljhrkf+f/mJoGjYg3/gGNl4//SmI3RmEaYuOI2gaNqB+P+ZSVRXmV3393LE300GWMad37YIP4fGAuOcyQZZyqayEz0KEzTV3OOWNv9UqqjF8Pvx/0aJMAjEQQHMkIhCIer+KGz9UVhavF8b61PE4/LZ8DUpcLvzN5CTGzVmJAwPIviEiuvVW+K4+n7Avfn8mQZgOvb5YZaX4v/6V3rhgcJDopZfw/nnniWs+Q1hwtogrJLjiweGYm0HGQuB2i+xnq1UEMbj7d7bu0vX1ws9oaSmdhOJrmCsIPN3n9Gv5li34/8UXiw72PDZVBalnsWBPKEnYX95/P/ZPH/hA5vf292Ourl6dmdUciUATemwMGoi8tyLCtenpwfevXJlfE7Cnh+hPf8J1vfZaEaR94QWM4aSTkOhRDAYGME6fD4Te+vV4z+EAgThTHMgLL6ChXSAASZkbbsj/GQNYcDanGJgkIpBLblVK+/cHRPRQjr/tK+uITGSF04lFKBwWmRkMiwWOns8HY6yP1DI0TWj1VVWBsGPNG68XhpW7GdtsIAGnpvCqrc2fhm2xgJTct0+UKBvJJPR4EH3avRtO8oYNcGwnJ/Hi7s3ZNFACASwu1dXZCT7WMmxtLcyIR6Ni4V6zBotPXx++L31BUFWQoGNjWIBYKFiW8TlFwQJXDg0XTcP9C4UwtlJEf+cgTFs0w1AU0dXQaEZbNvAmLZs+IlFmx+Zcx2Dnk7s4M7F5rMlEWQaBmEwSfeQjmcQdb4KJhN0ZGiL661+xkeXMFKPn0N9P9KEPYaPxwx+mBgYUBY4061VOB00jeuopaJhu3Eh0wQXFX8ds2YPf/jbRV7+KzOwf/Qh21+tNzUDkTTkLoWcjBwsZk98P/aMnn4RO0Ve+MiuBE9MWHSfQk4d2u+iUylqIemH+uYhQCNpau3aBsD/zTDRU0dvuo0dxfkuW4O8nJ0WlyerVouSYfRAm4hoaMm1KJIIMxHgcuoLptsjrFU0jirUtExPwsYx2v+aAlcsldByffBL2tq2N6D3vwZj6+7F2OJ1Cf5eJQNZ8zEYQGoWi4D4cPoyxn3barHRoXlC2KB7Hi7U552JzrmJRUyN0WG02zOHmZvj0o6PY96RrTTc24nder/GmjtNBTyQSFU4kbtmCpIpLL0XmXzgMYtTphJ/E+qudnTi/UAi+lMWCIGm6nJTXC9+ptTWz2isex3rf30/0n/+JCjGGoiBIoqog7Ox2kUWcrRP1gQNo+tbcjIZT7NO9/DL2q+vXZ5erMYKXXya6+26c71134TsGBnCuy5bNDAGuqkQ//jH8sOZmXOMyBlYXlM0pFguFRDTQr2da9PIxNE37R+nDMVEKWBw8FMrU8OE0+UAARl1P4MXjMKKBQKpW39GjIAkbGjKjqTYbSComEuvq8i/oVitIt717EY1asyaz/DobXK5UIvHEE/HdTicW0MHBzEzDaBTjdzpFyrwePp9wVAsh8BIJ0T2cu4x2dqIkenAQC+SqVVggVBUliF4v/ob1OBQFGYjJJK51ORxNVcXCFIkIfaN5BtMWHUNwaVCh5cu5wI1WOKJdDJFIhM9VVsK2MJnIGQjHagPxm98gAHDzzdltCzcUaG7Guft8yNCrroZgeCEZAyMjRB98RYL6Bz9ItcOsd8v3bLoNhKahwUlXF9Epp+Qua+HmJPleeiQScOJ//3uiN74RWo18byoqcP+KIQjzYdcudH4eG0OZ97vfXbaSINMWHefgkttEAs9nU1PqOsyVHbFYebuVlwvxODRNN2+GD3T22USXX57pS4yNYRPudoug6dAQbNGGDZkkaTQKEs7tzqwWicVAWAYColspg8k7VRV2rxh4vbjudXXw+9jeT6c9qM8w0jSirVuhVdveDq1al0toz3HTh2xrUikIBom2b8e1OeEEEA5lIgdMW0S416zPzBn38yFLuBBIEp77iQnRNMVmg23iZI50bXPeh01MwO8oR+KAvkkekfG53NMDvb2NG7G3s1hER3e2K0wI1tdjbt9/P8Z9662Z5xaNQmfd48lszCLLaCq3fz98gFNOEb/TNBB/sRj2XWwTJUmUa+sDBF1dRI8/DntxzTUiANPVhaSX1atxTsXgiSdQObJkCRrLWa3YJ7rdqKqbCQJxYgLf9dxzWBc++9mCm8CYNqcMWCgkYuiVfw0Ul2ZC07RxSZL+QUTvekUo84D+95IkVRNRXNO0HP1uTZQTkgSD6/fDqUkv32XNxFAIht3lggE/cACGt7MTCxaXxPr9+DmXAbJaMzMS82Uv2WyCSDxwANEjIyRaVRUi7Lt2iYzE6moY59FROMjRKMaqKKJRyZIlmc5GNIqNvdtdWKaBosAplyR8Dy8AkoQFiLshdnVBl4NJ2HXrBNmgqshajMWwMBYrOp4+roEBnFc23cd5AtMWHQOUWr48HbgkmfVy0mGktFn/t5WVIjORCUV9qcxsYOtWdEd+/euRaaIHNyXgUkCXCyTFww/jd9dcU1jAwOtFeU8ohA7QnMXM38X6ZS7X9M6oqqKZy759iEaffrrIOM3WnCQd05UX+/3I9Nm6FUTipz8t7jmXpZcbqkr0i18QfetbsMM//CHuRRk3laYtOk4Rj2N+xuN4PnNluzkcsDGRyNzqFp9MIpi6YwfOw2YDUaZv3BGPY9yTkwhWOp3YtHNlSV0dqh/Sz1uW8XvOgEr/3gcfhJ917bVCd4zh9YoGc0aCO9mak7D0DDe4CQQyP6cvL7bbU7MGVRWyBi++iAYqb34zgrvsC8+UvuWRI8g2Yr3r9EYYJWJB2yJNw/OcSGC9qaqaHxqlxcJiEaQgE4kuF55dvx/nn56tx5qJrItfjj0F2zuuJslHdk1OCuL+/PMFCWmxiD3nwYOwLYsX4/f/+78g+9773ky9QkWBv2KxCEKSoaoIVL70EgK56U2L+vthR5YvT90LcQMZvbbp7t0IrnZ0EF11lbBdBw9in7lsGSorCoWqEj3wAILHp55KdNttsMlHj8I3XLp0Zkjw7duhD+nzIfh8ww1FJaksaJtTLhzHZioFLxAY489IklRLRFEieq7AY9xEaP/9oiRJPyaivURUS0TriegaItpARIfLNWAT08NqFU4YE4V6sIhvOAyibXwcf7NmDX7HJbGBAJyhfF19rVaRkejzwWjny+yrrESkdu9eLBTr1hmL9jscIBJ370aJ8Lp1QqdxagpOaDSKDbLFkl1rQpZFJmEhZW8s9K0oyPTL5sh4PCA3DxzAgmqxQNicCURNQ6lLOIyxlaNUSlHgxMbjWJznevnVNDBt0SyjXOXL00HfsbnQRivZYLFgrKoqmokkk7NDJh45AsdwzRpsUIlSSTdFSc1S1jSiP/4RdvHaa43pwDICAaKbboLNuf9+BHj0YE0ol0tsoLO9EgnYot5eole/GjaTu84SZTYnKaS8uKcH53XkCEpn3vIW3IuZJBB9PqI77kBZ9nnnoZS5zJt2ItMWHXdIJkX1AfssLtf0Nkdf2VFdfWwzn1QV/tILL2Az2t6O576hAfM6FoNfEYnAx2FJhfp6+CR2OwKN8Tg2sOl+ITet0zRkCuk37YpC9NBD2ABfdVVmVpDPh++vrYWPVkxzkkBAXOf6+uylxdOVFweDKNnr7UWwhjswNzTgPkajCIzX1ZXezVZ/XXbuBGnR2IgAzQyULy9YW8Rav5qGNdVuP/6yD7OBswu583hDA17RKPyBJUsy19aaGqz1U1Mi+79U8HdwaXOuuZdIED36KMZ9xRX4fg5IMpHI+n/Ll8NO/etfILyuvDJ7me2hQzjfDRtSz0XToHP67LNEN94I3UU9RkaQVNLWNn3yiyTh+596CpmGV14p/Me+PgQiFi+GdnWhz1wsRvT1ryMT8LLLoJnt9eLe1dTguOV+jhUFgeaf/hTn/dWvonlfkdU6C9bmlBMLgkTUNK1HkqSbCeKXPyIiKxHdWMQxTiGiO4joaiL6MEFo8xAR3UlEI2UdtIm8sNtTGxKkk3oVFYi0TE2h9LazU5QXDgzAmWttNU6ycaMUnw+v2tr8RKLdLjIS9+/H5tbIwme3i9Lmri6QkfX1ogPqjh1YpNhx1oOJQKLC9UNY6Lu5efpxahquX1UVFgxuSFNVBWczGIQTUI5sQVnGBj6RwMJkpDR8rsK0RbMLLl/m6P5MZdro9RFzNVohKoxIJMJxeMOaTOJ8ODNxJs4lHEaXT7cb5B6X/DAkCY6ipgnb8re/YX6+/vVCysAIIhGiW26BM3vvvbBlTM5xCXM0KsjUXM1JOANxYIDowgvRmTRXc5JC8eyzoovio48SveY1M08g7thBdPvt2FjdeitKKtOzMsoB0xYdP5BlEIHhMJ732loE2ozYGUkC2RYMYs6VI8OnUHBp3vbtOI+2NjRDGhyEvVm0COSeogiduLo6kHLV1SAM7Xb4PZOTIB6zBTMmJoS+tN6/UVU0Gzh8GHZs5UqsG0wG+v3wIx0OId2QjnzNScJh3Kfm5vxB62wYHoadDASgHXvKKbhfyaRobCXLOEevVzReKUUOIxDAPQkG4ceuWTMzBNdCtEVcHZFM4vngZ2shgcvvOTmjrg5zd3AQc5n16fVoaBBNMNMDAcXCCJH4t79hnNdcI/Yg7PNxVZvPh2zDlhaip5+Gz3D66dk7lw8NYZ4uW5a6T9I0op/9DJUgb3kLiD89pqawx6qrg92bDs89h2ZUnZ2wa3xeg4P4XUsLMhwLndNeL3Qae3tRRXL55aJSrrY2U9eyHBgfR/bhCy+AOPzQh0CMFnv/F6LNmQlIWr5WPibKCfNizwACAThPNTViMfB6EeVh7ZraWhhdScKGNxxGlLuQrBmGpsGQJ5P4TiPZheEwshE5O9GoY5dMIhsxHIYD19iIKNTEBDaxNhuc/pYWYUwnJkTTkUIixtzZur5++ig26xRFo8iYtNuxAWBnyGrFIlKMo5yOZBL3iwXTZ2IzXSTme6z4uLZFM1m+PB2mE7XmcXFZczHjURQQiZqG+c46fOWApqHzb1cXSna5A6B+nIEAbGtDA2zE9u0QGT/jDOjSZEO2zMFYjOgTn0Ak/M47M7ULmfxlaYpcnYuTSWz+BwdBOqxfX55rQYTGCjffDCf/979HdgETiJzNWE6oKiLs992HzdEXvwhC1EDQybRFCxSqKrIIiUAcVlcX92yyNqLbPTPZ2rlw5AjRtm3wWzjjsLkZ2W+9vdDea2kRJY5OJ84vGETgoL4e88Xnm35zHQyCiPN48DdMEMoytMK6umDDTjwx9XOxmCg/bmoqPHuQCPfH68X4szW/y4e9e6EVW1mJwMuyZakEYnogmb9P0+CjZmsymA/9/bgHNhuyqAzqjZm2yAB4fdM03LtyNByczwgG8fJ4hKb9+DjsQTbd80QCJKPdXtx8ygXObk6fzy++CD/nnHOyZxSGQpgrdXVIFOnpgQzJkiXIJKyshP3gLEC/H/u6hgbs6/T47W/he1x2GUqg9fM2HIYtcDrxPdPZnC1b4J+tXw8SkzMmR0chy1Jfj6ZzhVa29PYSffnLGMunPoXrMTwsGpNm088uFf/+N0jLQACly1dcAZufx6bNd1s0L2CSiLML82LPANiRliQ40IcPI2rtdsNAV1TACeS/SyRAcpWSJadpOGYige80QtYFg8hGdDpBJBqNOsoyHNxgEAumqmLxaW7GGLxeLAQtLXAqvV4svIU0HQkG8TmPZ/rMzFgMBGI8TnTSSYKElWUY+v5+kA+nnVZ62WUigQ2GomBTMAsdAAvBfF+gjltbxOLk7KDP5oaYv1/Tcj//nFFXCgkly5jr7PCWQmqxC/DII3i9852QJ0hHIiGaOLW0IMv7kUewyb/00tzNStKhKCDInn4aJbuXX55KDKoq7l9FxfRZUfE4vn9kBE5yujNeLFQV3QXvuQfk5i9/ibWCr/dMEIheL4TB//1vZFN+4hNwkg3aUNMWLTCoKtbsQADPpNudGkQtFoEA5md6w7qZwOgonnf21U48EZlHsRh+190NX2LjRtgc/YZRlrFRr6gQpYMHD8LeL1uWqUUYiSDzp7Iyc/O5dSsqPs48E690/UGvF99TbFfYSASEiMNR3DG2bCH69a9xbT7yEfhcXM7tdOZe3xQFWZmhEMbf2Ggs4C3L0D4cGIC/+apXFdR0x7RF04DXNm6a5nDM/DybL5iawrWpq8NzPTKC53zRouwkaziM57u6urz66Ewk2myYqwMDkDlYtQq+SjpUFbqCsgwb5vMRfe1rCBh86lO4x6EQ/o4DIC+/jONv2pRqsx99FIHECy9EsEBvKxIJ7AOJULWRKxFF09CIascOHP/ii8VxxsagjVhdjaBroeT19u04N7cbEivLlglN/MbGzMakpUKWoQX9q1/h2DfdhAxsg4115rstmhcwScTZhXmxZwiyDMfz8GEY0fb21Lby8TgMvaKAwCvHoqNpICXjcZBvRrLkfD44u0xwGnUgFIXo+eeRXbl+fWqXrlhMLLiyjA1+IfpZ7LTnc3KjUUTkZBkZMvpryBodmobNdmUlNgDFlh4nEiAkNQ0E4hzsHDnfF6jj0hbpy5ePVXkQd2vm8rZsKAeRSCQar7DTW1FhbJOavuzv2oXI+ZlnQtuGSHQF5Q354CC+q7UVzujDDyPgoO/ErG9Oku1FhMzDRx9Fye7b3546DtaxtVim13KLRon+8AdkL73hDXDwy4FYDI7qQw+BTP3GN0TH7JkiEJ9/nugzn8Facsst6PxcYDaZaYsWCFhGxO/HvGQ5kXJ1cFcUEIlWa/k09dIxNYXMw54ePONr14oOntwQYN8+ob/H9lT/OnwY12HxYlwHPtby5am2iDPCR0bwb0dHaqnxs8+CRDzttMzACTeYIyq+E3M0KvRjW1oKIxBVFXbo738HafCBD2DTb4RATB/DxAQ+U10NkiaXbfH7QRRw5UtnZ8Gkp2mLciAex0uS4JuUa84eL9A00biooUF0+JUkzPNsz+zkpCjbL2eSAftUsRgILKcTvkq2+dbTAzuxbh1sz3//N8Z0++0ie5cbxMViokLu5JNTx/zPfxJ997uQTPnEJ1LtDdvEWAzfk2uvqWkou969G7bz/PPF73w+fEdlJTIQuWGTET9D0+Cz/eQn2Nd97nNIUuES7ubmgjsj58XICILNu3YhQ/y66+DnFbCnnO+2aF5gQWgimjj+4fPBOMfjINn0XfVkGdEkjsaWq6xRkkQ3sWBQdBCdDrW1MMLd3RjvCScYGw+XTjc347uGhhChI4JDsmgR0umjUSzA2Ro85Dru2BiuTVNT7rGEw4hsqSq6cOkbm4yPg0Csr0f6fiiEhXXfPvxcaHQqFkMGIjv9C73Uw0R+HKvy5WzgzSuTcNmctOn0EwsBN1rhzEQ+/2xkIhOH+g7FXN7y3e/Ctrzxjdjkp5OM3MyptRXX+fHHYcve+tbUcuPprrmmwcF+9FGUCacTiOxoE8FJns4WPfQQyI4rr0zt5lwKxscxpu3bib70JaL/+A+8P1MEoqoiyv7DH8J+f/Wr2Li73QtDWN9EYQiH4WvIstDAK3eWtdWKuRcOY76Xc2MeCoG027MHtvGEE/Cy20VJrqZh/nEw9OjRTFs0NQXyoK0Nn+nvh991wglC91a/OeZMx0WLUoOR27eDQNy0KZNAZEKD5XCKIRDjcdiUYrIY43Fs2F9+GVlJb3kLPh8O49pVVRknoZxOnPvUFGwmky7pRERfHzbslZXYtJdDjsaEqIxgGYxj6ZvMZUhSZsfm5mbM34mJ7CRVXR3WZ9ZBLVfTOZtNVDokk5h/2WwtNxJZtAg26DvfwXsf+Ujq/GHd2SNHsFft7Eydv88+CxmTk06CDrLe3mga9lPhMD6Xi0BUVaK//hXVbq95DXQDGcEgshNtNtgTl0sEZFjfORcUBT7KY4/huLfeKhpYcWPScpaUE0HH8b/+C0kB73sfgtvLls3JZJIFD5NENDGvoapwfoaHsaCwoCt3Mk0mEbWWZYiwsiOWraNzMZAkOPOsS8SlRdOBSb6+PpCJq1ZN71Qkk4IEveACfKa3F+e+ZIlweJua4DCyVlBr6/RGV1FAIEgSFujpOgK+9BL+f+qpqec3OYlFvrZWELduN4jcvj4smoEAxIaNLPDRKMZusSA7YbZLUU3MP+jLl+dKhF/faIUo+9ySpFSNxFK+ixutxOO4FpzNx41R9MShHokEHF9NI/rgB+GgpmcPxmI4/tKlmNu//jXm8tveVpim7Pe+B62fd75TZDsyNA1j5kDMdLbooYfwt298Y2GNXKbD/v3oRD0+jvLlK68UWdUzQSBOTCD7cPt2lIJ/9KOwoceiqYWJuY1oFBtPzvBvbp7ZzZTdju9iSYFiNub67MGpKWySd+6EH7Z8OfyD6mphr7mR0t69mOOnnio6sutfiQT8iVWr4Ov19eEYK1Zk97smJ3HcpqbUa7ZzJ9GTT4K0v/TS7J9LJIpvTMKabTbb9L5VNvh8COoMDMDGXnihsI+FEogMi0V0cJ6YgN/ncuE9TYN/NzgIQuDUU83AbTmgaViPEwnR2K1cJNfxCoslk0isq4MNqarKnOOSlKoTX2i273R45hnMkze8IbufE49jH+Z2Y+/zq1+hyuzd7wbZlx4k5qZPq1dj3gWDsEn796PiobMT2Yvpc/vIEZz/smW5JaoUBcHZ7m5IsLz61eJ3kQhsnaZh/8g+Bmdpc9M+Lt/WIxKBrMuOHURXX030rnfhOP392O+2tRlvTGoEySTR979P9OCD8DdvvBHrRUeHOXfmKszbYmLeIholOnAAxmzRIpGRwpmBVVUwwKoKI8QRHNbI4Y5o5UBNjSAoNS01Uy8bmpsxjiNH4AivWJH971QVzp2q4vy4McvBPNjsxQAAIABJREFUgyBHWb8oFsNiytpIIyP4XEND9gWQOzgryvTaW4EAFhCrFc6lPgrm88HR9Xhg8PULkM2GxXJ0FOe4Z0/+VPRIBMezWnG/5gIZZGJuI7378lzSF9ITeNkaqRTSsTmb1mD6iwlJVYUzpiiirLuyEnPSYhFZOpIE/R2vF9HlbDZIlvF7ux125ZFH4Ahfc01hzuPPf070ox/BEb311sxzZZ2o6Tpo+/1ocJJI4DjZOjcWg82bia6/Ht/92GOQimACkQjXrpyZI9u2oXFNOIx/L7kE321G2U3oEY9jjY3HMXezZY/NFFwuzP1QSPg2jHS9wWyvaBS+w65dqEiQZWQKnn46NvpcSqd/9ffDLp11Vmolif57OZja1oZqjGAQgYRsfkUkAlvl8aRKr+zfj3m+YgUE+tPntt+P8dfWFjcnubrDYik8i/HIERCI0SikDU48MZVA1DdnKAZ2O6R+/H48WyMjIEKIQOxyoN1EaZBl+OSqivXDbjevq1HYbCASvV6QZ3V1QhbA4ch8/q1W+CLj45jv5SC19u6F7TrtNMwJruzS+2yHDuHf1ashOfDssyAcTz8df6OvNolGkU1YUyP8rEgEwYx77sF+5447Mu3N6CjmaGtrbokqbi7X14eAQ7rU1ZNP4m8uuihTooL9QG4wxf4hEWzYl74EO3vLLUSvfS3Opb8f9mjRouIak+bC0aPovrxvH3yiK67AmperlN3E3IBJIpqYlxgfR9RFkqARoRda9XhgAHt74XQtX55qnD0eGM1gUDQlKAeqqzGeSASLSz5NobY2jGNoCAtjekdBTcPvYjH8jqPDkgSH3GIBmehwwAFkR9puh2M9NibKEFtaUp3ZiQlsTpqbc0edfT5EqCsrsTDpS5sCATi8LhfIzVwOUksLxtXdjcVh8eLsm/9wWGwQzKiTiXzQly9XVMxdJ91qxRhZSD0beFOuJwHzNSchSs0WZIKQXzzX+bs5W1E/hiefhON71VVoXpAN4+MYU1MTyLa+PjiTHR3Gr8HDDxN985tofnLHHZn3KRYTXUZz2eLJSWQgKgoIzHLp7/zsZ0Qf/zgykh58EPZJTyAa1Zg0AkVBlP0nP8Ga9I1v4Dq6XGbGtQmBZBJrbzSKeVxfP70+aLnA2YN6e+T3Y613OgVBmE1GnTulJxIY96FDIOtY/uT886cveZuYgG1pa8tOIBKh2iSRgL8xOQnfprk5u8h+MonNt92eait6erDhXrIEWq7pBF84DL/Q7S5Oz5m1uYnwvYX4MTt3Ev34x7jXt98u9B71Gdrl8Iu4emZ0FBrXkgSyZPnyubmGziewX5JMCl3fY6HLPN9RWQnCzeeD/WluRlLE6ChI8PTn1OEQ0lKVlfmTOKbD+Di0AxcvRjdmScr04TiAsWoVCMdHHsEc0jde4WoUbohis6VqjI6OQoe6oYHoYx/L9AF8PiSK1NZm7zZPhGP/4Q/YO73udamd5RMJ+GyRCDIQcxF+kiS0aDkY3dODbsjJJHQJN23C7/v7Yd8XLy6scWc+/OtfRHffjf9/5CPY0zc3F6btb+LYwNyqm5hXUFWQgyMjIOk6OzNJsGRSCEnnKv2pqUGUy++HcS3XQu/xpGYk5mvgsngxjPPwMMbAOodEIAFDIUSh0svcJAnO9MgInOn0CJzFgs/5/bgWAwMwyE4nFqdwGOedK7NhchJ6PE4nCET9NQ6Hsbg5HHA880WJXC4Ig/f1CR2NlSvFghwKwUGorMRiaRKIJqbDXCxfzgY9IZhIiLLl9OxBotSyZ31zEi6jTX9l2+xle4+1GXmDz6Tr4cMov9m4EWXB2eDzYUPU1ISo/Msvw1HORThmw+OPE335y9DY+spXMm0FZ5JytkY2jI+DiJQkomuvLU+mgaIQff7zKOW+5BKiBx6A7dY0jIlJ13JtqkdHUb68Yweu94c+BNvqdpv2zgQgy1ivWYqgtlb4E6XCSPZgtmAFa61yprc+c5A3yLEYNqqKgvV93z7M6XXroGWVbyMYjaIRgMeDz2SD349XUxPGwxIq2QKSmga/iCi1E/PAADbczc2QLkhfN2Ix+ISsN1koWB5G03DORtclTQNpwSV8t9wCvzGdQCyXj5pMwg4dPYrgyQknwAcbGsJ5p2eemjCGZBLPkKZhLTNLwktDVZVI9rDZMPdHRzFHswUOqqthj3y+6f2J6RCLoSzY4SC67DLhr7DutCwLyaWmJsybBx5AduENN2SvNunuhj088URhE4aG0GCuuhr+kdOJYzkc+H8kgs9VVeWWu4rHUZkxMoKxrl0rfifLRE89hb3WeecZ0yzkTMstW4juvRd+1l13IeDC8lvxOH4uV9OtRAKZ13/4A+zQe94D+1PuLEcTMwfTfTUxbxCNIrodDoN86+jIXhbX3y+y8RQFhi99QeGmKD4fnNPa2vKlTLMwfiiEnzlDMRc6OkT3U6sVji+Lh9fXZzemqgqScdkyONLDw3gvvVFLTQ0WppERLFyVlfg7LnvOhvFxOPUuFzqI6SNk0ShI3MpKLJxGHVurFYvh2Bjuz549IBI1Dc6sw4H7ZUZtTUyHdI2hY1XmYLS8OP3vbTa8OAM6vXOxPoswHdkygIxs9njTryi4duPjiIDX1UEHMZeDOjUFGzA6ioj26tXQ2zGKp58m+uxnEYT42tcyN9XskNtsuRs4jIzAwayoQAZiORzLSITove8l+stf0PH07rsxBi4DLzeB+MwzuA7JJDYO55yD++HxmGU6JkRHZL2/UEh37nzk4HTZg2wb9B2L04nCYFB09pUkzNlgUFRcWCwIVHZ14f2mJgjwG9ErVRRk4BEh2yXbOScS8G+qqvDq7cW/uY4/Ngb71d4ubM7ICEi6mho0ScgWeJ6cxN9nIyjygf0xRQGBaDSzWFWJfvMbbPhPOQWbaPbRZoJAnJqCDms0isAuExS1taJ8lBuvmCSYMaiqqIqwWrGWmXa9PPB4cF0DAaz9Hg+eUaczu89QXw9/xevNrL7KB02DzEEwCBuhT7DgbL1YDEEShwP28Gtfg0256absQYOBAewxV60SFXBeL8p2LRb4Ay0t+O5oFMePRkHYWa1Iksl2DtEo7NnEBPSbV68Wv1NV+F5eL6QhjDa21DRUe/ziFwgu3H47rrksYzyJBIIcpWR56jE4iEDuoUOQp7noIlxjrs4wMT9gkogm5gXGxpBibbGgdDfbZjISAUFls4Fcq6gQzrnVml1Lo7palOyUMwLL5UfctTnfsZcvT00Xj8VgrHNF8Scm4Phy8xS7HdlFqooFQP9dTKgODGAxqK7OXTI0OgqCz+MBgahfGGMxOPA2W2omYSFobhblzc8/L/QPly41HS8TucFOlqLMbPlyrnLifOXF+uzB9NJiPUmoqqnaOulIF+MuljjMBiYLfvIT2MRbbhHkmX7u8YaYS1z+/GfYmTe8wfh3v/gi0Sc/CSf43nszs8G5kQJ3g82GoSGUCVVVgUAsR/T76FF0lN69m+h//gckKo+n3ASi/P/Yu+74tqqz/R7JlmR5z+xBFiEDCIQAZZQdNoRQRqCsAmX2o7SlUAqltFBWgY+ySoFCm7ILlBGglECAsgIJmSbbdhxvy9rrSrrfH0/e71zJ98qSLA8len4//xLLV3ece8573vd5VwSRjs8+C+fO7bdD/hUW5jsw54E55/FA9+CGbOXl0mjUNidJ9qMHJgG5G6weQZjq/LPZQOJ1d0MmqCq+X1KCe//mG5B05eVIqZs8OfVz19djDObM0ScFuKQLEcjJhgbcg1EGBNfDrqqScqWri+jFF3H+c87pLW+iURzDTRrS1UO4vrSiJC8Pk4hAgOjPf0Y65PHHI72a65P5fDgmmwTi5s0gem02OIO0ZKnZjHv3+zEWLS2Qt5WVeb0sGTgSVwhZeziP7KKiAmvC6cR8DAax3saN6z03TSasYSYSa2tTl0Vffgn5ctRR+hHOQsA+C4dx7ccfxx5//fX6pQ96elDyqa4O51NVOCpuvRVr/4475HWEkE7xFSvwjPvtpz+ffD6il16CrFuwALKQoaooUdPWhtqMqTaei0QQEbh0KUpPXHutbNTX0IDzTpyYPXLvgw+I7r4b+9PNN8MetVhgC+adF7mFPImYx7BGNAriqr0dSuqee+oLVq83vqYek1+lpdh8PB5sRokbSmEhjmGyMVteFiJsCkLg3E6n/vUZQkD59vtBrk2dimfVg8uF4yorpXHOG+rWrVBK99orfoPlumjsnePuzVrFvbUV3y0vJ9p333iSMByWBbgnTepfCqndjiiB1laMOadC5ZXVPPTARcqJ+pe+nG70IKOv9OJ0DHImBoy+YzLJ1ELt37NFOL3yCjy/l12GdcxjywSjEFC+IxHIwldewZgvWJD6uK9bByV09Gh0ZE5UPLlRAJGUkYlobERaUVkZvNSZ1CdLxKpVIBBdLijhxx2HzweCQGxrI7rhBqSBn3km0pe5yc1gNcfIY3hCVaETOByYd1arTFvu6UmeXqxtjMTkIH+mbZzUX0Qi2Jd9PsiHcBg/lZUw0n0+aXTb7UiZS9Q5+kJzM3SAyZNxTj10dsLgHjUKx6sqjGY9B2YwCALMbpcEmdOJjvAmEwjERP1OVSHvYjEQDumOHROIoVDvDtDJ0N0NB0NbG7rVH3ooPmcCUYjkXerTQTgMYqK1FeO4//7Gstxuh0Hf04M56vfj3RhFiu+uiEYx37hOXlFR3ik0UBAC67mzE+uZuzF3dOhH2XE0cXe3zDLrC9u2EX3+Ocop7LOP/jEdHVgXY8dCprS2opah3j0Eg6hXX1wM+UaEdX377TjHbbfp15Xevh3/cpZWINC7Fv1LL+FcCxfGk4SqCpm8fTsISKNmnYnweIj+8AcEj5x7LuSkEJAbjY3YC7SNSfuDUAhO5TfeQCT0NddA9trtuEY+Ey33kCcR8xi28PuRvuz3Q1gmdgBmeDwQnFZr76YcQkiS0OPRj2axWiHsfb7kkTGZgJULl0t2GksWhVRUhPvllKHElGOu21Nc3PtvY8ZA6WSP84wZeB6OLBICYfWxmExvrqrCT3MzxrqqCpuoVpgrCqJAYzF8v7+eIocDCsGMGTgXd2+ePDl7tTby2DWgTV82ShPKNHqQKJ4ITKf+YKbgRisckcj3r8VAkIdEcE688w7SRg45BJ9xrR9FkRHQHg9k0JIlGPtFi1L3QG/ZQnTVVZBzf/6zvgLv88mSCnrvc+tWpBpXVYG8zIY8fucdoosvxjnffx8KLFE8gZitKJKPPkK0QTSKVOmDD5adVfNe9l0bLIuMogY9Hux/0SjmQlkZ5ABH5TIZqNe9OB1nRSZQFKxNvx9ylwhroqICc1dRsG8vW4Z1brEQHXQQaqSmm5XgckHfqKmJj6TRwucDKcj1q0Oh3k3yGNEodJqCAmnUe70w9iMRovPO089ecTgg46qrM1v/XV2QmdXVqcupbdsQ9RONgoSYPh2fM3GbTQLR4UD6ciiE98SERjKYTHie4mI8X1sbZHVVVd7IV1WpkwgBnWS41mTelcBzsqsL67qiQhLdejaD3Y535PFgXSdbmy4X9IPaWuhGeuAU47IyRPpt2ADCbcqU3sfGYpBtRFjbJhO+/7vfIcL3lluQmcAZICzTm5qwXidNQrAH62ORCNYiE4ihEGpDjx4df90VK3CPs2cbB6AkoqUFxGZHByIqjzgCn4dCOBcRntFi6d29OV00NkIv2roV8nj+fLzLykrYrnkSPjeRJxFThBDiNiL6DRFNVVV18xDfzi6P9nYoqmYzDD4jb5LLBTLMZjP2ZBQUQAhz1JvehsJFfJlIzKaxx0ovE4l69RdjMRChqopUk61b4cmaPl16zyMRKPGFhcbNBUaNwv1v2ABibsYMGVk0cqRU9seOxbkcDkQmOZ34++zZ8fcWieBeIhEooKl62o3Q1YXrlpXJLmulpSA+v/sOm4le97U8JHYHWcReWK4zVFgII5YJQY7UyTR6UJtiPNhgIpHTArX3zP9yp2bt5/1BSwvSmCdPhvKrvSZ3bfb7IXctFhBhHR3wdhtFCSWiuRnpwYWFIBD1Oihz1DE3aUjExo1oxlJbCwKxv/JGVdER+aabEF394ouSZIhGpWKcDUNQUYj+93+JFi9GVNadd0JBjsUg43ZFY3N3kEWMvpqT8N8TIQSMMr8fx5SVyY7L2tqDQ7HnhcPQeXw+2ZHcapXpwDxn/X6kLX/zDX6fN49o7tzM9KRwGFHBNht0O73njkbjazh7PNBZ9DJFuJFKNIpj2Gh/4QXc97nn6jcWcLlwXHl5ZpF2XV04f1VV6pHSX3+NjvAVFYjWZlmUbQJRVaUz2W6HTpluPVmbDfoY1w33+6F3ZiMqfCAw0LKIo/ZjMdm4I6+nDh4KCjCHu7tl7cmuLuM08vJyyBqudaq3/0YiyHgQguiUU/SdIbEYbCSTCUTfJ5+AADvsMCnvtbrMli2QpzNm4N4UBZF+mzejziA3puM9w2SCTdTaCvKQZYLdLvWyhgY4dYVAvcbEMldr1kh7kR2kfWHtWtyXEGh6x02tgkFcjyjeaaPt3pyuQ+vdd4n++EesmbvuwjN6vXgOPT0xj94YrrrWbkMiCiEuIaIyVVUfHOp7ycMY0SiEcEcHFK1p04w9xE4nFE0OhU6meFmt8YX89c5ZUiKLnFdUZNfos9l6RyRq77elBcJ73DgokdOnI7V4wwYYpHY7NhpVhdBN9qz89+++w4Y3fjzIRa3CbzJBgLe34xq1tfA4JdZG445ckyb1PyKoowMKQHl5vBetqAh1Lhsa8D49HlxvV60vk5dFgFHEYDgsDW6LBeuQ05mJBj96MFvQkp0mk1TI9NYy3382iMRgEKlzFgvR1VcbRw25XFiL69ZBaT7qKOPaqYno6ACBqChETz+t/71gEH83Sklfvx5RgqNHo4Nxf9d/JIKU4iefRPHxv/xFkgXZJhB37MC11q0DaXH11ZKU4Wiz4Yi8LAIGqjmJokCfKCgAAVNePvTp7NxN2efDGiCSjQLYcGWEw+jKvmoVjt17b+hkFRWZEYiqihR/RQERabT2Wlqks4Frmxk5Th0O6HUjRuCewmFE7PT0wODWq2/m80HPKC7OrISNw4FzcAftvsBNG15/HXrWVVdJMk5RZCQq19LuD8JhkJXt7SABE2tbpwMhoKtyVGJnJ4z/mprsd5UfrrJIVeXeZTJlt05lHunBasWaczohs8JhzPOxY/U7I1dX4+9dXZAPibrWBx9gTi9YYNxosqlJyso330Sq8OmnS2cvZ7mYzTLNevx4rJtoFM1X1qxB1PG8efK8nCXW0wM7q6Kid4qzxQJZ8+qr+F2PQPzuO5nJNWdOauO4dCl0wpEj0eSFictAAHaYyQQCUauDMXHIuhM7v5IhGCS6/37Ivn32IbrxRoxlIIAxMhrzXRHDVb71F7sNiUhElxDRWCLapV7grgSfDwIxEIAw1dsYGA4HPDclJfoFdvVgt0P4eb3xBcwZQkCRdjolkZhNZUG7AToc2GTMZmxyHg82B1YsCwtBHq5bB5Kvrg73zoX5+0JNDRTIb77BNcaP733Mpk3Y8PbeG/fS2op/q6qwOW7bhncxcWL/vc9tbZI81ashYjKBOCwrwybGm+Iuusns0rKoP+nFiiKbpxQX6zcpySUYRUmyXOHah3pyThuRmKlhqaog0drbQXIZdR7ldMHWVjQ7mDcPCqmiQO5wtKIenE7U/OvpAVGnlzIXDuP8HMGRiFWr0AF6/HhEBPSX2HO7UWts6VIo77fdJudOtgnEDz6AIk4Eb/shh4AUKCgwTtkeRtjlZdFQNCcJh7EeuN5oOtFq2QaTIJyqzPWRi4qgj+hFBUej2IO/+QbfnTIFMqGiQpKQwWD6kcKbNmFcZs0yJt96eqAPMYFYXq5PBBJBl+vpwTHcyfWVV6BvLFyoX3eMO8+zPpYuuMZ2WVlq+kkkQvT3v6Pm2oEHEl14oZSl2SYQu7tRtiIUQuS1Uap4urBY4NxxuzF2zc3Q5bhjd5Yw7GSRomCeqyrmS74cxdBDa8fZ7ViL3d36GRNmM4jEjg7YXNpjVq2C4/Lgg2Hj6MHphE4UjUKuTJhAdNFFcs5r9TivF1lblZWwSVUV9f+++oro8stRNzYRoRAiFC0W/YZUra24bmkp0ckny+flaOUtW4hWroTedMABfY+dqhL94x9wsuy9N0g93pd8PtmYdI899HUjdtBrHWtGNvK2bUhfbmyEzPvBD+BsFSI7QSk5iGEn37KB3YlEzGMYo61Ndv6dPTu5ctbdjeNLSyGsU1ViOHWWlUC9jskmk6zB43Ilr2GYCSwWnLOnBz9EklBMNPAtFhCJX3+NDW///VMXvFyce84cjNXq1RhXqxWCf+NGpE+PHSvrZ3R24p7YQPD7sTn1t05hayvGvKrKuNs0o6YGG+TmzSBPR4/O18sYThio5iSc8seRh/1NYx1KpNpR2WyWypgRQccRi4kdlFPFu+9Cfpxzjqy9lYhAALLO4YChO3kyagOxsqgoIEWYTNQqjT4fomp27CB65BFEFCdCGwGulzb4zTeImJ40CR2g+xvh0tQEhXXTJtQeu+AC+bdsEojhMLzsL7wAUuTuu0FK+P2Yx9kgBfIwRl/pxUbNSZjE57IleuRgps7DSAR7nd+POcaRaoM9D1QV98A/LD+KijAvk9WX3bABhq/XC/3qwAPjU844Rc/v7y0PkqG9HQbluHHGpGAoJGsbejy4T6Na2OEwyAGbDXpDLIZIv6YmRB7r1SuLRKA/ckmYdN+L2w1ZWVKSWnqwz4dyChs34p5OOkleMxyWctGowVSqYJ2uvh7v94gjBsYBy9Gq3d0yGrOmZtfLGonFZD06s1k6EfIYHigrk+nlVivWpN2ubx9ZrdLm4hqKra0o2bLHHpBvelAU2CGRCNFbb2HNX3llb72BI87Xr8ffpk3DenziCaKPPyY6/3yiE07QP/+GDVj/06dLpzHLgeZmon/+E8909tm4b65b63ZjDX71FWTpwQf3LT/CYZRb+eQTomOPhd7Gc9rrhdwsLAShmkw3EgL3bJTerKpIvX7gAcii+++HTqntW7CryYvdGUPqIxdCXCSEUIUQxwshbhdCNAshfEKI94QQ43cec7UQYpMQIiiE+FoIsb/m+xOEEH8SQqwTQnh3/nwihDgh4ToNRHQIEU3YeT1VCKEmHHO6EOJjIYR753lWCiGu1bltuxDiISFEhxDCL4R4Rwih4+/MIxVEo4g+3LwZQnLOnOTKT2cnlMzy8vQIRIbJBKU+GoXg1IPZjPNHo9icso3CQmxqXi82EZvNmFwzmaCkcag8p8glQygka4VMmgTyMBwGkRgIQNHcvh3CnDcvIWAo1NUhEnDTJtxjunV0tFBVkAvcUa0vApHB6c21tUhrqq/H/Q8knnnmGRJC0Lvvvku33norjR07loqLi2n+/PnU1NRERESPPPIITZ06lWw2G+1qsogjdhQF8ycQkClfnILf3S3JdY9HpiUoilQkrFYoPSUlWM8VFbJ5T0UFPispwTHaWjaBgGwslIsEIkcMaglEXlfJZJQ2Oq6vY4yiN41QXw+P8wEHoIaPHqJRyFS3G8plbS083lpPu80mIzBCIdmVMhQi+slPIMPuuw810hIRi0lCRU/B//JLXHfaNFy3vwTi118THXkk5MZrr8UTiJFIfI3N/mD7dnjXX3iB6Ic/RAp3WRnGpKgIczxTUiAvizC/mGTxeiXJzfv/jh14x5yqxgYi1yxjYqa8HLKnthb7DzulRo7EZ1VVOKakBO+NU5LTRTSK+2tpkXX2Ro/OeqRWUnA0DJN1HR1Ye8XFePbx47G/G9Xc27YN83npUozdqaciKlivZhVH2Hq9+k6TRPh8yKooL8da14OqwnBmAsdshpGvd6/cHE4Imdnw5pvQI+fPl/W9Er/T1YX/V1en75TxeGRTO6PUai06OlBzbOtWoksvjZer2SQQQyE0e1i/HnP7yCOzRyDqyaLy8mK64IL5FAo1USRC9Ic/PEKTJ+86sojrhEaj2Pvy6cvDE5WV0iEtBNabkR7FOifvI2+9BTvw+OON197mzZCf//kP5sTVV+sHVHD90UiEaOpU3M8//gEH7oIFRGec0fs7sRhIf0VBEAc34GRSrqFBRiCee668bmEh/t/RgcyN0lJ0du9LljmdRDffDF3roovQFZnntMeD/cJiMY5A1AM721RVNgsMBFBf8a67YMP99a8IVGluxjoabmWq8hxU/zFcIhHvJKIgEd1DRKOJ6GdE9C8hxHNEdAERPUZEdiL6JRG9JoSYrKqqQkQHENExRPQaETUQUQURnU9EbwshjlVV9YOd57+OiO4moioi+mnixYUQPyeie4lo9c7jHEQ0k4hOI6I/JRz+9M6/305EI4noeiJaTESH9fWQ27YN36LEQwGvF8I3FAIhOGIEhJ0ROCS9vByCiBXCTBAI4PtMZughGJQGSrbfWygEQzQQwEbF3nctWFFWVWxOW7ZgE5w2zdjYjkRgRHDNQx6jUaOgZD77LDYermvU2Rn/fa7NaLXCW8eFw9NVclUV3+f6OUS9r9UXSkqweW/fjnNNmDBw6c0eD/694YZfkdVqo6uuuoHa2lroscf+SCeddBqdccYievnlv9E551xJW7f66bnnbtmTclgWsbHGBnuy6EFtFKFRc5JkKcrJwJ0OmaxKhSQfLkg14jCV80SjydO1tWnNqVyjpwce4IoKKLFGcpVl6qef4tpHHGHsXCGSnZzDYaLf/hapc7/5DSLxOLJae89MMpSU9L6HL75AKs6ee6LLa38dNm++iQ6DI0aADJkyRd5TJIIx5u63/cEHHxDdey/G6/e/RxQApzwxIaCt4ZkufD78+8tfQhZdcw1k0SOP/JFOPvk0WrhwEb344t/o3HOvpI8/9tOyZbkti1avjm+WZNScRFt/UO//JlNyMn4gwE0/fD45z3muJ9PP7FyUAAAgAElEQVRlsoVoVOoQ3FGZCfuiIuzj3DjFCO3t6OzJDc/mzMFeG43CuDQCR8X01f00EkEmhaKA5Nq+Xf+4jg6sV0XBGp04EfqI0bF+PwjEHTsQVbR+PeRIVVXv+1ZV2WCuqgrfSQd+P+SkzYZzJRtPIhCHixdj3v7wh5BJfE+hEH4KCzNr6KJFdzfWTyQCh3BdXfrPlgysP/7855BFl156A7W3t9CTT/6RFi48jU49dRG98srf6IQTrqSentzXizZtkvYF2wUD7cDOI3OUlspa8X4/ZEti12JGSQlk9fPPQ2aeey5kpd77bW2FXPzkE/x7xRWwY/SObWjAOuEuxs8/D+ftccchglBPp928GbJuyhTIAT5vLIa/LVkCR8UZZ+Cc2ut2dMBZWlGBkgVOZ/KyKU1NRHfcAf3q5z+HjOR74sakVitkM9ckTwesv65fD32orQ0O1kWLZI37qirYoMnKh2QLqgoH8jnnpPW13YKDGggMFxIxSkSHq6oaISISQpiJ6OdEVElEM1VV9e38vIeIHiaiE4joDSJaoqrqK9oTCSEeIqKVRHQDEX1ARKSq6us7X5JVVdXFCcdPJKI/ENHHRHScqqohzd/0zLXtqqou0BzTRUQPCCFmqKq6PtlDdnfLNNrdsB5AHNrbIXwtFniN+ypO3d4ua+qlGtGWDEVFEGZcj0bP+2KzSSXdKJImE0QisvPghAlQSJ1OzAvtfXR14fp1dRDykydjg9m8GaRionc0FpMbak1N/KbCxq3DgWfX61bY3o6/jxqFa3Z3Y5MJhfB7qsZ3LIZN2OfD9zKpO8TgLpHbtoFE5WiSgYruMJvN9MYbH1PBzoeNRqP06KP3kcvVQ4sXr6O1a4uppoboueduuZFyWBZxkXAhMA8HuzkJR5tEo8Z18oYjEonDbIwREyR91UfUXj/ZdSMR1EGMRFCLx8hJ4nbj58svcezChX3LYa5R+Yc/4HvXXYcuhXp1G/1+fJ4Y+aSqIC3XroW3+rDD+p/K9/DDIPbmzsWza0tDZItADIVQjPxf/wJpetttUPTdbvy9tDS7zQbMZjO9/Xa8LHr44fvI6eyh++9fR48+WkwOBxFRbsuiYLB3oyQ9onA4gQlyrxfr1m7P/vs3AnfyDQYlcVhQgOszcZgKHA6Q+Nyc7uCDoWOkOtaFhbhWKGTcrI4IETeBADIijGSR1wsdKBDAecaMMT6WOwVXVuIYjsLbbz/8GH1HUfCddCNhgkGMldWaWgr0N9+gEUJVFaJ+tFGL2SIQVRX60JYtkK9z52bWICZVmM1meuklKYtisSg98QT0oldfXUfNzcXU3p77epHTifczcmS+FEUuwGTCOuvqkiUWnE59m0MIyKLWVkTr6tVQJILd0tQEom77dhCBepHNRLCRWlowX+rqiP79bzgwv/c9oh/9SH8ONTdDnowb17uE1caNIBDr6qCPJcoIhwPEpt2OkjMmk0xvttt7y7Zvv4VeZLUS/e53sBsZPT2491Qak/aFJUugG5WWoqzLPvvAvg8GMTZGY51ttLTgPjZuTJtE3C04qIHAcCER/8Ivbyf+S3iB/+CXp/mciGgyEZGqqn7+gxDCRkTFRCSI6CMiOjvFa59BGIffal/ezvPrJWo8lvD7Ms09JX2Be+0lPa2RiFSCdidEIvD2OZ0IbZ46NXn4NEe0CYFj9ZpyZIqaGiiXqgoCT0+I1tZCQIdC8NL3l+xQVXiky8vhaWeisqcHxgh3hXY6oRyOHy+VQ06/4uLke+4p71lVQQKWl4No086raBTeaouF6Mwz8d3WVij1HGHJqQBTp8rOqnV1UO47OmT3w+Li5M8Xi8naFxMn9o9A1GLMGGzsHR1QGCZPzi7xxGN85ZWX0ahRUiwee+wh9Oij99Hhh59HmzcX08iRRIcfTnTddbkti7JVcD0TaGvZ2GyDY3hnimxFG6YCbaFqo2ukEpH4t79BmbrmGuw5egiHIWPWr8f7OOss/YYoetf/3e+gyN5wA9F550mvdkEBZJcQ8Y4XrXxXVaQHbd0K8vCwfvpOw2GkVD/3HJ7hkUfiZZ+iQCb1l0BsaMDzbtxIdNllSG3i9FFumpGtlDeWsVdccRnV1sqbPvroQ+jhh++jsWPPozvuKKYJE/6fOM1pWaTtWJkL4PTqsjLsiRy5NJDgjvU+H+ZdSQnmXXGxvvGYDC4Xamlt2gQZfMopIMUzXR9uN2SXng7V0IDzHnqofpMTIsifLVvwTLW10D+MjE4un1FXB13w88+hTx19NCJ/jO6PS9SkS7QFg9A5Jk3S7+6qharCwfDuuyAzr7gi3vHMpK/F0j8CMRgEweF2o072PvsM3B7K7+Hqqy+jyZPlRU444RB64on76NRTz6NgsJjGjEHjiKuuym1ZNHs2HPFOJ8a5pmb3s9FyDRYL5j8HPXDjzET7YNMmlFQ45BBkYvl8vUm8aBQya/16HD9/PmSLHgIByJ6qKthin3yCsiYHHYSMCJMJ96XV07jD+ZgxveXh2rVE778Pu+n006UuxTLH5YLDpLgYNQ1ZhhQV4Vk4MIBTo995h+jPfwZZeeut8YEjXBqkshI2ZqYEos8HHeSDD7CP/+pXeB9bt+LvU6b0v6Z+KohGkaL+/PP4//nnp32K3YKDGggMF/MtMWmCE0GaDD6vIiISQliI6NeEcNNEFSWFSi1ERMTc/JoUj0+8V07iMuh7KcEKH3eUa23F75l4R3MRHg/qZ4VCIDHGjEl+PNfUc7kgAPVq8/QH3GiFa7wZpcqWlsq0pf4W5OeaSWPHSuXEbI5vtmK1yhD1RKW3shIK7ZYtMiJRCGygekpPNApvVE8PvGmjR+O4NWtALM6aBQOFOzMzgcgoKcH9tLXhmIoKY298NCpTtMeMye7mYTJhcy0rQ1Ti2rUYh/7UbNTDBM3OjhQJsKBm83iaNQvGwU6FPadl0VAhGISyw+nLwy3CiGhwiUMtzGZZW8aIkOorIvHTT1HT7MQT9WsU8nc7OpCK3N4OJTlVAvHBBxFlc+mliLIhwnpgxxjffyQCZVYrK2Mxovfewx5w4IGIfOoPHA4oi59+CuX1l7+MHw8mEAsK+kfwvf020nSsVpCUhxwCGcc1zQaqcUaiLFq/HrKopWU8XX890Y9//P+GUl4WDQI4yiUSwbjX1AxsBHUoJIlDJuptNkkepksc+f0gn9avh9zdf3+kw/X3GUpKpPGu3fMdDugoI0YYE4hE0PEcDjxPba0xgRiJQA+xWKALrlhBtGwZopmPPVb/O36/jNJJl0AMhSAnCwr6JhAVBfW/vv4ahOl558XLnEAAelZ/CUROZYxE8P7Gj8/8XOlgQsILNJkgi+z28TRmDHSznc+b07LIZgPpwo0rduyAXZCYJZTH8ILVincUi8HWaW+HLcNr1uGA7jFqFJwNXi/esdUaHxjR0ACicflyrK8FC3QvR9Eoak6bTCgj8PXXaFoycybRL36BucJN3JhIdLtBrpWV9e4G/e23cK5OmCAJRC43xHWlP/wQ5znqqHgZwnX+AwHo1+Ew0csvo7zL3Lm4H+3xXV2yMalR06pUsHEjyMnWVugiixbJDs9C4Nx9BZ1kA9u3Ez3+ON7b5MlwnmcQJLFbcFADgeFCIhplyRt9ztP+QSK6gsDMfkrIE48S0cVEtCibN5jGPSUFE1eseHFNAu72NpyjcvqDHTtk+vLee/et0KkqhIPHAwVuoMKhzWZZK8Pn0xd6QkDwO53YCCoqMjNKOzrw/bq63s9vNsM46OwEQVhZaVy8u6ZGesy2bsXvXi/uS1u7MRJBupLbDbKQozhtNryDNWuI/vtffGfsWChPeigsxN+7u2XK0ciR8UpVNIpIwVAIxw5Uag0bUJs3w1M4cmRmDXaMYN75Yj0ebOyrV+Pz2bPNRhEzOSuLBhNcdDkWG57py0NFHCZC27E5GZGojUhkNDai5uleeyEVxghcR2vLFjRd2X9/42O1eOopnP+ccxCJp70f7qrt92PtcIo8IxpFysuWLTCyjQjOVLFlC6Kqt2/Hff3gB/F/zwaBGAyiQPjrr2OM/vAHkBxeLxT1RAMk22BZ1NyMmkb/+Q8+v/hiM/3kJ7pfycuiAUAwiH0vHMYcr63tfy07Pagq9k+fD+soEsHastlkCZxM5nIoBD2Aa0/OmIH1l63yLBxxzI22ioqko9JuN04FJIJBy1kQI0ca1zNTVVkfeuRIGPD//jecqNqOx4nPzU7ZdJ2NioL7Mpv7JhDdbjgXGhogd487Lv5+mEDkyPtMoKp45g0boIsedtjApi8ngmVROAzZu20bPp8wwWzkgMppWcRdqLlhk88H/ToT8j6PwUFxsWz45nJhrdXVYc6+8QZk98kny6jkcBjvt7AQ+kp3N4ITPvwQEXSXXGK87jdtgoybORNr8t57EdTAkXhEUpdj5+qmTbinadPi5cPy5XCGTJ6MqHCeX1zixu9HpF80SnTMMcb1+YuKcJ277oKD5ZRTQO5p94yODvyUl8NOy7SG96uvQuZVVCB9eO+9ZWp3UZFMj9br3pwthELQKV97De9ywQLophnaFrsNB5Vt5Lo4PJeI/qaq6tXaD4UQP9I51ogV3rTz39lEtDSL95YUQshupUxO+XxQDDIlqYYjIhF4LBwOkGJTp/a9CXNKrNcLz1FiyHm2wUWUg0Hcm54QMpkgeFmpqKhITyg6nRCyHMmnByFkUXEupmskEEeMwKbCNT723DM+dTgchuHg80HAJ9ZAtFoh6LdsgdE/Y0by5xECZGVREbx827djgy4pwTtuasI1x44d+OZBVivut6kJhoXHg00/G8RUNIoxWbMG75kjM1NIy85ZWTTQ4PRlIsyf4aKEDxfiMPH6rHzFYsZKbOIxPh/qApaWEl15pfH+4fMRffcd6nbNmAGvdip4/nmc/+STkdarN04cgVhSAvnF0YmcWtPYiFpE++yT2jWN8OmniPQxmZDCctBB8X/PBoG4dSuKkG/bhvTlK67A9TweGWU5EESSFtEo0d//TvTYYxjDM84guueeviP4KS+LsgJO+We9oLo6+6SxqsrGKJyqLATmVmUliIxMo7UjEexjK1bA4Jo6FSlnA9GczGrFugsEsOZWr8b8nTvXWN4HAiDInU7oIhMmGMvfri5ZX6uhAet+wgSi007TH59IBPqW2ZxaHcPE77a34zsjRiSXIVyDy+2GjJgzJ/7vfj/GpT8EYiAAoqG7G8+8zz5DYx+0t0M3YsKXKCUZmLOyiCNjS0qg93d3411wWaNdxUbblVBWJvVNbpz50UeQMWeeGW+bVFdjTnd1QdauWoXovdpaoquuMrYnduzAXJg4Ed+/807YqbfeGr8ehMAcCQYR/U0EO007bz7/HIEce+4JZ0iiLFMU3H8wCF0tmezu6kLGxNatIECPOQbf43r4bW04pqICOkQmuq7XC5Jy2TJkktx8M8a8pQVjUlaGoA5+DpNJkqjZrG28YQN00vXrEfF4/vmQi0Ogv+esfMsWhok5lzGilMC+CiH2JKLTdY71ElGFEEIk5Jm/SujI8xshxH8Ti1oa5KRnDVwYVksmclqIUZ2+XIHHA4M1HIaXxsjLrEUsBnLI58Px2U5XNQJ7sXw+Y+PTbMZ7cbnwnlJVxrn7cnFx8pqO3D1wyhQZ/VBebqx8VlfDWO/pgXLDCIVgOAQCEKx6pKXXK+siBgLwck+f3nfEZ3ExNon2djyT3Y7ni0YhzAerYZAQUKjLyrBprl2LEPb+Es719XgPJSWImGKPewrIeVk0ENCmL3OtlqHEcCQOE2EyyTSWZLUPmUiMRlH7xuGAJ9yojAB7wz/6COT4Kaektr+8+SaKZR95JDoy630nFoPsNJmwdoSQNQPffBOK9/HHw6HRHzz3HNG112Ktv/xy77QVJhC5QUe6UFVELdx5J2Td448j9ToalfVzmSQdaNxzD2T7QQcR/fSncNzcc09KX83Lon5AUWTzDpMJOgjP6WyAo7I5co8dAXa7/OnPtWIx6F3Ll+Ma48djDg10cfviYpn94HIhVdqIdI3F4FRobYUs2mMP4/Xq8eB8lZUwgv/1LxjtCxfqE5SxmOwqnNhgri9Eo9BtOOIxmcNr3TrIXYsFjpXElG0mEG22zB2cbW1w+DAha5QtMtDYuhXPWV4OMjqNDqs5L4u4WZHbjbnY0YE9lj/PZRttV0RlJeRQczOilbduhe6SWKrJZJJE4mefEf3zn1ir11xjbNu5XHBiVFfjGr/7HY697Tb9yGBVxfXDYWSIaOXAxx+jNu3MmdCNEmU+E4g+H9ERR8C2MXIsb91KdPvtkDm33gpZwenN0SjmrtOJc6Rih+uhvh7n7uwEyXr2zop/jY1YFzU1vRsSCQEZytk1HJWYKbxelJf58EM86zHHwLk6WI1bdJDz8q2/yHUS8XUiulgI4SN0w5lERFcSUT0RJfgEaTkRHU9EDwohviSimKqqL6iq2iCEuJmI7iKir4QQLxJRNxHN2PljUG0luygowELgaDenEwuzomLgai4NJJqbIVysVhBZqUSnRaP4TjAIgT8QHvNk4BRzro+oN+YWC96Hx4OfvlJKwmGMBXccNHqPHo9MSbbboaA4ndJwTfT4RiJQZsaPR0RgaysM58pKGXkwZ44+CevzgRyzWkFYqioU4vp6eMT6qj1ZWIhnaW3Fd8xm1Aocio7jlZVI1eau1XV16RcKVhQ8CxE2yAMOwIZfVQVCO0XsMrIoGxhO6cu5QBwmIjGt2eh+hUC67apVqFGYrLZhYyOK/peXwwBPhQhbupToN78BkXb33foKoKpCphCBNOB7VRSkm3R0QElm50hhYfqKZCyGlN5770UB/7//PT46WFVxPVXNnED0+0EevvUWIrbuvBN7sqJANnMpkoGMpA0GUVydCKTwr3+NsSsuBhGbIvKyKAMwUczvmhtxZIMk4LQ0Jg7ZmCouxo/Nlh2ZtGULuqY7nYiiO/bYzI3GdMF1v5qbIYeS6RE7dkAHqa6GXDCqN8d1CYuKMG7//Cf25bPO0pdfqgonYDSKtZvOWo3FQCjEYhi7ZDXwli1DJMzo0SAdtM5L1FKWEcuZOBxiMdncobwc8migMzwSoaq0s/s7nmfqVJC3aWKXkEUmE/aboiIpI4JB6VBKLN+Rx9BBCMiVhgZkLXAtcz1YLHiXL7+Mf2+8sTfZyAiFEAFXVIT3fsst+P5vf2sc7LJtG847dap0spjN0KtWroSj5eije8v+aBQkY08PSheMGCEdy4lE4ldfQS8qLYWTkestFhXhWlu24Dxjx2a2F6gqxuexxyBTH3kExKeiYIxDIdiDRgEcWiIx0/RmVcV4vfkmCNMxY7AHHHrokPeT2CXkW3+Q6yTidUQUIHS3uZiIviOiHxPRXtT7Bd5HRFOI6IdEdC2BPX6BiEhV1buFEFuJ6HpCkcwIEW0moqcG/hHiUVgI5YvruXDHKfaGD3coClJse3ogcKZMSU2R4xp/XFNvMDo6JYIL1LKSYEQQckdlvx/PZpTSwXUCieBBNjKcQyEoa0VF0jDmdHeOelRVSdKxspvoLd+4EceWlmLT1CNhg0FsbAUFUPT5nmbNgtK6YQPO31cX7HAYBlF1Ncajq0sa2YMNTm/evh2ee68X8y6V9KH2dijqLS34fdIk1D/LIPVol5NFmUJRMKeJhi59OReJw0SYTFLxMpIdq1cjMueQQ0CuGaGrCxF2QqB+YCry9Ysv0Kxk1iyiBx4wVtb8ftxjcbFUbgMB1M1xOIhOPRWyJhKRc8NsTp3sCwSQov3qq0QXXkh0//29Oz73l0DcuBHP2tiIa112Gc7DNerM5uwRSkb48kvUXfz2W/x+/fWIFs1ASc7LojQQi8koIyK857Ky/qcrcmaD3y8zBbgRDxOH2UJzM9ZrRwd0xRNOyKi4fL/g8cTXKQ6F9J1HLhciJW02OC2T6U9tbTIN7sUXoQOfc47x2PX04LpVVek5rlinikSgfxutuViM6JVXUJ909mzICe29ZINA9PsRRepw4B3Onj34qbNeL2RiZyd+nzYtIwKRaBeTRVYr0l25hrrbLSNNCwvxb67pGbsieA2NHAkZw+W0EuF2o6ZeVxf0IiMnbCwmbaO6OkT9RaP4d8QI/e/s2IHzcrd51lPeew/BFwccoK+zxWIgPzs6iL73PVm+JLHUjRDQ6Z5+GvbOr38dT2aqKmSaomAcSksxZ9OJdHe7oZN8+ikIu1/9SjZxaWjAfUycmBo3wXI83fRmhwM67urVsF8PPhiRmdOnp/YMA4xdSr5lAjHMIyV3NaQ92IGAVIwKC6EcDUXEVypwu6EcRiJQflJVOiIRCKRwGFFkQ02WBoNS2Car+eJ2471wjRQtVFVGVU6YkFxRbmmBUB89urdQVVUo3aEQxsVux+YSDGLzYgXW64WgdbthQLA3SotQCNF6QmDTSVRwYzFsbg4HyDSj2lvBIMhR7sBlNkPZDwYxFrW1Q6dIOZ3wVKkqxsCo/mQwCCW5owMKR10dnnfMmJSJglxXFbMu+LkxwFCkL+8KpKEeVBUywmTqPS87OpBGU1OD2jSFhfrpz8Eg0eLFIBrOOw9e8b6wahWKco8fT/Tkk8akIzcN0BrMPh8ihtxukGCJaX5MJrJHOhnx19EB0uCbb6Cw/+Qn8c/XXwKRi4Tfcw+U47vukk1f/H4ZOZnNdNZEuFzoev3WWzKyae7ctMqZ5PpMHxIlVFVBBrjdkgQvL++f00NRZMQhO1IKC3Fuuz37EdkdHSAPm5sxRw88sHfh/sGAooAEV1XcQyiEdZ5IxobDuF+vF3M8WbRiSwvki90O4s5kQu0rowwVTnsuK0vPCc3GdiiE+zHS1UIhyMJVq1Cf7Kyz4tcnR2RHo7jnTLr5trZC1qkqHMEp1D/NKmIx6OKcPTNlStqpgruNLAqHITsURdYx5f3MYtk19I9cRDRK9NJLCMA54wwQeYqCQAPt2o5EiJ55BqTeggWICGTCLXEP2LIFNs6YMUT33Qcb6Xe/MyYdu7tha9XUyGNiMezx9fUgwg4/vPf3VBV1EhsbQTJOmaJ/jKIQ/eUvuPfvfQ/lTrR7CzcmdbvxPDU10GW4Zm1xcd+OibVrkYXicCB9+cwzZbT59u0Yo4kTM9vTIhHco8lkfB+xGOpF/uc/uIfaWjhUDj/c2K5LQH4FDgLyJOLgIuPB9vlAJnKR5qqq7Hqy+4vt2yH4iorgIUi1ADmHREcig9cSPhV4vZIgNFIGVRWkVTSKqEHtxrNjB4TtmDHGCq1WeR01KrnX2uXCJhAM4ns1NZJs9XiQwqyqMISjURgS2nQ/RcGmFoslb0KiqiCCu7qwQSTW4AkE8K5NJrwvvmdOfenpwWcjRw5dmHk4jGf1erHxcKcwvk9OpfL7ZbfNsWPTrr+Z6xtUVgX/UKQv76rEYSK0KSw8j8NhKLEOB4jE2tr4js08DrEYPO0rV6IpSmITEj1s3IjC3FVVRH/9q7HCFgpBHmmbBng8IBB9PjQ9MEoNUlXpkVZVyE4mQRn19VBcu7pgvJ9ySu9z9IdA9Pkwhu++C6X+jjvwzEwGDHQHZlVFzab774d8X7gQhCl3oE8DuT7rB1UJ5ffrcmGv5AyATEgfIhmV7/fj/0SYN3Y73mOm500GpxOk3ZYtWHv774+I4aFo9qCqiJ7t7obhW14OueNyyYZ02uMaG1HiJlmkpMOBH6sVKWyRCBwgRrLI78fxdnt6dZFVFdF2gQB0KqN119ODxlLNzVijRx7Z+zz9IRBjMZSU2bwZc3HevMHXhXt6ZNfZUaPwfjIg1HcrWcTv3evF7xaL3Mc4zTmPwcXSpSD6Tz4ZDlOuzV9UBCKR9YQlS4ieegpZHNddJzvAc0d21kU6OrAuqqqgh2zfDnJt5kz963s80F1KS2ELCwG58OabWN+HHgp5rUegLV+OY/bdFyWV9OD3w9m5ciVI0osuitebuK8ANybVysxIBPOVS2Tp6emxGNELL6De64gRSNfme+nslLXwJ0zon8ON63lzAxrtM7S0QG9lW3OPPRBRetBBaa2pXJdFOYE8iTi46NdgqyoEQ0+PVH6rqoZ2o1IUhHk7nTBkp0xJXZENh2VIdLJovaEARwDGYlDqjAzUWAzvgwgklMkEQdvVhfFI5sXt6cE1tIRgMjQ3w1M9YgQIPCJ8f+VKCPP99sOmUF8PpXjPPWW3ss2b8e/kyX2Ps6rKKL1x42RUo98vPVDjx+sryn6/rCtUVzc06c1EeAYeL7sd85I7hXOdydJSGDjjx2dEyOf6BpU1wa8oMDy03viBwu5CHCYisSj1X/4Cj/VPfxrfrCQWw78sr5Ytk97qk0/u+zqNjUQXX4w95ZlnjMsacMRVYaGMjHc6QSCGw0Snn55aJDoTgZEIfmcycelSogsuwLlfeql311P+HlFv8jEVfPcd0S9+AYfC1VfjmTlVyOvF/djtA+eoa21FjcnPPoOcvuYaRH9nSGbl+goYNCXU78c8jUSwV1ZUZObwCAZlxCHPXZtNRhwOVAkHnw+GZn09rrHvviDkhlIH3LIF0f977RXvNAiHsZZsNozJtm0gESdNgswyWrM+H9aH2Yz6oD4f0aJFxmmD4TB0LnYIpiMLOjvxHqurjXWwpiYQiIEAorNnzYr/O+vlHM2a7rv3+1HXrKcH+tmsWYNbX09R8P7a27F/T5vWr3rku6UsikSgV2qzMNihx6nOeQw81q+HvjN3LmoJMjo6IKdGjID8WbUKabrjxyPDgff5YBAywW6HTPD5kEZrtaIu4KZNSOk1qrEYDMIZUFAAkrGgAHPiX/+CrXv00dBlmEDTEonffgu5PnOmcQO6jg6Qei0tKLvC9RRZXnDDKp8PASx6gRFMfCsK9g1terPTCWfqF18gZfjGGyHTOPiipweyYezY7MgodiYTyTrgH3yACERVhTyvrcX7nDYt7dPnuizKCeRJxMFFVgZbVWW3pVgMyk9/PHbaBgEAACAASURBVOmZwukEKROJQDD3VUdPi1AIQpXTTodTVCWDC61zV2Yj5TQSwViwN6W1FYI2WRFbnw+bVWlpaqHZfj82EK6NWFSE+/v2W2wE2jp+kQg203AYnjiOdpw0KT3v9qZNMoR/xAgQiBYLNt5kinIkgmsGAhi3dDskZhMuF56DmxJwcWybTXYqy/Decn2D6rcsGqz05d2VOEwEE4nLlqGxyIIFiPZLBBOJGzfiuClTUEuwr3ne1gavdiiECES9kgh8H14v3jkb3g4HCMRoFN7xvpozJUJLJj77LNFNNyFq4KWXekcz9odA5CLh994LBfvuuyVBGY0iioA7MA/EfhqL4ZkeewzXuegilJ8oKoKszMuigUEwiD06HMZ75X0gVahqPHHIERRa4nAgnSehELINVq/GvcyahT1/qB2vXV1wYo4erR+Zw2nd4TAiJysrEfljNFaKAj0jGiX65BPIlXPOMY5o5gZzJhNkTjrrp7sbcqyy0jhb5NtvEX1UUgKiP/E+uCs962XpEogtLXivRCAmBqsJDoPJlUgEDuN0m9LpYLeWRX6/rK3K9aCZLLJah6Y+9O6Cjg7UTR01CjpI4jzeuhU6Tnk59v+CAqkHaOF2w24oKZGdld99l2jNGqKf/QzySw+RCAjESASy0GbDd199FQEN8+cjHZfB9a5NJjg1V6+GvcblVBKxYQPR73+P8990E4hGbrbCaGyE3ZVKY1JObzaZ8Kxr1yKrxeUiuvZaOII5ipIjG+vqjJ05/UEkAjvtrbewT9fVQSZXVYEMTie6XINcl0U5gbxIy0FoOwe6XPjx+fB7RcXAp7RwvYWmJmyUM2emR04FgyAQhUCY8lB2bk0GNpK5iLKRp7qgAAKvtRUbWV1d8igcRYHyzWnpfYE97VYriFqfD+O/aRO8NByBqL2f6dOxKXz4IRTTvfZKPz1m6lSMwaZNICSmTUPEaF/zq6AA1+zpgREQDA5dejMTXU4nfjeboTSMG5d2+nIeGmjTl63W7L/bPHHYGyYTFMnFixF9dOqpxse1tqJ7aE0Nonj6Mgy7u4kuvxyK4lNPGROIbDSbTFKedHQg9cRkQnHyFOvVxIFTWm69Fd3/jjmG6Ikneq/RWEzWoEqXQPR4EHXw/vtIKfr972XJB20H5mw01dDD5s3w8q9bh7pxP/4x5Hdp6fCtc5zrYNkfCmFfqq5OfR9UVcg4TlXmYvZ2u/wZaOeYosB4XbECesCeeyLVdagi/LUIBHBvpaXGqXd2O8bvk0/w/3nzjNcWpxMqCiJ0udmBEYEYi0FuEaXvqOzpwXovL9cnEFUVtbheeQU6zzXX9D5OSyCmUmMs8btr1oCkqKzEuAymDAgGIY8cDry/adOGTymhXAbXPXW74yP1WV8ym/H3oSg7sCsjGAQBZbMRnXiivizYYw+se26Kctdd+jZAWRlk7YoVkPeffQaC7+qrjQlEVYWdFApBFtpsuKd//hMy7eSTezcC4TmwYYOM0N5/f/3zf/IJ6iZXV0NHYpnIUYic1cfOgFRqwtps2BM9Huhazz2H895zj6ybzecNhzMq+ZQS/H6id97BeLMs4hTmAw/MR/EOd+RJxByGySS9qE4nhIHHA8UojaLsaSEchtBzuUCWaTv8poJAAN4SkwmG6nCvGWKxgCgNBCBwk0VMcppsRYWxcRuLxXvO+zKCOarPbJbHBwJQPrnDst4YFhbivkMhHJ+pIK6pwbWYDEx1Tgkh63a2t4P0rK0dvK7bkQg87K2tUNoOOADRiJxyMhwjX3MF2vTlbEbg5InD5PB4EMFWVUV06aXGY+P3g2g0m5ES3JeTxu1GakxHB9Hjj4Oo0AN3HiWCwSkEFOTXXoMMWrgwvg5rOvD5iH70I9QpuvJKkG3RKPYbRZE1DzMlENetI7rhBtzvT39K9MMfSlnGEWYD1YE5HAYx+7e/Qf7dfDOU44IC7NN5JTn7UBToRGy4V1am1hwnFsNc4B8u/s71DQerWRQ3OVu+HPcxcSLmTCYE/UAgGkVKoBBwaCRbM+vXQw+ZNy/5vstNzr78Ev8//XTjuomqCgJRUaBXpBPh5XJB5rHTXe/Znn+e6OOP4aC95BL9JnRcBy9dAtHnQ/qy04ko8ZkzBy9TQ1UR/djQgN8nT4bDN7/PZg8sb4JBGdVWXIx9OBzGHC8owJzKk4n9h6qChPJ4iM4+OzkZv2QJ7JHzz09ek9Xrxfm++AJRgpdcAsemEbZtw7uePBlyxe+HA6K7G9kieg1SiGAXcST3vHm91yFnTixeDHLy5pt721DRKGzqUAgOj3RsLLcbdaG//BINS372M1mCy+fDeYkwVtl2MqgqyNm338Za2Xdf6STef3/ooUOVwZZH6sinMw8uBnSwFQWeFo4UqahInoabLpxOEIjRKIRlumHNPh+iF7mrUy4ZT9yFTa97YzQKpSwahQCORmGw6KUasaI8cmTfRFYsBqM3EpGRfO3tiDAsK4PHhhsblJfL98ydobneIhcLnjEjPdLW5YLCyZtyUxPOx8WCU0U0insIBLDB1tYO7ObQ2QmvoKLIAsmhkIxC2bYN9zRhAu4lQ+S62p22LOKUvkhEEur9lS154jA1RKNIvdm6FYrkmDG9i1HzcU8/jfV/0UXwbut1bGb4/URXXAHC4qGH0GDECFz/jet+NTej1o/dDgIxUwfBjh1Q/teuhRf88svjn4fTnKNRyK905p2qwsP+wAOQXffcE19viDswWyySGM0mVqwguvNOyM4TToAxYrNBZvcjfTkRub5isqYXcW0y1oHKyrDnJHuv0ahMU+bGZdzBkutiDmaX+c2bQTK5XMhoOOig1OqLDibWroWDbs6c5HWfV6/G88yYASeoUZ1Rlwu6zRdfQOc48UTjumBEUs+trEzPuPV44BAtLta/70AADQXWryc6/niUjNCTsT4f/p8ugdjcDNJACKQtplMCqL/w+ZBR4vHAETVlyoA4U/OySINYDOPNzgxOMQ2HZUMxqzVPlvQHn38OuXH00cllxj/+gcjA446D46OqSr+TfU8PnI7LliEC8PTTkTVgtAe0tIAMHDMG0Xo+H0qWuFz4rlFWR3Mz0aefQi4eeqjMxuC5oCjIyli6FPUJr722t82sKLBnuDEp25wmU9971ooVqK/o9aKxzDHHyEARRYHNZrEMTLCP0wndcdMmjNv06RjH0lKkL5eW9t29OQXkuizKCeQjEXchFBZCIIXDsrud2w0yMRUvvBFUFUbQ9u1QAmfPTj/1wuvF9wsLIZRyrTZIaamM9tRGeXIDD0WBELfbsXlwzTCt8HW5YKyk0lmbuwaGwyDCLBYI2fp6vM9995Xn5/qYHAHZ3IxrjRkDRbmiAt/77jt4s1Ihb3t6sIkUF8siugUFIDHWr8d5UlV8zGbcC89JTm/Odhp7KIRNqasL833CBIyLquLdcNTBrFmIUmTv4cSJeY9wX8hm+nKeOEwfr7yC9Xv55ZjX0Sh+EonEN97AvD71VBiJ3LGZqPcYh8OIylu7FgRlMgIxEICiynWeGhvRbbCsDARipl7qVauIzjoL6/Dll4mOPTb+7/x8XIg8FsM6t1j6lj8uF7oofvQRlPDf/ja+U6zXK6OSs51K6PEQ/elPRK+/jiiDBx6AZz0azacvDwS4GzBHh5WVJSdpI5F44pBIliXhqKHBRlMTjOGuLji8TjoJa324Yft2EIiTJycnEBsbsc9OnAgj0euVKZ7a/ZabGXz5JXScY45JTgZweZnS0vTkjtcrOzjr3XdXF9ZseztqyOqlLjKBKASunaoOFI0ifXnbNuh/BxwweDKAu7VyU7zp09OvWZtHZuDu5EVFkE8OB/5fUgIZpCiYT4WFkDl5PSg9bNsGmZmsGQkRoopfew3E/WWXQX41NUHejBkjjwsGQbR/9hmiwE85BQ4Np1M/ldfhwLqqroad5HaDQPT5oBeNG6d/P21taB5SVYUIwIIC6SglwvfvvBNk5qJFcLLq6W/btmF9T5wo5Ym21qLefIrF0DTvmWdgFz3wAJzNRJiH27Yh2KWqKv1Mw74Qi0HOv/8+fj/mGOhzO3bgHubNk9wA63zs1MuvjeGJfCTi4GJQBzsYhJALhSAc0vXaEuG7GzZAOI4YAaGSrtfM44GgtVr73xZ+KBGJYBzY2CDCJuRywVDUGqhOJwRgRQWODwYlKZdK9BsTwNXVUJa3b8d7qK7GZpmohLtceMd+P0LoR46MjxT1eEBCFBWBAEy2MXR3YxMpLcUGqxXebW0g6ioqEF2Q7gYTCEBJ56jNfnQB/H+oKpSCrVvlhmo24zmKijDnEkkv/k5zM0iEKVPSVupzfUtLWRb1t/tynjTsH5Yvh0f66KORhsuIRKT3mghe83ffRdrjKafERyZzp0j+LBpFd+KlS5HOcsopxtcPhWTEs80GYmDJEiiZZ5yReXOHJUvQFbm6Gop3YtdTIpneYjJBvrHhxYolpzknYvVqpC93dYEoXbRIPjtHh0SjA0MYffghIh57eojOOw/XjkRkJMoARODn+mrKWC/id+nxyCZz5eX6MooNdr8fc5pIdqcsLh660iocgbdjB/SKefNQk2o4ykiXC/KouhqOTKN77O5GhFBpKci4ggKsWZdL1h3lov3bt8OgbmpCFMqhhxpfPxCQ+3o6qd1+P4hKm02/jMzWrZCx0SjKKeiVdGDiOV0C0euVkaXTpqXngO0vXC6QIoGA7Ew7wBlAw3DWpoUBs9HYcaWNkuYU53AYx1gs+BmOa3+4welElkFZGZovGdmVGzbAgThyJOohlpTgXWzciHNMmwbbOBaD3vDBB4hCPPZY1EJ1ubC/VFfH2wheL4IziotBzLvdaOwSDhOdeaZx9HhXF3SEkhLodNp9JxKBTXLnnTjuf/6H6Pvf732OUAhkn6oi1TgxKIXJyEQisbsbY7BiBSKtr79e6m8cENPTg3lZW5vdDI22NjhVm5sx5vPmwYGtKPj/5Mm9v6Oq8URimnIzv4oGAXkScXAxJIPt94OUUhQIh8rK1Ay/nh4IYFUFyZJJ6qfLBeXYZkutKcdwRyiEzaOoSCqmtbW9PduxGMaPCIp0ezsE4KhRfQtCtxvvi7tTNTbKJiqzZ+t/PxTCptjTA0VRrxg5d9MuKcGmp3eeri48U1mZca2cjg7Mi7IyeADTJYWjUYyH3497SberohZ+v6zRWVkJArGzE59XV/dd78ftlt0JJ0xIy0Of6xtUn7KoP+nLeeIwO2hpgQI8bhzRjTfGrzVWsITAun7hBSiUixbp1/AiwjqLxRCh9+abINoWLTK+vqLEF4jfsAFE5YgRSPXLhIBTVRjsN9+MdMgXX9QvjRGNYu4xgZh4X5GITAnjGomxGDpSP/QQjIZ77onvGhuJQH4PRAfmzk5cb9kykBA33QT5Ewr1LjmRZeT6ysqotILXC7kfi2Fu6hG0oZCMOOSO3larTFUeypIqPT0gD7dtgz4xd+7g1sdLF+Ew7tdkSl7w3utFtImigEDUNoFRFBjkVivGv6UFzo/NmxEJffTRya/f2Ynr1tamvpYCAXzPYpHlTbRYvhzd6CsrkTKol2KcKYHINc/MZrzfgehsqodIBPOqtRX79tSpg9ZIbreTRemCSy6wPcaEOtf/FQJzNd26v7sTIhHoOx4P9BejYIS2NjQiUVXoG9rUYkVBpF8sBnuouRm1Fd9/X9YHNJt7Z4UVFmJfWbcOcmDmTBmBGIuhGZSRHeF0gqS0WhGFl0j+rV2Lhm8mE9Gvf41AjUQEg1jbfTUmTSQSly+HwzgQAHl4wgnx49nYKEtt1dbKvZOb6GUa/BOJgDT95BPscyeeiHFatw7BKIcd1ncwiTa6Mg0OIb96BgF5EnFwMaSD7fVCceW0tMpKfQHENfWam6WXJZNoE6cTBKLdDoJmuCrH6cLnw6bickHYjh6tf1wkgvHu6sIYjB7dt9HCkXp2OzairVvxM2IEInWMlIrOTtRlLChAiHplpf54d3dDYa+o6F0PpKMDfy8vN34mRlcXIhuLi3FfmRhjPT24XmFh+unNnKLT1ITnnDIFc7S5GX8fOzb1KEdFwRi7XCBt99gjpY0q1zeopLIoGoWykk76cp44zC4CARCIfj/+1TMCYzEYqosXwxg5/3xjYzEWwzu6914o4FddFV9/MBGcusdK5Pr1ULDHjEGx8EwitxQFEZBPP41zPPGE/t6SjEBkqKqMTCTC/nb77agzdMwxIEq1BEY4LFMRS0uz59CKxeBh/9OfcC8//jEiEbRk5QCnLub6KktLL/L5IKsjERhhFRXxczEYlBGHkQg+KyqSEYdD7cjkyLQNG7Bfz5mDGl3DuUa0qhJ98w3GPVl36FAInUY7O9GURE+PCATwEwwiWnHtWqT3nnii8fWjUegnQkDnSvUdhkLQpwoLoUNpdSJVRTT0v/4F/eGqq7BWE8GOFK6Tmcqexo1nGhvhzDzggMwjttNFVxd0PO6oOnHioOreu5Us6g98Pll+gUtccLkOzjKwWoe3XBgqvPsuogAXLDCuOej1gjTbsQMRhQcd1PsYlwsBGn4/1uuSJTjuV7+KH3cOfBACASP19VhfM2eCyHz5Zayxs84yjpD2eNDx3WSCfpKYFfjBB0QPP4xAk1/9CvKqoCBe3gQCsPO4e3FfOlgshrn0zDPQESdOhI6kHbNQCOdUFDirtXZTJIJ5GotBfqVbQ3XbNuhG3d3Y577/fURBdnQg8vCAA1InJzm9mbNvUpDDuS6LcgI5mliaRyYoKYHg4hp6LS34vbJSCsxQCOSQxwNiZ9KkzBQQhwNe0JISCKZdhUAkgvBiz3ayyDWuc+H3x4+xEdjTbrFAUd60CUro6NFIgTESmg4H3mVdHd4Z116prOytbFdXQxBv24YIvMmTZZfVnh58J5Vi3zU18JLV16PWz6xZ6ZMKHBHb1gbyL9X0ZrcbBpjPJzuEd3VhrOx2kKjp3EthISKHOL3Z54NRke1uZLmCcBhygLuSJjPY8sThwEBViZ58EsrWDTcYE4NuN9Grr2LMTzklebSJyQSi6/nnUfPrssuMj+UutRx5s3o1vMkTJuA6mXil3W50i166FJ7wW2/V3xdSIRCJZJfmggJ42W+6CTLshhuQ3qQ9N3dgLijAnpSt/aihAalH334Lhfimm/AO3G7ZpTNXy3cMNwQC0FsURe69NhvWSiAgiUM2NNhRarcPD/0jGAQRt2YN7m/vvdGFcgCaW2QdmzZhbc2aZUwgRiI4rrMT+6dROl9REc61bBmiUebOjY+KSUQshv1dVaEjpEoghsOQn2Zz72yHSAQRy59/DtLgggv012kmBKLHg0hMjwd6RTLdLZsIhUAedndDxs2apU+K5jE8UFyMte924ycQkPUTo1FZRiQcBpmY30eAVatgdxx8sDGByE1JmpqIzj0XMkYP5eX4eeMN6Dff+x7RL3/ZW+8wm2E7tbcTff01fp8+HfvRK69gPzrrLGP9y++H3qOqREceGW9bqCoIvpdfhjPpxhuxZ7EexEQid0suKACBmAq53N1NdNttGLOTT0YDFe1+4/VijISArZ/o7OTSXX6/rIudihwMBIjeew9jVVmJJn9FRRjjaBQR6sm6Y+uBIyp5XDJIb85jAJDVVyCE+EgI8VGKxx4hhFCFEJdm8x4GCkKIZ3beb06LciEgNMeNgwff7wdxwmmsK1fis+nToQhmski7ukDIlJaC0NmVFrqiYLwqKkDucbSJHrxeKAKjRkHgsddRD1pPe10d0hMbG+FJTqaEOp2IROKx5nR1TqfmkHYt6upwbHc3jODWVhxbXZ1et8CqKnjiAgGQDFxnKh3YbJiLdjvmX1ubTL1MBBspK1bg/7NnY442NeFZampAKFosREcccQQdccQRKd3DRx99REIIevvtJ2n6dLzP9euhMAxHDJQsYmM8FILyYEQgco097bznent5AjE7ePddEA5nnQVZrIdgEAqsz4conr46uD77LIjJBQugTBq9K1XFHqCqmAMrVkD5mzQJDVsyMWaamuB9//hjKPe33dY/ApERiyGq8YoroKQ+8ww6InI0B5EklywWyMls7EeKgrE87zw4Y269FSnUxcWQ8zYb5GlBQWay6Mknn+z/TQ4CBkMv4miyzk5JJI0YgbnS2Ym51daG98w178aPxzHZJIwzhaLAmPr737FPTpuGeXPIIblBILa3QxcZN85YxqgqdInmZuhF3BVeD+EwiNSvvsJ+ra3fqgcuxVNVlXpUlqJAnzKZMA+0+5jPh2YCn3+OaOiLL9aXaeGwdDykSiA2NkJWhsN4vzNmDM6e2NqKOcblbObMMSYQ8/Jo+IAdTeXlkGddXdg/2IHL0auBQHxk9e6K1lY0SttjD5RU0IOqSlk7fz6aqhnpLIoCPWvpUthal19uHIBgtYLs7ezEHuRyIYW5qAhEpRGBGAzi/IoCApHr6RNBTtxzDwjE446DXsSyhmVWNAqHRGMj5F+qdU0//xyybeNGpEb/4hfx2V4OB2R2YWHymvDsSLbb8Qxut/E8VFU4hv73fzGuhx5KdPXV+M6HH+IcJ56YPoGovZeCAqwP1hUHA3luyxgDKmiFECYiupWIvlVV9fWBvFYe6cFkgtArK4PiwaRJXV3/Osd1dkJ5Ky/v3ZQj18FFwIkQkSMEhKPX29s7Hw6D2LLZoMT6fFAEuLacFqoqm42MHAmh39KCa0ydanw/Hg8MqOJieOR4rLmJjtMpIxITN9FRoyCAv/0W73qffZJ3WjRCRQXIvLVrsWnPnp2+YWQ2436cTozZ9u0YM+15ursxLqEQ5tWkSXj+jRvx3BMnxm/OiYjFYnT77bfTvvvuS6effrrhcaWl8OBv3YpN2+3GtYY6BW6g0Vf6cj7icPBQXw/ldN48KMF6iEaRhtfaijpi06ZhjnLH5kS8+ioM5/nziW65BZ9xs5VEcDRXcTEi/L74Auc//vjMCJnlyxEZGA4jteXww/WP4+6EZnNqRKXDgTpHn3+Oe7vlFtwzN2MJhXAMEdZ1ttIJ16whuuMOyIjjjkNUZUkJrqWqkEPJrpWqLNrdoSjYEwIBzImKCsxXjwc6hqrKVHs2uIeTTOK6T19/jWeYNAmG7yDVpssKfD48Q3k5ZIARtm+XztVJk4wNce7O+eGH2GdPPBFywWi9OJ3YlyorU9crIhHpAKyri5cl7e2IxnY4iC69FDJWD+Gw1Nfs9r7nVSSCaJ+mJmSRzJ07OASx3w/HqsuFsZ86NX05l5dHQ4+iIuhdHg/siWAQ+4jFgjmoKHJOms04dlfXSRPh9xO99Rb28uOPN16Tb7+N+ntz5xIddZSxXaCqSC/+618h2664QvYPqK7uff7WVsjDiRNld+WaGtRANCLsw2GQnn4/CESt7Hc6Uf9w0yaQfaefHn9NJswcDllabOLEvnWjSARlYp5/Hk6a22+HUy0Wk3UF2SmXTqAPR8N6vZinienNbjfqbNfXw5674ALsG8uW4VpTp+KdZGPecjqzNipxMPf+PLclMdDeGhMR/YaIniWi3Xqghys4so672lZVYcFzmHc6C7O9HZ40jtIbTgp9f6GqqK0RDseny9rt2CCCQSlQYzGZRsMFwEtKpEfJZIpXsrloL0cgtrdDEZ80yfh+fD6kJNts8OokbgJMJPb0yDTlxKYMnPLFqROZoqwMqVlr1kCRnj07MxK6ogLP09aGseZuaJs3YzyLixH5WFKCDb27O/X05VgsRr/97W/pwgsv7FNRLiiAUtHWBuNo7dpdO73ZKH05TxwOPhwOokcfhRJ2ySXG4/3++4iAO/BARCrb7bLRCiuKjPfeQ22gQw8F+cXFwvn9aq/BjXSKikDOffMNommOPTazd//Pf0I5Hz0aUZNGTpF0CcSvv0baj8eDKMAFC+T9cRSjzyefhRXO/iiwfj/ezcsvQ1Y/8ACijXw+yNiCAuyZfd1/OrJodwQ3HvD54rsyMklbUADjhztrDzeZpKowDDmldcwYpMwOVlONbIGJMbMZTkYjQ7O9HfuxyYRonooK43OuXAkjf9Ik1G+NRECMcFkCLbxe6aBNde/l2mWqKpsgMDZsIHrsMTzPz36m3w2USKaRcjOpvsAdq71eyOI99xz4Oamqsia02YxrZjq/8vJoeMBkwv7BKc4OB+ZfaSnmYmGh7OTMEbJW69BHWg8GYjHIjWAQDkkjgv7LL5GaPGECMh/GjTM+55dfIntgxAhEA5pMMiuLI0QZPT1Ya1VV+P211/CuFi40JhAjERBoLhccp9rGpI2N0MmcTugxBx+sfw63G/dkteJZ+tIt2tsRzbh2LUjJa66R0Ycmk2y45PWCAB01Kj1ZZTb3Tm+226GPvfce5O/8+dCLWlrwzlQVzVMmTEj9OqlgiNOb89zWTuREyHceA4Pubii7RCB+qquxQTHx5HZDkJaU9C1oWlux6VVV9Z1Wl4vgdKnRo+MVy6IiWXzWbMZG39kpowq1RmtZGTYNtxuKNnuZ/H78zvWEpk5NLnADAUTBWCzJo+QKCuKJxIoK3F8sJmv/7b+/9HSZzemlM2tRUiKJRI5IzIR0s9lACra3IzqWo1onTcImqiggFYNBbMojRw6cwj5yJJ5ryxbcy7hxmY/PcIRe92X+nDHcDPRdGZEICmsrCrqEJlOUV6+G4ThrljTahZDdlzm1/JNPUKR7v/2I7rtPGtX8XrVEIpPJFgu87KtWgTw44oj05wE3cPn976EgP/ecccHxdAjEWAxe9ieegJx49NHeEVKRCMgbLg2hfTZ24KSrbH76KdFdd0E+n3UW0ZVX4v309ODcNpvssplHZohGZb1mrkPH+xUR9gG7PbOO4IOFhgasTy6v8f3vY57mItatw3vYf3/jMefa2sEgdKNkut9338GpMGoUCESOeFcUGLVap3UwiHMXFaXeII2dt9EoiAGtY/Gzz5DiWFcHw1pr0GuRLoHY0ABZXFgIJ00m2Rzpwu2G5LubogAAIABJREFUs9nvlzWh8803dh1YrZhHXi90dI5KtNlk12aOTPT58PtwdKZkE59+Chvl+OON1+6mTUR/+xv0oeOO6904Uov6ejhUi4uJ7r4b483lfNj2ZWeVzwebo6QE4/7OOwjcOOQQHF9a2vs6sRh0r+5uHKeViytW4Jo2G3SKKVP077GnB8EUxcUykpB1daMxuvNOyL/bbuvd6V5RQF76fLBjWDdKF5zeXFCAIIu334a9NnUqykNUVMBZVF8PPuCww4zr6PYXHK0ZjUoHer526OCiT1VaCDFBCPEnIcQ6IYR3588nQogkpZCJhBATiWhn30S6cGfOtaqXVy6EuEYIsUUIERJCfCuEOFLnmAohxENCiOadx20RQtwuhLAmHKebuy6EuGjn9ScmfH6CEGKlECIohGgQQvxCCHGx3rE7US2E+LsQwimE8AghXhRCVCUbi+GGWAzESH09BNm++0oDz2KBAjZqFDanri4IMp9P/1wcoedwSM/Grobubii0Ro0/SkpkzUOHAxtLVVVvxZvrUXIatMuFf4uL4R3q7AQxkIxADAZBIBYUQHnsS2AWFOBehMCmFApB8Pt8eFeVlSDoqqqwwXR2pj8+jOJikA5CQLH2eDI7TygEY8DhgLE/ciTm5Nq1jXTxxdfSiSfOpO99r4SmTi2hww8/jN55552k52toaKDCnVr2s88+S0IIEkLo1gV6+OGHafLkyWS1Wmnfffel5cs/pJkzsTE2NUF57+pyUq7LIu68qyiYp1rCKl/fcGjwj39gbV92mbEc3bAByunIkUgN4UhnhtY7+/XXRD//OWTKQw/1JiW1RCJHBBUU4PyrVoE8OPLI9OdBKITow9//nujssxEZYEQgKgrutaCgb1nW1YXzPv440iCfe643gRgOQ6aaTDC8Cgogm4uKsLcxcR4KGdde1cLhAAl7/fVQhJ96ClFMO3Y00uWXX0sHHzyTJk8uoTFjBl4Wffjhh72OcTpzXxbFYth36uthCDqdkEmlpZg3Y8fip7Jy+BKIra2ITlmyBGvpuOOQ5parBGJDA/bgqVON0699PuyJPh90ozFjjB2a3EG+vBwR1kzQcZZGLCZ1TEWBzmWxyMifvqCquF9FgXHM80RVUULhmWcgK375S2MSIhjEj8XSN4EYiSD6cOVKzNGjjhp4AjEaBZnx7bf4/6xZqJdbWEjU2NhI1157Lc2cOZNKSkqopKSEDjts8OVR3k7LDoSQ8s9kgkx0OqWD0GKBvq0l4UMh4/rsuYyNG5ERsc8+iPTVQ0cH0Z//jPE44QSsdaO9oqUFNQJjMTg6WdcSAnZGcTFkEdtoGzbIKNAlSyDnzjkHThMOutFCVeG0aGtDuQRtNOQ77yC9eORIoj/+0ZhAdDhgV5eUwB40m2UGSWIdQEVBiYabbsKzPPVUbwIxGITNHwqBAK2tlenNmSAaxTM++yz0suOOw5hYLIhIrK+H3jl//sARiFpox0dRUlsHeW6rFzKSmalwtgcQ0TFE9BoRNRBRBRGdT0RvCyGOVVX1A4PvdRLRhYRwz0+I6Imdnye2K7iCiEp2/j1MRNcR0b+EEBNUVe0hIto5mB8Q0RwieoqIVhLR4UR0y87PTknhOXpBCHEUEb1JRI1EdBsRqUT0YyJyJ/na20S0lYhuIqI9ieianff9w0zuYbARDMIj7PVCGGpr6Wlhs0Eg+f0Qkh0dUqnjmitMILpcUNyMlLNcBtdfKiszfj7e8Dkac/RoY8HJKQvaZiaNjRjDGTPwXSOEwyAZiFIvrksE4VpVBWG/Zg2+t8ceslaIECAko1Gcn4/PBEVF2OzXrJFdm5PVKtRCVWFsNDRgnPbbD/fR2gqF/aOPltNXX/2HFi5cQJMnTySn00mLFy+mk046id5//306OnHn3Ina2lp69tln6cILL6TDDjuMLr/8ciIiGpGQ//P444+T1+ulyy+/nCwWCz344IN02mmnUWNjI02dWknt7UTffBOiq646mgjrPmdl0Zo1st4TE095DB2WL0c9GTZGGxt7H9PRAUKupATkhM8HhVgP330HpbK2FukyXV340QN3IDWZcB/btqEw//jx+veRDE4n0f/8DwjMa64B6dfWZnxd9hz3lWa8ciVSjvx+FOo+9tjeDg8mAbgRgtNpfN1oVKbKFhb2nv9cL+npp6F4n3020Rln4Nj164nefHM5LV36H5o/fwGNHz+R3G4nvf46ZNHixe/TIYdIWRQM4t/GRqJAoJb++Mdn6Wc/u5DmzTuMzj0XsqimZgQ1Nsqxeuihx8nn89IPfnA5FRZa6K9/fZBOPfU0+u9/G6m8HKxOfX2Izj8/92URp5iWlMhmKEJg3BINtOEGpxMOs5YW6EyzZmFvDgYRyZeLcLkwx6uqoP+sXdv7GI5q4ai9WMzYcOvpQX0uIkQS6ckUjgC0WGQDuqoqyLy+oKogHcNh2Rmd7/G11/As++8P2cr6UyKCQRxfWNh3LUO3G2PCdS7LymRGz0DB5cK4hcNYI+XlmHMs///97+X09tv/oaOPXkBnnjmRPB4nvfUW5NETT7xPBx0k5RGTtWvXEvn9tXTHHc/SzTdfSPvtdxideSbkUXX1CFq7FnsBEdEDDzxOfr+XTj8d8mjx4gfplFNOo/fegzzq7iZ6770QUd5OyyoKC6EP+HxYF1zHjut0Wq3xac7cuV5vT8tFdHcT/fvfsEW//339Y3w+NGsLhUAgjhtnTOi7XHACejxE999PvRp8FBRAZ2prw3hv2CDt3f/+F/byaadJ26usDPLAYsG+paqIRN++HfYLl6TiBnBvvAHn7y9+YVy7tKsL1+d6hdoyLUQy6s5shm30m9+AtFu4ELpRol3ItfPNZth52rJbqtq7/E1faGqCY6ajA9lnJ5yA72/eDLLXakX69mA70NiO2bEDWTcPPtjnV/LcVjwykpmpkIhLVFV9JeEGHyI87A2EAegFVVV9QojnCAO9VVXVxQbnH0FEe6mq6t157g93nvtcInp05zE/IqL9iOjnqqr+cednjwohWonoOiHEyaqqvpXCsyTiXiLyEtHBqqp27Lz+U0S0Mcl3PlVV9Tr+RWCFXyOEuFpV1WQvaMjR1QVlRwgQVqkQRXY7frxeKINtbRBClZUQIh4PlJrBSOMYbAQCEEhFRcnJPSIZ4WIy9a2ExmIyjfS77/DZrFnJa9ooCjxJsRi8V+lGZKgq3hV3OUz8vskE791332Ez2HPP1NOIEmGzydTmNWvo/yP5ksHjwYbNtTqmTsU9ctSQohAdeuiJdMklZ8alif/kJz+hOXPm0D333GNIIhYXF9OiRYvowgsvpEmTJtH555+ve1x7ezvV19dTyc4iJ0ceeSTNmTOHnn/+ebrkkquopYXotdeeosbGFUREv8hlWWQ2Q/FhMsVk2j1q6wxHNDejXuCUKTCy9eB2o2OzxQKDuKzMuFxAYyOajJSVoe5OsnWsqjIq77PPYJTOm4dyBOmioQFpvm1t8PCfeKLxsakSiNEoIphefBHGwd1391ZOuZs0G1B9RRFx/bVIJD6Vmg2vlhakSa9ejX3y6qtx7VgMRkg4THTUUSfS2WefGWeoXXTRT+ikk+bQ44/fE0ci/h975x0fWVn9/3NnMjPp2WQLu8n23pfO0gQpIiJlLfAFBRVQv8AXRcWGCqJgAXsBBBF+SLNSBcQF6SBlC8v2nt1septMb/f3x5vjvTOZlmSSTZac1ysvlmTm3uc+93nOc87nnPM5diktLZOzz75AvvKVT8mUKTNlxYr0uqitrVlWrtwoZWXooqOPfr+cccYh8uijD8gpp1wu998v8vDDd0p7+8jXRRqE02BTJMLvXK78OTKHWnw+QBjtbrl0KWfWSC8rDYfJ/CkpycwZqA3mFHQrL89sS/r96LZ4HBqATHaAx8P1mpr49/jx+fOXKqXAmDGWY+7zAVzu20eWzNFHZwZVFEB0u3PbVXv2YEOrHs5l1wxUolHu2dHBsy1YkF7vH3/8h+QDH/hY0u8+8YkvyMc/fojcdddNSSCiXUpLy+RDH7pAvvWtT8nkyTPlzDPT66P29mZ59NGNUlqKPjryyPfLxz9+iDz00AMyfvzl8tZbItu33yky6qcNipSVYVdr9ZKWOGunWi11Dof5iUQsgHGkSiRCYNXlEvnwh9Prg1iMDMS2NgKLkyb1BgZVgkEAxOZmsgEXL07/ufJyzqM1a5jnjg58r7lzRc46K3kcVVWMs6uL+V+3DuB9yRJ8KBGucfPNBGjPPFPkkksy29otLRaF0+TJvXWWHUh8/nkCqyJUfaQDWdvb0YElJWQ02teDUt/kCySGwwC6r7/O2rvwQp4xHqdEe/161unRR+8f/t/ubjIy77iDd5IHiDiKbSVLv3RmTvPMNM2A7aLFIlImIoaIPCci5+X6fh7yR53kd++3xjAMr4jYTZizRMQvIr9N+e5NArp7loj0aaINw5govLzbdJLfvX+bYRj3CShsOrkl5f+fF5Evisg0EVnXlzEMlSQSRGA1ujF/ft9BqPJyFERPD0r1zTdRHvPnH5gAojacKSpKr8ztoqU0bjfGbzhs8ZSkSjzOZx0OjN/ubiJW2ZRuLMb7i8WSI0n5SixG9Cga5eDUrpdK4qzicHAobNiAIzF/fv9T0T0eHKt33uFwWbAgvaMRj+OI7d3LnC1aZGV8KieIwwGwEY+XSlsbRnVVVUgSCb+Ypiknnnii/OlPf+rfQG1y4YUX/hdAFBE5+OCDpbKyUlat2i6zZzNvGzc+KmVlZeL3+0e0LlqyxCpnsDfZUDBxFFAcGvH7MXxqa+k0nG6/hcMizz7Lfn3/+3Fc6+rSv6O9ezGQKyuJfNfWZn6fpsn9KytFnnkGIO4jH4Heoq/y4osYlUVFlOwcdVTmz+YLILa0kE351luUhn7jG+k72/f08AylpX3XjVr+ooDiX/9KOZDbTYT/nHOYu3DYop6An8pCKkOhkPj9fikrM+XUU9FFdkoKHZP+TsuRtFTJLpr58+lPXygLF1q6aNq0g6WiolKefnq7PP00urKi4sDQRZddhqPl9+Po2Xk6tcxdgV/97/7qTBoMsh7feYfxnXEG5/dwLbPuiyQS2HXTp3PepgOrTBNbZNo0691kqooIBqEeKCuD4zVT6Z5KSwt7Yvz4/J3QtjZ0Zk2NpTsbGqCGiMfRqdn0mQYfiouzv8NolGzoUEjkiCMAEHM1cxuoNDUx19XVPMOUKdns0N76yDRNOe009JEdMNH3qr9TfVRd3RtY0ez1iy++UI480tJHs2YdLKWllbJy5XZZvhxw5P/9v0dFRv20QROtEAoGOfPa23mXZWUWH7K9UWIoBJiimYkjTf75T/yUj30sffMS02Sfb91K+e7EiQRy0p0N0Si2xLZtULwcd1z2e2uzzNZW/JP581nj6a49dizA5IsvsmcXLrT2UVsbAN/OnSKf/zznRSZpauLz1dXZG5Mqd/bf/oZf9f3v96a/MU2r+WRlJbojnQ2YL5C4aRNZlD09NAk75RSrm/iLL4IJLFmCzxcMEsQpLu57p/j+SCiEzfaLX+BPn3UW7zqXjGJbvaRfOjMniGgYhltEvi0iF717MbsUgoEhXcFUp4jYIYfpIrLTNM1Q0s1Ns9EwjC4RyRB7yCrT3/1vukKEbMUJqePVgpthybcRDKIA/H7AsGnT+p/mrvw1ra3JXZq0I/NwzBjojyQSAFXasTrXc2kpzUEHoTS9XuY71dlRsDEQ4MAwDByQsrLk7s6pY9m5E6Ng5sy+dz2ORgEQYzEyeLSLqwKYIsn3dTo5MDdsIDMwU+Q7H3G7OVjeeYfrpQLOHR2AlUrKPnMmc5ZI4Ah0drLepkzRko2I3HnnDXLPPfdIQ0PyNjQKULsxzebRx+Pa/bladu3qkIoKAM6vfGWXzJgxQ9atWzfidZEdXFIjQsmbRSzDdLTceXAkkcDJ7uqCdy8dgJhIYLx1dtLgpKIC6oh0xl5rK4ZqNAqAOHWqRTadjuMyGORn5Ur00gc+gAGsndvzlfvuE/nCF9i/f/kLOjOTRCJc3+XKDlS//DIAQDgMAXo641u73ZsmeqI/Tr3yS23fDvi6ZQsR/auvtrLPleDe3n05EonIDTegi3bvHlxd1NMD/2M0Wi1btnTI1VeLfPrTIscdd2DoIsPg3CwuZu0qyKw/0ShnhD1j2uHoDS4Opo6KRMhMWbsW/bhgAWBSf8/G4SibN2MTLFuW+bn27mU/aEM55c5OlUgEvqymJpHzz88NIHZ18Z26OiuzOJet09HBvhwzxtKd69eTmeTxUDKYjV86XwCxs5Psm2AQcGD27MFda8GgxQ1aVUUGVC5nfKj0UTQq8txzBIocjmopLe2Q665jHdx44y6RUT9t0KWkhPXq9bIXQyHWie5Dp5O9E4slg4kez8jx0d58E8Dvfe/Db00nTz0l8tprNC6pq8PeSQc2xuMAeatWiVx6KVmN2aSpCXvI58MuWLAAKoTubotf3i4Oh8XpO2cOVDAijP+GG9jP112Hr5dJ9u2zGpNmq3rbu1fk2mvRD+edx/Ok6q5EAp+vpye/PgUOR7L9b7fLfD4ap6xbh397/vkWx+Pu3cy/YWCb6nsqKuKZtdqurGxwkhJiMSpUbroJwPR977OaCOYjo9hWL+mXzsxHpfxCqO2+VUReEpEOEYmLyGdE5II8vp9L4hl+39+Tz8zw3ULFrgs93kGTlhaUoMMBAJKJIDtficdRHOEwzmZ5uZVe7/MBKo4ZM7KzmJTnMRzmUMqVYdDTw7PbS2l0Xnp6kjsOtrUxV3v3MkeHHMI70c9qSZ2KAojBII55ugMym0QiHCaJBM+i4zMM7tvVxb1NM9lIdbk4ONevx7FYuLDvGT72ay1ZwrWUbLe6mgO2uRlj5+CDrbKgUMhaYwcdlNxB7KqrrpLbbrtN/vd/L5OlS48Tj6dGSkqc8uijd8kDD9zfvwHaxOl0SiLBO6qv52A3TZHqalOOOqpf63rE6CLDSCYnVoNC+VdGAcXCyyOPYJx96lMWd06qrFzJfjj+ePbNmDHpdVJXFwBiZyflHFqK6HQmc+jYu5/6fET7Ozooo543LzkrNdd7TiQwkn/yEzIk77knc+m0nfA6G4AYi8FvdNddOM833ZQelFQyeS2F7a9zFAwCOjz4IAb8zTfjlMTjXD8QsPSjvQuj6qLLLrtMjjvuOKmpqRGn0yl33XWX3H9/YXRRKES2wb33MhaPR+S440z5ylf6tQeHvS5SQFdLzePvXrG4mL9pgEMpGLRDqYqen3ZwcaASjxMEe+st9sysWWTZDnYZ61DLvn3YJdOnc+amE212VlHBXFRXp+c7jsUoJd6xg6yQZcuy39vn46e8nHnVLCCXK3MGVVeXlYGsOuf557lvbS2Zj5nsXQUpYzGr4VIm2bYN26W4GCe1v1zR+Yhp8g5270Y/zpmTf6PCwdZHhuGUl18Wefxx1sD8+Qpwmv1tpjjs9dFwFoeDvaIZ8gS8k88ozRRWPRkMoiM9nv2XyZ2P7NlDp+G5c8n4TSdvvklw9eCDAfUrK9ODb6YJ9+Hzz1PN8IlPZL93VxeZh5s2AQwuX47v7HQy193dvXX/9u3oiPnzGXN3N9//yU8Y1003ZQ5mmCa6t7MTwG/ixMxje+YZruV00tVZ7RQNEjscvOtduxhrXV3++kq/r3a/YQC6Pvkk1zzlFGxQtSfffBMgc/x4sjrtQSfDYC0WFaFntYKjUNmwpgmw+YMfMPfLlon8/OcAmX300UaxrWTp13jzMbPOF5F7TNO8IumqhnFJHt8tVK+onSJynGEYxXbE9t20zTHv/l2lU5LTRVVS3TRFXeek+ezcNL8bMaLdl5ubUWLz5w+89CIWs8CdKVOsyG9NDffQzDYFziorRyaY2NyMQTtpUu4sg3DY4qqxHywOB/PT3c21KiqYn7Y2wCmPBwBRjV/7/FVXW0DO7t18f+rU/JuT2MdWX891pk5N35l1zBgLBDbN5Mi/2w2QuGED4N/Chf0v2SoqIoK/YYPIq6/yuzFjOFinTbPWiXYkczoBVVJB0wceeEAuuugiueUWMr+9Xg76++67M+cY8onGB4OAtnv3cnDW1fHM9kjajBkz5KWXXpIDWReNAoqDL2vWACIefzzGTzp58014+Q47DEDd40kPXvj9Ipdfzt757W8xeu3icFjGptNpcfg8+ST65cMftkBHe8fmbO82GKRpykMPkRX3059mNhLzBRCbmuieunYtJUxf/Wp6nRMO88xOJ7q1v+fMf/6DIdrYSBn3FVdY55o2FIvHmXMtGVNRXfTb3yZXodx5Z2F00Zo1GMqtrZSWXnIJ2ZjadETkwNVFDgfnj2YhagdJt9vicY3bzF39ezzO2g7ZYvr9LYM2TQJor7/OHpkyBfAwE8A2ksXr5YyvqcmcMdjdzT6pqMAW9HjSlxwnEiJ//ztn/amnZuciFOFddXVhn6huKynhvSvVQur+1q6p5eXYS4kENAQrV1JO99nPZrZV8gUQIxGc6MZGbMFDDx3c8mWfjyxo5YSePbtv9xssfaQBpccfB0ifPh0gZsECkV/+MvmzM2bMkM2bN08/0PTRcBaPh/XS08O6DofZM/b1r2B8NMrfAwF0occz/Hy0nh7OvepqKiPSyY4dZDnPmkWTEuWJTxXTJED45JNc69JLs+v/QABgbM0axnHkkVQlNDaii6qqrLlT36S+njNi0iSCDF1dVGM8/DBB2W9/O3swY+9eqzFpprMlHIby5pFH8KO++11L9+rzxOOMbc8ertufpBO161tbAWh37bIaySjFlNdL+XJnJ3bmsmWZ15Dbzfi0KVAhyptffJHA9Zo1vP9bbsF+7advOoptFUDyUSFxSUEiDcOYJyLn5PqiaZpxEQmJyABz4OQxocvNZSm//+q7/33U9rutIjLfMIz/xscMw6gS0GX72BrlXZJLwzAm2D47TgqDQu8XCQTYYM3NGL5LlhQGQNToxtSpvcvutKNVXR2KorMT5ajg1EiRjg6rY3KuTAPlNnQ603dt1g6hkQhKubGRqHZxMaCAPWPHMKyMxe5url1fz/xNntz3DFLN5hMBpMuURaj3LS7m0NSufSrFxQDQ8TiRtWg0/XXyEe2I2tEBmFpbCwGyAhz19ayZsjKieekOQKfTKaZtQVFOv1n+9a+HRYRrZxKn0ynFxcXSmdLu0zQBRER4n7t2WY1hFi/u7QCdeeaZ4qN95HtCFymgqIaoZrPF41bpoWYGjUp+0tIicvvt7M0LL0zvZG/bRtnY3LlkpBhGeiMzHKaUePNmIt+HH977M/oOTRN91NYGYbnfT6ZQagMFHY+CM+nGf8YZGMo33ogzOVAA8fnnKc/Zto3mKd/+dnrDMBBg3C5X/wNVXV0Y4ldeydl4++3wLVZUWByLPT0EVWpr0QdaFqbgVaouEhHZvHmzPPzwwznvn0kXJRLwromQITphAlkUN93UGxgWOfB1kcPBGnC7WZOqa4qKLADI6bQCGm4373DMGIsjU4T35vVaZ093N2soEkm/xnfuJDP12We5xllnwYd1IAKI0SiBCqUeSaeLAgHsibIy5j4eT8/JapqATWvWkCWjmSvZ7t3ezl4eO9b6vdLmKGerXbTBX1kZ3wmHcSRXriRb5vLLswOIfj/2bGlpZru4o0Pk3//Ghl66lGykwQIQ43FAkVWrWI8LF/LT1/sNhj7atMnqqm2aBDK+8hUrMytVzjzzTJFRP23IxTDQd2PH8u/OTs64VN3mcrFvPB7Wnd+Pbsx0zg+1xOPoj3gcfZvOpmhrE7n1Vnyi009nz2QC3O+7j4DGMceIfO5z2QGsSIRAyiuvMHfHHms1KlH6mEAAW8DrxWfYt4/Pjx+PrjNN9stf/oLeuP767ABifT1n0cSJmc+W+noqTB55ROSCCwATU4M3Tidj0g7xs2b1HUAUYd5ffJFAdEOD1QRG/dudO0WeeIJ5eP/7SYTJZX9poFebZPb09G+9rV1LY66PfQy9/OMfYyOtWDEgPuJRbKsAkk8m4sMi8hnDMPzvDmym8MAbhRbUueQNETnFMIyrRWSviLSYpvlsH8d5p9DF5qeGYcwXkTUicryAJD+e0r3mDhH5ioisNAzjdwLj8GffvXdq4v3XROQpEXnVMAxt0/1ZAf2tlsKhzUMizc1kIDqdACCFKLnR9OhYDKc3W3ae242C0wy99nYru64/Sm0oxedj/pRzLJuYJsBgIkEEKpMiLS62Isz79jE3hx2Wfg6VY1JT4aNRHFi7cZ2PBIMcPE4ngG8uY1SBRBHGqvxiKqWlAIkbNzKuBQv6Viam5eHaMOCUUzhItMv3uHE4KJFI9sNUROScc86Ru+66S8rKyuSQQw6RHTt2yK233ioLFy6Q1atXS0cHz5+JlP2II46QlStXyk9+8hOZPHmyVFZOkIMPPum/5OHRKHM2bVrmebvkkkvkzjvvlFWrVr3ndJGCUSrpMhTtWYqj0lsiEQxBw7BArFRpbsaYnjQJB9bvZ02nOm7RKE7dqlVk1B1/fOb7Kjiwbx+RftOkaUgmviF7aYv9XW7YQFlQWxtGejZ+oXwAxGhU5Fe/EvnjH9EtN91kce6kXkuBH80O7quYJobnz36GDrr4Yn70HcTj6N9oFL1nz/qLxaxMDqdT5Oyzz5G77+6tixYsQBflErsuqqubLF1dE2TVqpPktdf4+1ln8U4VQEsn7xVdpFnRGriIRFhLml2o2dLaHEeEOXO5OGOcTutv2kQnEEi+flERZ/qbb1rk9qedlrlD8YEgpgmdQjhM8CGdLopEOLsVtG9r45xOF5j85z+Zv0MOwQHPRoESj3MthwMbIHWNK6+bAh3Fxfy7vR0wYOxYwJLf/Ab74oILMmd067P6/dy3tDQ9QGGaVvlySQmZRQOlAMomnZ04/qGQ1VW2vyX4mWyj/ugjj2eybN48QeLxk8T3bluA009njWQ71y+55BImDJDkAAAgAElEQVS54oorVsmon7ZfRMF4v58fbTpkB8+UMgKOcX60M3m2s2Yo5Lnn8AvOPDN9GW4wCMCVSJAN29HBvkm3R//+d3iEly0jAzFb889EArvm2WeZi1NPhe9WxenEL2lstLpgb9uG7qypQdeFQgBba9bAG3jKKfhTqRUMer/6ev5eW5u55Pjpp6FX8Xj47/Ll6T/X2sq8lZbm5/Olk4YGqkq0McyHP8zYTROd+cYbYAsTJlC+3Bdu/oGUN2/fTun2o4/ynq+5Bl0/ZkxByqNHsa0CSD5H1lUiEhSRjwiI5yYR+byILJD8JvoyoevL9cJDPy8ifZpo0zTDhmGcLCLft41jr4jc8O6P/bPbDMM4V0RuFJGfiEi9iPxM6IBzV8pnVxqGcfa7n/2eiOwTkV+LSFTobpNEdjlcJR5ns2lr+HnzChM5jUQAELXBSL6pyB4Pyj0YRNG3tlpgYl8bgwyFhEIo0eLi7KS2Kp2dfGfcuOzzHI1y8Gzdyme1iUomcbk4WFpaiPT3NfNB09mLijhM+qJkNRPS7+fgsGeblpeTEbV5Mz/z5+dXFubz8fmeHoybOXOYY9MEWF27lrFOm4azlgsY+MUvfiElJSXy97//Xe666y6ZP3++/O53v5ONGzfK6tWrZcIE1po2xUk1eG+99Va5/PLL5brrrpNAICBHHXWC3HLLSdLVxd8nTWKM2cTj8cgzzzwj1dXVv5X3uC6yg4X2RggaaRzt9Jwspily991k3H75y+kN254eDODSUngKvd7krCqVRIJsvZdeEvnOd3Dyct27sRFjTISIbi5dlwokPvMMmZPl5QAG2bqeKoAoktk5aWigfPmdd0T+53+Yk3T6NJFAl2gGUX/4WRsbMUZffZUA2zXXJJdAKd+RSHreSeWXUiDqhz/8hXg8JfLII+l1US5RXXTttddJMBiQqqoT5JRTTpL/+R/e67x5uSPs7zVdpGCiAoIK6GqGtL20S38UVFSg0ONhLZqm9S6bm1kXe/awvg45BEBbsyf0+gea7NgBKLdgQXou03gcANE0ORsbG9n76Zze556jrG/ePJzqTNyoIlyvvZ19PWFC5rn1eHg/2gClo4O9P348TvhvfsMauPLK9Jm6KokEdk0iYWVTpkokQrluUxN68dBDB6+jbTTK3Dc3Y1MvW5Z9vvKRXLZRLrn11lvl0ksvl2996zqJRAIyefIJcscdJ0kiQXaV2537HPegsEb9tP0omsWrGXPd3fhhVVXJ+8wwrCzvcNgCE12u/QMmrl9PRvQRR6QvTY7HqRhobYVGRYGoqVN7f/af/4RTefZs7JVszZVMkwSJJ56wMiDT2TWlpdgFXV3oj9dfZ/7e9z700ve+R4D2C18AQNTOzh0dyYkgiQRJE34/Pl46ADQUotPwP/6BbrjuuvTVbpqg0dnJ2CZPts68oqL83mEkgl33yiusmwsuAETU63d2kp3Y04PdtGxZ/9eG8h0rB2628ubGRihyHniAeb7ySmhzxo61OJILIKPYVgHESE2BHxURwzB+JSKXikjFu2mrhZKCT7bfT4ZYMIhCnTKlMBssHAZAVH6F/jbWELFKUGIxrlNdPbDrFVJiMQxlw8ivE7Pfz+FQWZmdtDYe53Bav54DYP58Dr3Kyszvp6UF5akHVmlp/hk3fj9OkNvNOuhvRFu5VUpKevMwauS8ogJnIZNRmUiwdhTQnDMnGRCNxwFS1q1j3AcfzPUKIZEIjkAkwjqzd1NTLrhQyOJi0kyIyZP7PGdDYmaNJF2kkgomiowCiiIYa3/8I/x7Z53V+++RCEZTdzelvQqS1NYm6wzTFPn+9wEbv/QlGrNkEy2deeQRnIQVK1jz+ax3bbRy550iX/saBuaf/4wBnO07CiBm6pr77LN0GRShtPiUU9JfK7UDc18d+0SC0tTbbmPtXX45AKq9K7k2UHG5ejtbmZ5PASiR/nUHrq+nAc7zz3PPCy6wuH360Wn4PamL7O9AwcTUebMDimrqKuDo85FhsXWrxVOs55BmLNqbDGnmo77vkazLWlvJmqmtTQ/AmSZ2kc+HXdTayhzOnNlbb7z6Kvu5rk7k5JM5S7Ot3/Z27NWxY3MHpk0TsK2tjbN84kTAht//Hn1w5ZXZdZECiMr5nE7ntbezDsJhSrozNbkqhGizw1gMW33q1P2/jrq6ACxefZX5OfFEgGDN2Ozj+IYMfhokfXRAOcSBAOenCPslkz+RSLD+YzErW3EwOUDt0tLCGV1bi22UjibhvvtEXn4ZW6eykmdaurS3/njhBZpsTJokctFFVH5lsxm2bePesRh2weLFmT+rgOMLL2A/HXYYwOGttzJ/3/wm+kOlp4e9pd3j43H8olAIHZkucLBrF0Hh3bsZ/2c+k94e0Sanfj++lb36Ss+tXEDi1q0ElTs7AW9POy3ZL9++Hd5op5Py7kxVK/0R5e9U2i99511dVOr8/vc84yc+QTn3+PG86+Hoo40kGQydOUIavg+OGIbhFBGHaZpR2+8OEpELReSFAhvKBZemJiKaRUUor4FGM1VCIZSZYVBiMQDOARGxDi9VqgqUVVcP3UGVThIJK2stHwAxGsWY9Xiyl7mYJsDuunUcjMuXM5fKO5iutLutjXmprsawVDDP6cwNuPb0EJHyePjuQLImtLubGt72NVVdjYG9fTuHr3K12aWri+zDYBCDf9as5EM8EMCBjkYhqtexOxxEDgcKgLvdGOetrVbGaE0Nz6OdVnt6GF9JCVHKvjatGQwZ6brILqkZivZyQ/vftfz5vSDbtlFec/DBRLtTJZGAp7CtTeSjH7XmbOLE3gDiL34BgHjppbkBRBGMzYceYr2fey772N6xOZskEmTt3XILmZF33509sKHNMBR0SX2/kQhG/gMPAF7cdFNmEMDegbmysu96bds2SLg3bMAI/vrXk7sfZitfziZ2QEnBpljM+l22a7S2ivzhD5C9ezw4Ch/5iJUhNRwy3kaKLtL50vlXvkT7O7BnKCr1gtdL1tmmTfzt4INxCNOds1pCrYClvQzaXlKdz7sfLhIIkP1bUUEWYjppaOCcnDKFs1P5sFNtpFWryEKcNAk6hVR9lSqaGTVmTH6VLdpgRTOsnnmGJirTp9MIKdvZbQcQy8p67y3TxJHesIH9f8IJg9d1OxRCH2l367lz+0fJUEjx+ymZfO455uq440ROOonxFaq7eSFkpOij4SilpZwzXi/7ORRiz6QCaw4H+zEeZ6+Hw1aZ82Bl5Iownsce495nnJEesP7XvwAQTz8dHbR7N35Iqv544w34mQ86iCDs4sXZx15fDzgZjYp88pMkemQTvx9dYZoAmDt2kC03bhxVDqkgW0UF86jJCo2NVmPSVL1lmtgEP/sZ7+ynP00uqbaLVghGIlwrVWcp9UemjMRAgMzLNWsY+6WXok9VYjEyLXfsYC6PPdbi0SyUfaIBCr+ftWkYNMv5zW9YpytWoN8nT+bewyXpaKTIUOrMYXJM7DcZLyL/MQzjXhHZJSJThLrxUiFFdVhKPI5B0tqKApk3r3CKPhhESTscKJZCgXzqCGrX4u5uDFXtsLc/DJaGBg6xKVNyA6WJBBEzh4PITzZDedMmFHRdHd0JdQ5LSpjfoqJkpdjZyVgqKy0+MI1c9fRYWRbpxOvluyUlfLcQSl6daeVI1FJnEQ4djajt2GFxRsVigIuNjVaJTirQqkBpURHfKy0lwuR0cqAnEhjXA3XEtAmF280Yd+7kMHS7MeKVy7K2dv9nAdhkROqiXKLzm9rp2c5ddqB3eu7uxjgaOxaC73TP+e9/s05PPZV9o2Uwqfr3zjsxts47DyMrl+zcCYBYWkrJsBqcmTgP7eL3Ewl/6iky+G64IbuezgUg7tlDNuPGjRjtX/xiZr0WCiV3QuzLPg2Hmac//hGdeuONZDrax6PNNrRLfX8CZXZ+KW0ypIZ76nN5vSL33ivyt7/x/ytW8D4083GYAVAjRhflAnTtEo1yLq9dy2cWLwZALC5m7YZCFuho11v2M1WzUO38iuGw9Xd7pqL+ezhJPM7zG0bmzpqtrWTnTZjAczQ1oYtSg5/r1wNCKU/WpEnZ7VC/H3umrCw/jmwtNVfu5PvvByg48ki4TLPdSxtHiKQHEMNh+BtbWnBUDzlkcN6VaZKtpNUus2b1ziwfaolEOG+efpo1f/jhZCFVV2fO6N3PMmL00XAUp5N3q2dee7u1BzNxkSqYGApZPMSF3h+mCZjl9xPcTBdUWL0a++Xww2nmsX49SQGpvOfr1sEbOG4c15o3L7uOaWzERohE0CW5aIyCQfaMYQB2PvQQdtGiRXSDz3Svmhr8s9WrOetnzOj92WAQ0PCf/4RG4dprM3Ph+/1W08wZM9IHIpS7XM8ptS1Mkyzuf/yD93riifzY36uWL3u9AKXabEtt90ICiS4X7/yuu8g+bGuji/ZVV5FQ4nTy9+EQWB2BMmQ6c5iZOEMuPSLykoDOThCRiIi8LiLfNU3z1f05sEzi8wFShUJkUaUjoe+v+P2AOUVFAIiDEYFSp62ykgiN18t9taviUCmM5mbmcuLE/AzatjaM2okTs49x82ai87W1RHDsc1hailL3+y0Ho7sb57q8nDm3H+pVVSh15ZNMva89q3PKlMICYkoIbE/J17EddBDPsXcvYyorI6IfjTKO6dOTxxqP84zK75YKdk6bxtiVf3P+/IEZsYkEc+bzMYemaWXWKoH5MOTmHHG6qK+ixk0qoGhvzHKgAYrxOFl8gQBNUNKtu1WrMDIPP5zMoH37+FxqtPqBBwAjP/xhsupyzdGOHWQsVlRA9m2/nj1TNF1GaEMDQOU772DgfvazVlZpuvvmAhCffppuhU4nmZTZmiAEApxvbnd6YvJs8tZbNCTZs4d5+uIXk7Op7ZnI+ZYv5xIFExXIUnDJ5eK/f/4z7y4QwEi+6CKCJzpXwyiQoTLidJG+Ay2ltwO6IjiZq1bhGM+ZAxCl60KBQXvHecOwMh3t70ffmTpAIlZQxA4qhkLW5+2g4v5+3xs3ci4eemh6p727G/1TVYVDvmMHIF4qP/OWLTR/Gj8eO2fcuOxnajiMLaN0NrkkFsNGMwz01513An6efDKl/9n2rB1ATBeAaGsDjIxGAQ/tWTiFFL+feerpAUyYM2fgVT0DkXicjK4nnsAWW7JE5EMfsoLiw5j7c8Tpo+Eo2hTE57MaFlVVpU8UUTBR9VkwyO88nsKtkddeAxA7+eTkCgGV3bsBmGbMoKx140bWaGqzqy1bOPOrqylJnjIl/fVUWlspQQ6FKJXN1TwrHAZADIXItn7gAQDEww+nhFk5mzXgaRfNfI9EsGVSQb/t2wEN9+wBzPzUpzKfD11d+Fxud+4EHz139Fzzesn43LqVoMmKFb2B2K1bCay43QSz7X9X21zt9YHa6IkEpdQ/+hG+2WGHAQIvXcoclpTs3yrFA0CGTGeOciIOrQxoshsbMepcLoCWQpZh+nwAiKqghiqCHouhHHt6UEyVlRxsg2lod3YSXU8X0Uon3d18p6Ym+5xv3QqHxKRJOMnpDlvTlP828nA6UaAlJRxk6Z45HufzhsEhqYpbn6GsjENhsOYrGOQAcruTgUQRnveNN1iPM2YQ/bM3ZBHBcd69m/c8aVL2LmkNDazv6mq41/r6TKZplW0oiXokwr7p7OT/6+oYRwHW90iHuYaN4rcDinYOsgMBUHzwQQzOz31O5Jhjev99+3aRhx9m/591Fg58IsE6teuPxx6DK+f97xf5yU9yG/LbthEtHzMGpztT6ZxyxTmd1jyvWUM03+cTueeeZL5Ce9Mc++8yAYjhMOP9y18wEH/8Y/ZfOlF+wmgUh6cvQH9PD12eH3mEufvmNwGK7BKLoctjsfyzofojWkb5j3+QfdjdjfNx8cU4OMpXVMAzdgTvEBEZBF2kHF8bNwIsh8PYNcuXZz+DtBul/ogkBz/ydaC1nMxeCq3icPQGFodCx+3ZQwB61qz0vH/BIHqjuJjP1NfjOM+cmezQ7drFfq6pQaeNGZN5T4vw7K2tzN348bnP9Xgc2yaRYH5uu42MwQsuIHvU6cxshymAaBjJXFsivNvNm5mDsrJkILmQos0T9u5l/LNnp2+MMFRimuyBxx7jPehZo7qowNmHo7poBEgkwrkUj+N/VFRk35fRKDpUzy6PZ2A+x44dnNWLFhFYS5WODmwFl4uAaXMza3fRouS9X18v8q1vobM+9jHAwyVLMo+tvZ2S50BA5LLLcgOI0Sh8r11dAF133kk25PnnU2XmcOB/KU1DTY1VZaad7bWBVCBgVd6ZJvvxl7/kd9ddR2AnkzQ3owPLyki66Ms59PLLIitX8t4+8AH0nn1+olH81l270OPHHpu9fFjPxf7Y5qYJfcIPfkBgb8ECkW98g3NEy+hLSpiTAoCII10XjQh5r2cijgiJxTDu2tpQQHPnFjZLsKcHA9PjQUENZQlOURFGvWbdKaCoZLSFNq59PgzU8vL8AMRQyAKgcgGIr7/ONU84IbOS18h6UxNzXlODkZ7p0FODuauLg2rMGA7ClhauU1c3uA5ISQnXVyBVswj27eMZNDpZW9sbQGxt5TMaPcwFCihwsnUrB/XChfkdlgo+eL2WUeTxMEafj/lbtIjDsqUFw+Ogg/Y/J9GoIKkZivbSCXuGov53pMjrrwMgnnJKegCxpYVsngkTKJHp6GCNTpqUvO6ffRYj86ijiNzm2hNbtwIg1tTgeGfbd1r2kkjw78cfF7nkEsppVq60OvWppJZBK4Co/HB22bWL8uUtW+is93//l/lsSSTQ+/E4+zLfjB3TZH5uvhkdeeGFZE2mGsGFKF/ORxIJxvP73xMUWbSITAPtaD+Msw8PKNm5E8eoowP74qST8muepRkcRUXJgKKWhuULKOrfdZ3pteygYiSS/PlUfsVCinIVjx9PwC9VolHmrKiIv3d04PTW1iY7cw0NlOSPGUMJc3FxdjsqkcBeMQzeQ651n0jgMGt2jxLsX3UVQcpIhN8rp7FdYjHGnA5ADIXIsmlthVdt2bLBsXO7utC/ygmdrhHNUIk2gnjkEWzN2lqAk7lzrWDQQMGgURmZ4nazHzUrMRzGTs4EHtkpIyIRvqOdnPu6frq6sIsmTEAvp0ooJPLb33Kvq65ibK2t6G+7D9bYiF3kckENUl7OOZsNQPzVr/IHEONxmqh0dhIIuOkmrvHVrxIUDIXQh21t6NV4nM+OG8f+0s72M2Ywr04nNk40SnXKM8/Ae/id72TOzjZNghFdXXymL/5eUxN24N69VuAgNYDW0UH5ss8HtceiRbmvr5yLagPmO5633oJa5pVXCGD8+tdkQmsjGK0K8fv5icUs/3NUhq+MZiIOrfR5srV8ORwG4CtkhyQRi5uwuLhvEY7BEi17Ue7A6urCZYxox2m32yqhzSaxGAeVw4Fjn+nz27YBGIwfT6ZQLqMxFIKgNxzGSc8nGh4O4wT7fBY58lBy6yhBcCQC+NHTw7uZMwewrr2dw3LCBKvU2evl2SZP7tu6amkBeKio4FDLNp9+v5Vd5PFwv44O3pvTyWFl5xeJRjlcw2EcobFj+z2HI/1oGxGKf6R2et63j/LdKVOItKauYZ+PLDXDoFTHMFj3Y8YkG5SvvUYH0gULyMjJBcRv2oTTOH480fJ8mhdoOecttwB4HXooGZS5wAF7aUsqgPjEE3SQdrsxHI87LvO1YjGLf7UvHZhbWjDsX3gB5+Fb3+rd5X0wypfTiWkCWt1+O+fB7Nlknx5+OPo6GrVApUEAEUd10buydy97pqWFfbR8OeeSAnimaa3Xvr6DTJ2ei4r6n5VhBxUVzBdJBjPVge/vuo1EmBOHg/lI1UWJBGtWS721M3NlZbK92dICL6HHQ2DE4UC/ZcoYMU2c/2gUfZQrs0QBxGiUQN/99xMIufLKZF1kBz30WZQmxuHozfXW0gKAGIsBHk6blv/c5SuxGNlVTU3Y0nPnDl6Tlnxk507OgS1bsHHOPBO9bu/cOkgNM0Z10QgTzdCPRtnbuZqYmSY6RYMgLhffy0f/xWKUA/t82D2piRmJBHbIxo3s+xkzoDEoLU0GuNrbqTYIhQh6FhXhS2UC49raACZ9PoKMc+dmH2ciIfLSS/jGNTXwUDud2Bj2BiydnfgbEyYQuGhtxdbQ8ubUxqSvvSbywx/ync99jjnIdA7FYmQ0BwIEJPLNZo5GyfZ78UWrYc3ixVZAXnXmli3oxeJi7LNUyopckm9G4tatPPMTTwBifvnL2KZ63rndjMF+jWDQ4ihOx2mbp4x0XTQiZBREHFrp02QrIbPbjeJKzfQaqHR2co+yMqKzw8kpDwYZXzjM81dXD4zHLhYDQDTN/PgeTRODUDODMn1+2zYUcU0NAGKu64bDfEf5+RIJ3ms+qdu7dnGoadfjoZREgsNg40bGevDBgJgizNWWLYCMkyZZUaTa2swEwbmkrc0qO0rXZS0Y5H7aQW7MGOsda0fmdN0kdbxtbRhOHg/z2Q+DeqQfUCNO8Y8UQDEYBEAMBES+973ezmQ0CkjX0UGmYHU1+9rlYv+oMbV2LZw9U6eSkZOLvmL9ekpoJ04U+fjH8wMQdTxf+Qr8QytWiPzud7m/q8CH05msu0IhypAeegin9Yc/zA5GalaDZmjnYywmElz/17/GkP3f/yUTIfW7Q1W+vH49AO/atei8Sy/lLLBH612uZCBLM88KtG7f87qopQUHbe9e3vORRwIopzo3mlGo70ABwL5KJkBRf/ob3Estg9axivQug85n7FrK2t1NJnPqHlC+4J4ei6h/+3b+NnOmtac6Owl6OBx0SU0k2NfZbFLNZhw7Nrc+MU0AxFAI7sqnngLQvOyy9GP2eq3sFc1AVIdT514z8TZvZpxHHllYCiCVtjZsumgU0DWfAPVgSWMjXGNr1/LMp59OeaJmwvYXQO+DvOd10UgVvx/wS4S1k8vfMk2ri7Nm2Lvd2XXfU0+xJ1es6M1FaprYRS+8ALh27LGcrYEA4L+CcV6vyDXXoF+uvBJdNHVq5v4Azc0id9zB9z71KcDIXM/16qsAeJEIdDO1tQRYU20ZbZwUibD3g0HoYFwu7B97JrraLMXF2FsnnZR5H2rCi/LM50u7oGXi7e3c/4MftN6jBn1jMWio6ust7v7+VmYoBVE6ILGhgQqRP/+ZMVx+OcCp08l8aUfwTEki0ajFbVta2q/y5pGui0aEjIKIQyt5TXYsBmDT3g4YMndu4UsiNFurvLzwTTkKKX4/BqxGyeycE/lKIoHC1GzOfL7f3o5hPWFC5sN0yxYOjJoaFHEuQzkaxdhMJMhUcbutEtxcGTJNTcyDZtNUVg4dSXd3N4Z4IAAgMm6cRZCuY04kSFOvr+fZlizJH8TIJB0dGBzFxVzP7bYyIsNh9oSWJjY04Ehqlmk+h67Px3dEeM99BBlG+gE1ohX/cAUUTZPmJ6tXU8prj1rr3x9+GGNvxQoc96YmjKq6uuQo8cUXs8fuvjs3GL9mDc1LamtFPvrR/PdedzeNPv79b5EvfYlIey5+LDVENaqtjVl27KDUZ8cOsgMuuyy7TutPB+adO+HTWbsWUOCb32TeUiUYRH8bRmby+IHKzp04Jy+9xHv69Kdp5mIYyfOTqbuvPSNogBnl71ld1NVFBuj27ZwThx1m8eZlk1QwcSDvQBurFBpQTB2rHVhUUSDUDiza77dlC87w4sXpeQu1HG/yZHRMQwM6Yfp0y+7RzuLRqMg55/DfqqrsmTFer1WJkCv4rRmLPT3osNWryZi86KLMdm8sZtETOBy9AcRgkOBuWxv2wLJlhc9A1oBwezv6a+7cwQtU5JKODgJIr72GPXTqqYAUTqfFxzkYZfJp5D2riw4E0SYc4bDla+RaM8o/q2ee253+vF27FqqPY44hoJEqzz4L1+qpp4p85CP4Eg0N7Cu1fwIByn/37BG5+mruWV1NpUY62bePLEKvl0DjsmW55+CNN/C/6+vxaZYto5okE/1RLMZ4tCmXaaLzyssZt89HYPW55+BR/OpXmS/tOp8q2p/AMNBd+STPBIMAtG+9hT969tnpE01aW0Wef57PH3YY8zbQ8ykVSOzooGz8D3/g95dcIvKFLzAnwSC/83jyy15VfmmtNOtjefNI10UjQkY5EYeZ9PSQgRWJEAnWbK9CSlsb0ZmKCgDE4cw5UFaGEvX5ANEaG1EkNTX5O4b79qG8Jk/OD0D0+XgPVVXpFbgSdK9fzyFx1FH5Ze5s384hPWuWBQBWVGC06/3SvYt9+/jM2LEY7vr5dGWEhZRYDEdZy92XLGEM0SjvQjkSTZNDtKyM8SlP2kBBxJoaooYbNpCdMHmyxd1WU8P9ursx5CMRooR1dfmDSOXlvIemJn60I+Vw3g+jgtjBQntjFnWw7RyKQ/k+n3gCQ+7883sDiCIYcNu34+DNnMkeCoXYN2qs795Ndl15OeWxuQDEN9/EQJ06Fd6bfPfd7t2Qke/YQQnRJz9pZVdlc97tGS3KYfnoo2QdlpbS+XD58uz31pLEfDswRyI4A3fdxT2++12ybFK/p1lK2t15MJp0NTVhIP/zn4zl0ktpRON2J2capgI6IlbGhvJLKSikvxvVPfmJ34+zt3Ej83b44WTH52sTKLir869rvj/vwOGw7mvnctVyPwW5+vt+dVxquygQbS+FDoeTP+9yAW7t2AEgmA5AbGuz+LzGjrV4l8ePt+yeQIDsoHCY4EQsltn5VQkE2INlZflVz+g4/vY37Iizz4YrK9tc6Zx0dBBQray0Pt/cjE6Mx3GUp07NPYa+ilYJmSZ6fLC5qTOJzwd48MIL/P9JJ4mcdprVVE75anNliI3KqIigp6qrLQ7htjbskGxntGaUqc4Lh/mvUneIsF+ee469ktrwTETk7bdF/vpXuqWvWMG9GxoI7qv9Ew6L3HADdsvVVzMej4eM5XSyZ3U+MIwAACAASURBVA+UCD4fumvp0tzPv2YNZ8qqVejO007DFssWgCgq4vk3bWKshx7K83d1QXP1k5+QrHD55SLnncd8qT/r9SZnR3d0MFceT/4Vc+vXw2UdCMDVeNJJ6b+3aRPP5fHQEfuggwqjE5Qbu6eHoOqttzKWc88FMJ00yQoYO53MVb4BHYfDAh9DIauqZH/Tro2KJaMg4jCSvXtRkB4P0Y/BiGq2tBCNqKraf4ZPX0VL3crLUbpdXRww5eVkomVTtMrfl6v0RiUSwfguLk7PaaMlMlu3AmQdckju9xSPcyBFoxyididfOXyU79A+Rk2V93ox7NVw10YrXi9jHAyF2tbGM4bDgHczZlj3cbkwNLq6iJhpCcS0aQCNGzaQAbFgwcCbl5SXA6SvWsW6Peoo5kI5iDo7mc/+3svl4vna23meUKjf5c2jsp8ktTGLPUvR3phlsAHFDRswhI86Kn3HwbVrcW4PPZSfUIg1V15u6ZCmJkqYTZMS2WxdT0Uou3nlFQzO00/Pn/LhP/+xeGkefhjjU4Q5UhAkXXmw/l5BxmCQzMDHHsM5+OEPc3e/1Q7MJSX5AZ5vv43zsGsXRv2Xv5ye+2iwy5e7ukT++EfmSwSH4JOfRB8r4bxmYuQCLvVzLpeVwRCLDSpX2QEh4TBnwdtvs5aWLAEo6k/Ayg7oKpg40HegesblSgYU9R0roOh09h/c1nHbx6iBOwUV29qYp/Jy7JTu7uSMRZ8PG6qyEh0TiaB7SkutDMNQSORPf8LOOPdc5tswOB8z6VHltPZ48uMEbG9nX//pTziZn/0szQZySSTCeBRYTSQY08aN2B6VleijQlMABQJc3+u1OKH7WhlTCAmHacywciX/PvpogNeaGt5/KGTpmFGHe1T6KsXFrJ2enmQe9mxBGgWI4nHWZCjEPo3FyJKtrKS8NlV37NlDUG7qVLL543F8j+Jiq+Q5FiObb9Mmzv+yMsa1dGn6gOfOndhifj98oIccktv2W7+ewNTzz/PcF19MQCPX97xeq2qwqsqybf78Z4LAEyZQnbJ4sfWd8nKrQ7bbbSUztLWhs/KhF/N6Cd5u2oQOv+ii9ElHkQh24p49+DrHHMO70iqegeqHaBS76Kc/Zfwf+hAVInPnWo2wRHif/a2e07Jnv59308/y5lEZBBktZx5aSTvZ0SiGiXZ2mj17cMoOmppQdtXVydxbI00SCZRvdzf/X1GRHkzr6iJzsboawzef6+7bx79TO6SKYLS+8w5Ab2UlRL7ZHGa95o4dGJ8zZmQ2aoNBPlNWhrI1TYx8LalOzUbSTmAOB89XqHcZiXCAt7Yylnnz0vMIKcC5dSsHw5IlltMeiQCoxOPMUX8cPC2r0APIMJj3oiLeTXs7c1tbm92p6Yv4/WQxiORV3jxCd89/5YBX/HZAUY+5wQIUOzroFFhRAXdOqmO5axeZNjNmEGnX/W0YrGGHgzV98cX89847ezcJSZUXXyTSPWsWjQ7yBc3++ldKjevq+Pfs2cl/t/NnqSGbDkDcupWS7V27cP4/97ns51Y8zn6OxxlrLiPQ7ydD8q9/JQj0jW+k73ItMrjly4EAIIdmZJ1+ushnPoOOUPDGXhbbH9HrKOjdx9LDA14XRaMi69YBjEUi7I0jjigsx502OrET0BfKDtM9la55ykAAxXQSixEkCIXI0HQ4ksugg0GCf6WlzKPLRQBbg5wKbD/4IPbTxz+OPvP70RmZzvNYjKCtw8HeyPVMnZ2ULv/lL4zliiu4fy5RgMLlYlzd3fz/5s1cc8YM7JFCgmeJBE54fT3XnTUrO9frYEksBn3Ck0+i7w4+mOzziRMZo4KrAy3RH4Ac8LrovSYKdsXj7NN8qUdiMXTN3/8OuPTJT/beM52dgIMOh8jXv87Zrft4yRJ8kEQCgOqVV8jmmz0b32Pu3PSUCtu2wT8YCpGVd/jhucGrrVvZU//6l8VZmKuaQoR52buX70ydir7s7ob+4ZVXCHBddll6Gi3lgdUApPLH5vLNTRO77+mnmZuTT8YuSvdO2tqwE4NBAtf26hjlbO7v+ZNI8G5vugm9eMwxgIeHHsrfg0Erw7+kpDBnnL282e1mPWaZq5Gui0aEvOczEQ3D+LSI3CUip5qmuXKo7+/1EkmIRjFMcmWe9FcaG3F0a2oG7x5DJQqcaUZeT49VDqzla34/oGlZWf7GXmsrSm/ixN4GaCKBE9PQwL1nzcpdZqik5X4/EbVsUfGSEqvDoGFwuPj9jCVd1o3TybNqRmK+xLvZpLGRUstEAkM8E1dmLGZlICp3UjBolS+43WQGbthAZsDChflH6zUtXonTy8stvsiSEqLvGzZgPC9aVNgsAG0wtL/Km/e3LjrQJFOGomYHFQpQjMWINEejcL+krsm2NiLG48ZZnHmqa7Tru9eLsdncTAZiNgDRNOEwXLuWz73vffll4ZomBt+NN2Lw3Xdfeh2m86LZPfbMxKIiiyT8xz9Gp91+O4COzm8mndHTw7/tXVUzyQsvcP22NjL+/vd/M1NLDFb5cjQKSfk996BnTzgBsHTq1N6AUz7Zh9nE4UB/KhCgpYhDxGPWS4aLLkokOEPeeAMna/p0Mn3727Arm+h7VEBXM/tcroEDUnZQUgFFe4aiXVcN9F7r11ucV3a7wDT5/Z49PGddHXPa0oLTPmWKlT30yCM46mefTYC2vR39lQlATCTYqyJ8Ltde6O6GA+3xx9lPV16ZOyArwviUr031gc8n8vLLzO3y5ckdpQshXi9B/kAAcHTWrKHPFk4kyGJ/7DHexdy5nBeaqaVr9UDMPhwuuui9Km43e9PnwyfRrMRctndREUGC1laRE0+0/ARdn+EwQcJwmPLkqirsn44OQLeyMnTWLbcAyH3mM2QUbt6M3ZQOQNy0CZ1imjQ3szdkySS7dhHIWLkSXfSd7+TXuLKzE3+wrMxqpqRdo71ekauugi6mvd1KELLrDQ14vvUW+3fJktw6sKWFSoj6esZ49tn49KmiFXOrVzO+007rfWaqjrB3Wc5HTBM/7Ac/wBdbsoSO2yeeyDMFg/yoz1bIgK6WN4dCFkg5kPLmUd0ycHnPg4j7UzSyOZjly5ox1tWFgtof0dPBEqcTxVhZiZJWQK2khIPI7cagzAcg6OpCKY0d2/vQicdx2FtbmcPaWg6wXNGi3btxnPPtrlVeznNs3syzTZ6c/XsuFwpVQdT+lu4Eg9yzqwuHYe7czGWRSvqbSDC+mhorK7Kzk+9rev78+RwymzYBJGY7TLTMsbuba5eW8uzKu9bUxIE9ZYoFGkSjhS8lKirCuero4HmCQYDc0dT5kS2pgKKCinZA0Q4q9kXuu49s4yuv7J3x7PcTrXW5IAvXEiG/n73j8bDG/u//KMH51a8AyDNJIoGxu349IPry5eiNXDouHGZ8Dz4Iwfivf53duHY4mCMti1Mgxe+ntPjJJ7n3jTdaxqlhWHNrH4+WtDidjDWbwdfeDofQM8+QcfDjHyeXAdklFkNnaWbjQKkTVBIJovx/+AN659BDKTFX8nbNKNPGKIUE+RwOdJoCTFoOVgggaySJaRLQ+s9/OBMmTcIRGooAqB3QtZepF+odpAKKyuWqPwMBFHftwtGcO7d38NE0yZpxuVjLJSXoomiUeR0zBl30xBNk85x6Kmfwnj1WM7d0Yprs23g8mds1k3i96KGXX2Zvfe5z+dEwKIDodjP2RAI9uG0b41u8uLD2bTyOTlaOssWL0zvsgyla/aKg7pQpIhdcgG2lAR7NhB6lQhiVwRKllCoutiiliovRCZnspS1bAMgOO4xsQA2OBQJ85+67semvuMIKaOzahR6qrWVN3303tsC551Jt8fbbjGPGjN73W7cOnmKXi3suXJjbp967Fw6/l17iO9dem18wo73dakyqnKsPPCDyu9+hd7/0JewjhwOd0daGTzFunKXTQyF8xJISbKhsdmcsRmD1+efRfx/9KHZiOrsvHAZ0bWhgbMuXZ/Zf+gokvv46Nt9//sM7+N3vKBdXGpxAAL3sdrM+BqvJYXFxcnlzScnQNRodlWQZBRH3g0SjFmgzbhy8KoPhIGjJXHc3EdRs3fRGsrhcPF8kggG9fj3zuXhxfgBiIGBxk6UCcbEYZLsaSRo/HkM11/vau5d5r63N3/BMJLhPIMChmg/wqA6nktbmy4kmYjVE2bULZT9vXubSYE29b2mxUvcVwNNGJwrkKpBYUoKxu3GjBSSmczD8fovLTLko9dDz+zloAwEO56lTed516/hZtKgwWZh2MQwO9ZISnnnvXt57ofmVRmX/iB0wFEnmUNRSw3w7Pb/4IlmBZ5yBEWqXaJSocTAIcFdRwe/a21lbVVXorC99CUfx5pvhtsokiQQk+lu2EJE/5BD2e64xtreLfOITGJbf/jZk1/noRZ0PBU82beK7DQ2AnhdfnHxvvaa9dFyj0i5XdrDTNMmy+eUvMYIvu0zkwgszAxKBAMCkYaAXCgHymyZzdPvtgAdz51KufdhhFkAajVrZloPZrEABJDtZvd5zf3YfHwqpr6fLbFsbeviMM8j0GGpRMNEO6CrfYaHegR0wFLGCGgpUi+Tf6bmjA0DtoIN6z5dpMq/BIM6fcpc1NlpOuWEAIO7bR8n+okXYBso92N5uPb+dX7Gzk/WZT7O7jg6c9s2b4Y09//z8bN9gkPlXuyIQwKHt7KQEetEibAWfrzDZyO3tzGU4jC02ffrQg/jbt3N+bN+O/XHJJYCuqosiEYt24r2gF0Zl/4vLhU7Wvdbaiv5I9Tva2wnE1dZSKSFicQBHIvAFrlpFlcHChZypW7eyx5Re5S9/oYLjjDOgVHj7bda4Auh2WbWKzOaKCvy+GTNy+12NjSLf+x5+xAc/SDl1PkkJra34BZWVVlLDjTfCO3jCCdCu9PRYdp7b3RtI1GQMpxPdFQ5zHbe7N/C5ezd6oLWVZKMPfShzwLSlBUA0FKI6JBcljogVWM8GJG7cCN/100/ja//4xwQzNMlD9bPDwdiKiiwaoUzVKQOVoiLegd/PeRCL5SxvHpVBkFEQcYiluxsDKhZDWebD1dcfUYBIm4rkE10Z6eJyMa81NSji7m4LfMqkdJWE3O3une4djZIO7vXyniorUaC5or0NDRwWEyfmD9zGYryvaNTqNub355ddU1bGAeD3cyDkE5Hp6WEd+nyMcc6czA5ANMqB5/czl+k6IGuJuQKJVVWMQ3kVN23iZ8ECyxgPBvlsNMq9J0xIJkpvaOCwdrnYK0rU7nJxmK5bB/iycGH6ku+BSmkpRkJzMz/BYO4M1FEZeWIHC/sCKO7eTcfghQvJMrSLaZKt19gocs456GDTxMgzDNZRPI7B+dprIt//Ptw2mSQWw8HfsQPjcPFi1meurJ+tWymp2bePzsYf/Wh+c6JgmcfDuB98UORnP2MP/v73Fu9NqujeSCTQvdGopQcyyZ49lMa89RbXveaazB1V7eXLHk/2TIi+yNtvU0b+zjtkWF9/PQ6BnRNSQZ2hzArUkiBtmBEKWXxnBxpo0NzMXmhowBk85RTOpf2tb+2ArnZC1tL+Qr+DTICiOniZAMVQiDVcWopTmirK1VVXZ2UUNjRwXSXxX7mSM/V974PuQBuv6Hlv7wit3aDtWdWGYTWmSSdNTeiQpib40U49Nb93qw6qx4N9sG8foIEIpe3aTEAb1Pn9/Q/4RSKAdsoJvXDh0AcP9+0j83DdOuyo88+3miGIWLrINPcf3cGovLdFudu7u63zWGlKIhECgi4XAKD9rDQMdPzLL2PvHH201eTJ72e/uVyUJD/wAJyGl1yCrxIKodtS/ZTXXydLb8IEy6dO12DELnv2UEK9ezfNXD73ufx0eUsLP1VV2Anr1ol897v4MV/6EnzXhsGZvWcPZ9rkyVYzSg30hEJ8Zvp05qy42Oro7HbzEw4D2v3nP9hdF11EYDOdaJfmtWvRgx/8YN+yprW5mL0qRwS/7+ab4aWuqBD51rd4Hwoaq02itqLHY31XKXEGE0g0DJ5Xy5tjsdzVLqNSWBk9fixxGobxXRG5VETGichbInK5aZpr9QOGYZSJyLdE5DwRmSIi7SLyiIhcY5pmR64b1NfzU1KCMixU+VWqKAm0z0eZylCXYOwv2bcPRTJzJgovEEBpt7Sg3Kqrkzl9TBNjUYQDKLUEb/VqDrYpU/j+2LG5I1XaYUszFvORWIzDLBrlXmVljD0YRLnnAwpWVFh8gvbmB6miJToNDRxUixdnB5h7elhLiQRjywbWKZDY1WUBicXFjG3OHDKoNm8mSuj1ckgWFXF/eyTT62U+wmHmcfLk3oeC201ntnXrODwXLhycdV5UhEHS2claCoWSwc5BkkHXRaOSXtIBiqZpAUj6d7+fkuDKSrLmUtfnSy+x3k880Yqsd3SgVw46CF1z3XVEz7/2NUpCMkksRkS+vl7k2GMxJJV/NJu88AIZiC4X3RGPPDK/OVAAsagIHfTd70I4ftxxRNzz6bqqHZjLyrI3YrjvPpE77mA/X3MNTQIyGZvRqEXwXqjy5e3bKcl57TX0+9VXE+lX/ZnaOKWoaP+AWlr+qiBOKGSBWIMMIgy6LursxFHasYO1cvzx2EfDDSRV8E5BHAUTB6uBhR1QVOoFdfRErHdvGDiPpklwLVUXtbdbVCx61nd0sEcnTuQse/FFOPeOOIJ93trKGps0ybI/XC5rL5umtRe1zNHeBE3Xq4JcO3cCIIZCONuHHJLfHGggoriYa739Nnu2upqx2nWAVmIob1tfz+imJtZgIoFzP2XK0O719nbAk9dfZ+xnnw2vm4Immn2oDrndYX+PyKhdNIxEK5C0qVl7O/vx2Wex/z/+8d5ZdevX06Rs6VKqM0Ssap9Jk9i/K1fSWG75chqpNDRw7RkzelcdvfwyGYBTp/JTVZW+1Nku27fDXa38hWefnd/zqm+njTrvvZeA6qRJBCDtAJ/TiZ/Q2GjxyXo86LP6emzAmTOTz7ixY5mLtjbOxH/8g3k9+mgCLpmSPEIh5qGxkQz05cv7R2ugQKLSxPzylwTJHQ7ew5VXWrafZh9Go9lpaoYCSBRJLm/2ellHfSxvHtUt/ZRRENGSG0UkISI/FZFiEblaRB42DGOOaZoxwzA8IvKMiCwSkd+LyEYRmSsiV4jI0YZhHGWaZijbDerrLWLmwULKEwmr/LOuLj+H70CQ1laUx4QJVuS4tBSj1+dDKTY18f/V1SiYtjbLqbc7YuEwkW4t/VEC3Fz8GprmXlOTOxKmEo3yvjQjQMG00lKr0Uo2UFDFMKxGM93dPGOqwu7oANgIhRjfzJmZr6s8hK2tKOhp0/JTynYgsbub65SUsA6nTMHZaW4GVNSMURXNxmxv557z52efc5cLY+Sdd+BenDdvcEr2DYOxOhxWV7RLLy38fWwy6LpoVHKLHVC0N2aJRCD77ujACE1do+vWAYosWwYXkAi6xOtlj5aU0ODksccwzi64IPMY7E0O3v9+nFvtSppN7r0XQ3n2bMqC8i0H1e6eLheZw1/7Gve+6ioLkMwm8TiGrzZFymT4btgAILl1K9kGV1+dPZgRCFgBkpqagXN/7duHo7JyJY7P5z9PlqZ9Xu0cdcOFk1BBmfZ2skP/9CcAoEGUQddFd9/N2XLYYWSiDndeN+2orOtDO1AOJsCsusjlSuZyjUTYp21tcGSlAvY9PVZmp9okoRBncHk5e+n113FCly4lO8jrtWyIbBUcfj/nujYfU5DTDnQHg+jDO+7ApvnqV7F/U3lT04kdQIzFoBro6kKnZQKZPR6+Ewjkz1caDKKHNPA5d27mwMdgiNcLTcWLL/JMp57Kj33udV770bl90KWnB7tu9WqAhkGUUbtoGIry0Xm9rOG1a6EqqKtL/lxDA3qgro5sNocD/bV3Lzpk+nQCn8oL/eUvs7Z277a46O3y3HMEPubNA8hzuXJnrq9ZQxAjGqXj87HH5veM+/Zh72kiyde+ht48+WR0Wjo9WVqKfuzqYn7a29E1U6age4PB5O9pYOD++wE6Z87ENszWKKq5mYB1JEJWtlax9VeCQZHf/pbAajiMzfflLydzEUci6HalucjlFyq/drbGe4WQoiILxP7HPwBB+yCjuqWfMoyOov0upogcY5pmTETEMIyNIvKQiHxARJ4QkS+KyKHvfuZN/ZJhGM+JyGMi8mkRuS3bDebMGdzGJvE4CjcUyt2U40CS7m6M6DFjepckKyGwlrp0dXEgqEIbPz7ZYAwGARAjEQ4nzabJVS7b0cF1Nc09H4lErCYlU6f2Nly1JFt5fnIZ3Q4Hn+vs5HtjxvCdaJQU+uZmDrZDDsm+NhTYDAQsQLQvit8wrMPT67WyNqJRIngdHcyz3chobwdAjMe536RJ+TlkRUV0B3vnHZypRKLwe8zvJ0Nh1y721hBQAwy6LhqVvomdu+zRR4moX3QR4Jw6dw4HBvG//oVBrOXJ8ThgvHLj3HILpToXXUS330wSCsGF09KCUV5bm5v3NJGA5+dnPwN0vOee/M4B5ftTgv4HHhD5+c9Z63ffDcBg79Kcbm9GoxZPYUWFxYtjNxyDQQzUBx9EV998M2XD2Z5Hs5YLUb7c2cnzPPYYz3HBBfzYSxZTsw+HE6i1eTPv9LHHmMt8M7oGIIOuixYvRocXF1slScMNKEkVO5hjb4oy2GCiSDKgqJQf06Zho4RC/M3ptM5xjwd9ZBgWTYjTyfm7di1O1/z58CCGw+iqkpLMmf2xGPaWNrbTZ1X9aHcqn3yS/TZxIjyqY8Zgm+jn7fyKqldM0+K4KilhPKtW8bfly3M31ykrswKwlZXZeVj37mWOHA7s86Fo3KMSChHEWLmS8R5zDFnQ9sC/6qJEYnCzXvsipsncNjaSGarlmRMmDP6tZdQuGpbicHBOr18PyD9tGv9fUcF67e4GnCouJnCqOmLbNta2Uh7deisg/pVXAiBu2oS9o9UcIqy/lSvRXUuXsu7ica6R7cx47jnKcYuLyRzM1LDNLtpXQPsXNDZCdeL1Ah6eeWb2/VhTw2ffeIN/T53Kfzs6mBOtNDNN6Fyeegrdd8wxBFdT/Vn7uN55x2o0c9JJA6N0CofJOvzFL/DFPvxhnm/evGSKGj2fi4rQzfnaYmofDxaQ6PcDQD/1FOdhP6pURnVLP2UYm2lDLnfoAnpXnn/3v9rs/XwReVNEdhmGYYcRXhMRv4icLDkW0WACiFoSGw4T6XivNIEIBFDsZWXZ+SU1m7CiAqd82zarxFkd40AAYzUW44AJBPhMLtCoqwsArKKCwzMfIy8cBkA0zeQmJXbRdvYKJObzTp1ODGc7V8m2bTzT9OkW/1Em8Xp5Fh1XfzNZFUjYswdDvaKCw37yZJR8fT2gXG0t69br5R1On973LABtorNhA5mWicTAnYFEgjnctQtwOBZj/PPmDQk9wKDrolHpn6xZA4BzwgkWSKiGUUsLnZgrK3EGVQ+0tlpr8p57iMavWEFEPJOuCAS4Vmcn11K6hWzGUTBIRt3DD9P05Oab8wPA7ABiMIiR/O9/U4p9/fUWCKkd+NShtUs4bGVNV1RYOsZuOL7+OuTcjY1k/V1xRfZMY3v5cjry9r6I3w8w+uc/E7z58IfhQrLrdi1bV16g4dKsIB7HabrnHpwRj4fxf/KT6fnvCiyDrotOPdVag8o5GA5bQJ3+DEexg4magWfPTBxM8XpxsidMsNaBAv3BIOe+aXKmqjQ3M7fTppF99+STZLyceSZ7tKmJPZypwVoiAYAowt7JtD8SCfbbE0/g6F91FbpL95jOlWa1qCj4KcLnN2ygFLqmhvLlfHSA8mR5vejRdDqzpwdbwe/nOWbPLkxzpnwkGiVj68knuf9hhzH/qSCczpHqov2ZCW2azFlXF/aadq0uKmL9HHHEkDQ/GrWLhqn09LDXJ0zgbA+F2Hsa/Lv1Vtb61VdbYJc2/Zw1Czv7Rz9iDV1/PTpVq8HmzbOatSUSdGBev541N348vtGCBZmrM0yTc//nP2d8v/xl7pJn/Z42yBw3zgqITJ5MFuOsWTkv8d9S71gMPaS+Q3W11WjFMMic27kTXX3OOex1n8+qpku95ssvo6tnzCADsb9nTTwO3+FNN/E+3vc+gNalS62zzOm09LTyPfZHVw4GkLhvH7r0+eeZy0mTsOuy8YtnkFHd0k8ZBREt2W3/H9M0Ow2sKIUM5olIiYi0Zvj+4MfhMohGnKNRgJ9cZbcHimgqvMtFVD0f8E6dlSlTUNA+Hz9OJynkhkEZov4ulSsxVXp6AMQUAMtnDKEQ3zGM3GXCmqLt93N45AOwaRe01av5zkEHcRBnAyDs5cslJayjPnJK/Fc0e0jLD8eOtcbkcKDoYzGrDHnSJO43kKYl2uVs40YruplaTpGPRKMcRnv3AgpptuqcOYPTvCWDjFhddCBLczNZdNOm0TnYnoETDltk4itWWKCC14sumTCBv//852QVfuc7mde6zweA6PXiXI4dy/4sK8v8neZmOh2uXk2Tkiuu6Js+NE0AiW98Ax1w9dWUstivodmY9oxEEZyFUChzB+auLp77qacwem+/nXKlbFKo8uVIROShh0T++Efm86SToCJIzRbXUkzNxBwOWXAdHZSi338/wGttLcDzuedmzlAYBBkSXaRAidudDDRpV2Q7oLi/eCmziY4/kUgGyQZrLUWjZOK43WRx6nwoT+bu3fxuxgz+PxRiPzU3o4uam8monjwZfeV0WsGyurr0gJVpkqUSj+NUZ3quUIhMnzfegF/x4ostW8L+HlU02y4aRVdo1+GXX2bMc+cSJNTM5nycT82U0c7w6vTG4wAWygm9aNHQ7SUNpDz+OHt7wQI4YFPBN23gY9dF+2O9qx3X1cV7b25GDwWDvM8jj2T+ctnIBZRRu2gYSjyObROPY6+oHi8pYe3cdhsBiyuvxO8SwcbZs4e95/OJ3HADOuW667BzkSB2KQAAIABJREFUtm/neosWWRnqhkEwbds2ypAnTACImz07c4JFPC7ym98Q0Jg1C7AsH79Au9n39LDWf/hDQM0PfADbKB8/rLubZ/R4oOno7iYoXF1tJbX84x/oucpKwMPDDkvuvt7ejv+m+rKxkc9Ho3Al5gNkZnq+f/4TW3HLFnzen//c6qQtgt4Jh63qEuXEHcheLwSQGI/j573yCmeg14sOPe88MtX76beO6pZ+yjAwlYeNxDP83rD991URuTbD57oKPqI8JBrFKIrF2EgDydYYSRKPo6BFAKDyidJqIxXNCnK7Ueh79qCYnU6UUCDAZydOzH5dv5/oUXGxZaznkmCQw8npZNz5RHSUE0h5frI51Bo927WLQ6iujkM227rQsupAgEO9trZ/B4VGq71e5lg5QYqKLDDFNJknn88qc66uLkwpjMNBg5VNmyg/Vp7JfMTvtwjo29p4/oMOAhjWQ38IZUTqogNZIhEaqTgcGMP2fRuLkf0XCGDIqFMaCll8OC+8QIT92GPpxJxJV3i9In/7G9dasYL9o1HsTN9Zvx4S8/Z2AKczzsjvmRRAjMeJ1P/qV6z5u+/OXOpjNwI14ykSQUel6hjTBDj82c/Y75dcIvKZz2Q38lLLl/OhcUgn8Tj3/sMf2NNHHEEHxnnz0s+BGrRu9/4HqDZsAPR8/HHm4aijRL7+dQDQQW7olE6GXBfZgaZMgKKeg8OhvNMuuobswJiWZxcqk8w04RmMROBcTbUh9uyxGsxpFnEoZDWY6+khSDFuHF3lXS5ArUAge9Owri7WY01N5j3c0YEe2b4dp/jss3PbODpn0ShgQHs7AUbDQF+OHcv4NWNROaIVUM4EspWUWNyNRUU48Vu3WpzQ06cPTbDANCk7fPRRq/nBhRdm1kWaCe3xDH0mdDzOPCkVjd+PPeTz8bfqakDrmTMtnuihHF6G34/aRftRnnsOgPnMM5MD7S4Xds+WLVAl1Nail9xu9qHbzRq/9lrsG616aG4moWHyZKu6LBDAxtqxgwqJyZMJekyenDkI4PcDkj37LODct76VH1d9IoE/5PMxll/+kvt/85s8Rz7nTWsrz1Bayn4vKmL/dHSgl9rbCW42NLCXTj89OQnFMNDPTU18dsIEdL52bD/llP5Xib36KqDtW28BQv7+99iM9ucyTXS9dl4uKSkcT2x/gcSeHoDct94iSKY9C044gaD0ALGPUd3STxkFEfOXbSJSbZrmyv09EJVIBLBIu8kNJRn0/hQFyqJRFHS+WSqdnSjF8eMtw9bvRyEpYNTYyHXnzMluYAaDHGhuN4dAPg5CIICBX1QEuNWX7JqyMqtpQVVV+vv5fHBm9fRwsB55pFVmWFSU3pjv7mYuRZjL/vJo+nxW6WFxMQec/X6VlazTrVsZX2UlDkJnJ85NSUn2cvR8xTDgd9qyxWpYk6l0IZHAWO7oYEydnRxoEydibIwdOzzKGdPIsNNFB7KYpshdd2HwffnLvekNnnoKg/ass6wyei37c7vRE9deC3/dj37EGo1ELG4zXWNdXQCIkQglQVVV/LukJLMuWrkSbsWKChr+LFuW/zNpufD110POffLJdGLORZugZc1er5UhmQo87NvHs772GoDkt7/NPjTNzE0VdDyJRP/Ll02T8pY77mD/L1iA83Doob0/qzx2Ivu/cUo0yvu7914MZe3Oet55gA3DDSyzyaDqotTMNTugqGWvdkBxuOhrJcnXrDLd74UY444dOJYLFvQ+rxsb0SO1tcl/a2ri3qWlBAzGjAFAFMHhbWkBAMi093t6LI7BTPty924CLZ2dIp/6FF228wmSJhJcOxbDPmho4Ow94ojkbtD2DE8te1dJ7Qat+rK8HD28ahX2V2kpOnKo+MK3bgX82LkTG/Ozn8XhTd3LqdmHQ1m+r3q3q4v3nEhYFTrhMGMaO5azra6OdTIcmkylkVG7aIhl/XoA8iOOSOYtFMGmWLkSu+Lss1lb6m+EQqwlDahefz12lc+HfhszxkoAiEbJ2GtoEDntNAC1rVstOz2dNDVxzXXruP/nP58/gKj0Sv/+N8GWadP6VgLd0IAOHDMGkFP3+vjxXPfBB8mmrKjAdpsxAx+kszOZKkmruOrrmctQCNDviCP6F/x45x0LVJ00iZLs887rfa1YDP82keAM03NMM+sLIX0BEvfsoYHOhg2ce04n83DYYQQ0KisLM6YcMqpbMsgoiJi/PCAiNxqGcaFpmn+0/8EwDKeIVA1lm+9wGABROW/2Q4bCfpPGRqv7dL7AqbZ+r6y0yno7O+E403Rzv98y9vXf6ToUhkIcdEVFKLN8FKvPx+HpdnM49lUZK8+P8iPaCcMTCdbCnj0YnwsXWpl9brfl8GtWoAjrprERA7ukhIOyPzwXgQBjikb5vnYvS5WeHsbn9WLIK8GwclLu3s3YCtG0xDBwvJ1O5jyRSE77j0SsQ1tBVhEOef0ZpkayyrDSRQe6PPMM0duPfASjxS4vv0zm6/HHU3an0tHBnmhspJPf/PkQi5eVWYaT/oiwFh96iLX7sY+xv0MhdFOmfXn77Vx78WK69OZbvq8A4tq1gHvaZfrcc/MDquJx9kwi0RtAjMcZy223oUevvprnsRuJym9kv5ffb1FIVFf3z4levZpy8w0b0C833MB7SX2m1OzD/QnQtbVZXZZbWij3uvpqK6tjOGRG5pAh1UUKEJWUJAN0wSA/ww1Q1GYjOtZweGBgYmsrtkddXe+S/I4O1tDYsZxhKm1tFr/zQw+xZz/5SZzYUAjHzO0GWAsGrTFro5NgkDO+tDSzw7ZmDfrI6cRhX7o0v7IyBRC1KYPfz9m9YEFvKoV0ZdB2YFEzZ/TzRUXMyY4dPMPcuQSHh2Jd7Nkj8sgj6KIxY6CGOPro3vfWskV7JvRQjC8SATTs6kLviljcZ1pJYhjYkePGWbbacOUmfVdG7aIhlJYWbKOpU2kEYpdNmyghXrQIoMrhYA3V11uNL37wA/brDTdYFEebNrHG5s61gq1/+xtBydNPx0/ZuJFr1dWxdrV8WvXFpk1cU7MjP/5xq4w6myjVwb59VAJs3EiG3lVX5edfqx/j9xMwSK2u2r4d/sG9e6kuOPdc67qVlew79VFVOjoIxPb0YMssXZp7HKmycydl3A89hI6/9looJlKfSTmxo1HeV1lZsp86mECinR5HhPusXw942NTE+VVczHqaOZNA0BBw09tlVLdkkFEQMX/5mYicISL/zzCMD4nIK0KK6ywR+aiIfFtE7h6KgYRCKDvlvOkvd91IlLY2DNrx4/OPQESjfE8bqYhgOK9dizNy6KEYoD09KP7qapSWZslpV9WSEg61HTu4xsyZ+RlVPT1Epzye/Euv04nTCZCoWQHl5YxxyxaU/6RJvcek3Bvasbm62mrCEwxiIObbCdkuWqoZiXC/1C7XKlp2rvN/6KHcy+9nPFVVRDC3bOGQVWOjEDJ7NtdraOCg0s7QGm2PxfhvVZVVUj3MjWSVYaOLDnTZuhVj+OCDMUjtsn494OKSJRiFKn4/a6ypiQ53tbV0ZNZghD37MJHA2P373/ndOeewrwMB9E464zUeF7nmGsjKTz+dkt18u9ElEui6e+4BcKur498LFuT3fXsHZi2nUQNw61aM940b4UL7+td7NxNTvh/NRtQGRloSna2baibZsgUA4/XX0UNf/7rIBz+YXs9q9qGCEvsrWLB2LY7KU08xnuOOgyfz6KOT+WNHgOw3XaRAV3GxBdJFoxagqN1s93eWqX2suv7C4b532w0EyCaprCQoYRcNUlZUJAcTgkFsGBEaAzgcIv/zP3zONK0S58mTLbBTCfVF+LeW4KXjBDZNutH/9a/svfPPxwbJx+lWALG+3ipvVK6zfERBN3uQRdeBz4d+bm9Hn86ezfN1dTG2weLWbG2FH+7NNwFdP/IRyu3S2RUKgIoMTTdytdm6ulhLIrzXykrOq7Y25s/tZn1UV/MM1dUjxscYtYuGSEIh1nlpKY3f7GdVYyPn8cSJ8A/r30IhbKKaGjoBt7bCwVxXhx7ZvBk7YOlS9kMohF5paQHMmzGDPe3xACY5HHw+ErGSGF59FTqFRAJb6pRT0Ee5JBbDp37jDcYWjxNgPe20/OYjHManikQALO2lxn4/TWfWrkVHfv7z2GvRqKUny8u5p8+HHigu5vPr1zNfxx9vcRTmuxebm+E5vPdevvvFL9IZO10Wtp6bpsn1U/W3vsNUPuyBSiqQ6PORNb5mjRUUrKoCPJ4wAfBwMBvUZpFR3ZJBRkHEPMU0zZBhGCeLyFeETj0rRCQkEHLeLyLPDsU4AgGMLoeDDMSh6ig3HMTr5eCpqso/Y027pjocFgF0Swtp7uXlgFraTEMNJhH+rY1XOjutroWdnfx39uz8lLnXC4hVUsLhMlDlq4TFXi9gphr4y5ZlbvzhcDBnXV0WWbCun76mgmsEOxSy0u0zARgdHQCIsZhVeqCHkWFYHIlVVUQeN20izX/evMKVG02fzvO+8QaG8axZlrPkcnGfgw4aMUayiAwfXXSgS3c32YPjxsGnZ3c49+6FmHrqVAxVlVgMZ6yxkey+qqr/z96Zx8lZVXn/VHf1vqS709k66c6+kA0IBpCwSIgIqEFAESOouLz4qoPLOMKI4zJu6CAzrqPjoOMQQIMgihiBgIgQICQYErLvve/VVV1dez3P+8eX896nqquqq5LO0qHO59OfpKurnrrPfe4995zfOed3AOvS8dd0dVHuVlaGw1leznrVTBqNChcUmD1z88189yc/CWiXrU5RwPKrXxV56SUMZOUjyka046Lbje4sKGD/BgIAmffdx/1+61uUEKVz0NVo1GYPto0eypWOo62NsuWnn+bzn/wkPJKpzkTlqLNtw6l2ojP8IhGAnLVrzfnz3veaTInCwlOnI3S2cqroIieg6OQjVC49zf472V1uNZMyVSfnTOsxHsepdLlwsp1rRKtSSkpwtpwVCq2trLuNG/m+97/f2And3fytocGAXDpPGmzQTswVFQb4LCw0lAb33Uep3fz5lCxOnpwdDYFy723fzndMnEiZ3rFW1BQUcD0Nsp9zDteORgEUvV4TRBBJXwadq3i97O3nnuMaV1xB1/FUOk3Xp3a5P56Z0IEA9ppWXYjwLBsaGENXF3ZaQQFnlGaWa8B9LFEknSq66HQX2wYUGxri/HKukcFBbKaiIs5j3c+2TaAgHCZDt6+PQOiMGfxfK4PmzDFB1Acf5PWrr+Z9O3ZwnfnzzT4tLUWnh0LoogcfBKi75BLonJJ5R1NJNIrf8etfExCZO1fkX/81+27jSomlST1qT9k2OvtPf+K+L72UcbndZDv29pqAhgi2UyyG7bhzJzpl7lx4b9Vv1UYrmc4wr5eg9X/9F/d24400ZEsFvlkWYF0sxjXLytJf2wkkiowukNjSQhB43z6+RzM5Cwvx25YuTSwNP9GS1y3pxWVrbVFeToQc02Rr1NbtRqmOkaypUZFgEEWtnYOzVSbd3RxIkyejsDs7OYyqq+EosyyUdnEx70l1XdvmgHv1VQ6rM84gejbS/A8McO3ycpzE0XIOu7sZSzBIRoJm3GUS28awPnKEw2rRotwA6FjMRLAVlEzViVXENGoZGODeZ8xI7VhoB9biYgxYy+LwDIW4r5G42TJJOGwME8vi344Os34UPDwGI/nULjIcWfKKP4PE45SAHDoE0OYsHfR4MFjLy0XWrEk0lDs72WN33MHe+OUv05fStLRAtl9RAQdiRYXJ8quo4HqWZcp/29oAAXbuFLnrLqL82YplARzecQffcdttgJbZ6tGhIfZUcXFil+gtW0S++U3uZfVqot3ZBCa0hFG7L+fivPf2ki3wxz+ig6+/ngyoVGCocqlps4KTkeHX1YWDsm4dTsCsWaybK65A/2hG1TEY5XldlEY04zwSMfyXCpSdiOyvTKJrU8eVCUzcvh3dsmxZYiMB5RG0LBxO55ne2oqt8OKL2Arve5/hBfN6CcjW1aXO/Lcs/q6dmAsKTOa+CNf7xS/IAr7oIrp6jh+f3Zkdj3MWb9nCNRcu5Lw/VgdxaIjxDA5yT3PnJgYHYzF0jmb6OEuh9b40eOMEFjPty2AQ4OHpp01G8ZVXpg+C6vcdr0xo20a/a8ZhNMp3VVVhY5WXsya0+U5pKc56WRlzUFSUmsInB8nrotNcNm7Elli1KpHeJRol8621VeQf/zERhGtuxi76/e+xqb7wBao3tInJtm3Y42efja5et469es012Ou7d7OvFywYbl9Eo3Cx/uUv+EJnnIF/c+65I/tokQhZwz/4AeO49loa52WbVODxmE7vzqQej4d73b8f+++aaxIzrGMxQz81darRfS0tVCfE48yvk2dSQf+iotQd0UMhdPL3v49+v+Ya5jkdl2M4bAILCsZmI5o1WFBwbPorGiWzfvNmzprSUmwjt5tnXV4OVU+2TUtTyFjXRWNC8pmIY0T8fpScKquTafyeaIlEjMLNJRrh9QJU1dWhoNrbccBraylPFEEpFxamVsoqto3zV1uLQR2Pm9IhJ8+gU/r7uXZlJYfEaDiu4TBGso5lwQKun83njhzhkGlq4jORSHaHhmYMKLgxbhz3ne5+urtNo5bGxszzWl7O33w+DN6aGu5p505KGxYuzL25wuAg8+Mcr2agRCIcVuEw8zCWMn7ycmJl3TrW4C23JAKIwSClxy4XBqczc2ZgAIPyK1/BSLznnvQA4uHDgGDV1QCI5eWGm7O83KzNwkL0zyuvUIY4NAQgddllxpAbSR/GYnAU3nMP6/6nP03kb8wk6pRq6Y3ux8FBjNU//IH5+dGPyPoZyah0li+XlWXWJcni9wPe/va33NPq1TRwSEd/oKWZ2qzgRJ6Z+szWrqVhimXRVXLNGuZJu9SfbCDrdBdnyatyYSqPYjicyLV3orNT9bvTZSaqtLQAIM6enQggamAwGsXZdJ7nAwOA7Rs3orOuv94AiKEQf6uoSL13bNtwujqb0LndJlDywx9y1r/znZzZFRXZZyDu3MlPZSVcak7+xqMRbYbQ2soYzzgj9TXdbsYYCHAfzvGqrtDnEAoxbyKsISeoWFTEe555hozwQIBsoXe+M/29WBZrTjOhRzP70LaNDaWZlgUFpiS5upp7aW4GvLUsXlc/IhQy3WDTBYbzkhcRqp9eegmQzgkg2ja0KIcOUbXhBBC9Xtbe738PZdGnP23oXyIR9ElDA7bJ4cNk9FoWGfrTpnFNnw/9lwwg+nxUPuzcKbJypWmQeeaZrOtIBEAw1RmrfIs//Sk67utfx67KVrq60IGVlYamyrIoqd6wgX30jncAZibbOG43vlFnJ7q2thZO5127SGY54wz2r7PpSFER+rqvj72uGeWxGJzKd93F/l65kiqYZP5ulXgcfaDVWKWluflCWoGigZdcgUSPhwDSq69yBk+aRJZmLIb9XFjI2BcsyNtGY0Hyj2gMiM+HgVRSgoI82fw+J1KUU08kt3LgYBBlVVHBwdPSAigwfrzpYNrZiTLMlB5uWRxiwSBRkupqA6z5fKbJSU2NUcR9fRwuyk90rEaZbQOAHjzI/2fP5nDVhik6hlQyMMDacblM+bLfbzij0mXiWRbXHhzk96oqPptunjRTVMcyfXp20byyMsbm9fK8ams5QHfsIPq4cOHIJU7xOPfZ14dhoAd0SQnOksfD/5cvx+Dft48I2KJFb6y9lJfs5KWXcA5XrYKjTiUexxD2+XDKnSXKoRCG8le+wp75+c+HdytU2b+fEpf6eqLFZWWAg/E4+ip5TT72mMhHPsL7H3mE/aHRYM2wUzAx2Rjs7qZs6OWXMWjvuCN7YN6yuBcdV0kJ+ueppzBYBwboLvixj/E3zehJt6ciEfa5s3xZ+RFF0uvJUAjg9r770C+rVjEf6botpmqccqICBqEQz2vtWhyC6mrm6IYbOGe0FCidY5OX4ycuVyKg6Oz0HImYLLTi4hMLKOq4khuFuN2s9z17AKeSM0paWtAb06cn7ulIhDP/2Wc5l6+91jj18Th2j9udnltK6UpSceEdPEi5omURYKmrQzdoubPLldiYxSmhEA52Rwe23LnnHjuNyMAA53kwiPOtmSzppLSU+VVaBh2jjjc5c9HZDVoboLz8MjpwcBAb4pprsK1Sieoi1dMlJaOji9T2GxjgX+UpGzeOc0m5Zbu7ASc8Hr63oYF5Uv4xy+IZHg0XbV7eWDIwQJbcxIkAVU5RHtBrriGbUEUzpTUr75ZbCKaJsAZ37WLdLVmCLlu/nnW5ejX2TkcHa3jq1OEUVq2tlB339RGcs23AvDe/mWs6m27p3tb9PjhIpcn69ezhb35zeKOqdKI0Ecoxrz5eRwfNS9rbKaNevTozLZP6pm1tIn/7G/c9fz7Z5vG4KfMeP97szfJy7mlwELvmmWdEvv1twNlzzkE3Jze5UbFtk33ocnGto61m1HnUBkzZVsK9/DLroKAAkHDxYuZRX9OsdL2+cmfn5dSVfDnziZWcJ9vrRcmUlmIIvpFAD9vGMQ8GORyydX6VV6KggKYhzc0oqYkTUVoFBabMOVNJqyo+n4/vT+YcjMUwzvx+U+KrZbTV1Rhso1Gis2cPY6itJYPIOd5wmO8vK0ucH8viMOvv57Bqako8MDQbaNy4xAwG2+aA0g59FRW8J51hrl2eOzpYm42NidkS2Uo4jJHidnOfkQjRRZcrfel1OGyicpbF/Y8fz312dTEvRUU845oa8yy6u8norKxkPRylMz/Wj7a84k8hbW0Ypk1NlPw618b69YDb73hHYmMDy8JQvu029txPfkJmSirZvRuAcvJkiL9LStBvmpmXvBd/+EMabpxzDhmIyU0Hkrs8ixgOxRdfBDQcGgJIvPrq7PVRLGY4SysrzZ767ncxeBcsgHjcmdFo28ZZTj6n/H7GoUTZznl1Nlpxji8eB5D7n/8hGHD++WQ5pANn9TParOBEZh+2t9OAZ906w2V0000QwrvdiSWMx4GGJK+LjlG05Fl5M0XMszrR3buVMy8UgidKG44413JnJ/txypREnWDbOJSPPcaeu+YaHDP9W3s7121sTH2mDg6yfquqhjvAmzaxF+vqRD70IealstKc9xrUiMfNHCr/qMcDX2AwCMgwf/6xzWksBqDZ2YnenDs3Pe9ssmg2dEFB9uCZbQPG/e53fGdjI3QECuymKoMWMetJXz8WicUYtwKHet2aGn6qqkxH27Y2QOZQyPBxT55smn6J8HzHjRv1AEteF52GEo1if/j9UKo4kxZeeIEsxBUr+JtzP+3aBfXIjh1UDbz73eZve/dSGbRoEWt73TrW9DXXYBdpVVNT03Buw23bAM/cbjiiVRdefPHw9RyN4ivofmlrwx46eBBOx09/OvtSXm1KqRRZEyZw/b/8Bf1WVoZ9uHhxdnqluRkA1rL4nDNQFAqlb2r1hz+I3H038zt/Pvfztrel/85YDN1rWaZh32icafG4CRyn0iORCM9qyxZ8tYoK9P/ixTzbPXv4/OzZrAOnb6sB12yqbdLIWNdFY0LysfBTWDwejD4Fgd5o5Zfa2r2hIXsA0bY5mGwb4/rQIX4mT0ZJuVwo5kAA4zcTgNjcjLE2bVrqpiVuN4eIdj9WPp7GxmMHEJUr5MgRU6KTKnNAM4C0k1VxcWL58sSJfC55LNXVxhitqeEQ0I7J8TjzUlOT2fD1+wFZQyHmsrHx6J12JfJWEnAt1965k4Ny4ULGoiBnf39il9i6OvZHVxf3UFiIUeGM4qkoYe+uXXBNLV78xuIXzUtqCQYB7UpLIQV3ruUXX8QQXrFieGfUtjZ4E5ubRf7jP9IDiK+9RpnLtGlEqbXEXktunIZsNCryuc9hgF97LZ2YU+mq5E7PCkD87Gfw48yYAcH23LnZz0Mkgi5wuYyT/eCDJgPpM5/B+E4GCp2d9jQLMFX5cvJ+1N8VeLBtkb/+VeS//xtHePFiMjw1gzyVpMo+PN7Aj20DrKxdS2aSCFmSN90E6OvMZDoZYFReshcn4OzMUHQC0voMj7cdpiXYW7cylrPOSgTnPR7Oubq64UGFri6yhQYG0DEKIIrgxAWD2AOpnOZgkL1aVpYIINo21AuPPooe+cAHOPMrKhIDhs7sQyeguH07AFx5OVlI6bins5WeHoDSaBSbY/r03MvxKiuxI4LBkW3LPXvIAD9yBJviH/6BzCntLu/MVtTmU6qLSkpMswJnaWK2Eo0afkMN6hQXY3dqIxSdS5+PM6izk+8aPx67sa6Ov3V28j6l4XkjJSTk5dhkwwYCeddemwgg7t1LhcCCBXCuOvd1ZyfA46uvkol/3XXmbx0d7OOmJvyV3/6WvfHe97Juh4b4vuJivi8QMPt0wwZskYYGgoq7drEfLroo9f5yUkasXy9y552871vfErn88uznIBRCB8Ri6JzqaoDIRx7BHznnHMNzPJLE49Cd7NmDDps5c/jnSkv5DuVxraoCkPvmN8lAnDhR5Gtfoyojnf9i26asu6AAfTGagVWl23ECfiLMx+bNjFebd61eTdD54EHGH4nw/JcsSc2lqzpT9Wbedjo1JZ+JeGIl68nu60MJV1aOblOOsSJaElxfnxtnTl8fxuHEiUQ6mptRYGecgRJSzrzq6vRcWiJ8tq9veKQ/nWhmgJJYa0ZdNpyFyeL1crhopuScOZlBLuXE0ahQZyfrpbExM9G5NhwJBIxBXFKCgZmpzEg5IXt6OOSnTx+9bsra/bmggPkLBjESiopwPnw+3qP8IHV1pgO3x2O4ferrRzaSPR5AytJSDrIcO52P9SMtr/gdoll/W7eK3H57Yobd7t040QsXilx1VeLn+vsB1bZuFfnOdyDVTyVbt2I4zZhBxFmz04aGWMtOR9brBYh65hmRf/onsgmz1f89PWREbt7M99x+u6EDUMAx07WCQROQqKoiSPDNb2IMnnceXDvpyohMtc5RAAAgAElEQVRVNBNJS+60fHkkWgLLYtw//zn6b+ZMnIQLLshsQGpzCs0GOt7OcTBIJsDatWSg1tSY5i6TJyeCTycIPMzrouMkmtmqJakipov28QQU9+7FYV282HQXtm3DbVdVRemuc10pZ+iRIzhs55+f+LfOTs7pVPaU8gUXFfF3vW4sRiDjpZcoE7z2Ws7nsrLE96WSaNSUr02YwHjUrnCWPGe7N8JhrtXXh101b97R2VcqgQAOdlVVavtKG0Hs3o2dkY7fzCka0NVmOfqaimZmOrtBJ9+/VmUMDBie3NJSk3GYXHHS3W0a2RUWGn658nJT8mxZzNVIgeFRkLwuOs1k61Yy7S64wHAZipjKhOpqGng4QbBAgGYlGzYAHn7844lg92uvYd+PGwcvYUmJoYgJhwnYaslrMMhrbjfn7iOPEFj58IcJ4lVXw2WYaV1r05df/xq9+bWvsUeU3mIkHeT3ow8KCgx1wZ//TIZdXR1VJbNmZTefg4NUc/T3c3/LlvFaby9+S7IvNTCAn/LznwOC1tbSbfl978NWLClJrdOjUebOtnlPScnxs0OUyubIEebk4EFTnvymN+FHHzrEcw8E+H3p0tTJOcmizQWPAkgc67poTEgeRDyxktVk9/aioKurT25b85MlPh/ZPdXV8E1kK34/c1ddbVLhGxsxNl0uFGpXF8ZVJmBQeTgmTkTZZRIt5/V6iaBNnMj39Peb5iW1tdllUmqJTns7RuO8eZmBzuTP7trFOKZMGV6+nEqUYL23lwMmmVsplQwMYLBGIgCco9U0xinRqOHwKSvj+159lTlRp0pJh3t6cCps22Rm5BJp83oxWIqLARJz4Gga67syr/gd8thjZNutWZMYnW5vh7R6yhSIvp0AVThMKcxzz2GUOiPtTnn5ZZHnnycYcOWVXCMex0HU6LDq+MOHKfk5dAhQc82a7O/h+ecpawmHAQ/f9S5eVxJsZ6dnJ4eifrezA3NRESVKv/wl4/vc54iy59LUyu9nz2aiQ1DZvZvsyc2bAeI+8hGeQybdollA2qzgePPYtbSI3H8/To/Ph4F8440Ay8q1lly+eIKCf3lddAJEm2MkA4oKFI8WeN3ZSfZeY2Ni1rNSmxQUUMLmLEmLxSin37YNHePkLFOOxOLi1BzN8Tj2jojJ0hdh//7kJwB373oXWYTaRTNTwzQRzu+NG7EXFi/mbNXMZM1QVAoG7fKZaf+2t6MTbRsnfjS4pkXQU5aVWNbb1UXW5SuvALxdcQVZTpnsKdVFmjXjBCac/Jv6r5N+QgNKSvkQifB6ebkBDpMDMOEwz7S1lf+Xl2PzNTQwl34/z0CrSurqcg6SHq3kddFpJO3t2EUzZhCY0DXt9wMgBoMELZ18hZbFWb5uHbro8583eysSwZbXJIFHH8W+uP56wze/Ywfvc5a3er0i//ZvgIZXXMG5++yz/H3Vqsx2e2srdtGOHZzVX/wi+ykcNt3L1eZJpVP6+5kH9ZH27CGoHAzSjf3SS7MH5o8coarF5SIo42y819HBNadNM3u1o0Pke98jOFRUJPJ//6/Ipz5lkkN0n48bZzJELQvfLho1vPfHM7AaCnHubNrEWKqryco8+2yebWsrf/f5TE+CbBJznHKUQOJY10VjQvIg4omVESe7uxtDbdy40TOUxpJogw7lgMz2/iMRFG5JCaBSRwcHn/JnaRcwtxsnNVN34Y4OlN1IRLvKMeTzEQlKJv5VBR+LcT+1temzcXp7yWqJRHjuM2dmr/g1zd7vx5icOjV9oxURk+0XCjEfZWWMsaQk/eeiUcA8j4f3z5jBAXE8RLtDHjpE1EpLb/r7meO5cxlHTw9GR01N+hKtbMTnw8AoLCQ6NlLG1Osy1ndmXvG/Ljt20Cjk3HMTI+ZeL9lmJSVw/Tgj7ZZF9H39eozkm29Ofe2NGzGu5s+Hs0Ydac0wqagwuuillyj7iccBqy68MLvxx+OU99xzD/ruzjvTly+nAhRFGI/yiu7dS/bh4cMY7J/9bHYRYx2Lli9rKVImALG5mQj7X//KPv7AB+hyOhIA52xWcDzBOu22eO+9jLGggOd4440YyS6X4dNTMLO4+IRXDuR10QkWBY0UGBIxZfTH0nHb70dfVFXhiOk6isexD+JxbAPVUQpiPvII2S0rV5Ix5xxnSwv/pqIbsW1snlgMx06dYe3A7PGQ8bN4Me9TjuFM6/vAAcqXXS50arpAeCZAUTm2AgH0kXJCz52b9fmclWi2tNttOFhfeIH7XLWKDKeRvk/XgOqibOw2yzLN4Pr6Es+DceOwc8rLhwcjtNOtNgWsrwc8VNqWoSGeWTTKuVVXN7rzlYXkddFpIkNDBrxas8YAdbGYyPe/j33w2c8Oz8D7zW+gX7nwQoKrziYZr72Gjhs3TuSJJ/j3+utZ97bNXvd6sZc0I8/jEfnGN9B/N9yAXty4Edviqqsylw8//TS2TCQCRU1yINiyABN1/2pmoo63sxPfTDli//hHQMSGBrgbR0oyUYnHydDbu5c9e9FFw/0nbSJaWEjw4kc/gtLFsrCLbrwRHVhfn6jH+/t5VhMmoCdCIV7X7MPjJT093NP27eibadPIEJ03z3BabttmKv+WLs2+eU0qOQogcazrojEheRDxxErGye7sZMPV1qKc3mgAYjTKweRy5QaiaRMRy0Kh9vZC1KoktfE4wKBtM6/pDPzeXjIga2sxzDKJbRNh8fsxqtNlDCqH38AA4ygv5/p6UEUiHI49PRwq8+dnBgCTpb+fMRcWMubCQsMfkny4RqMc0IGAaQRTWWmyNBWETD7cenuNI6Ld/Y7H2tRGNf39jFVLL2tqODg9HpyTeNwApZMnj46R7Pdj4GinuCwyR8f67swrfkHffvWrrKV/+RezlsJhDOhAAADRCaLZNobp/feLfPSjZOmlkmefJZtl0SIcUpeLzypg5+zE/NvfEmWeNo3If6bmIU7p7CTr8O9/J1voC1/IHtxXHkGfzwAhv/gFzQOmTKF02dmdeiQJhxPLl0tK2KupSqh7eviu9et53w03wIdUXp6+0YpIYvbh8WycMjQEMLN2LcGM8eMZ3w03mCi6M6tIs49OEs9YXhedRNF95CxjLyhI7PScjcRiBBJiscTSX9umQmFoCLtGHW4t4//rX+n2ft55OPvOPdPRweemTk3tbPf24nTW1xvdt3u3yE9/ylr+5CfRSdkEYKNRnMrmZvTl8uXZNztR+gMnqNjWxk9JCeBhrtkr2Up/P+DASy9xbxdfTPAkExWMiMlM1eDBSLQFto2doaXKmgWlHIVamu3MWtTv6ekhQ3JoiOekXJBqpwSD2EfhsKF6yZZHfJQlr4tOA7EsbJKuLspmNUHCtmmutGkT1QLJ/M8bNmAbLV5MBp0zsH/oEH6a203VRH09oJ7qpcOH+b6ZM81eP3IEIHJwEGqXhQuxGYaGACknTGD/JOukcBgQ7re/Za/84z8y1nTnczzOZ9RecbvReRq8aGkRefJJ7v+yyyjtzjZQ6PMR4PF4GP9ZZ6X/bH8/vNpqe7773dx3UxNj6+nhs/X15hqazDI0hJ1SWsqcHo9Apm3jr27ezPMqLMS+fdObOBu0J8G2bcanXbKEpJPR8BlzBBLHui4aE5JvrHKKSEcHCqSuLvvoxukkzoj5jBm5OWM9PRhdPT0o7HnzDAiokfZ4PDOA6PGYEmpninm6sba2orQnT86cpaPNCaqqAPC023ZFhWmAYllE8xobs1e0OoaBAYzPpqZEYvhAwESxNTvI2UU6+eAtK+N9gQBzX1pqMhwHB3n/9OnHJ6odDALmeL2mG2xDA/8qb+Phw6bUsq+PeVJuktGQykoOu9de4wBcsuT4ZVrm5dSQaBRDMxaDLF/XtmXBhTUwgJGbvL//8z8BEK+7jkh8stg2HELbtmEwXnKJ2dfBIPtMAUTbJnPw29+mact992VPYfDsswCfkQiR+iuvzA1U05LqwkJKjL73PfbWe98LOFpRYYzqbJxj1TlO0n7NvFRA0OcDmHvoIf5+3XXwPzrBhuRGKwq+xmIm+/B4ZfsdPsz4fvc75mbpUsq2rrjCOETxOHOu4GFpab5JwRtZnBksyYBiJJLYlTtTye6OHeiHc85JzCDRYGVTkzmT9JrbtuFUz51LECEeNzrA42EN19enBhC1GsFZLvvcc6z/SZNEbr0V26Wzk/WdKQPR4wFY8PkYy/z5uYFYzu7GXi/ULH4/TvGsWaYJlbN5y7FKJIKefuIJbJwzz8RpT9XAzilOAFeffboxWRbX9nhM0zrtCl1Tgy2W7rPBIPqouRndWlrKGpg8mc+Ew7zu97PWSksBVY6FJzIveREB9Gprw6ZwVlj96U/s89WrhwOIr7yCLTN9Ov86AcTeXgN0bd+OL3bddUbPpeo2v2ULZ29ZGddraACkLCgQufpqdIVSsDg5l5ubaXS3Zw/B2w98gOBLJnuhsBB9pTzV+/Zx3YoK7rm5mcDu6tXZ22ciAKcvvcT1L700PT1XNIpNeffdYAErVoh86UucBc4x1tVho/X3m+zjcJh7HxxEH9TVjX6SRzCIjbhlC3qsuhp6i7POMnp+cJBnq81AlyyBjmM0A73JjfveaIlWp6LkQcSTLBpFGBhAWY9kwJyOoll9kQhAWi4p2Nq1rr0dZbpgQWLKdE8Pr0+cmL7c1esFwKysHDliomCndo3OtqGIdhGurmasmzdjcE+ezEExUtTbKUquHg7z+WSC88pK7snn4/dAgH81JT/dYVpZacp7uro4+JVIOLlU+1hFm8H09ZnMyNpaDkbn81eAsauL57d4seEDOnx4dIHEigpAg+3bcc4WL87tueRlbMl992Hk3Xor+0jlySfZX1deOTygcP/9lA6/9a1kMKbKlHvySZzgN70psSRZeWrKyjCswmEyfdatI4PoBz/IriQ/GuW9996Lw/6tb+Fo5+Jch8MYywMDdG9++mmM5LvuIlquhppmBikImJxVqAGKaBRjUjObVQoKTPblQw9BbB4IUBL84Q8nzrtTnEBiPG6yco5H9qFlAciuXQuI4nZTJnXjjegD5/siETMfJSXHLxMyL2NTkgFFZ6dnBRSdnZ6dXKjd3YBvzqBFdzcOY6pg5Z49lA5Ony7ysY+x1zSDLRIxDUhSZQP6/fxUVvJj2wDnf/4z+/+WWxhfZydjnDQptX6xbTgTlQ7k3HOxtbLpUJossRjzoJzQy5Zxz5ZlQDsF7pyNWXKVeJxyyMcewwZZsgRwQAFazYJO99lk3tNU7/F60a0+H+MvLMT2qq1NnTnlFI+H86e7m++ZONGULOtcBALYZ16vsS1LSw0IcoI5WfNyGsmePQCCZ52VyMn68stk7J5/PkG15M986Uus0e9+NzEAHwgAynV2klHd1ESDJt07Hg/AU12dSf547DHskpkzCZRWVoo89RQ21MqVRheWlprmQSUl8A3efTd74pOfxP5qasp+H0SjgHjxOHtwyxa+45pr0EfZglaxGD7e/v3s3wsvTB1UsSx4Ie+8E1v03HPhk5w2zehyp41RXMxe93jQ7yUlXKOsjHOgt5e/5QJ0ZpLubp77jh2MpamJTMx588ycBoP8/cAB9NzixZxjThtuNME+J5CoNmleTp7kTeCTKAqe+Xwomly6EJ9OomUaU6bklv2lhpSWQC9alJjFqZ2HM5V2aNetsrJErqFUopwVodDIvIOpxLb5/JEjhgi7vBwnIRbLDPCp9PWZkgAtbUolWuLtcuGA1NRk5/AWFnLwBQKMb+bM0e3mF4txv3rPxcU8M2f2kgjf39XF8ykq4tkqn9uECaZE3e0+Np6NZCkrIyNh2zbAxEWLRq/zdF5OHfnb3+h+/I53YByqbNrEcz//fJ69Ux59FMDu/PPpxJzswMbjOOH79lHucu655m+RiMmk1WzaNWvg3/rylym3ycbQam+HyHz7drJmbr01sSw6GwkG2V9PPomhHomIfOITgGaqI5xgoRps+qN/j0aJPouwR1JlKUejOB6/+AWG/oUX0nFZqSZGEuU+VK7B0TRGfT6Rhx8GGG5u5gy+9VYyMcePN+/TEmpn5tFx7nCal9NAnBmIIomAopY9u92ccfv2cU47aVQGBjjjamuHB5c1Y7a2Fj5WZxZ1MGgoTpzrWCUUMh2Wa2rY/7/4BcDBJZdQvqh8YLbNuFLZDpEITnZnJzbWokXooqMBEPv6EjmhndUoTrDQWfKsgYVsOz3bNuN99FGCy7NnA77Ons3fYzF0QiAw3K7SDFPNJEzOhI7FTJny4CDvLypi/rVUeaTgdEcHemhwkM9On04QyzmfGnwdHOSZzJplKjZUR2nQWMTwdGrw5QR0is/LGJa+PuyChgbK+lX276fR2ty50Ls411BzM81KSkpE/vVfE2kH4nHoEfbvRyfNnWuyCEXw+/bvZ7/Nns06vuce9ui558I3XVhIkHNwkOw3p07TPdbXRyXFk08CYH30o+iQXKq7fD78s85O6GEGBvAFVq7ETwuFuMeRfDSvF/tSm0qdeebwMdg29ue3voUtd8YZ6PPLLuO90SjYQHc3z8IppaXMnzYRdTaTrK7mPoqLjz4j2bIAhTdvZj7cbu7jTW9KfLaRCMHyvXu5nzlzOAP0LHJWj4x2sFWBRPUJ80DiyZM8iHiSRAGlwUGMtFTG3htB+vsB+9TYylY0YrRnD4p98eJEQ3twEGVeVZUe7AsEiP6UlGCMZVJEsZjpSjxtWu4K2udjrEqAO3cuil47EavxWVPDmFN1UGxtNfeUjiTd7+e74nEMe+3ONZISVw6iri7er8DjaJUOBQI8ay1ZrqpifMmZfuGw4SJxuzlANT1fS5sHBnjW8bhxlkaTAqCkhIN/+3bKmxcuzL6xRF5OfTl8WORXv8LgufZa8/revWSkLVhAOYlTnn6aSPvSpQCIyYBZLEb0/NAhjG8nMBmL4dhrE6O9ewEAOzrgF3KOIZP85S8AjpZF+fPKlVwz2z2qGYEHD0KMvn07WdD//M+ZOWCTAUXNtAkEMOJra4eDapbFnP33fwN8Llki8vWv828249WsIxHTrGC0nN/9+zHYf/97nsuyZSKf+QzdoJ160rbR986mCXknPC9HKwrkaCMzBeE3b8YWaGoynHZabVBRMTwbur2dLOri4uF8rS4X56zbzZkYi7EXVU9Eozjcypvn9ULp0NxMg4PLLmPdd3WxzydPTg2Y9/WRoRIO47RPncq5mSvdSSTCfuzt5V4XLcqc/Z8OUHR2y04GFG0bZ/f3v8fmbmggaLJoUeJe1mcTDHLPmhWuukiBQdUR2qBOq2FEmIOJE7GdsgmIB4OMqa2NZ1NZib0xZcrwBhCa2ShiyqGd73FWcCR3gw6HE+dQ7+NYmgDl5fSScBjwrrhY5O1vN2urpweO1Lo6MpSd66WjAwAxEsGOmD8/8Zr79gHcd3Vx9r/jHea6kQi2kNvN58JhOjBv3gw1w4c+ZMA2j4dmJKkq9Q4fxi46dIhxr1iRe18BTUbZssVkRX7wg/hpGkCIRLCfVDek8hcPHiQQ7XajS1P5Ja+8Av3Mxo3o9h/9iExH514uKqL6q7ube1cdH40aMLOmxmRG6zMZN87oJWeTmGxkaEhk61bGNzjItS67DJvXGciIx3luu3bxXdOn82yTfWLNulf9mQcST0/JN1Y5sWKLsOCbm0323WilHo81GRwEGKuuTs8VkUpsG0W/dSuG2llnJWZxBoMo39JSDLpUB0kohPFaWEgEJVNmSSzG90WjKP1csiXjcQ631lZDEJ6qNDgc5rBQsKG21ihl7VgdjZry5WQZGsIh0C7LNTX8Gwjw+crK9GXiXi/Xj0SYr6lTmeOBAcYybtzROc62zbX7+hiDs2Q5+XCLRjE0PB7ep92ukw8GNaajUdZNWxvXd5Ixj5ZEowAtgQCRwiSgf6xDCW9Ixe/3i3zlK/z/q181DmtHB6WBEyfiTDsNnhdfFPnUp3Dy7757eAl9NIrx3dwMsOcsgVXewYIC9Mazz5LxV1xMae/y5SOPORKBbPv++3Ewv/lNHOFcAETdN/fdJ/LAAxiFn/40nZBz2dvO8uXSUtPkwclf+PLLdFw+cIDsgltuoemD09hLZ/A5G6cUFhoOOWcG5NFIPA4Iu3Ytz7O4GIfmxhuZU6c4ee1ExgR4eOqOLDt5Q+oiy2KvDA4CZCt/cSSCQ1tczLnjPLe7u6Ex8PsJRCxcmLguu7sBmrSqw1l+a9vmfJ04EZ33ox+hnz76UQJnlsU5HI3ynmRQUIn1d+4kO2fxYr6ntDT3TqBa3mhZOKLpujhnO5eanai6qKAAgO6Pf2TM48ej75Yvz/w9GoStrjYgrGYfhsMGONSMP83orKnJPguzv5/zoqeH37VkOTlYqTaU18s4Kit5T64Oueo0J7ioOtXJR6mg4jEEj/O6aAyKbWPDHDwID7T6Y4EA5cl+P03bnDZ2Xx9N3To6AOWTg3CtrSJ/+AP7/NxzoQhxdpvfuZP9tHAh3/Ov/8qeuOUWqGQsC3qRtjYqO6ZPHz7u9euxyUpLaUzX1MT/1a6rqOAn3X5XOrHNm6kKcbv5rlWrhvsoGlSMRPi9qAid53KxnzZtYv4mTaLiIlkX7NtH8PdPf0IXfe5z8EFnAvq0GquhwexhTQwpKOAZRKOJPpXqcNtOT0PhlI4O7n/nTp7LzJlkHc6ZkzhvlsX9KXdvQwO27kjJP5qRqHpmtEWBxBSlzWNdF40JyYOIJ1ZsywKwCQRQ1Llk351Ook07Sko4HHIxHltbIastL8c5dYKwmqGYqZNgOAyA6HKhKDMp8WiUccbjAIi5kIX39xOx0fLnmTNHVqLOLnvFxcZpd7sTu/E536+gmvJlJBv+2n21ujrx+zW7sr+fz8yYkRhN0m6rJSW5lW5HoybDVEFNzTRNfh7xOI5PXx+/jx8PgJhpnhTgjEQwFlpa+H3OnNHP6I3FyEb0+4mWOgDcsX5AveEUv2VhcO7eLXLHHaakVpt9FBWR2ePcY9u2UX5bX48xvWBB4hqOROji29EBT6ITkNLMYG0WdN99AHdz59KBeaQO8CKs7dtuw8B7//sx1nPlu4rHMXC/9z102apVlAnluldCIZMJM25cImhg2wDuP/0pczZlCt0bL7sssZlEPG4AwmSd7+Q8S3V/RwMkDgzQofH++3EWpkyhXDNVwxwRAx4q59lol1AfJzn1R5hZ3nC6SIRMjtZWwDt1zqNR9FMwyHmv5XPFxeiSBx7gXL3ySj7ntF18Ps5SDdQ5JRpl/Uej7IH9+8kSLiszARJtQhcKpeY1DIdNVtHUqSbrKFcAMRjELvJ6sQnmzj26Euh0ok3n/vAHmgFUVcHhdtFF2ZUjWhb2iGWh5zRjVBvRiABMKHCY7b3H4zyDlhZD0zJtGnZlKrDW7+dZx+OcSbW1uWUWZTMeJ6iYDMA6QcUcgih5XTQG5eWXAewuucRUUcRiIj/8IcHAz3wG+1rF5yMD8cgRGpckdzMfGCDY0dKCDXD55YmZwfv2sbbnzycL8OtfR7/cdpvI2Wfznhde4PrLlyd+twg65O67oY85+2xsNBH03pQp7F2fj/2qiRDJiSIKZP7pT9hvs2eTETgSPZJtM1btrj40RGBycBBQbcmSxL3S1kaG5bp17ONPfILxZlPNZlnMfziMzi0vT9Q3lsX8WRY2qjNLursbfZEquULLzDdvZnxFRYz9nHOGJ7lo1eT27dxjfT1nTy70axrkOV5Aogaok4DEsa6LxoTkQcQTKPG42EeOGFDpjcq1Fo0aHsMZM3JTKl1d8E2UlGAUOkFY5cmz7fSdmKNRDGjL4mDKZABGIoBsljWcm2ak+9u/n7GWl3NQ5vqsfT4iPlrqrRF/FY2Ih8OmI2o6gNOyMNhFeJ/LhZHc0mK6VqdL/Q8EOCTLy0fOwBwaAjz0+UzJ8vjxqQ9LPfx6evh/bW3m5jfJ4gQSKyoMNcC8eaMPzMfjPAuvl+u/XlIx1g+oN5zif/hhHMubb8ZYFmH/PPAAa2fNmkTne+9egLDycrhrnHwvIujx3/2ONXzFFawNFS0dVtLrb3xD5N//nUzF//3f7ED5J58kW7KggCj9ihWmpC5bIE3LFR95BKPv9tsTuY6yEdtmfrTML7mj6KFDZB4+9xz7+IMfJOtAu0+LJDZl0cixAonJ2YfpssI1m0qbvGSS3bsBhh99lGd87rlE/VeuTB2Z12YUOobj1f35OEleF40xaWvDgZ050zjItk2mx9AQ9Crl5QbU7u/HCfV6TbazU1dps7GysuEcWiKmGqCmRuT557lWYyNBjdpavrunh/fU1w8/63t7ARoiEZzkSZPYM2VluZ3ZLS3YVAUF3GO6xkpHK/390Eq8+CK23WWXoevd7sQghJY8pwpUaFMaLelWPaVNampqcuNEDQRMybIGc7XLciodMzSE3afZ3rW1uZeJH60omOgEFlU0M9zJsZhCD+d10RiT5mZso3nzOLdF2Kv33guQd/PNifzOgQDNTvbvp2rjkksSgbdQCK7lQ4e43qpVievkyBGyE2fMQN9973vsqS9/2QRWX36Z65955vBKgYMHeW9zM7bGW9+KzzFhwvByZ02EUCBeqaLCYfTEX/7C61ddRfZgLhm4loUO37QJHXjxxYmB4f5+GuD94hfM54c/jL7NtvJQ+W39fgDB+vrU5dGxmGmC6azeUn+sqsr4RH4/fI+vvMLfa2vJOly6NLUv3NlJIMbjwe5bujS3qsHk+1FO2dGiyXJKCiBxrOuiMSGnFRuGy+VaICK7ROTjtm3/7PXXikXEIyLlIjLZtu2u119/h4g8KiJvFZFXROQ2EblcRGaJSLGI7BSRf7dte23Sd8wSkW+IyCUiUi8iA69//nbbtl/NNL7Dh1FejY1v3K6v2t3YsnIHEPv7KQd0uzm4nI64RtGVxyfVdWMxojrxOFGnTABiOMwhZdsmRT4b6ezkO2Ix7i+XzkLLF/gAACAASURBVGAqgQAR64oK7qWwkHvT7qdaolxYyIGUKV1fhO+vqjKlxQr0VVaS7ZAJHC0vZ74CAb4veR4UoOzrw3hQMve6utTOhW3z/d3dxqCeNCl3I1k7Enq9HIbTpmGc7NsHaJtr05tMoh3Hdu4EWIpGR45Wnuq66I0mf/87AOLFFxsAUTvj9fdTGuh0yo8cEfn4xw1Z+KxZiWs0EMDw9ngoi501K/H7gkETeb35Zr77Ix8hIj2SzguHMazXrcNou/NOjMNcAcRnnoG/sbsbY/9Tn8qNikGEexgYYK9WVCQGBDo7RX75S5HHH0eHfPSjZPglNwJI7vSsryvAqPM0EnDnzGRw/q4Si4ls2AB4uHkzz+vqqylZdgK8yZ+JRk0HVeVfHCsyMDBy0CSvi04t8fkAuevqTFMPEUBAvx/7UPdZSQn7Y/169MKKFcYm8HqNPujs5LVUnGFeL/qouprS3qeewjn/wAe4vvIyB4PoQKeOsG24nHfv5vU3v5nvzBVAHBzk7FRO6NmzRzerzu8nK+nZZ/l95Uo6wDv1lXZ61xJvzSRSMFHtGA24RqPMT0NDYpZPNqJ2jpYsa4frpqb0+zUY5DORCHMzaVJulS+jIQoOJjdHSMev6AQVbXvkQHteF51aMjhIJl5dHWCcyuOPAyC+/e2JAGI4DJ3KwYNwOZ95ZiKoFIvRGGXfPs7eVasSv6+rC101aRLBjF/9irP5jjvMvti6FQBx4cLhlR2PPUYwtrKSfydOZL9OmpQ6M66khL07OIjNpskXv/41+nbpUhqppaKYyiTRKNVwhw/jCyxfbppkxWIAhz/5Cd95/fUi//RPuYFv4TD+lMtlMpD7+7mPZOzA7eb59fWZzswuF/o6EuEzvb0kQuzaha0zezbg4axZqX3Hvj7Aw+5urnP++blXDCaLs5RdZPTtLAUPLYu5G8mnzOui0ZHTCkS0bXu3y+XqEpG3iMjPXn95ubAgrNdf/83rr18iIlER2SgiC0XkBhH5rYj8l4iUisi1InKvy+Uqtm37FyIiLperSESeEJFKEflPEWkRkUkicrGInCEiGRfFwYNEEnp7+XmjiW2TKRgIYJi1t2f/WZ+PTBeXC0O6r8+UwIqYSPv48amvG48b3r+mJg6ydBIOEzV2uRhnR8fI4wuFiLx5vRxws2YZ7sZcpK8Po7OoyGQUaOe/nTsxwisqTEfjnh7Dq5NJbJt7am5GuTY1ccBmmgen+HyGh7CoiP8PDHC/8TjXqq01xORtbamv0dvLMygv59DXroTHIj4fz6ykhLlobye7Y7QN8MJCjKCdO+FtySSnui56I0lXF5HxGTPIRlN5+mmMwLe9LTGC3NnJ87UsAMTGxkTHz+8XeeghjLOrrx5elhwKmf3xgQ9gFN95J5w9IxlhR45gcO7dS5T9U58ygFu2AKLHA3j4xBPc8z33wBubq2j5sgL2GnTRUqVHHuH397yHeU0F3KuTrlmJCipGIpwDLhc6I9t7SwUkapbWAw/wrKdNoyzquuvSBxOUe075zkpLxwZ4qF1zn3gCR2/TJhyuzJ/J66JTRaJRnLPi4sSyt+5u1vHEiYmZKqEQXK1eLwGQKVPIXtTsXS1TDodx8nQ963WHhkwn31/9ipK0yy9nbyiXV1cX76uvTwTdQiHA+J4edOCZZ/I9sRhnazbZePE4OratDf2xaNHoUo6Ew4CiGzbw//PPB/hIle3j5P5TfjGPh/vr7zd2zMSJjLGqinnR7ORsJBbjeTQ3o9+Ki3HYp01LH7QOh/l+Lb2cMOHoO6uOtiR3GBcxa29oiLnbtYufI0dE/vM/M18vr4tOHYnFCKLG43CF6jN+5RWaEC1fzl5yvv873+FZX3MNfK1z5yZSldxzD/bx6tXDAcSBAXRBdTXf++STZP995jMmoLBzJ9efOxd9oxIIEIDdsAHw60tfwg7zegmqZAIBXS6+s7gYu+3JJ7E5PvAB+A9zBcY8HoIVfj92lTZoCoUIqv7gB+znK64AHE0XwEwl8bgJQBcVMU4NsAaD+E+lpcN1b3ExmYLqk9XU8LxaWtCPnZ3otHPO4SddNqTPBx2NcvgvW4YvNVpVGQUFxgYUGV2bKxQiA/2JJ/j36aczvz+vi0ZHTrtyZpfLtU5ELrRtu+H1378oIh8XkWYR2Wbb9idef32TiERt217hcrlKRCRm23bccR2XiGwQkWm2bc9//bUzRWSriFxv2/aDuY5t506xR5P7ZaxJTw8KbsKE3Mp7fT46WVkWAGLyZ30+fqqrUzuN2sgmGEyM8qeSUAgjsKAAEG+kaLkCo62t/N7URFQs14NJS7EHB7kHzTaIxzmstDRS70cbNVRXj6yIg0HGp+WIdXW58+sowffQkDnoNNpVW5sZsPP7efYK9B0PI3lw0BjhHR2Mce7cYy8Dsm2uqwe7CAf15ZePnCp/KusieYOU7YTD8O14PACC6sBu2UIpy7nnJpb39vVRdtLXRwnz9OlEkDULxeeDYy8UAkBMji5HIqyVffswUvv7iUprmVAmWb+esRYVUf584YWmzDcbbj7bJqvge99jz33wgyIf+1juGT/pypcDAcC6X/+a+7/ySrIsc2lopM67ZidqabIaqVryPJLRqhyM999PdkI0ikNw0008z3Q6UQFMZ/bjqdqd1Am6NjdjHD/xBCCUZXHWvO1tIl/8Yl4XjQWxbTKiPR4cdLVVvF6c65qaxOYB0SgAYns7dk9dHUEBpw3Z3w8IqAE8Nee1SYvXy1594AGus2ZNor7zeHiPs9TP7eb1l1/GET3zTNaanv3ZAoj9/ejBcBhbaubM0XMaYzGCyuvXo6vOOgsgJFW5n1N0TjwedHkkYkqVlXolWScNDaEnMtksQ0M47O3tjG3cOGMLptNlWqauVR41NeYZnGoSixk7qK8PoOfAARMsrqoCLLnxxrwuGiuyYQNn6OrVJiP60CEy/JqaKL3VfW5Z2BUbN1K1MWsWz1vtqVgMmpYtWwDP3vnOxO8KBAAILYvg42uvkQG4Zo1Z7/v2EbSYMYNggPP1L3+ZvfWRj/AZzdpuaMiuPLi1lfEdOgQo9q53oTOrqnLjY927l3ssLoZOa+JE7unhh+HMbm6Gp/+f/gm9WViIzzOS3lOexXDYBDWTdayCgkVF2J2p9MTgIP7PgQNkkAcCzM/06YCdjY3paatee435KSwEIJ4///jZRvG4qf44FoDS7yeQ+vTTAIdKvbV8ucidd455XTQm5BQ1n49J/ioi73G5XPNs294rIMh/FZEjAlosLperSkSWich3RURs2/7/Cfqvp7NWikiBsCi+5XK5qm3b9onI64UOcoXL5Vpv27Y/l4Elczu8kUQjrdOnpy65SSdeLwddfb3IpZcOjzj5/SjFadNSR7htG8WoBngm8HJoiMNGy5BHMpT9fhR1NMqznTcv9w6F+r3NzRiRZ5zBPVoWB4LPx4HV1GSc+XgcI3hwkHsfN46fVPw+7e3MvbOURjlCkrnN0ol2do1EjEE9bx7jzDRHgYDJdJw6lTEcz0ZCWrLQ1MRhGwxi7BzNM9ES7kDAOE3l5RzuORj5p6wueiOIbRMZbmsT+cd/NPrhwAEAxLlzMQRVfD6yBbu6MAgbGxPL2DweItnRKKU8yXxesRhr7q9/JZOxuppsMWdEPZWEQnzfww9DEn7nnRin2h04GwCxvR3Q84UX0EV33EETmFwlFjNd3rV8ORqF+/F//xc9cPHFgJOpuiWOdG3l2NKyYW204nIllj6LpAYUo1Hm9N57yfAsL6dc6P3vH15S7pRU4GEuvGYnQpQrUudg3z5Awyef5Jyxbc6HT34SAHf+/JwM8LwuOsly8CAAzMKFBkAMBDj7y8vRNyrxOPqgrQ27p7KS4JvT4VXOq/HjDZCvJaeBANmNHR3sXcsiq3nxYvN5r9cEX+vqzB7Zvh1nuaaGQEZlpWkQVVExsmMZjaJjlYLlrLNGj17EtgE3H32UuZw3D9oJbZKVbjxeL7prcJBrFBRwX+PGASY4dYGWPKtuEjG2Vnl5YjZ0by/Pr6+Pa06ezHPMZGdqZcngIJ+prWV+ThUOVi0H1B8FDg8d4qe312RMXnEFGbXpOLXTSF4XnWR57TX2+bnnGgCxr49M0nHj2FO6J2yb1zdupOpg5kyTrSvC/vr1rwmQrFwJvYtTolHOL4+HAGx3N9mHK1ea9xw+DIA4bRognNoDjzxCc5dx48jwW7KE9wYCvHckfyISMWdoQQFN1S69lL3t8xmqh5F8oWgUkOrIEYDLCy7Ar9iwAbtr50506wMPiLzlLYxfS/8DAdOkLR0fczDIvisuTu9juN3Me2en0ftOaW5mDrduNdy1F1zA8wqHTQNLpw8dDhMQ2LuX3+fN43w6Gp8pF9F50MSMXHSfx4Od+/zznAWBALp82TKe7cUX50Tbk9dFxyinK4goIvIWl8t1UEQuEJHPCMjyHS6Xa6KwIAr1va+jyJ8WEOh5MpyQs0ZEfLZtH3a5XN8VkS+IyI0ul+tFEVkvIvfZtt1yfG9r7Irfj2NeVZUbgNjfj+KORonEJwOIoRBKsbQ0dTRKy4kHB0c27Px+AEQF7DIZyloarVGhRYty61TllO5u5qaoiAhZaSnj9Xo5VMrLh3cWKyxkLsaNQ6EODHAg1tRgjLpc/H7kCIdEfT33r4q7spLr+/3m/alECcYHBrjnsjIOGS1xSTdHoRD35PPxHo0WHu8Iu3KFaLl8aysH5MKF2Wdj6aGvvD+lpRxIRwk45HXRSZSnnkJ/XHedcZ67uuAFmzyZ7EBdk8EgTvbBgyJ33WUyltUY6esDQLRtIvHJ+92yWDe/+hVR8yVLyCJK1eTAKYcOEbXev58o+yc+gUGlGTIjdcWMxzHgf/pTxvYP/4ChfDRGYDBonOXaWvbu449TotTZCcD58Y8DZOUiloWhrBnUzntyAomqTxREcwKKfX10tF63Did2+nRKmlavZt+nM0Jtm7nUrs+aeXiys30ULEz+97XXcHg2bDDNx846S+QLX6AMdfr0o87myuuikyg9PeiWqVNN9nIkwv53u3H0dA1bFhyqhw7BU1ZRwdnrtH+iUXSZZvar6NoeGODzjzyCHvvYxwB6tCtwKMR7KiqM7RSJ4JR1d7POFi9mTD4f/5aXjwwgdnVxn7EY12hsHB1wzLbh9Pr97wFWGxvRc2eckXovK/fZwABgq4gpVS4r40e7Xid/XukXRHgWbrcB/lSHdXYyjlCI686ZA6iRyc7QLEhnh/tsA7nHS1Q/BoMGNFTbRzkdm5u5d7ebtbtiBQGqo7V5Ja+LTqp0dZG51dQEyCTC8//xj1nnn/ucybq1bZH/+R/Oo+uuI3BVWGhA+0gEO2fLFgIOq1cn7ifLAkDct4+gSGEh1RbOYEZrK3bapEmMp6CAPXvnnfA6n3ceZ31VFTotFBrZnxMBGHvkEfygBQvIPtRAjfIIBgKGM9Bp7zmlr49mnkND2EALF6Inv/lNeBFnzMD+Wr06UdcpDUAkYuhb3G7TId62mfdo1AQ1RtIFWn02MGD08Y4dgIfd3fgrF1/M8ykvN0BjSQn+oQYvysqYn127DH9/cvPO4y25AImdnYDYL7yAjRQKMQ/nnMP6OO+8zFnfGSSvi45RTsdyZpeIdAu16D8QkRdFZI6IdAiklu8XFsXnRaTWtm2/y+X6goh8R0TuE5HHRaRHRGIicpWIfFZEZtq2fdjxHfNFZLWIrBKQ67iIXGvb9uMjDO/0muwsJBRCiRcXY1Rmu8l7ezmYhoZQFE1NiYdTNEqUvbAQ4zjVdVtaMIQaGjIbPIODhrOnqSmzIvd4UL7BIN87e/bRpXxravrgIMp96lTmSrOASkt5PRvwKxLhPoNBfh8a4v+lpRwOqZr4RKMYsyUlw8t0/H4OTgUUqqs5jLRkWTuelZYmXlsdG4+H5zFhQmK3sBMlQ0PcQzxOhlZxMQd/uuekh3kgwNyrw1RennHs2aTK53XRSZK9e+HvWbpU5NZbWceDgyL33cczff/7jcEUifCeTZswXNUxbWjgvd3dxgC+7rrhAQvtFP6Vr2BwX3UVwNtIBtmjj2KIlpXx7wUXGL6pbADEPXv43K5dZBR85jNk4+XqlDrLl4uL2e8vvmg6LGq2zznn5A6+afahlkmmGps2PFD+ROfrW7eSAfnEE1znoot4dhddlNjdObljs5ZNazanGvQnAzxMBRiqWBbn3JNP8tPdzZo77zw4pS69lHNmBOAzr4tOYQkEcDa1zKqggPW+fz/rU4OHIoaSYPt2MnXq63mP086wLBzveByn2HmuaYO5DRsACubOJXPV2enZ6zVdO6dMYV/09uKIxuOA1k1NhtfVsnhvaWn68zAUAijweNAf8+aNHifxgQOAAQcOYFOsXk3WSfJ+CAYNcKi2UHm56ajsdifqg1zsNp3z5mZsn1iMazY1cU5k0i2WZbKedC51PCdaotHEDMNQyOijggLmrrWVAIZmSjY2AsLMn59VJUleF53CEgphA9k252hZGXv+xz/Gnrj1Vp6zinINv/3tgMdeLwHSigqu9eCD6Kqzz6aEOdmX2LuXYO6f/oSP8+UvJwZWu7oACmtr0XduN02cvvIVw019ww3sG21M2tSUuTHp0BDft2ULNsIll2AfpVu7uj+VDsmZtLF7NxyRpaXYHL29It/+NvbIxIlUuKxZM3KSgYL1Sk/jbCxXWppb0Ne2yXx89VV84HCYsbzpTSS0FBVxTz09vHfCBGNXKYepdp6fOhUbORd6sdGWWMzwzur5Ytvo2o0b+Tl4kLmbMIHzculSfLqGhoxjH+u6aEzIaQciioi4XK6HROQ8YVF8yrbtptdf3ygifxeRs0WkwLbt819//e8i4rVt+y1J1/m2iNwuSYsi6T2Nr19zr23bF4wwtNNvsjNILIbit20iI9kaTd3dKMhQCKWYnH0Rj3PAWJZxsJKlrQ2FP2nS8LJDp3i9AE1lZYnZesmiJTqdnbw3S4MqpWj5ciyGEiwvx3iLRnHia2qOjsuvvd1EaaZMwfDLdNgqcFZRwfd6PDgX4bCJ1NXVpZ7fQMA0eSku5nDSRjdaXnUyI+wKJIbDPLPycsAh55hiMdPpWhtX5FCynBUckddFJ14U0Cst5V91oB94gL+tWWOyeuJxMgGffhrOxPPOMwGCkhJTDlhaCoCYymBRY/cvfyGb8etfz7z2g0FKYB59FMPv2982TYayARBDIZGf/xxnoLqaEuyVKzNn5KWT5PLlgwdFfvYzHINp08hguuSS3K+r96LG4UjZfwokFhTwufXrKVnesQPH5LrreG7TpiUCcQoeOjnMtKRTsxuzKQcfLXHyGDoJxFU0y/SFFwANn3qKNVlSQibHZZeRSaAgQ5ZnZl4XnaISjxOcCIfRLcpbeOgQ59PMmYln9IYNgHkXXggA2NeHXeJ8T1cXAI/aDk7p7oYndNs2Oil/6EOJDm4gwOcLC3HcYzEc5YMH+f3Nb+bfeDwxg0/Xte5lp7PX1mayZmfOHDn7OltpbyfzcPt29Nzb306gxalbh4YMcKgZdJWVBjgsLja6yNmBPVt9YNvYNs3NzG0oBIgxdy5zr9xeIsyJZjFqptHgoKnkKC/HnjpRNAqWlZhhGAoZOgmXizNNbbfmZkBtBQ5nzMB+nDcvZ/7qvC46RcW2sWVaW+EjnDSJ1x54gEy7m24ymYkiVGzccw+BrPe8hz0+Ywa2kfIjHzgAqHjJJcOrzI4cIQD43HMEIP/5nxP1WG8vNlNlJedeURHlzj/5Cfvka18jOy4a5bujUfZeuvWoQcc//QndOncuQZvZs7MLaGgjOctiLNu3M1fTpvHzgx8AmlZVYed99KO5B0ricRPkUB7UXCiStOx71y7GumAB2erJzf1E2Ou9vezn8eO5l23b0Ne1tZRd51IZeDxFK1UOHTLAoTZInT7dcHCqPz95clbA61jXRWNCTsdyZhHSTq8VkZvFpKvq69eLSKOI/Lvj9bgkLTiXyzVBRD6S9Fq1iARs247pa7Ztt7hcrh4RqR3NGxjrYllk2sXjKIFsAcTOThxHywL0aWgYnp3S04PSmTw59XU7O1GeEyZkBhAHBgAJlI8onaPc3W2yBpqaOEiPJrtOswS0DKmpiYOrp4f7qK8/uuh9JMKB7fVyHxMmcN3eXoxCJV1PlrIyjPADB4wDX1bGgTluXOaDTYEZjQ663XzPpEmnBtdYRYUZ/8SJzPmePRy6yhkVDhtjOluy+KOQvC46gRKPY4QGg4CD5eXsuz/+kX127bUGQLQska9+FQDxC1/AWO7txYAtKUF//eEPrKXrrksNyO/bJ3LjjUTc/+M/aMqSSfbvZ1yHDwM83nIL+y5bAPHllwEdW1vho/rIRzAQnes9W3GWL/f3i9x9N+DW+PEin/88GZW5ZsvYNrrZyT2YbddlBUAeeoiAxuzZgMBXX52oF7XTs7Pk2dmwpaDAgIfHOws6OcPQGZPVzEqXy3BlPv44mReBAOvp0ksBgC+4AN2bDNKMsuR10QmWnTtxaJctM2dwezv7btq0RJ3yt7/hIC5fTjZgc7Mh/1fxevmsszJApaMD7rKWFkr3kksL1SYoK+OcDoXIsunqAvybP5/3ezzYFNrx0wnOx2Kcm4WF/LtvH/c3fjwZIqPBpdXXh77etImz+eqr2SfFxYnAnAZeXS5DlZNM/RKNJlIZZBvYjEbRsS0tzFNpKfZoXV1ic5miIhMA0eCFdi5W/sXycsZ2PHnGtDGDM8MwEjF/Ly5mHGVl6Jf2dgL1WlXjdqNvFywYnYZ0WUheF51geeEF/IS3vtWAR089hd5529sSAcSnngJAPP98mqft3AngNWUK6/rBB/GzzjoLzudkMKq1lUYsO3fCkfipTyXaEgMDnIdlZeztcBhb7G9/I+Pxi18kcBCJYCspPUK66o7+fgIOBw6giy68kLHOmJE9lZFmBB4+DBAZDrPnN2yApqagALqZf/iHo0sg0cYpRUXmPlRXKEdzKhsuEiE5ZPNm9Hd5OVmRs2fzt3SZeOqT7diBzWFZjPuqqxKDQCeT2iUeZ3wbNwI29/Ux7sWLCWhNnWq4uRsajltlW14XHYOcziCiiMgCEbnL8fozAlLsfI+IyCMi8nWXy3W/iPxFRBpE5BahJbezEHaliPyny+X6rYjsFVp+v+P17/ny6N7C2Jb2dsNdka1B0tZGhKWoCOU/YcLwz/b1cd0JE1IbZT09GMV1dZkj4trVsLISYz6VItUSnb4+jNSlS4++q3AshlPg95tOiF4vRm1d3dFdVwFV7Qzd2AhgpqTEfj+HtWZP1taaORscNCXLfj/jmTkzO04M22b+urtxOMrL+exod1w+VnGSoNfVMU9eL2ursJDxjlCyPBqS10UnUH7zGxyjj3+cfS1CtPvAAcpDlcvHtkX+7d/IBvzEJ4i0t7WxT8aNIyL6xz9idF17bep98eKLlASFQgBfTqLwZNEsgO98h732s59RYiOSHYDo84l8//uMt7ER3sbFiw0AnovYtinfUa7HDRu4x1tuATA9GidSHWnNAMwGgLRtjOO1axlDPI5TcdNNGJHpDFwF6AoLDYG5ggXatEVBvdEykjOVJWs2pIKGBQXoxg0byDh87jkM/vHjAXdWrSILVd+fTbbmKEheF51AaWnh7J0zx3BT9faaAKeTGH/TJojily4lq+fgQc5qZxBUQcCKCs5ypxw+TBDD5xP5P/+HveMUJdYvKjJBtS1bWMcXXICutG3DlRiP897BQQOWKR1BJGI685aWAjzl0qE9nQwOivz5zyLPPsv+WbUKHtCyMtN1VLOmCwoAGWpqUvMKapMYzZ7MNvtwcBA7raODa9TVGf4//bzySSvAqlQNbjeAQG+vAeZqa40dEo+PXnVGJJKYYegsS3a7eS7V1cxdaSlztn8/QNL+/Xy+pATAcMECAIkTHPzN66ITKAcOQKmweLHhI3z1VWhali0DqFd54QXKm888E4qU3btZt3PmsPbXrUPPLF5sssSc0twMh2FrK3bYe96TuPcGB7HJ3G7O+oMHCRb29gLQ6fvDYfSaZWG3pUqEsCwAqKeeYi+uWAHwVFU1MjVVKtHy5aIi/n/nneyVNWsIrI7U/T2VxOPoA9WpTlqI4mIDLuqe1H3o8aCjX32Vv0+eDCDrpGbq6MCGKysbDpb29vJZ9Q+XLeOZuVwEMZXvPvksOd4SiZAxunEjNvTgIGM/5xxToqw8kfX1/J6pom4UJK+LjkFO13LmAhHpFdDeObZtH3j99UoR8Qgoct3rHXTE5XK5ReRfROSDIjJZRA6JyI9FxC8iv5TX01NdLtdMEfmiUNc+VaiD3yciPxWRe+yRJ/P0m+wU0tVlOgKnaniSSpqbcf4rKvhcVdVwHkONPtfWpo6+9PdjuI8bx+GWzmjs68OgrqriwEl+n20Dgh48yO8zZ6Z+X7bi93N/kQjAlWbIVFczhqO5bjDIATs0xHWmT08NqipgoFF7zdZRR3/8eMagpL+Zuigq91tXF9fRjpFqrNfUnNwS5lQSixnAU7M4Ghs5rI7RYc82VT6vi06QvPQSmTiXX47RJ0LHwKeewkC59FLz3h//mJLgm24S+exnMcaUI+bQISLR9fUi11yT2nh96CHKiCdMoAQnU7ORoSGRb3yDEt3zzqOUWQEEBRC14Uiy2DZA1F13sX5vugkj2+1GV+aa3aINAvr6iNz/+c/s2Xe/mzk7GmMtOQswG4c9FAIQve8+jPXqau5rzRr0fzI/YiqJx9E9+r1qROvK1zJDZ7lztgGDbMqS9ZpOPsbOTviSnngCYMiyWFOXX87PkiXm+lr+OArcaHlddIrJwADgeH09zrg2Ozt0CPtkxgzz3ldfRTcsWIAz39KCzpg1y+zveJzXXa7hVRM7d4r8+7+zlj73Ocq+nBKJcGYXFAD2aaODceMIZGjwT+k9tAux6ialBnC5sGWOHOGaEyaYKeMfNAAAIABJREFUJnT6czRnaiiEjtuwgTFccAGZUcrRp1yChYWMWRvIpdrLqotyyYS2beZHG4goz3a60knt7Op2G30ZCuH0h0LoPwUPnZ2eRRKDH9naSvF4YoZhKJTYjKCkxICFpaXmHAkEeM5arh6Pc2bMn8+PBlNHWfK66BSTgQHO2dpakeuvZ902N2NTTJ2KztA18/e/w7M8dy7AXns7e+OMM9gn69YBaC1YwF4866xEG2TfPqo6/H6AxEsuSRxLIEBQLR4n6PrnP9OUZMIE6GTUjgqF8G9EWKepgppKNdPejs475xwD/Dc05KaLwmFArSNHyPp77DH8hiuvBAidNYu9nsoWTCeaHawVT2Vl6YH6eNwEQpubKTs+fJj9vWABAUcNiid/rqXFcJe6XOhLLVsuLTWNPwMB9KbqNG2yUld3/BuqBINU0bzwAv8Gg+hHbYoydSr6MxAwXe6nTTvm7O2xrovGhJyWIOIpLKf9ZHs8OFK1tZlLiZ1y+DDRUeXgKyoa3ixlaIisu8rK4V2aRVCIR46YjLp0B0hPDxGa6urUB83QEEa2z8dY5s07+tIOLV9ub8fo1rLiqqr0RnA212xvZ44LCzk4nBkNqUQzGFpaTEOUxsbELtThMAd/WVnqzCafD2MiFOI9kyebwygeN81UamtPbnq8SijEgaRdbl0uQxTv8zF+zUo7SjkF7vKY5LTSRa2tGKHTp4vcfjt74+BBjMxZsyjv03X5q1/hdF9zDUTf/f2siUmTMOAef5z18a53DTdibJuy3699jcjugw9mbtq0Zw9GdUsLGY8f/rDZ95q5lw5A7Owkc/H55zGub78dvWjb7L1cM0eCQa75+9+TZRmLEdn+0IdS69RsJB43zQqyyT5sa6Nk+be/xdidP59y8He+M7G5RKpGK8nfqRQMyU0SnBxlyV2e9fVkQHGksuRk0NAphw4Z4PDVV3ltzhyAkMsvxwlwAgnHoWQ5r4tOIQmHybBwu3GQ3G723v79rPHZs82z372bpiGzZpEBrEG6KVNMhoie+VrZ4cw4ef55GiCNH4+eSba5tNmZlvy+8gpn9axZZKXo/orFsH0KC1NTI4RCBHm1Ad3s2egMZ7MXZ0ZeNhKNUr745z9je5x5JnygxcXoYw10Kr/hSAFX1Qv6uZH0YyRiSpbDYewaZ6OUTBIOM1/6bAMBwzNZWZl6nE5AUfWLUxdoBYkzu1C7t6qUlLCGFDRM5nv1+Thz9uzBHrZtwB5tjKJAw3GUvC46hUS5oIeGqJqormb/f+c7rL3bbjOJA7t2UVI8dSq8zuEw66ihAZ2wbh3vW7aMNbxoUWJZ7+bNAIeFhVzf2YFZhPX81FOs6eXLCeS+8AJA4+23G39CEySUYzXZBotGoaB5/nl8lSuu4LPKZZ2rLdPTQ7nv3/7G+Pr60ENf/CIgqfJGR6OMpbp6ZPA9FmM8lsUeHYn3MBwG+Nu0ifGUldGsJlNDGJVgkPOhsBBdf+gQ+uuMM/BfVR/39/MM6uqMrdXdjR6cODH7su9sxecjsL9xI+B0NMq9nH8+gaJ58/j+7m7WU1UVa238+ET77xj01VjXRWNC8iDiiZXTerK1YUimEuFkOXAApTdpEsojFjMdA1W0OUZJiclSccrgINcoL8c4TuecaYakcns4xbIwupqbUbpz5hwb6azyBXZ1cS+TJnH4pCq9yVb8fq4ZCjFXyZ0ZnaLcQX19PBeXyxjjoRB/EzElQQUFvC8UQpnrgTI0xNwHAmb+U2WBKkBXVHT0DWeOVZRIPBAwpUPKA6QNDdRJ8/sxllIREmcpY/2AOm10UTCI8RsKAe7V1GCI3X8/Tt373mf0ycMPAzZefjmlKqEQ66G6msj2hg3sq9WrhzuS0SjlPffey9//67/SlxLbNgDjXXexX+68k0i5SiYA0bIMwbhlEQm/9lruU8GAXHSIbRNEePhhfoJBsgA++tHU0e1sr5lt9qFtA6qsXUsZk8tFqeKNNxJhT/U5Bf6cYJ+WKDqzjNLpPyeQmHxNbYbgbM6i73NmLTpfT76f3bsBmx9/nOwLEUpRL78czqnZsw2HnGZxHceS5bwuOkXEtilB8/lMll80atbI3Llmvx88yD5vaKDRQTyOHVNZiQ5S6e3l3NIKDf2eP/wBHTNrFl1Ck8/lWAzdpuD4tm38f9kyzj4V5QhOByD29ACAxmLoi6lTDWDn3Ge6X3SdZ9qbmzYZntrp03EqFczQ5nI1NdlxvTqzD1UXZQLovV5Tam5Z2FJNTYAP2e7NWIxrDAww7vHjR+aRdoqCiWqvhEKGv9FZIu3MMEzXHbu/H320e7dpRlBfD3C4YEH2wfxRkrwuOoVk/XrWxbXXss9CIWySvj6CDuoHHTwo8i//wp771rdYd6++is0/cSJ6yu0GXPN4uJbTdnj8cZHvfpd98J3vDA/QRyIAfxqs/f732Tuf/CRj030TCODjFBZyjWRg68ABgqD9/dhTb3mL6TTc2Ji5kipZbBs+vv/9XzIPe3sBDe+4g/tMlkDA+E2Vlamz9zQIEIkYjvlMQZW+PsDX7dv5TEMDNtGcOcZOKSzkOaSz+cJhANXXXuP5LV0KgJgqAN7Xh56pr0dPxuMmyDRp0rEHNnt7AYY3bmQ8lsX6efObKTVfsMA0Mx0Y4HsnTOC+nVnfyq2tevAoZazrojEheRDxxMppO9nKX6F8htkoo717Ae2mTkWR+P0oHKdjHovh3BcUDM9OFAHkUv6g2bPTK9rOTg6/urrh4KDXS8QtEOBvc+YcGz+Mz8fBrQfmlCmm2+bRSDxOxLynh/ucPj39YRmPc8D293NYFBWZDM/kzsQeD3NeUGBKhHw+rlFSwvcpJ9LEiSNnGSo4WVp63DksEkSdIOUFUhLxVBmkCiS2tWHAT5+e6FDlIGP9gDotdJFti/zwhxi8t91GdHNoCMDKtgGq1Dh5/HEi3itWkIlYUMA60Ajus8+iu97xjuF7dWCAUuK//lXk05+m1Cfdfvb7ASqfeILv+sY3EnlnMgGIBw/y/tdeI2L7z//M3lSOrVzpD0IhsjHvu4/9fv75cKbNn5/9NZJFwTE18NLp3EAAoGPtWkAILae64YbsuIU0Y0c7Njs5D0fSz8nZhMllycmchsmdVZPFsoimP/44z7W1lfe96U1kHL71rRjCzsYyOvZMczRKktdFp4js2YNNs2QJ4I1lsfbD4cSGFS0t8LeOH08Jf1ERe9+yEu0Yvx/bpabGZNhEoyL/8z/oq2XL4DFNDmbE43xOO3Rqk5blyxOd30wAojZO6e9H78ybN9xxVjBRQTwF6tXxdTq/to2z/NBDBGzr6wEPtVxRgcNcOF61mYnI8Ixkp1iWKVlWLuqpUwEecinl0+6q2jRFBPAwmwqMWCwxwzAUSmwMVVRkMg3VfkmXidPVZYDDnh5emzLFAIcjVaccR8nrolNEtm4laLdiBQENy4LuZedOGp1o6XBbG1l3JSWGakUbQtXXA7CVlpLx19rKWtfP2jaVHffeC+j31a8OD0zGYmT69fQAYj34IGv1a19LtEOGhtALbjfXcp7xwSCA6CuvML6rr8ZnbG423cRzKTUOhUT++79pHtPczDi++EU6wI+U7ezzmQYp48YZnRONMk7bNrov1bVsmzNh82aCRoWFzOfy5cPtokjE0EW53VzT2ehKdUAshp2rXYwz6UHVF/X1huO2u5trZ6qqSSft7aaj8p49vNbYiG5fsYIxKVjZ0cHcFxdzr5Mnp7flRgFIHOu6aExIHkQ8sXJaTnYsBoBo2yjzbBy83bs5vBobMeZ6e40xpmJZhqssOTtRxJQIFRUB/KVSNLZtCLnHj08kAI/FMNzb2zkk583LnsMx3X0dPMhPURHOwKRJx5Ym7vFwyEWjXGvq1NROrjZJGBhgHBUVhu8w06EYiRguisJCDuLOTg7Kmhrma/z47CNUQ0Ncq6Li6DpN5yLJJctaij3SgRONcs9HjvD/WbOOKlo/1g+o00IX/fGPRMnXrCELLBoV+fWvcXzf9z6z3597DvDvzDPJ8CstZZ2HQuz/TZvQIVdeORzwOXQIzsDDh4m2f+hD6UGhnTuJ8Le3QxL+wQ8m7h0FmJIBxEhE5Je/xCCvrISn6G1vM13Ei4tza1xk23AP/fznGP6LF8PhuGxZ9tdIFuVI02YF6bLqmpsBLR9+GGd70SIA2CuvzI3jxhnV1/LEkfgWnWCh/qTKLnTyGDrf78xO1IypJ5/kp6eH71+xgmdz2WXmvEjmPjvOXZaTJa+LTgHp7AQka2rCKbVtdIbPZzi19H3338/v738/Z5ZmZjg7kEYigI0lJYaTeXAQIGDHDrJ5r7tu+DmrgJnXyxkXCGCLLF6cuB4jEROccDYh0/JptelmzsyOY0z1gzq+yvVaUkIG0e9+Z7gYL7mEcr3aWuyMXClj9LsUsEynF8JhU7IciXCfWrKci2NqWcyn18ucVFWZigufj3t0gpGWxXcrWBgMovtFGKeChVqarDaiZkorICtisrE7OgzHoWbxNDUBGs6bl75L6wmWvC46BaStDbto5kyoQlwughbPPIOtdNFFvK+nh0BlLAaA2NDAZ5ubWaN/+xt2x7vehV9TUIAN5Xazvu++mwzDs88WufXW4QCiZRF4PXCAs3TXLqogvvCFxP2iDY1KSvAhdW9q4OGxx9hDF11E9qHPx37Q9+eS9PHMM2Rdbt/O/d5xB5ngueiDUIgxWBb7t7DQVD/p76k+8+qrZKoPDDCv55xD9mOmQIZmWjsbRbW0YGuGw/jQS5bwva2tzEmmpqKxGM/d7TbZ134/PpFWy2US28YmVuDwyBFenzsX4FAbdYlw9rS3A1JalqERGz8+u2D4MQKJY10XjQk5Xbsz5+UEiW2juGKx7AHEnTs5AGbMQAE6uwc739fdzXUnTRp+3VCIQ83txkBOByC2t6PsJ0xI5Mro7SUTMhplDDNmHFu2iNfLfel3LVhwbCBaNMqhqt2P58wZftBo05S+PkNIW1ubyHkxkhQXM79+PwaqprbX1+O45FqaXFHBYar8SsdIjDtMLIt7DQSMA6GEx9k67JqdKcIaOnDAHKh5GTuyYwdZLeedRyaYbdMQpbsbo1cBxC1bKPebP5+sxdJS9mswyLN/7TX26+WXD19DL74IGBmP4/ivXJlaT9g24OXdd2Mg/fKXGNtOUQAxuZnG1q0Y8IcPi1x1FSXT48ZhWMdirO1couxbtoj86Ec4m42NIt/+Nk77sZTRjtSswLIoqVm71nRXveIKwENtLJGtqNHs5DcrKRmu40fqlqyZhem4DFO9NxBg/I8/ThbH4CBzf8kl3M+llyaCuckly8fSYCIvY1f8fs7/mhrT2KSjg/NZu4WKYHf85jesqRtu4GzXxmf19eaM1wCqksy7XNhJP/wh+u2978WhTrYx1G5qacF+KC0l+zg5wyUcxoZKBhCHhrADfD5sCWf25EiigGFJCeP3eHCaH3mEc7aqij20atWxcXA5s5KLi1Pr44EB7l/LubUJTF1d7rpocNB0rNbO2E57tKyMvyuNSijE/KoUFRkdrsBhujEoYKilhvv3o8f37GGNFRZi715wwbHbmHk5PWVoCNCtuppgl8vFWfbMM+w9BRAHBqioCIWofmhoYK23tLCWX3yRtf7udwMaxWIGQPR44E3ctQu76brrhgOItm068D79NOv585+HCiaZx7OlhX3h9MMGBmi+tmcPOvTmm/FVOjvRo9qBOVu7/9AhqlWeeIJ9/KUvkZF5NJz3ykeqTToLC9ExqQK9PT1kHb72mvE3L70UezSbsTupW/btw14MBHheF1+cmHVcX894PJ70XZfdbvRgX5+pzqusBKT0+fiuZHtTE382bsTOUz9x8WIy4d/8ZpPFqGXT7e3/j73zDo+rutb+nlHvxepy701uGAwGAw7YEAIxSRx6IOFCEkJuOvdCkhsSUkhPIPlSSIMQSighgAOx6c0ONi64yk2yZVm9jDSaXs73x49195nRjKQZjWTpZtbzzCN7yjn7nLP3u9d6V0PPtlp1ynKsDVxEN5PsnQQ0oUtKgiX5SJIyLGlqwhifOHFwMA4GAdK2NhShyZNRlFNS+hM4nZ1sbiUl/Y/r9erOydOnR+9q2tiI4mXuEu3xoJhJk5aamuGl3rrdbIDHj3MdNTUDe4GGIu3tjN0wuK/hdSAlFVlSltPTMTSKimInQgMBNuSOjtCITpeLMUi3s1gkL4/f2e3Rm0bEKpFSluPpTisiG6lhQCQdPMhYhxOJmpTRk85OInKqq2lWYrHg8T58GKJvxgy+t38/HvKqKiIQc3LAADFwjx1DEbrggv6G3WOP0Qxl0iRIwXnzIs/l3l7SeF5+GbLprrsi1ycLJxD7+iD7/vY3jPx778Xgl5SZQEB3cx+KHDpEp8MtW1Asb7uNtJ/hOEdEeRMyL1yJ6+vT6dLHjnHeW2+F5IgnNSacPJQIIyHrlOrfLVmiCs2EoVnMqc2RjPfeXoysTZuYQ243z2/NGl5nnaVxRsYiEUNKaYwba53pkzI64veDJSkp1KOyWNhP29vRX0S3sdlwNFitEIh5ecz1piaMNvN6aW3luNXVrIPaWvBOKYzpefP66y2SdbFrF3O6spIUuXCiSQjEtDT9WTAI6XbiBOebOzc0a2MoYibc6uvBw/37ITOuuQaD10zW+/1DK00gIjVRJRonPPowGIRgaGjQ3ZMnTwa/4yHbJDpHHDlFReCA3891SoShx6PxWtKxc3M1aRgLLvh86La1texl8pxmzIDQnTZNY7DFoqO0RyniOSljXIJBCESPh1qDGRnYXI8/DgH4oQ/xvb4+dJbubv5Oncr8PXwYDDlyBJti/XqdmTR7NnO7vh4Csbub8iRnndW/BqK5DvL27czdu+7CvjCLlBbKyiIKOyVFZwBs2sRxLrkEvUgpvbZLSrRzZTBpayOD5IEH+P+VV9KBejhBA1LPND2d43i9kLfBoMblw4chDyVFe8ECyp/EU2v/5Elq2vb0oJusWKGj+bxejYV5eWBrV5fGn0iSkcFxJLpasgC9Xn4rY9yzB+Jwyxaed2oqUadXXskYzEEmfj9zpbmZ+SdRohUVwyP/zI0IRYdOytiRJImYlLilrU3X/RuMiJPC3h0dbEaTJ+uCuBUVocDQ08MmV1jY37Pj80H6BINsSJFIpGAQAszh0OSaKNhSd2j69OF1qpM04IYGNsLS0sjFbGMRt5sNx27nfk6ZEroJuFyQJ5JSk5urC9LGeh3iLZLOWAUFPEcZf18fBsWxY5qIHeq1WSwYDtIJubAwPuCXdEank+ceS8ryUMQczn/wIIr7ggVjJi0oKVHE54N8CwRIGc7IAFu2bSM1RFJ26+pI4S0oUOq3v2UeBoPM+S1bUHiWLiU9xiyGQSOUu+9W6pxzMN5LSyPP/z178G63thLteN11/ddiJALx1VeV+tGPWIPXXIM3NytLG6hKsYaGMs9PniRt+cUXOcaNNxI9GUv6c7iYa/tFij6sq4M4fOop1ufixRRsv+ii+JwGfr8mCKxWTZya66xJ5LG5dqE5LTmaSNdTiRZUin3oxReJONyyhfOXlRFRcdFF1JAy33tzSpGQmampOkIgGXn47ymGgZHucpGalpHB+m1q0qlbSrGHPvooc+faa7VOcvIkn0u6slLoFQ6HdqC++SbGeEkJ5GN5eeQMgWPHMNwNAzJz/vz+5JJEyZkJxJ4eHBAuF2tgxozYiD2JpOzp4e+WLZCH2dlg28UX68gWr1eXKPD79ToWMjHSWhIsihZ9KI7cxkbWaG4uulhVVXx6hxjhUlNa6k93dHCu8LTkggL2B+nwHEtzBxm/RBwePcp5pbzO3LnoquFYJAa1+b6YsTEp/57yxhtgyiWXgBeNjdT+mzQJ54PVynz79rf53te/rusS1tWhB9fXQ/585COs7aYmHBKlpZBiP/wh8/PjH4c8nDWr/5p95RX0G8kK+fKX+xP53d2MIScHW8dqRY966inGPWsWUYtFRbpRpdut02EHk95eOkD/+tfg6dlnk7q8YsXw7rFgqMXCNaWlsSb7+sCIN9/UeJqfT9ThkiWxZZOItLfjoJLIy7PP1k1LpWSCxwOeZmQwFsGi1laeezQHg2SN9fWBLzk5nOOllyB+9+8HCzMzIT9XruRveDShw8EcaW9nTAUFYFasUd8DiZlIVCqJcWNJkiRiUuISmw0DuLBw8OitQAAg7OpCuauuZgNxudgMzMa5w8FnOTn9FWWpYej3o+hG8rKIR93lYrMpKAAIDx5EyS0sZNOMB9BlDDYbr+ZmwFeuKV7ANAwIjaYmXSRYvGSGwbg7O7kmiZYrLo6PsDQM3aFYFO6Kiv73IzeX+9TYyPl9PjaY8FSeaGK1cu+7u/n9UAqPi4SnLKemshlnZSXeWE9JYQ7Onk1qxr59w49OTcrIyl/+gqL7uc9hUB8/Ts26adOIQlSKefvJTzJXf/tbHVXT3k6ETFubLvxsFrebSLrHH8cA/va3URQjpQ0++CBdBsvKaHRQU9N/rFK3S5prtLdDtr3yCkryj3+si5R7PLoMQF7e4NElHR141zdsYF18+MN4iCM1oIpFpFmCUqHRh4EARsqDD5LSkpZGIfLrriOaM95zud1aOZTOqvJ/c1djiSaM1mxgILFaIRo2bYI8fOcdjjd5MsbVRRdBvESKYjRHY0qJBiEmpTabue5iMiro30eOHWNNz5nDHud2815mJoaxxcK+/eijrO2rr9YRhx0d7HHV1Zo0dzrZ63Nz2T+ffBKie+5cooql1nH4/H/3XeZ0fj7pipFq/IrxK+lqfj842tzMeGtqoqfAmSUQ0ISh1AXz+3HkbN/O/L/kEmqghjvk0tNZ49IQRdaV1FGUkgBCKprrsMr7IuLEbWvj/+aU5XjE5eJe2O26uVxqKs9Nxi4NT7Ky+jdO8HoxyF2uwfVLhwOdVEibYJBnvngxc0lIlUhiLpsgGCRkYpJQ/PeVgwdpPLJ0KXOop4fsi+xsMioyMlhL3/sepPV//ZcuudLejgPi4EH2wQ99iO8eOQKmTJuGnvG73zE3P/IRdJQ5c/rPsb/+FQIxJYXow0su6Y9XnZ2stbw8iK5gEJ3otddYX+vX6zIoLhc6XiCAbTSYbu52K/XHP5LZ0dHBca66SqmPfjT28kxm8fsZSzCoSxTIdbW1gb+7d7O2KyvRLZcujc+parNxLIlSP/10noEZE6T7cyCgo8u9XnCqrAyCtr194MjH/HzmyXPPQRq++y57UFqaLhO0dGn/bBhzynJvL2MpK8PmHqkSC0kicWxKkkRMSszicEB65eQM3pDC7ye9pqeHKK/KSkCqpwelybwheDyAfkZG/1DzYBBly+PByxEJqAIBlEqPB8U8N5fN5/hxAGjOnKF1BY0kojj39fGSCLvJk4cXueZwYHS4XCi/kyZpJVu6LPv93JPKSpT8eI3U3l6em8fD/Zs4ceBoJauV+5idrRumOBy6qPhgUVIpKdwbMTYGu08+H8d3u/l/RgbnTnRdxUjjLC1lYzxwgMiSxYuT9YbGorz+OormZZcRcdjZqdTTT2NYX3YZc7a9XalPf5r59Ic/sKaUYv0++ywK34UXEm1mlo4OiMN//Uupb3xDqZtv5njh86Cnh8Lcr78OafnNb0aOPjETiBYLHvZf/AJF77Of5VyyhlwuXmlpg0cW9/URBfjEE5zjwgtRkONN2xORaLtgUEcHWSys3SefpCbkiRMoi5//PKlMsXYBNRu9Qh6aI5HCG6CEi6QSD1WBPHoUEmbjRhwEhgEh85//SS2nOXMi32tJV5aoo4FSls2RkuHNEJKE4v9d6exkflVUoAeIkzMlRRt8Xi9lEST1TyITnU5wqqBA74t+P8699HT219/8ho7gq1ZRbkEp9CLznBJi/+BBXWsrEoHlcmkDMysLrDtyhPcmTtSphNHE59PEoXQmlii9PXvAQpcLY/eyywYuZSAd1lNTQ2utKhUa8SvkvOgBqam663RDA+NIS2PskybF7hiWbqqiF0n9roICdBwhDOU1GOakpzNWaVYTTh709OiMh4YG3isqwlifO3dozWvCxUwYKhXa4Emwy0woJiOm/29KZycOMqmT5/FAIDqd1CEsKGBO/PjHrNfPf15H5LnddD7evZu04csvZ57U1jKPZ85U6r77SJM+/XSlLr2UuTV7dqhuHgiQvfHQQ6zJX/0KWy1c2tvBufx81u3x49RN7eggYu/979fRblIvUWqBDlQyy++HwPzxjyG3Fiygsd355zPueLOXzA3erFbGlprKXl9bC3l44gTrfckSovWys9HTpLP9UPWyvj6ez/Hj4MnixdzngbAnJYXj+/2aTLRaub+9vXwWTrzabOi5mzfrGosFBewfq1axnzkc4JOZQPT5dMqy18vzmDYNonI06hVaraHlbJJE4qmXZHfm0ZVxf7O9Xkiv1FS8QgMZST4fSrDdTqRKeTnvSQRfZaVWavx+3rdYeN8MDkIgOhycM5LB7vejmIlSHAyisDkcGL0zZ8ZXyFvSdURx7usDqHNz2SjjLQ4eCOApamvjGJMnYzhIJEJvr+4COGHC8FIThfR1Otn0KypiS7nxerXC7vOFplsWFg5uKLvd/CYzM3Idp/CUZYn8Gu0NQrpa7t+PorBkyYBKy3hXx8cdFtXXU8tmzhzSY9xuFFZJERSv6ic+wXy/7z4dIed2E0HX3Ez03GmnhR770CFd/+c3v9GNWnJzQ+f3rl1K3X47a/RLX8LDHckwMxOIjY14/3fu5Lxf/aomNg2D9SnpKAMVnna7qZ/40EOsp3PPZcwTJ6IADqf2qDmKRaImDx8m6vPppzn3aacRdbhmzdAURkkhDu+Y7PXq9O6MDPBvKGnJckwhHaI1uNm7F9Jw0yZdO3fJEqIN16wBa6OdT4xvs5IaS82xcDJRqVEhFJNYNMridmOEZWTgjLBYIBTdbnQGX4xUAAAgAElEQVQNifR77DEMzA9/mMhjpZhjdXX8Zvp0bRhJOm5+PtHTDQ04BxYt4v2SklCj3W4nqrqlhSjCs86KPKdlb83I4FxHjmCw5+RgoEaL7PF6ddZFXx/vZWSw5+fnQzr84x98Pn8+kZKCa7GIOBXMZKJE1yjFmF0urrOtjXtVUMA6Di+FE00k4lnqGMq/7XbuT1YWemJZGf+OF0ul2Z2MsbMToqG2lvErxTnmzo2v7mQsYiYUzVHcI0woJrFolMXjwcHn86EHZWWBH3v2EIG4cCHP/557cMB+8pMQdUrx/sMPU4Nw1SoiEFNScKZ3d4NPv/0tEcaXX853urrAOLMDsaNDqS9+kZIy552n1M9/HpnUb22FRCws5PebNvGboiJSlwUj5ZjNzRxn6tToOodhgEN33w0GL1gAGTZzJtgcicgcqvh84EQwqJtGOZ3ocjt26LJbp50G4We2FyTwRGoWFhREvwa3GyfnkSPg3ezZZKjEY1+KE0ZK9xgGtmp3N6Um3noLG8cwwM+zzw6tsVhSorNmPB4wyuvVKcuGwTVXVcWWYZZIkTI3Ug4jiox3LBoXkiQRR1fG9c0OBDDkDWPwTsxeLyDrdKIEl5TouoR+PwAkgCqdCAMBCETzcQ0D0rK3F6UxUrqNdDL2+/l9W5tudT97duzRMnJeu12n66SloSyLMm8mQGOVnh48TV6vDgG329mcJWW5qIhxx0tSKsWxWls1ASi1lOIZt9PJ8XJzAe7ubjZQ8dwXFAx8XIlklILjgYA+ppAt2dkjk7Ici0hx9n37MK6WLYv6DMb7BjWusMhuJ+JPKf5mZeF1bm+HyKuoYI7dfDOK2K9+hUdYKdbZAw9gzH/oQ6RnmOXVVyHHMjNJO5w3DywRj7NSzIv776cWY1UVdYHmz488ViEQDUOpRx4hGjIjA+//ZZfp+R0Msob8fh31EkkCAZTk++9HsV6xgrTlSZP4TX5+/GvGnC4ohuUrr0C4bt3KuC+7DONE0q4HOpaZNDSrFuY6XikpOq0x3jFLtKSkPm/frolDada1YgXE4YUXhqb0mOsjSlqyOWXZTKQOB4sidY6WMQ+VNB2iJLFoFCUYxPB1Oplj2dnoKD09RGXk5/Odv/0NLLrsMgxbkcZG8GzqVG1oS33pYBC8cDqVuukm1rjTSZaCOZqloYEoErebMURbm0IgZmaiX0jq7JQpuraWWdxuTRw6nbyXlYXeUFjIcXbtUuqZZ9Atpk2DPJSO1MORQAAMl1qJOTncE0m5DgR0tsaECTp6OZxENAyMXzNZ6PXqz1NT+VwyPEpLh+YMHao0NoJHdXU6Fbq6GtJwzpxT07zNHFltxiJxkiSx6H9lXGGRYZBdUV+PQ7G6mqyBF18k8nn1ar7zu98RbXjttXxP5IknIBbPPpvvW63M3+PHwbH77kNvuuUWyMjGRnCjulof4+23cYy2tSn1sY8R+RhpLTU3Q6oXFYEvGzag/6xcSaS16NmGAWHV1YVdMXFi9LX5xhs4lnftgoC88krdJOTcc2OvTyoSDIIbPh9rJCuL69u2DYI1EAD7Tj+dCMmB1o/LBd5LyYKcHP19n4/jHTzI5zNmsFfEW27LLD4fx33+ed3QVMplrVzJa+rU0LF0dOha8cEgY2tu5tlImnRVVWLGN1wZApE43rFoXEiSRBxdGbc32zDYWNxuFNCBQMTjQYnyePDOiNLU3o5SVV6uf28YgJvbzftmQ9owdOOSiRMjk4E+n66XkZ3NJifpzOZOdrFcp8OBQRAIaFKrtZXPJ02Kf2OSaEnpnFVVpbthSQ2eCROGr9B6vTo9JyUF4I9URylW6e3lGqThgzSXcTo5T2EhxFu080hEp7m+mURfDYcsTbSIErN3L9e0bFlEwmO8b1DjBouCQaV+8hMUoq99DcVnwwb+v24dyqPHg9d91y6lfvYzFEileF86B69b1z8C8YEH8KDPmoVCLZ32srL0nOzupgD5W2+RAvuNb0SPDJbmIAcPas/4hRcSOWnGr0BARzdHm/+GAcH5+9+jyC9YoNR//Af4K1HKw1HmzKmEDodOWW5uxkkiBkekOkLhEYZmNcJcH1CuVWosmtOWhyMuFyTKiy9SCLyri3u4ahXE4fveN3D9IzOpaU5BFvIw0WKOxDR3ijaTisOQJBaNouzfTxbBkiUQUM3N6DBVVbosxrPP8r2LLgp1Wths7C1lZbpkS28vv29uJnIxK4t0+4ICPsvP1zpHIADGHTzIOjrzzOjRf0LIie4mNaFnzQrFDadTE4dSSkRqUhcW6ujHgwdJOzx+HHxYt44IyEQQUFIjUUj9kyfBPIdDd26dOJE1Lh3cJV1XohgFZzwevcZSU3UNw/R0PpNMCinLMtz1LnqqpCqLnlRZyf0ZazWWhVAMxz5zw6phSBKLRlG2bkUvOf98cOb113Fcrl4NKaiULn1y+eVKXX+9Xq/PPovT7bTTSPu1WMCAfft0yRSvl8yLiRPRZUpKILqUYv78/vfUH8zMJMLxqqsirychBTMyIOL278fx+6EPhRKSUpKqrw8sjVYua/duyMPXXgN3v/AFxiZRkqefHv+6luZPUrKhro6U5aYmMGTRIu5ZLMEpwSC4I6UOcnLA0f37Od/kyYnBCcPAcSURhydOgImVlUSqX3TRwBmEbrfuxi0ldpxOnv+CBWMvfdhcqzrCNY13LBoXkqyJmJQhSVMTgFJdPbDh6nIRgej1sqmJIdfbq2ssmH8v0XfSidAsJ0+yqVVWRgZsrxcglu5Uzc2A8/z58dUpFGVaPNQTJuA56+yEoJw8OX6yq7MTQA8EuCdpadxTw8BAmDBh4FTGoYjPhzHS3Y1CIIZKooA/NxdDxG7nGtLTIX49Hp6jdI4uKgolWQyDZywKvITR5+ePvU1JKe5dVRXPav9+UheWLRudmh9J6S9PPYVie+ONOAakBth552EQ+3x4v3fsIG1YCES3m2jF48f7pzAHg0rdeScpPhdcAJmYkcFvJMVWKZwht9/OvP7a1yDVIhnNUsvL4SCS6PHHUYJ/8hOILbP4fCjJFgtKY6R59c47pBEdPMg13303DhmHg98VFcU/H83Rh4cOcY82bGB9rljBda5erddmpLRks5jTdc3RfWLsK5UY8tDpxEjauJE0zr4+MPN970M5Pu+8odUekogcSfOT+mwjWb/QXLvMTCiKMZ9AQjEpIygnT/KaNo313dnJnjthgq4DuGkT+4YY9iIej64lLQSix4NzdccOyPBJk6iXmp7OnpqdrQnE3l5IAymWv3Bh5HRYw9ARiJISmJJCtGBFhS7LIsShNDTJzdVReWan2fHjlDSorQV3rr+eNMFErBfBIuk0Kg3mJBJ8yhRdb8vcaEVKoNjt/E5qq2Zl6Vrbgq2SYtzZqaOBwq8xVgkEcEzV1oLR4kidMYPnPmuWLhEx1morC85IV1nBQ8FrcyR2sqbr2JXjx3GkzZkDzuzfz16+cKGONnzqKQjEtWs1gWgYRKe9+CLElbzv8TCXDx+GYCwqgqgrLCQiLT9fpwa3tZERsmMHpN2VV5IiHSkq+ORJ3Yl52zbm29q1RD+avy+lsqQkVaSss6NHlfr+9/X4vvUtSpS88w5r/JxzIMnikWAQO0XqNR84QKMRh4NAmLVruV/x1GiXjK2MDNLM9+/nPkyfjl43lIZWA417/37mwubN7A9WK2TnBz+IoykQ0CUsHA4dWWjWNXp72StOnGB/qKpibLIXid03liQ1NbQURlJ3Gn1JmsVJGVTa2wGYsrKBo/CcTozuYBCDXb7rdrOJZGeHknsSmVZQ0D+yR0LfpU5NuHg8eKykbkNKCpvH5MmxKz6SwiO1K0pLOYZ0eRaPWDwA5fGw2dtsusaahMlPmMDmNNwovECA+9DZyTmKi7lniSa9rFYUcyES5flK0xeXi82mvZ3vyOfmlOVJk3SNkbEM+BYLYzWMUCJxLJKe/5dlxw4UxvPOgxzcu5f0mUWL8DYHg0QJvvEGf6XWj9MJkXfsGASTuYmKy0Wq4LPP8veHP9SGd1oazgxJK/z1r5kH/+//RU/ZE8LsrbeU+ulPUbA/+lFSgMIdA4N1YK6t1TWIKipIE7rwQtabwzG89GXD0MW3X36Z1O0dOzjm5ZeT0j1rliYLhWg0RxlKxMpAKblijJo7qsZrjNpsjHXjRqXefJOxFxXR8XHtWkjPjIzB12WklGVzLcbRNJaThOL4lN5e1ueECZBFdjuGcV6ejqZ59VX2ijPP5CUixrTFor8rdZH/8Q8My6VLiTS2WNhDMzK0cXn8OAZtIABpUF3NHh8+PySTwmbj2B4P+sv06ey7EpHo9+v9vLIycr2utjbSlnfsAMfWr8chMhzyzSyCE11d6Hvi/CwvR4/Lz9eOx74+3YhJnBHSgC8zU9d7NHd8lvrV4rDJzh6evuX1QmLU1kK0SKOaWbNIVZ4xI/TYUk+7r294JSdGUsI7PZsjOoVQTHZ6HnvS20tH3eJiSLSmJlKWKyvBEKuVPfPPf4ZY+9SneJbBoFL//Cc4NW8e2QZWq05dffll9tn589GnMjJw4GZkMM8tFoiq734XfUn24Asv7I8LhgEhdfw4JF9nJzi0bl3/oBCnk+8ZBg6acL2puRnd6uGHGcuXv0zk45EjjKeoCGyKN0tMGpJIBtLRo9yTmTMpizNt2vDXb2MjEZQSET5jBlgXj5PB52M/2LyZ2rw9Pdz/ZctIKT/jjNCoRinxICWlpHFVaip7RUsLOJWayriys8GB9HSehdTFlw71Y0mESPT7E16aISlDkGQ68yBisVhyDMNwJOhw4+5m9/QArAUFurNgJOnrQ9lUCiATUjAQ4PdWKxucGGtOJ0pqTk7/Tn6S2jNhAh6pcHG7iaA5dgzPc0kJinWs4CaFw91uwKeggPHYbAC+EEnxbEyGQQr08eO6Q1ZBAQrvhAn8e7iGazDIxtzeriMcy8tHPjXY7dZpRpHueVcXBozLxYZfXh6qvPv93GNJgR7roC9pB2VlGHrvPbdRH/W/Gxa1tuLtFjKtpQVicNIkpT7yEebNt79N7bEvfEGpj3+c3/X1kZp78iTk4+mn67nX0oLXfNcuPNqf/rSuTZiSAm51dnK+t9+GrPra16JHCUs5hp//nEiiadNQvmtq+n/X6WTtROrA3NBAatCrr4INN9yAsm0YYPBw05cDAe7nE0/wamvjPl59NSlFeXmR05LNnZIHW6d+v07nlrqH8WBcW5tSL7yAEfT224y9ogIy+KKLcFCZoyQHarRibtqgVOSUZXNK36kUc8SnjGmwbtXvSRKLRlh8Pow1pXRkx5EjrOWZM5lPW7aQXrd0KfPULK2t4MqkSdq4q6ujzunJkzg/PvxhXQzfamW/CQbBqhMndDfTvDz21PD5IBF39fW6ZEpZGd/r7dX1SPPzMbrz8yPPKZsNgmLzZtbKhRfyGqgzaiwSCKAbNDXxkqZSpaW6drZkLohItLDUJ8vO5jfRosJ7etCLnE5wqLgYzE1Pj83QdLkgDGtreV4SWTh7NsThtGmDd7W22wdvmjXWxEwoCnYOkVBMYtEIizRs6u7WJOAPfsBz+u//Zm2/8QZlXZYtU+qOO3heUlt5+3bsqksvxSZTSjsva2uJ7P/85znuvn2cT1JZ77sP5+OUKehW5eVgQ7gdYBjg0GuvoTsXFIBxy5b1X3s9Pbq78dSpoZF+NptSv/gFulEwSNTkF74ABrz5JtF1s2eH6gSxiJSV2bcPgq+rC5xbvJhjDidCUKStDcKvsxPMXbSI+y8d7/1+7RweSAdxuXh2mzfrmrxZWei3Z5/NeAfSD71e7FqpcXv8uI76LijgmZaW6vvY2QkGS23+tjbGXF6eOEdSIkWcRyZ8P+WWZYKxaUzKuCURLRbLOqXU35VSVxiG8XjYZ9OUUnVKqbsNw/jqe+/dpJT6jFJqrlLKp5R6XSl1h2EYe02/+6ZS6k6l1GlKqU8qpT6slCpVSs1TSh1QSn3dMIzvhp0rRynVopR62jCM6wYZ9ri62U4nxm1Wlu5oGUl6e/G+W61sEqIsCZHm8bBZiSEvqcfp6f0j/Do6UKqLijhnpDFt3Qr4VVZCHsba5ETA2+nUYea5uaEFfXNyOH88YOl0sim1tADIVVUosYlIWVaKcXZ3a1DPy+M+JkrJH4qIlz8vj+coir/Tqbu8CqmgFEpGUVHoHOjp4f/xpJ6PttTX46mtqMBItFj0BpXEosSLxwNBaLORsmKx4IXOyVHqmmuYNz//OWnIN91ECqBSYNETT7A+zj4bb7vMrz17qBPU3a3Un/6EUisEoqTzbd0KgdjXRxrz5ZdHx5ZgkEide+9lvJ/4BEpuOIkvUTHS4MCscLe3M5bnnkN5vuoqSM7sbH7jcGgHRzyRxYYBNj/0EBEIfj8Fta+9lr+iMJoj8mKNzJNUOGl2kp4eu0Lf0KAbo+zcyXvTpmnicOHCgZ+DudGKUv27LEu0zUDHUOrUE4lmiaHTcxKLRlBkDXV3Y7BlZ0MsBYNE56Sn40DdtIkIHnPzJKVYxw0N6ABS4+vIEYxjux3nx7nncjxxCJaVsfa3buXvjBkYmWlpHCMSgXjiBCSA08m+LI7K1FRd33CgmsUOB9fw6quM5dxzlbr44sTV8zMM8PzYMXQ8r5d7WVISeg5JS87M1C/BPommlnqI4eva5eI5eTzcq+Ji/pqjo5XS5RUilViw20nrPHiQsUrJmTlzIA4nTYoNJ8R5JCTmeJRonZ6lDITpHiaxaITlhReIlvvgB5mLP/sZdsuXv4zNsm0bDtJ586jfnJ7OennmGfChuprowblzOV5dHc7a5maw6JpreL+2lrUwbx7YcOedEIKXXILjRBwM4fgQDOJwefZZ1vgZZ1BOJlId6bY2bESxt2Sdu1wQh7/8JTrd+vVK3XYb3zlxAoeNYeDQmTIl9nsozt9t27iXQo4tX46ukYh12t0NedjSAs4tXBg5otHhCC1vYyYC7Xb2gM2bdZmw/Hyue+VK6vLGYqM2NPBcpRZ3YSEYKVkx5oZ3hoE97vdDLkpvAIkWH0u6klLa8WEiEqOyAuMUm8akjOd05ueVUt1KqWuUUo+HffYeDKqHlFLKYrH8XCn1OaXUo0qp3yulCpRStyqlNlssltMNwzgY9vs/Kx7sXUqpYsMwai0Wyxal1MeUUt8N++5HlFK5Sqn7E3BNY0bEa5GWFrmLn4jNhqdcQqnNANjdjfJUWhoahdbaqht+mI8rdTPE4x4uLS06pW3+fF6x1KcIBCCuzF2FBTwl7djtZlzl5bFHyPn9EE1HjrCRSnMXUWQTIT09mpjNzuY+nQoPd06Ovp/SZEXqi0nEpdRf6enhdfIkioSQiXl5OlUpWqOKsSLTpvFMDx5EQTrrrJCPk1iUQDEMiLWTJ6l1mJ0NCWa1Eq2TkYGC+cADEG633srvuruJQHS5MIAnTdIE4saNKMgFBRjKixbpFGalwK3f/AZP+9SpeOVnzow+xhMnqL/4zjt4rb/+9ci1eKSgtjR+EqK/t5drevJJxvHhD5OGUlTEb7q7dYOXgQz/aOL1UufwoYcgT7OzUcSvvpq5HB5hGE80cCDAeYTAy8wcOnkozWc2beJVW8v7CxYo9aUvkSY1WNdDEas1NF1ZIiotFh3BNJgIVsnvxoKYyUIzoSikos+HUXf66SE/S2JRguXoUaIy5s9nLR49yjyT9NV9+5jDs2YR3WOeP34/Br5E5CvFM/vpT/neV77CcQ2Dc/j9kGonThAZk56OwS/px5GMt74+9qRjx5jvonMIcWjuBhpJvF46sm/ahP5zxhlcRyyNAwaSQADd6tAhdDjD4BqnTOF+ZmTo5ieZmQMb8OY1LanLsubNjQtKS0N1CtG/zLX/pF5rair3sK6OMTY28n5xMYb63Lk6Yiseyc7W9XJHuvbqSIk5+tBMKJqjRY8e7ReBn8SiBMvevbzOOIPU4N/9jrX1qU9BsO3Zo9SPfsRnX/2qbkT01FN8b8YM/VKKZ3bbbejnX/0q9aGVYi309vK9XbvQdQwDPcfr5bV6dX8C0enE2bttG8ETN9ygyUqzGAbrzGYDo8TG9PloDPOTn2DnrFlDJOX8+cy3d97RJSVWrYrPbqirg4Q8dIg5PW8ee2i8tRTDxW7nOTQ0cP+XLtXR6pEkJwcM7O3VmX/79pGFsXs32FZSgkNn5UruRSxOWolub24GgyTwpKZG68cS8CGd7DMywKriYl0qq7SU+97WRqCN1PUdKyKR0l4ve+yCBQN+PYlNCZJxSyIahuG1WCxPKKWut1gshYZh2EwfX62UetcwjH0Wi2WFUurzSqnPG4Zxr3zBYrH8WSm1Xyn1LaXUVWGHb1RKvd8IDdP8k1LqPovFssIwjLdN79+glDqhlHo5YRd3iiUQQIlVCkM8GmB1deFpycgglNpM6DkcurOgkFwCZtJYw3xcCWnPzQ1tOy/j2bePzSwri3D7WJQ6qU1j7spnTicW8tJqRQGP1fMuBYH37weEq6oAsOLixBmkfX0o4C4XivaUKfHX/0iEeL1sPB0dPEdR2sPJUosFJSE/H4VBGuxIZ8SsLK5J0pTGoki0Wno6/96xI5RETGJRYuXFFzGK169HAX3sMdbulVeybh99FA/1Bz5A+o7Fwjz829/AlvPP53tSJuG3v+V7NTUcS7DD6QRbnE6Iq3fewbt/xx3R52IggJL8m9+gZN1+O+nAkQxDv595I6nIaWngw+OPcwynkyi7G2/UEUoSoWsYmowfighx1tJCcfXHHkPxmzKF65GU5UTU2QsGGaekEYvCOZTf7d4NobtxI0q2xcLe8bWvYTBEKl8xlOMKoWkYukZiLIq23BNz5+SxJsePswfu2EFa04EDzLGTJ/V3kliUWGlvJwq9uprX8ePsX1On6ojEDRtYZ+vW9ceBpibm5pQpzKmtW4lALCjAaJcSMTYbhExuLjrVyZMQhosXh9YKlDnt8fCbQ4cgFQIBDNWFC9E7htpgaPNmoqB7esDHdesGLlszmBgG69DlArOPHYOokNIm06ejY+Xng7HRUpIHE6lp6vWiU9rt3PvSUhwx0Y4pkYtZWRjVe/eyjlpb+byyEkM9WtOaeCU3N1T3Gc9itXLP9+8Hz3fvZh56POyhIkksSqy0tlKzcMoU5ujTTxMh/ZGPgBOHD0P2VVQQgZidzTN58knm+qJFrLtZs9BF3n0XvUgpSLslS/h3UxO4V1amOzvPnQteHTiADvO+9/VvsrF/PzUYbTY+v/zyyEEe4lRwOMA0KdvwzDNEUNbXQ5Ledx8OFKXQo15/HZtz7lyIuVj2d5+Ptb5lCzpSVha1Is84I3GZUC4XdurRo6yRBQsY61ACSNrbGdurr3IflcL2Xr8eW0PqUcYibjfPvbVVN6qaNYv9oakJzBedUHBRamaLTSYNRjs6uPcTJvDcbTYwdyxgWXs7GLRzJ6+9e5lbR45E/00SmxIn45ZEfE8eVkrdrGBz/6CUUhaLZZFSaoFS6r/e+85VSimvUupJi8Vi5s49Sql/KaUuiHDc34RNAKWU+qtS6ueKh/72e+earJRarZT6nmEYYT0rx6eIh8jnG7gbcUcHCzc7mwhE8/e8Xj7PzNQ1JQyDxe719q+p0NfHppKV1T/cu7sbYGhsRIk/55yhRx9KjSC7nU0qJyc0LTAY1J3D4klftttRpg4fBpAnTADwJeIgEeJy6aK3EhV6quoISsqyw6G7IVZUsPFIalA0sVrZvMxkYl8fzyMtTdekGyvpPlIrpaeHsQaDjG/RIjbiCJLEogTIoUOQhMuWQRI+9xxr9NJLMW6ffRZF8/zzlbrrLuZVaysEYmoqyquQ2oZBpM9993GsP/xBG9fShW/XLlJ5XC7Spy+7LPrYamspKF5bixf89tujG5per+6knJ/PWP7+d2qgdXWRav3JT4J3IkNNXw7vlBwMYhA8/DCpToEAOHndddQtSlTki3RGlXIF5tSXaOL3Q5xs3MjY2tp4PitXcv0XXhi/R9sceWhulCLNX2KVsUQkdnQwN3fu5O+uXeCQUszhmhqlbr5ZG35hksSiBIjTiUGSn48x2NLC3iWNSI4dY01XVmLIh6/Xzk7WtJRyee45pf7yF/bwO+7QupE0TQoEWCtOJwbo9OlgmzhdfT50KJsNnaWhgd9WVbGewmtLRxPDgIh+5hmON2MGJSEkOikWkU7JLhd/3W6uubmZta4U45ozB6xLVLO3QIB7YLezTsvKNLZLKnOkTrFNTeB3bS2/Vwq9b/ly7rdkWCjFsUWvGW5TkZQUxudwcK/GqsM0ktjtkEf79/Pat489LBjUNUHXrSNCKoIksSgB4nKh+2RnU4Zlyxb21FWriB5saEAfKihAn8nL4zdPPMEaP+ccnldVFd955RV0mfx8GstJ1kVXF8EcPh+fHzxIk7ibbqLOot2O7mWOUnY4wJI33uDYn/tc1LmgPB5w0+fTmSKvvsq59uwBZx98EL1A9t+GBq7XYkGfiZSlFk1sNhxu27eDS6Wl6HixpgEPJF4v6+PQITBm5kzweyAHsGHo7tqbN0OcKgUG33gj+3tJic7sikUXsdl0WS6LhWdVVRUadFJWxnckwlBEyESfTzdiEX3UZuNVVKT7CUgjwtESKRW2ezfzZedO9mXJhpk+nSyWxYuHdLgkNiVAxjuJ+LqC9b1GvTcJ3vu3oZR65L3/z1FKpb/3vYhisVisYQ/xaPh3DMPotVgsTyqlrrRYLF8wDMOrCE+1KKUeGO6FjBVpbmahVldH92a3tbGA8/LwCJnBWOr6iFdYwK+7m01twoRQBcrpBEDFSy0Gr8+HR+fIEcB/yRK8w0MxiKUGmRQTl0Ky5nFKp0KPR3vDhgLUgQDgKQ1N2trwMp95Zuy1cgYSjwcDQtKFpa7iqTBs/X6ek8ulU5YlDcli0bUQU1MH31BSU9kcCwqYE93dofesqCjxXaWHKn6/Timw28+aKjUAACAASURBVHVtjaIiXTdzgPufxKJhis1GF+SSEgiSLVtQzs45B+Xy5Zepy7NiBYXEU1JQhP7+d+bd+9/Pui4o4Fledx3E1ec+R11FcxSPy0XK9IMPovT96EehhJ5Z3G7Shh56iGN/5ztEzUVb6263Xg/Z2Sjsf/gDZOiiRfx+4UL9fSkL4PP1T182N9uQvyIeD4bEQw9xn3JzSVe+5hqwNFFYIdFFQh5GqyVmvv633mJsL7/Mc83MxABYu5Y0qHijqKUmmtS+CU9ZlrRmUSpjlVNBJLpc7KdCFu7cyVyRBjWzZzO3lyzhNXv2oM6WJBYNUwIBiHmLBYPEZmM/Li7WBtiTT/L/K67o/zzcbnSDvDxef/4zEdYLFlC/VaJfXC7WfksLzzwjA2KgqIj3pCmHRHpJ9+WuLq1/DZQqZxbDACeefhqioKpKqc98hjENVfcRolCIQyHclELnEuI0NRWsmzo1sXpLMKidkEqBI4WFoam2kpon5QAaG3WNQ4lYnDYNh++cOf3LwYizRAhSt5vfCO7Fq59kZHBMl2t4xxlJ8flwiptJw4YGjYfV1WDQnDnsYdLQMKkXjZwYhlLPP49OceWVPI+HHiIN98orwaU77wSDvvUt1pvTqZuvvP/9rEkpf/Tww+g+VVX8TpwHfX3YXPv348i1WolsXLkSoq+7G2ySIAmpFbthA1h3+ulkPIRHKIo4HNhcFgv6iThl33qLcf3yl/zevJa3bwf7Sko491BLN9XXExV76JAuPbF8OXtnomy0QIDjHzgA3kyZAvkXLcVaSrhs3oxu29TEvZg/H303PADF42Fv6OzkugeyPwIBnbLsdIIvkybhfIoUdCM2sc3Gv8PHbC4ZIRke6ek8w5QU5pjPx9jCswoTJdK8TKKd9+wBm0T3q6gAfy6/nL1m8WL2zRjGksSmBMgY3MaGLoZhBC0Wy6NKqS9ZLJZKRR76VUqp1w3DkIduUUo5lVLrBjpU2P9dUb73R8WDv1Qp9Tel1PVKqc2GYRyO8xLGlHR0AFqlpdGNvOZmNpn8fBTYcEVICrGaU296e3Vqszn82e2mPkVqKpuKfF+i+2w2AHDhQrzFQwF/h0N3vMrIYPMJB9GuLt0xevr0odXV8HgATEk76uri9+JhT5Rn2edDKRByrbycazgVdXQ8Hu6n18vmJU0hwj14WVncb4miGopynJaGMebxcK0Oh94AE0nGDibSZEdqZSrFZilE5yAK8v9KEouGJ4GAUr/6FQbWbbfhrd68GQP3zDNJb/7v/wYLfvYz1vSJExjEeXlEQkijHocDw762Vql77qHhiYjPhyJ7111EGX34wxw3WnTz1q1K3X23joa89daBjWKHQ0fC7NsH+Xj4MErsD38IAWr+rccDNkr6sqTMC2kY3i05JUWnLEsDmRkzSAe+7LLEdH0XMYzQ2mEDkYd9fRgaGzfy1+XiuVxwASnb55wzPIwM77JsLupvFolElOjEeFMlR6o+oijG5gjDgwe1YjxxIsrwddfxd8EC9qeUlKE/1yQWDV/272dOL1vGvGts5DlMnIh+8thjGHZXXtnfcRYM8v2UFAy1e+7BADrnHKJ6hED0etnrDxxgvVRWcj63m0hBIQozM/mbk8P3AwG+O2MGWDSUeSFRk4cO4cj9+McxqqP91jC0s0WINGmSphQ4JfOys1PXaU5LwygWJ3SijEtzbeVgUNdWDtc1UlL4bl0dz/DwYcaenq47Ks+cObCz02plP8jICMVAj4fXcAjFnBxd5iLWCKNEi2GwrwlZKPdLGtAUFXHPVq1Cz505k/mWkTGwEyn0HEksGq5s3ozOsmYNz+W3v8UuuPlm7JE77wQT7rqL9+128KmvD1LO4WDNTJlCLdaXXmKN3nKLJhA9HvSVv/4VnWfBAgjJsjLq0Le1QSZWV/P9ri50r0OHmNNXXQV2RQs8kZJRUqPxs58lMnvCBIjEj30s1BFjt5O+3N3NWJcsGRznvF5wdvt27FCpR7h4cWhd/uFKMAhJuXcv+FhVBYkViTwNBPieRBx2dYFRixcTvX7mmdFJV+lYL5Hqbjc2tFlXdbuxZWVfyM1lzQ7FZiwuZvzt7aHNq0TM2SaC/S6XLmE1YQLn7egYehBONJFSPHv24LzbvZv56HaHNra66ird4KqykvmWlRWfzpvEpsTIuCYR35OHlVJfUTz8rUqpKSq0eOURpdTFSqk9hmG0DvNcrym69txgsVialVKzlVI/HuYxx4T09gImBQXR08tOnkThLS4GBMMVROl4XFyslTSXC+DMzuZ9EY9H146YMUPXCzt8GKU0GGRDLC8fuLGLiMvFhurzAXxlZdGVe5sNsDV3A4skEtEo3nWlUABdLhSs6urhg6eIeJI6O/l/SQkbyGh7q4NBHVkYCPCMc3N5fgMBdW6ujuCLhcjIyMCjJJtZezv3eurUkWsYIx4+ma9KMVfKyxn7MMiOJBbFKX/9KwrpLbewnv75T8jktWtRKL7wBebEL3/JXKyvxwNeWIgyJkTcyZNE4rndpDivXq3PEQigQH/nO8zzu+/GSx9Jenro/vyPf4A/996LkhzNeDJ3YK6rI/Jo1y4UzG98gzTr8DUhKf0pKShJUlhcKU2ApaTof7/zDpGTL73E+VavhsQ4/fTER7aYu5mmpupUYbN0dTGWjRuJJvD5wK3LL4c4XLFi+ClDQhwKKThYl2WlQhutyP2LVYSIjDeiUaSlJZQw3L0bg0ApnvnixUp9+tMYITU1KOby3GMhDiNIEovilIYGntvMmexrhw8z/6dORXf461+Z11dfHdkB2dKimyL98Ifs65deSgkD0YECAfQfaUonhGBtLd/3+ThfRQUE4okTvKxW3i8pYX8cbH60tJBquGsXx7niCsjMcKyQovoSYShRj0rpDAOpY5iZyRxuaMDxFwyCw9Om6RS8wdboUEVwtbtbN6eSxmxm8Xgg52tr+evzoVvMns1znDpVG8qxrCkxpNPTQwlFr5dzmiOzh3LNFktofcTRbChns0EUmqMMpU54ZqaO6pk5E+d6SYkuMSPEYZySxKI45ehRSL2FC5nDP/gB8+zWW5mHd97JM7zrLvSlnh4IRJeLenpS0qmsDL1n3z5I4UsuYW0oxbp6/XVqtXZ2KnXttZQaSUkhYu7kSXSMKVNY62+9RYZBMIhOtGDBwIEULS3o9Q4HTVMef5x1/F//xXnC18CxYziNrVZ0HCEuo0lXF8Th7t06s+yCC5jHgleJcKwaBhi8Zw/3vKQkchkJr5c9f/NmmqPY7ayh5cv5/hlnDN22kXI4mZk8x+5uHbTR0qLr5ZaUoGvGUqNQ6uw2NkIGRrvPUvc6PZ2519oKcVlezp4lwTVSnmMoYrdDrgppuGcPZKRS4MzcudQnnzULLKqs1HXzhThMkAMmiU3DlHFPIhqGsdNisRxQhKHOUOSvP2H6yiNKqc8qpb5rsVhuDs9Vt1gspYZhtA/xXIbFYrlfKfU/ijbfLqXUY8O/ilMrLhegkJ0dvWFJQ4MOK1+0qD8ou1y6tqBEMXq9bB7p6aFAK4a2UgBEWhpAJnUhiosBq/x8gG0gsJAC4x6PTpWN5A2LJX1Z6u10dXENaWmMRSIcS0vZUBPh2QoGAc/2dv5dVMTYRrs2YHjKsnRPHmq9C7Ny3NcXe7piVhakbl4ec+3oUV0kPRE1NyRtrKeHuaCUnu8FBbF1+Y4mSSyKT/71LzqDrl2LIfPQQ8yfdeuYB7feylz49a95//Bh0ntKSogklIiZt99W6vOfZ/1s2BDaFdDrpXj4I4/w/o9+xBoOF8MgBfonP2Eu33CDUtdfzxyMZkRJZ9C6Oo7/r38xb7/4RciDtLTQ+oV+P/ji8zEHJaInUrdktxsS4C9/AX8LCogiWr+euStRMYmKaJFUQEmlTU8PxfrmZp7Vxo2QmsEgJOvHPgZxuGTJ8KOPwlOWrVbGEctxU1J0vcR4xxMrkdjXhyFjTkuWhg2pqRiCH/0oxGFNTWhtJ0m9HCZx+L+SxKL4RJqVlJYyr48e1el3YgQHgzgqIhXk7+3V6ba/+x3fvfJKSEJJVQsGwYgdO1j/s2Yxz7u7dRaHNHiTmtBuN2OSetKDEYhdXThA/vUv9rZLL8WwzshgXUltPiEOJcJXuqzLvpuVFVoqoLUV489mY65WVUF0ilEXjhfDEYdD42RmZn/HsNPJs6qtRXcMBLgvNTVg/JQpeu1LF2ePh/fiwcxwQtHc6VmyNVJTBy/3II1dhKxNhO4RLh4P98YcZdjczGeSgbN6tY4wrKoKjfKW60xE3bgkFsUnNhvO1PJyiP9f/hL99YtfZP78z//gcLjzTp5hVxcEot+PsyAnB9LQ56OOdFcXesP8+aRCi7PtgQeIbiwsVOrHP9ZNA7dtw2ZavJjjNzXR5bm5GTyT1F0h6MNFgjZOnKD0wxNPcL6bb0ZPMweVKMX827YNJ0BpKWRntMhGiTZ+5x0djDJ3rq4laLFwjxJV97Clhb29qwvcX7UqlHRzOhnLW2/x1+3m/q9YAXG4bNnw1nl6uq7D++674E1BgXY0xWsvpqVxv9rawNqBiEC5pxMnQiy3telMP2k8GYkc9fnAIklJ3r1b2/9KsddJM6vZs7mvovtJLVkhgxMduZ3EpuHLuCcR35OHlVLfVuSvP2cYRrd8YBjGZovF8lOl1JeUUnMtFsuzSimbgnG+WCm1Wyn18RjO9YBS6puKYpyPGIbRk4gLOFXi8wHy0rQj0iKtrweoy8oi1yX0+zVZKFGMElknKblyXL+fYwUCbESBAEpyby+bSlGRTvWorIwOGj4fm6x0kSouBsAifb+zkw0wNZVzRvMCud1sEjabbsRSWooy29rKeaZP77/5xSOGwbna2rgn+fncp9EsUquUrt9mTlnOyYkvqik1ld/29XHMoXSIDJeiIjal1laeg9sdPfpgIDEMxiDEoYTj5+aySUlDlxGQJBbFII2NSv3xjygP69ZRi0cpogtbW4nSys6mOUpJCVEUmzahOF1+Od/t6iJC78c/xsv7yCOhToumJqW+/GUU6iuuIF06kkLX0oKn/6238K7fey9KmhB1kcTvR+l9+GGlXnuNsd50E+PPzNSdjEX1kPRlq5Wos2jp8o2NHPOJJ/j+3LlEErz//dowHmrpgKGIGMRCupmbCdTVcc83bUIBVAri4zOfgTicOzcxyt1QU5aHKikpHEsiEuORaKnNfj9pyObmJ4cP6+csivGSJaRUzZ2rxyPfEdIw3mjJIUgSi2IQjwcDLSuL9X/8OGt3xgye96OP8p2rrw5tLCDi8+nGHS+8wHfWr4cMrKxkfre24gCpr8dptnQpuFZYqAvZT5gAPh08yPdlPKmpzJVoeo5S7L0bN4JFSkEUnXcev+3sZD+VaGelOE9urjbSIuGi16vJAI8HzJozBwyWtSHkWSJEsle8Xvb88nKtS/T2cl9qa3WtvsJConzmzo2uw8q9EzIxEBhaVHM0MUcgKhVKKIaXf4hEKGZl8T2pnTscx0swyL0wE4aiYyvF/Zs/H4fbvHlgt9XKs/T79XGys3W00QhIEotiEJ8P56HVSlO4Rx5hH775Zgjfu+4Cn+64A2xobyfCTymcFsXFYNmxYxB/GRk66m/OHP7vdkNAbtwIDv3gB7pR3LvvotfMn898+ec/iazLziZFOiuLOTt1amTM8PvR1R58EB3G7da6V6SIt54eGrPYbNiYixdHXpceDzrI9u1gRE4OhF5NDeMJBpnDiSKcOjs5X2sr5zrzTJwTFgtjfvtt7svOnVxzYSGYu3IlwTaJWEtOp25UFQiwX+Tm6gjF4TptpQlPV5feBwaS1FRwVhqNFRVhI7e1MTfb2kLrGO7fr+0vyWK89FLuj3Svdjp1cIeMKTt7ZBwsESSJTcOQ/2skYt57/w4RwzC+bLFY3lFK3aqU+rpSyqqUalJKvamU+m0sJzIMo8FisbyklFqjlLp/eMM+tRIIoBgqRVREJDA6ehSFt7KSDSUcmA0jtAufGF2trQC6uehqIMBGKKk6LS0oP6mpKDdKAWSFhdEjIv1+wNvhYIMtLAxtRBB+fSdPAnR5eVxjOKgbBlFEnZ26m2phIYq81E9zu/l/pN/HI1Kk3etlY5oyJT7CLV6JlLKclxd/bQmzZGTolG9Jg4xVsrOZS04nz8ft5jlGq4MkIqlPQhxKI4i8PBTp/PxRSQ9PYtEQxeUihSYrS6lPfYrowd5eorXcbt6TGkCVlSgkL73EOvzgB5m3x45Ru+eppzDYf/WrUCXolVeoFxgMklp48cX9xxEMooD/6lf8/0tfggSUCLRoxnFbGwToc88xlo98hAglSSmR+ScRhlLXJjsbEjtS99AtW4g6fOUVfrNmDfXxpDabeUyJKqPg9erjSurPgQMYFxs3YkwohfJ3221EjEZrQhOPiGEvRN1wjHuzmOsjDqfRiqQx7dqFcbVzJ9FYovQWF0MWXnqprsFUWMg5hcSUyCWrVZMGo1APLYlFQxTDwOgJBJQ67TSMNodDR7M98gjYdNVV6DSRft/YSJrf9u3oSuvXaz3l2DH2sG3bmDfnnYdBKoRgZyf7XVERf/fsYb5Mngz2ud0DE4gejy4t4HAwH885B6zp6uI70kmzsBDMzcgYeE309KCfiS43YQLXNWGCJuIkSjgR0YdSa9rtZo2UlrLnd3ay9mprIWmV4jNpuGVuRjCQmJswCeb4/YkhQAWzJNUwnFA0RyjKvZISMA5HbJkbnZ2hhOGBA7osS04OuvS11/Ks5s8HnyTCXKL2ZUw5ObFHeccpSSyKQV54gbXwoQ9Brm3bphtIfO97PPMvfQmsammBqEtNhagrLsah9frrYMLUqZCPdjt4VliIXXPbbUSIXXUV/5Y5IPNq1izm6C9+wViWL4ewa23lXFOnRtbv7XYcsPffD2ZecolSt98OeRlJ6upI2U5NpexLVVX/73R0gKt79jCPq6txOs+ZoyOBlYo/ACJcenvZDxobwclly4jG7OxET33rLXQAwwB/LrsM4nDu3MSlTnd3g3c2G9hVWsq9kfRvp5N73dHBe8Mp/1RSAu62tg6tJn1KCuOpq6MG9uHDzNFDh8AX0SXnzweLamqYu5WVOutNxq+U7kItjoxRliQ2DUMs/TtRJ2UwsVgs/1BKLVZKTY6xNfeYudliGDmdKKqRSKxDh1Aiq6ujR5t0dgIEZWX6GG1tHLe8XNfJCAYBHFGUpYlGRQWe/o4OwLK4OLJSGAjoVFmlMNTz86ODncvFRunz6fTl8ON1dfHy+QCxCRN0OHdjI969jAw23ni7iZrFbmfDd7sBWKl3NFoinm8pVpuezjMbiejHnh7ucSSyJJZjeL3aU9bzns8mL093ZJRU0p4e3Y3bauV5FRTw3RFSkEeeAhiCjFcsMgwUzd27aWxSV4dSdsklrNcbbwRbfv97sGfnTqJrpk3DM5+aCrn1yU+SPnL77XjlBaN8Pmoa/uUvRDl+//u6iLhZjhxBKd+7FyXwv/8brBDCJ9y4lO6oDz5I6pDXS3TgDTfoiGtJSRZsMndflvRlM5Y6nTQ9eOghnDbS8VUICzF2hWBLxHyWCMlAQNfg27MHEmLTJggPq5VaSBddBJkZzbETjwipJpEwZnIt0WJOjRkKcdfTo8lCSU2Wej0ZGSjES5fqKENz9JPUbzRHHJrrG44QcZjEomHKwYPoOjU1PKOWFtZecTE1EJuaIAWnT4/8+6YmsKqujtqHq1ahf6Smsle1teGMzcrCUDaXUujuZu/KzETnkIYq0oXb6YxMIEqDDmlo1NMDVq5ezdglHTkzM3Lh/Egi0ZINDRwvNRXDddIk3RTEHGmXCIPd5+Oa5ToLC8FYiTiUtVdVxfXNnZu4bBCfT2NgIiO7RQIBTSaaI6yFUAwEeIbStC5cXC7uw759upahOO1TUiA2hCycPz+UABByRXBeKc4pNQ5HqHldEouGKTt3sqbPPps5ev/96CbXXENjlM2byQJYs4Z9+sknWedXXIHO29aGbrV1K7/79KfBpaIiCObnnyfq0O+n1vT69frchw+jT1VUaPJ+wgQIzNJScCEtDQIxXDcKBsme+MEPwM8zzyTScfnyyNfp90M8HT2qU7bNdRUNA/3snXfAzpQU5vjy5egi0uncMJjPibBjnE50QTnf3Llg8dat3PdDh/jelCnc25Ur0UkTWU6mtRX72O3WNeMrKiI7OsQulqZW+fnxO0Q8HuZTTk5kG9zjAY8lwvDdd9njJBBlxgy4hPnzeZYLFmhC0OvVxKHsHxkZOlV5hLLCxgQWhcswsGlMSpJEjFEsFss0RbHN7xuG8bUYfz5mbnZzM6RdVVX/2j6GAVicPAkoSAHecJEIvoICTb51denUZCHeDANQ7u7W0WKZmRxXCMWeHt1MxCxCEknjhNzcwYmpjg6OmZrK+M0eGgnbttk4Xk4Om6REM3Z3s1EK+VhdPXxly+lkU3U4dHpOtI5cIyFuN+f2+XRdi+zskY3KCwZ5phZL/F0IDYPnFAhwv+T52Gy6OYFISgrnEeJwFKJ8TvkGNZ6x6NlnUX6vvRaceOMNFLKaGqX+4z9Yg7/5DUSNKHAzZ+p03j17UKqbmoggvPJKfezGRop279tHdOAXvtDfCeD1KvWHP9AAJS+PdOe1a0PTX1NTdZdkId2eeYYaQjYbSv5nPqOVyEhzzu0Gu5TqX3vz+HGIw7/9DUxcuJCow/e/n+8Fg7q5iYxnuPPaTB76/dRle+klIh86O1HmzjmHe3HBBYkx1s0i9zdRKcuxnFfSLs3i9WKYC1m4a5eu12OxMOeELFy8mMiH8ChQuR6ZK3Jd8hoFSWLRMES6Qk6ejAOhoUE3TnviCaII160LrbFqltZWpb79bfSliy9mnrS2smdNnYru09rKnnv66aGRjD097GkSQa8Uv6mq0kayNDbxeHQNQ6cTsuGVV8CimTOJhJ0zh/091mgOjwencmOjzpCYNIlxCA6GlzsYLhb5/YzdbudYfX3cw0OHtO4wZQrXNGdOYhy5kcR8bRKtOBLrVs7j82nnSUqKjgTPy+P+myMM6+v5TCnm47x5GOfz5mmS2SxCGsoxldL1DRNZr3IASWLRMOTkSTBn2jTw5t57IWc++1lKurz4IjWR160Dp556CpvoiiuYPzYbztTaWt77+MchxVJSWEP33EMk3aRJSv3nf6JzyZw4dgw9y+3WwQ6rVil1/vn8u6EBvWTq1NB91DDQIb71Lebs7NlKffObkJzRMMJmQ+fr6dFRauY60O++S+ShZJGddhq4mpOjM6n8ft1sY7jr1eNh7IcOcT1ZWdiR27Zx3UpxXStXUjNy4sThnS9cHA6dshwM6nJe0mhtMBE9U8pwhTuqhyo2G3pgSQl/pYbhnj3MKcGt8nL93GbMYD6JE72tTXe393p1lolS2lmSnT0qutEpx6JwGSY2jUlJkohDFIvFslAptUxRZHOBUmqWYRhNMR5mTNzszk5dFDWctDMMjO+WFjaySNE7SgG6LS26q61SmlTMz9fGp2FgLNfX8xspzCq1xk6e5HelpaFdoc2pqcGgTgEcyGMRCKCE9fQwBknRlg5lkjIkadBSe0gpFLuGBhT67GzGN9wUYwkP7+1l0y0r476MAsGlgkHt+RHFX7w+o6BIKqW4p729uklLPBIM6s6MFgtzRSI3fD7mxMSJusnEKMop26DGOxbt3UvjkhUrUFA3bMAoet/78Jrv20cKzVlnodRu3YpCvXYtz/j11yEQLRbqlJ19tj72Cy+gwFosRBVecEH/dbxzp1Lf/S7r/ZJLIBkLC7WBJ00uZGsMBlGS778f0nLhQsa5bFn0axT8cjp1qoZEzr75JhGSr78OLlx8MY1JRJE2NxYRo3a4c9swUOjsdlJxXn2Vl90OJqxeTcTheeclvmOodEoOT1kepZTe/x2D389etHu3jjSUwvNKgc/mCMOamv64ZSYKzWSoRJ6OInFoliQWxSl9feBLXh4YVFcHXkybhsPg4EEwYtGi0N9JRPKBAzRp6uujVuIZZ+iuu/n5zDGnEzJuxgxd8kUp1l5jo3Z4FhdDBmZk8JnNpiOQJV1PHLKvvUbU4pQpOEoilZoZithsOmXZMBjf5MmheoqQXoki2CQyu7ub629p4a/UtxYCZfbs0S/zMhpkolK61M7evfpVV6eJkcJC5qNEGM6bF7mRj0RTejy69q65AUx6+uhh7HuSxKI4xeHAqZiWptSFF0L45eaSavzYY+DRFVeAM3V1Sj39NM6OK65gnXR2oss0NNC45PLL2d+k0eHdd7P/rV7NZzU12p5qbCSa+fBh9rFJk0ilrqhA1z5xAltv6tTQNbF1K7rUli3YgZ/9LE7ggQIUjhyBnBOHpThV2tqIOty7V5dyWL4cHBD9x+PhpZQmqoYjfj/E4b59kKhdXazLzk7WzcKF6JdnntnfVh6uGAbnkSAaqxUdpLIyvtRkCbgRHC0oGLozSWo/7t5NMy6pY2ixMLdqajRpWFPTP1JRAjuysnSGnzTlyshgfx2oFNUIyZghEROETWNS/q/URBwNWa+U+oZS6phS6rrxOgHsdsA6Pz9y1N/evXw+Y0b0uleBAApsSoom/lwugCgrK7TD09GjGGwpKZA9c+YAKFJDqK9Pt4oXcTh0TTup4TMYGDqdOoKwspJr8/u5lq4u/p2ezmeSCivS3s5YDIMxmhvBxCM+H0p5d7duLFNSMjokl6QsS90bCRkfpQK1ISJdJKUTpDlVYSji9TIPOjsxNGS+VVWhVKem8mxdLjb+wsL4PXDjTMYtFnV2EmFYXQ159tRT/Hv1aroO7tlD7cKzzsJQ3rkTRe6CCzRp+JnPMAf+9jcdJe3xQEw+9hjf/5//4bjmOWe30+HwqafAgZ//HKM/GGSOSgSiOR15yxadpjh1KgXNV60aeD0FApoAkPTlvj7O+9BDKPIlJSjcV10V6jwxk22JSK8z4/ZdlgAAIABJREFUDDzqL73Ea/Nm7lVhIaTh2rUoySOBD5FSlkfSMA+Xzs7QtOSdO3W0V04OCvFNNxHhsHQpxkw07BAiVJ6PUjqKMlEdlcepjFss8vu1bjJ3LusyPR1i7p//hEC84AJNIEr9ZJuN1+HDlDVISQFvTj9d60VOJ4ZYZiZ4lJ8fGlUiNbe6u1mL1dXoRW1t2hBMTQU7MjP5rLmZcdXXo9985jM4MmLd74JBjtXQwLkkY2PSpFDSztwUKhHRh8Ggjm45cgSHjNUK9syaxTOYMePU6CpK6bFICrLXqzFruOu7r49IHnNastSqTE0lTf7CC9nPlixhf5PuyJFKakh9Q4lUt1gYu/zm30AHiiTjFouCQbqpezzoRb//Pc/w1lt5/5lnKONy1VWQXhs2gAHr16Pj1NfjNO3sBIsuvJD3enrAtT/+kT3vs59ljc2bp+dVc7NSf/oTeDZtGnrBmWcy5202bKPs7NBu5wcOQEpu2gR+fe5zSl1/PRgyUDPMrVsZV0WF1jtqayEPpT7+woWQh+YSVIEAmCgNkYYbDBEMgu/PPYfN29io6/gtXUpGyIoVkYn74YrYh83Nujv71Knck+Hoe1Yr483K4rkLkZeXF3qvXC7wR0jD3bt153ZJS169mojn888Hm6LpbIbB8Xw+5p7ollJrdfJkfc0uF3MuI+PfEp/GLTYNJslIxNGVU3qzpU5gZqbuMCUSDAImHR0oMrL4I0lrK1F2FRU6ZLmlBQCsqNBRPDt2oDAWFBCOLhtMMAhoOxya1JPx2WwATno67w+lzkV7O+dPS2PcUqi8p0enQEvKslncbrxPfX18NnXq8BRY6VLd2cn/J0xgIxxpo1maj0i9idFKWR6q2O3MkaHU63C7dWMUIUKlrpOkRoenNUlXbakLUlQ0vCLDQ5Txvg2OOhb5fHitW1spCv7CCzyvq69W6hvfoCnBXXdRpPqll1Duli4lOs4wUFrvvhtj/c9/1iklDQ146w8eJKLvE5/QBLZg0csvEzHU1YUifuONmmCUVFdzJ83du2nosmcPGHXttSi9BQUDrylJK7FYmKeNjRCHf/8763PJEsa4dm3oWjBHH0p9wOEoyW1tujHK1q0ct7yc8150EeTpSEbZSBMYpUYnZdntZr6YuyVL0zCrFYJiyRIIoUWLdFryQGImDuVapH6kkMxjhDxMYlGsJzQgEDs6wJj2dubsrFmk2b3zDlEyK1eynm02XefXagVrnn4aHeWLX+R3Ugi/oYH5OHGibhJQVsZ8CwYxordsYb+SlGEz8WO1ap0lLY1jPv00WJSfD5mwcmXs69ft1inLPh/nkMYt5mOZyX+JahvOPHe5dKmA+nqOVVwMkTF3LsTFWNBTwsXc8CkWDPP5cJ4fOKBJw+PH9edTpoSmJc+YwXOWsjPS6MRMEEpGjURLKqWb2mRkjFhNsXgkiUVxyKuvsm+tXcueXV9PVOGBA5RdWb2a9OMDB6hpWFlJBHJGBlj13e8yL77yFYif9nZ0mH/8g6i/Zct09sacOZoc279fqZ/9DB19zRo6eJtLUzU16eaPViv48aMf0YguN5cyMuvWQTRF6lgv0t0Nrtrt7L/Tp4O/O3aAr2IjLl7cP9jA7QYrxa4Zzlx3uyEON2zg2iX6WrB++fKRi37u64Osa29nHUsD0ZHITpNMmN5e9qNjx3Q9wyNHdAZFdbWOLly0iKjnzExwqKVFZ+yZxdyYU2pSCgHrdIJJRUXYwSkpOmpRnB5KjVqU9HjHonEhSRJxdOWU3WyfDyCxWPrXtAgEAPSuLpSa6urox+nu1vULc3P5bXMzQFJZyXGdTlLmjh9n8zEXzA0GATWXS9dj9HhQ0j0eXYh8KEAu3aV7e3WjFZtNd4cqKgKgw4lBwwAgxRM+aVJoNFCsEgxijMjmUFQEcI60YheespyaqlOWx5KnxzA0oVtQ0F8Rdzo1cSipCjk5usahRKG6XGyM2dmRSUKnUzfKycjg2Y9E05j3ZAzd4bhk1LHoj38khfeWW1DgJAXwnnuokXjbbfx/0yYUnjPOQLFzu4m6eeIJFNYf/EAb588/Tz2ytDQIyNNO0xGAUp/lJz/hvLNmUS9IUv+sVk12SUTZ0aPUHtqyBeXpuut05GG4R9csEqUk2LNrFx1dN29mbB/4AOThggX9fyuRL0oNL/qwsZF79/zzGCSGAUlw8cW8ampGjvQazZTlYJDnZI4wPHhQRwhWV0MYSlrywoWh+8lAjVYiEYeSpmwmDQ1DX+cYwNpTP4LhyahjUX09xtTs2cwbhwMyZ+dOsGLePAxaqTOVmqodWC+8gKFfXk5DpTlzdEflI0fAicWLmXM9PehJYtTV1uqU6eXLcbqKk0wizNLTeU86gW7dyudr10ImxFrvsKsLPUkacphTlsNFsEgioePVYfr6iJjatUuvzcJCvR4nTx4zBPygEk4mmglfw0CPNHdLPnRI43lxsU5HXrAA0nSgchF2uy7TohTzUjI55JlkZXGMsabnvSdjb0SxyahjkUTELV2KzfSvf+Hk7O0lc+LMMyEH9+1jf588mVTj1FTw4b77wJxPfQpdxe2GPPzTn5g7N97I++3tEPbSOG7DBhycmZkQlKefrueTpNnm5WEfdXWRufHAA6zb669Hp8nLYzwDlSs6dIj6hunp4O3x41xLIIAtunw5uln4XPb7mffBIL+VQIJYxeEAQ59/nlIyLhc4fsEFOFSXLh25jsCSstzUxPNMSdEpy4kmKw0DjH/33dBahuZmpEuWsDeJM3WgmtcdHexflZXov0IaSnCHlMgyN+f0+/mdOMI6O/lciEiJbpdglxGOnB7vWDQuJEkijq6ckpsdDEIg+nz9o+38fhTn3l6UnYE6cDqdgFReHqAgZJzXqwnEEyc0IVlTg1EvEghAIHo8kACZmTriTGo4hHcgHGgsx4/rTscWC9ci5FFRUWQl1eHgXrhcfG/SpPgVZcPgOtvaOHd+vo7OHEmRTlduN/8/lSnLQxWpg5SSwvwxE4eyoUjTnIEiFu12rjsvLzJBKMaapJNmZfGcR0BJGO8b1Khi0WuvodR+4AP8/8QJPOmPPgrZdsstpJY+/zyG+MqVkIgdHRCLb7+NZ/7mm4nw8XohE598EqXoe99j/ni9umjzM8/QdMXvp4vztdeGEnRS/yo1lTX8hz9AEOTk8N1LLtERigOlyfv9usbXpk2kWTc2ggVXX63URz8aWVmTelbBoE6bi0WZku6FEnF44ADvzZlDOtMll/DvkTQ0pRPxSHZZbm0NjTDcvTtUMZZ0ZCEOh1K7SKKspE6lEIeiDpnrGw6U4jxGiMQkFsUgnZ1EwFRUQGx1daG/vPsu60g6b0o2hJTJkGZMO3dCCF10EcavYRBJdOQIx1y0iP2tu5vf5eToiJCeHqJwzjordL92uTh+RgaY8M9/QmZarUQWrV0bW3S9OHcbGlgraWng5sSJkcuKmDsVx5vCa7Ppjsp1dVxTbi4YdNppkLRjkPQakkh0Zmcn13fokL5WKZGQmcm1mrsll5XFds0+H2SPz6cdYampOhrUvGcopYneRKRcJ0jG6RP+XxlVLOroQP8pL2d9b9hAg6TiYiL+Fi1S6mtfY8975RWw44MfZF78/veQhTNmQCouX848uPdejlldTbOTigpspcpKCL/6ejrOb9/O8b7yldA9s72dPTc/H+y77z6lfv1r1vPVV0NW+nzsjVOnRnfU+3wQovX1moRvbWWMYhtG2qsls0rKCUhtvViku5tzb95MJGZ3N2uopoZMlzVrRjb62efDNm5u5joyM7F5y8oSd96+PshYM2nY3s5nqak4LiTKcOZMbRPn5Q2NwPT5SHF3OLD3JRpbiMNo9qbXq5v0paWx9xUVhTpPgkF4ANHDRiiaerxj0biQJIk4unJK0nak9mB4p2KfD4XYbgdozDUowsXnAxDT0nTtqPZ2AEY8WwcP8h2/H2+z2bvk96PQer38PhDgt1Yrm1UsHXXb29mYHA5+K1FCEyZE9/JKIeu2Nl33KN56FxJZ19qqOxlWVIxsEfBIKcuj2OVq2GL2yHk8OpQ9P18Th0O9jp4e7nthYfSNR5rp2Gy6Y1lRUUI3qvG+QY0aFtXXk24zdy64sHcvkXGvv46C+rGPUVPnuef47nnnQQrV1kLAtbZSJ/G881CET5yg/s+RI3Qf/NSnwBefD4WztZW053ffRbG+4w6cBWYRQ8xuxxv/9NNg0Uc/iqJstWqjfiDj3e0GQ594gs6JHg8e/euug8iLNqfN6YKxkG6GgbIo0VD19by/eDGNadaswagYaUwIT1lOVG1Ah4PrM3dLlno9qakY5hJhuGQJkRXxnDMQ0B1M5fdDIQ7DZYwQiUksGqK43RiXGRkYdXV17EUnTpA9MWuWTukzr3ubjaigEydYY3PmYAD7fOBWSwtG28KFul7dhAkcp7ERssDjwZgLX5+ypxsGkTIvvsjaOussnC5S7mUo4nLplGW/X0cKDdR4TLBTKfbHWIzc9nZNprW0cKzcXPB27lyudxRKi4yIeL2Qhea0ZKmdLRk98+fzzBcsAIviwV2/XzdGkUhQj4f7WFQU+Zjmuo1jjFBMYtEQxeNR6uGHeY7z5vHvM87QTtFZs5S68072wDffJIrvAx/gdz/8IY6Q972P30yfzlq/7TZw7NxzydBQivVZVMSa3LiRLIu2NnSjj30stDxQaytrOjsbHeOee3CyXHqpUrffDhY1N+saidGwoquL30vjstxciNHTToPYGoh4lKhbaZwy1H21tZVr27yZiGApi1VdjX27Zg14NJJrw27XKcuGwX2XlOXhiDSAEbJw9272LqFvpkzhGiXKcM6c/oETgQA2kZR9ilSaR2rqO526y3tnJ9cxffrQ7SeXSzcplYjSsrL+xGMgwHjEeZWRkVByd7xj0biQJIk4ujLqN7ulhcVsrj2oFOCwYwdgsWjRwOm8Uog7EEDxTk3Vac0FBSjIJ09qo3vSJBQqc3c/iUDMz9fd7yQFeaig7vWyOTQ2ApBVVYx7sEgzKS7s9QJk1dXxG9m9vbomZFYW5GGiu5maJRDQYeRjOWU5kgSD3K+eHp0WJulalZXxN5sxDIy6QCC6km0eg0Q8GobuEpYAkmWM3/1BZVSwyG6nW7JSeNC3baNgdX29Uj/9KR70O+4gnfnECVJMamrwul93HcrmffcRQVNcTF2d73+f97/zHeoU+v2sR4uFWj1//COff/GLKN3h68TnY1xPPsn3vV6U5BtuwPDv69Mp0dGUXb+fMT/8MGRldjYe7muvxXiOJlLTylxna7B1HAhQ92jjRiIdpcnQihWkN55/PjgUKwEQqyQ6ZVkUYyELd+7E8y1G8ZQpoRGGCxYML9ranKYs9cWkC+tQnkM0ESIxabjHLaOCRdIZva0NPaCpSafhbd7Mur3mmv57Q0MDdcP6+uhqmpqqMyaktuG55+oGLL297M8+H4aeRJVVVfUvni8G2zvvULe1r4/6ZZdd1r8D5kDS2ck429sZV3k55OFABKSkTwuRPtQaVU1NkIa1tbo5SEUF11dVpTNBxlOTs2CQ/ceclmyuHVZWFpqWPGsWuBEIaByMBXuFAPR4NN5J04H0dPYzt5t7OFgWhRCKEkmqlE67Hs1GVu/JOHniUWVUsMgw0B/q63E6PvIIpPTateg1VVX83bGDLIz583G8dnRADp44Qe3n8nLWuMUCgXjiBBkdt9zC/Nm3T0ce/+Mf6M1ZWczfiy8ObYLZ3Aw2btmi1O9+h023ahWRkIsXs+67urD5Jk6Mvt+9/jpO2dZWTW4tXz5wJHIwyHglwjEra/B5axhc7+bNvI4e5X2JuJaag/PnQ8COlG4k5ayamsBvqQNYWRl7M0mluK6TJ3U68u7d4JGUeioqCq1jWFMTW0CMy4X+GwyCL6mpoQEqSunstuxs3m9vRzeOxaFlt/PKzcU5bBjcl0jPVRwpwSCfp6cn5HmNdywaF5IkEUdXRvVmd3UB5NLgQ8TjIZTd42FzGMxLIhGH5eWAYl8foOnzaQ+7gFheHh4L2WB8Pt0lrLAQcJK01aEqNz6fVvC8XjaImTM53kCGm/xOulRNmRI/4SeFZp1OrqG8fGQ6d4mEpyxnZgLoI1W7I1Hi92vi0G7XdXykvmFuLu9L+ne8G4V0wLVYBp8H5u/b7fxfxjMMw3+8b1AjjkXBIPUIDx6EXNu+HQ+p14sivGYNdQyfeQYFViJ87r+fFJs5c0iBljny5z+jeC9fDpEoHdgdDs7x059itK9dS+OWcFwzDL779NOQf3Y7JNxNN+H4CAT0nJXucuHS3U0K9kMPga1VVdQHWr9+cDwQI0+Iq4HmntdLRMGmTXj0bTZwZ9Uq7tPZZ+vo7QQpXFElGNRNX5SKL2VZaoeZ05L37NH1dYqKNFkoL7OBE68IcShjV0pHG0oqsyiuw+08q9QpIxKTWDSAOBysn+3bcUBOmcJ8KCtjD9qwgXV85ZWsS6+XeSlRxn/5C/vv1VfrqHbDAHMk3XjqVNZ3Wxu/F8dZbi7nyMwMrZMsZTfefpvIw95eSMx16xjfUMTv1ynL0pBj4kSwbDCyXaIPBYsGc8SdOAFpePAgY7VaueYZM7RuKd1BCwrGPnnY1RVKGNbW6hIJ2dmQhZKSPG9edCe7GRsHupeShizRPWYHhhCH4fest5fjxqKniJPK5wstMSGE4ig0sBnjT35QGRUbbetW9vfFiynhkpWl1BVXkEFRWEgk4vbtkIiLFrHnHz4Msej1kokhJZzq6oiStljQmy67jO/s28ecrqvjt6WlzOO0NPQeSSUW0mrDBnSvY8c459e/jnNEylD19bHWIzk3AgHIrr/+lfMVF+OYPeusgRuuKMVY3W7GkZk5MHYZBtcixOHJk7w/bx5kbGGhLi8wezbvj1SZJ68X/G1p0VkwkrIci27U0xNaw3DPHu2cycgAg8ykYXX18PFVSpN1delSUvn50bPbxPatro7tfnZ367IWvb3gXGnpwB28BR9TUjjXMJwg4x2LxoUkScTRlVG72X19KH55ebqLqVIs6B07WKxLlgzuWejtBWiKilBmpMOfRCbm5nL8jg4WvDlVx+NBQbPb8VQXF3O+oSoyDgfHPXaMv8XFAOlQmqB0dDDOYFBHAMQDvG43APr/2fvu8Liqa/s9kkYzqqPeLMmSLFuWm2yDwXYgGAgmlFCcvARCCQkhIZC8Fx4hhCS/FNJIIe2Fl+Q9IAnwQiihkwRTAzFgiqtky91IlqyukTS93d8fy4tzNZoqjcrY2t8330hT7r1z7z3r7LP23muPjOC4S0txLibDSdY01fXK51N6IDO9ZNnrVdl+jDilp6sFBbV9aMwOFFFR1InslyRlLNvx+TCx2Ww4v3l5mDzHcQzJPkFNOhY9+igc00suwRguLsbY+frXQYJ9//sgBfv6oC9WVwdS8Te/QSnwH/6A68SsxcOHoYn4+c9jPPj9CHDccw+IyOJilNycdtrYY/H5EIn/05+wv1WroJPY0ID3vV7siw5VMEbt2iVy//34PS4XiMxPfQrOfbSxGZx9GK4kxG6HduRzz0FjzW7HsZx1FojRD34Q32cZNBeGk2WhSpZjzdgbHoZTvHWrIg37+vAetYn0WobV1YnBVGYY6olDah+yXDnU7wzXaCUem0YicRaL9BvT6eJarUpn7uhRZOBkZcFXMZtFHn4Y+H/RRSobhpmqmzeDxJ87V+TGG1Xn9f5++ERmM+5fBiC6u7GoJaFHMjAQgN/AoEQggIzsp57CmKirA3kYKYNZbw4HFvWdnRifFguOoaws+r1HLOIiLZwOq98P3KX+n8OBsV9Xh+Osq4N/x6Ac9dNmiC7fKHO58Dt271akYXc33ktNhc+q1zEcT9MX/XnVB4lIGno8KnubnUmjZX6y/DA1dXTJaTzH5PNh31NIKM5iURR77z3oJtfWoopheBhlxb/4Be6JH/xAaf+uXIn5f9MmvJ+fL/Ktb6ks53/8A5mDdXXwjT7wAdxTLS1Y5x04gGt+xhm4D6xW+BHUvmdG5J13Ynyw+RwrODwe+F0eDwik4MDeyAj2s2kTxlV6ush55wHPoiU8sMuvz6caBoUad34/tr1pE34rG3csWwbt2lWrQIi1tuKztbVjm6kl0oaHgb39/Th/BQVYY8aSpefx4Dh37lRahuzcbjDgOrLpydKlicug1MthsRGnvrlgejp8zXCyYn4/AnAGQ+Qs1FD77e9XJOvICPYR7VzpMTMtDfP1OOaWZMeipLBZEnFqbUpOtssFYKIjy8FntwPwAwE4v9EcE5cLzlZGBqIrXi+Ar70dk0ltLQi9Q4cw0OvrFeD19wP4AwEAo96JjmSBACa6gQEsBHp64PBUV8emf+V247czCyCS8G8k83jw261WOHHFxYioTYaTHFyybDSqUsqZGtF3uxVx6HDgNbNZLSaipfGzGYXROD4HWX8cw8OYZOLZjscDMtHhwPUdR/nVDL0yMdukYtGWLRD4PuUU5QjMnQsHddkyRNz/8Q9cuwsugFN73XUg+j73OWQaWq0gIu+5B9f2hz9ECa8ItvnCCyAXBwagZXjDDWMdR01DCfTvfw9caGxEqc/KleozbjewkU1/OMZ9PpAIDzyA35OeDrLzssuAn9GwgGL8kbIPrVb8jo0bUWrp8cApPeccEIdr1uB7zC4RUYvAyQpkxFuy7PViAUKycNs2VVokgkU6ycIVK0BCJJL8DNVRmcRhLDqN/D6/MxGbJiLxhMci6ptarZhXGISjXMrevSDbTCaMdaMRGclmMzJ3KBRvNmOcP/00xuPy5SLXXgtfpLkZAUWTCf5QRQXIu5QULOj37MH3y8vhqwwOYmyUlqrsjX37QFweOoTPbdiAfUQby1yMtbVhEW0wgDSsro69IoLZaeEy5jwelPDu2YPjpDzN/PkYswwQ6+VJuCCcggy3mCwQwLndtUtpGR46pMZleTnKkZlpuGBBYjOVfD7cK9T5YuCFpGG8uM25KSNjfKWRNGZC8h4gtusJxQTNJyc8FkWy4WFUMWRmYjwfPAgC8X//F9fl+9/HGmv3bvg6p50G2ZX77sMY/MY38LnnnsN27HY0UDv9dOCI2Yzgx5NP4lovWgR8a27GeuYDH1D60M3N2N6mTcCSr30Nvg3HMptXahp8N722aXs7JBj27AEm+v0YVx/9aGyNzdxuJUFDzNWb1ws/4vXXoWHLLLaVK1XTvaws4FVLC7ZXVQXibSLriXAWCKhAlM2mEkrKy8OvLwMB4DUzDLdvB4FIQr+4WJGFTU04f4mUxmJQTL+21CemUBKLQTf2KOA8GWxOJ8jT3NzYrrH+OPr6lByX04m1dDSSV5+9zQaH8WhkSvJjUVLYLIk4tTbpJ9vng9NE4WdOCDYbFsIiAOJoYOX3AzBSUgCUDgcA3WqFg7x4Md5jpKu+HoPc4cCk0tYG0F+yJDZQ93hABgwOqgWhwwGgqaqKXnLNzl+dnSpaEg/Q0Xy+0WnehYXYzmRkAno8AG5qXcz0kmWnUxGHLLPOyFDZfPGStS6XcpAnEjV0OLCdzMz4RdxdLtxzLhfu32BB/QiW7BPUpGARy1Zvvx1jZ+5cXJvGRkTQ6+pA/D3/PDDp4ouBUR//OBytH/8YmYa9vRAVf+01OI0/+IHKQO7vx+defBG48//+H3Am2LZuFfnd77CgrKzEds84Y7QTQskAfQfm/n6ULP/lLziOqioc54c/DCyM5f7QE3HBelnd3aoxyltv4bPl5SAozz0X+JyaOnrhJ6IWo5NBHgaXLOtLfvWmacB2Zhhu347FiMeD94uKQBSSNGxqUrpzibSJEofhtjdRInGaGq2ckFjETC0Sh8yus1iUdIrdDjKQeswU+X/5ZcwXV1yB+Z0BO5cLAYeWFozFDRsw773wAsbtvHmKOKysxJjculVlWi9dijmETdeKizG/HTki8sQTGC/MfDz99Oj3qc+H7Mb2diWlQs2vWMkvauYRi/QEvtMJgpUdlf1+nJeGBpAWNTUKi0jS+v2T0qgsbtM04LOeMGxtVb5JTs7okuRFi+LT9IrVqPXsdiuspkYqF74TaXZis2H7E5F/0RuDW5xb9MGiBASoTkgsisV8PgQQBgdxrrdtA748/jhw7LvfxVy6bx+w4aSTRP77v4E9H/wgms8FAvCfHn8cY/Mzn8F90dgI3HvkEXy+uBiB1eXLsW5rbwcpWVcHrPrxjxGgzc5G8PVLXxrtv1utwCyjEfsxmXD8LS0gD7u7VXAxOxtYsWZNbPqdTieejcbR2u5OJ0q4N21CprbTCSw65RT4gCedhGPUNJCbO3cqma1ly6KXTY/H3G5Vskyd7IqK0GvCgYHROoY7dqhM7YwM+Kh60jDezu2xWCCgsg1ZIk6NyWiJKazs8vlUQkgwZnGNXloaH+Hp86ngFytF9PIekYzavfQxY8niPmbJjkVJYTOWRDQYDCtF5CIR+aOmaYen+XASZZN6sgMB1UCEwC+CCWrrVgDCSSfFFgHo6lKdlLu6QED6/fg+9X/278dn6+tVs4uhIZBweXmxZZzYbFi0j4yoZivUJjOZQEJEI6ccDkyMDgf2W10dPxHn9wPk+vpUV62SksQ7yaFKlhkVmokly3a7Ig4J4llZijicKOHJqH1OzsS2NTKCSTMnZ3yZpw4HJkdmX+Tnq8j/li1b5KmnnpJrrrlGampq+JVkn6AmjEUkTPSNKlwuZAwODaHkd3AQDtN3v4vx9MtfgvxzuVDm3NcHAnFoCHo8556LBeG//ztw54tfRIZiSgr28cwzKL1xuZAhdM01Y8fovn0gAt56C47lpz4FfR79AozRV69XkffbtyPr8O9/x9g8/XQ44suWKXIi2j1K4o9RXy7IDh9GtuFzz2E/IiAk1q/Hb168eLRTpF/gcXE3WVnQkUqWBwaw2Nm+XWUZDg7ivYwMOMT65icVFZNHntH5pI6hyPgJuH3pAAAgAElEQVQ6KkfaPq/bRM71ZBOJIfDohMEiZrBbrfBrOD6ysxVZwywXvx+LXrsdxJ/BgMXfiy/i7yuvHF2e198PKYWjR0Eunn46MOLJJ7G/1auxneFhYJnPB79qcBAZbQys9vRg/8XF2PfTT2NRbDSiNPHcc6NnlVGOprMTv4N+TUlJfKVk1GElFqWkYK5kR2VmGlksijjkudIfy+Agfm9GBs7ZZOmMRTK7Hces1zLs78d7RiMyJvVlyYnQDgtn7O7udqvMImp46TVqfT4VTIpUPh7JNE3Jv0yG3iRLnjnfiCgyMdrxnshYFK89/zxIwvx8ZAuuW4fy3K4uBEJbW5EAcuaZwJM77gAhddll0GMdGhK5+WYQbevXIyja1YWAgsEAuZX9+7FGu+Ya+OlvvYXgwMqV2O/Pfw4fRwRZgzfdhPWc3np6QBJmZakg8LvvYu53OoFr8+bBd/P5sO1oUgwsp/V4VDZcWhqwaPNmEJ2U2bJYQEiuWQPfUe/fseHI0BB+T1MT1qiJtqEhVbIsAj+yokJlfbtcwB89aUh9xpQUYJG+LFkv85VoY8KNXkNf34Az3vWQ3a70YXNyRvMFTBLweDBPxBPUcLvhT6am4jqnpIBIjEfvlThFWYgo+DRhLDpOeaiE2kwmET8rIv8rImdqmvbKNB9OomxST3Z7OwZ/VZWKElitcHSZCh5LScTAABxlsxnRqCNH8L1Vq5ARSALR74djy05ybjf2n5uL18MRcCxZ7u/Hd9LSMCHk5GBStNnw/5w5kQEmEACgdXWpkud4xfhZJtTTo0Ss9eVHiTKfT6WVMzV7JpYsk1xhyRIBm81wLJbEli5p2mgB8YlMtNS/yssbP/kbasF0//13y3XXXScvv/yyrFu3jh+dQVdtXBYXFoUiDPXGcfo//4MF8+mn4zwuWoSod1YWNH1efRXX+tJLgUvXXINz/PDDiNT+9a/IOszKAlnI8uUjR1ACvXkzPnfbbUrPkNbRIXL33SAJcnPheF900dgydZY+Mhr+0ktwrHfuxH43bECX1oIC4Br1PWPJGuKCMi0NZCaJw7178fqSJSAR1q+HYxlqGyzfYJe6RJOH+jJrfRaK349Fjr5bclsbvpOSgoWNvvHJZHY8pOn1DemqJJI4DLZE6SNOJpF4991j8Oi4xiKvV+kb2mzqGlGEPyVltP6lyQTs7ugAIVhZqZolvfACxjQzEGmHDoncdRf29YUvYFF89CjGr82GMVtejtfMZuBHWxvG0dKl8D1EkBnncABXXn0VWZApKSgjXLcOi9FwY4aZdW1t8MFSUlTJcrwlevrsQ6MRcyyJQy52i4oUcUiNNL05HDgOr3dsYG2yjQ0j9IQhCU8R+Lh6wpCVMJN9TMyI0eN8enr0BgD6RifxaMvq982SzkSWPIbaDwNY+sASCcXguehEw6LxWnMzSEQSiE1NuL8PHUKTlIMHMe7Xrwcu3X47iLx//3fgRnOzyFe/itduvBHBzZ07VZbzq68C184/HxUTBgNIuT17UDn2yisIrHq9CKhefjmOQZ+Zq2nYltWK4/T5QB7Sd1mwAASl3Q7/ICsLfl60DECfT5XTpqfj+2++CeJw5068XlyMbMO1azGeg++z3l4EMvv6sE5ctkyRp4kyamx3dioN2LIyBG46O0frGO7dq+ac8nJFGC5bhuOfbJzketLhUJVsXFMmoprN71eJI+npo7OgvV7cJ+np8QeNHQ4lEebx4DzF0uNAb4GACt5wvg+D/YkgEY9HHiqhNkOUTCZuBoMhS9M0+3Qfx3RZdzec3bIy5WQMDAD0TCaAfyzEmN2OxT8ddpcLDlt9PRbSfj8mPJcLk4fVqqLcdjs+U10d2lFmJGJwEECQkYGJwGLBvg4fxutVVdHJwOFhOJVuNyagysr4u4VarThvLHEqK0s8+Lvdo4GeaeXTWQYUbCRVSBwycyEnB9cmN3fyomjMPmXn5IlE2i0W3FuMUo7nmLOz4RyxdKuzE+ckHjsesEhPFpIQobHkk8LMvF4bN4JAXLEC12HuXES+jUYQgK+8gs9+9KMopaE+4sMP4x649VZkATY1gUhkF9U//xkOcFqayH/8B0g+fUlxfz+yGJ95Bvu66irsIzt7bKSSmlU9PdBffPRRfL+uDtkAl1wC54gl+7wfop0rEojNzSAxN25EUMdgQEbmN78JncM5c0Jvg5ktzISbYFe6iMfJbL62NhVF37oVGaBc6JaX4zpecQWely6NXypgIscZTBzqG6NMZtCFDXuoZzZeo9YQicTptGTEI7cb+Ds4qDLERZRuIedpoxH/8zVqJvX0YMFZWIhrYDKpZkWXXz6aQHz3XZF778X8cfPNCCIywyQQwMK8shLjeWgIPoPLhQV4XZ3aVl+fytx94w2MpbVrUYqYm4vxE2pMe72qZNnpxG+ZPx9YEe9ikKVf1PA6eBAL3p4evF9ejkynhobwizeXC7/D7cb5LSmZ3LHPypeWFtX8ZM8edc3z8rAw/9CHVGnyZEgkhDKfD+fB41GkgdGomvPEGuCh7iDnCTaTiJVMZOMJpxPHM1mZoDymjAxFRHu92K/TqbIpxxvcSkYsmqh1dyNQmZEBrKmuBglz4AD8mb17EZw4/3yMha98Bd/73vdwr//f/4n89rfAhR/+ENnM27fDN92/HzhZUQGcob4qu/zu3o0qEKsVkizEvqqq0YEJnw/+ADH3zTeBHxkZyMBeuRL33OuvY79z5+L1SOsYVl2xa/22bdhuayver6wEGbp2LQKqocaB1Qos7uxUiSyxaOPHYy4Xzn93txrvAwPA45074dPZj92xOTkIBH/2syrLMF4SbLzm8SjikLIJ6enAx0SvKVNTEUR3OrEW6u9XvrDRiHuouxv3SjSpMb1lZiofnPqIbLYSqzGT1e9XVQckO6d7XX0i4tu0ZSIaDIYsEfmWiHxMROaIiENE9onInSKySES+HeJrn9Y07Y8Gg+GPIvIpEakWkZ+KyLkiYtU0rfbYtueIyA9E5DwRyRORAwI2+Zea7gcbDIZXRKReRE4Tkf8SkXUi4hWRh0Xky5qmuYKO+WoR+bqI1IjIYRG549jf39Y0LZalwqSc7MFBOGEFBXCAReDQ7tiBQbtyZWzOqMeDSefIEXwvL08JZxcVwTHduxeTS1GRagtPJ9BkwgQZ7CgThNj91GKBc5+RobQMe3owSVZXR06/9vkA7v39+NzcufE7lMPDOF63G78zXn2HaKZpCuxJyDFCNFO6F7JknOQd9aRyc3F99A0mpsK8XtUgZSLXwu/HbzIYYu+ibbfb5fbbb5dHH31UOjo6JDMzU+bPny833XSzbNu2S3784++G+tpxg0XBGYbBhKGeLAx3T+zdi/KbykolvPzII7i3fvxjkFRGIxzZO+6AkPiFFyJz8PBhEIhHjqB051OfAqHf2goycc8eOMhf+AJe54LWZgPB+MgjwIWLLgKByA5zwQSi243F/SOPgFAIBBDlv+oqlM4YDBiz7NxtsUR3SlwuaPi89BIevb1wjtauRfbS2WdHdjKDycPJ6Jrp98NJZlkyFxjU68nOhkOsL0vmPDJVptc31BOHJA+nkoib7kYr4fDo5ptvll27dsl3v3vc4ZEmggVFby98AasV45XBrNzc0eVZfIS6Pg4Hsn1SUlTTj7feUtIJ7JqsaWju9PjjWMTecAO+8/bbOA5qOtfUYMG/fz/2n5OjGnEVF+M73d3Icty8GVh08sloGJWZqbIgg4/VZlNdlgMBbJMly+O5371eYOnevTheBr+qq5Ft2NAQuQkLdalJFo2j2VhMNjysyEI+W614Lz0dx6lvflJWNrXj3+tVxCHHr76j8kT9In2ZObPAY8V8BnknM7Crt2AsysjIlHnz5suNN94se/fukjvuOD6xKFHmdKoGKG1twCyjEfMwtZ97e+ELdXaiGV1pKfSjMzLQaOXNNzEWrrkGlRlbtwK3RkYQFGhowBhZsgT3Z0sLSMfnn0fQ46yzRG65BbjldgMP9Gsmtxtrxe3bgWMGA47h5JMxDtPSgMmvvYYxcdJJyEqMZG43/LbNm0Gctrfj9fp6lXHIBi+hzGZTnYvT0/H7FyxI7D1vtQLTt2xBsKWzE/jZ14f309KAm0uXKsKwpmZq10VMQqH8lYiS32FJ+GRbIADccbmwP/rFPT24B+fMib9kenBQVeUxE3U8MlQiivjVB+CPnZeos8YJykMl1KYzE/G/ReRyEfmtiOwUkVwRaRKRNSLyBxGpFJFrReSHIrL72HdeD9rG30WkVXBCzSIiBoOh8NjnykTkLhE5KCIXisjPRWSeiHwxaBsZIvKCiLwiIreIyGoR+byI9IrI/+OHjl24P4nINhG5TURyROTHItIR6w/uiPmTsZvdDvDLyoJj0dGBSWnPHrxWV4f/o5nbDbDv71ft6j0egITZjAlw9268X1amFnudnZh4zGaASVcXtkfRc2YRpKVhm3l5mKRYJtPRobQM8/OVBkUoGxxUGkHFxQCz4eHYM8UcDgCf0wmgIXBR82+ixrR9CtpSONhkUqK102k+H67n8DDuG+pJcZGWlYVrY7MpTYypNJ678eh46I2EZFdXbJ0rv/zlG+TJJx+Uq676gixcuFRstmHZtWu7vPjiG/Lxj39aDh8+Ig89dI9ceeXXpbKyUdLTRW6//apXgzaTVFjE0rBQZcl60jCWxdvQEEqPmXnl84Hc6+tDCc5LL2EMnHGGyNVXwxn97GdF/vM/Rf7wB5RAMwuooQH33u23oxFBXh5eb2rCdgcHgTd/+xsW/zYbSmouuwyO7/Aw7nE9gehyoaT4scfgLObmgsy89FJgnQhwaHhYRTTpcIcylwvO8SuvIDJvs+F+Xb0anaXXrlVEuNsdGvep7xIIKMIzUQ6h04mFeXMzHrt345yJYB/19VhYLFmCRXqwY8xmDpNpvPdIHoqo+y5UQ5epNhKJE9VHFImfSAyHR88/DzzasOGIPPbYPfLRj35dcnMb5Q9/uOoqSWLfaNMmzMt2u2rskZuL8UyNWy6+aW536PHp92NRaLcjqDoygsVoVxcW6jk58D3Y5OCtt7AovuwyYOKOHdhOYSEwKStL5J//BG4UFqogKRu1DAygScs//oHXli5FRlFZGY7B6cQ2uABkyTLLBlmyPGeOWthTczQWow52aysWxCzDq6nBwnfePBV08ftxvMFG30TfnTMrC6/HcyyhzOsFoblnj3p0duI9gwHnc8UK4P6CBaObAdImegzRjKQeG6PoOxeTOGT2oMORuP3qM8NjJRO5oB8ampwutMF2ww03yGOPPSif/vQXZNGipTIyMiw7d26XN954QzZs+LTs3Qssuvzyr0tJSaP86lfJjUWJNE2Dn2K1YswHAvCDtm5F4LKrC/f2xReDKHzoIeDHbbdhzHznO7jOn/wkxsjixfBjnnwSWHH++VjDuN0YP0YjgrM/+xl8r1NPRQXEqlUgxzweBFD0QfqWFmyzrQ3rr2XLQB6S4NM0Va2QnQ2/IVyVmKbB13jlFQRre3uBlUuWiJx3HgK1JSWRz5nLhWM6cABjorERj0Q0m/T7gZP/+hdwf+9e+DmpqRh31dXw4UgaLlw49dqvmjaaOCQ2mM3A5enQzmcwzu0G9vT3Y44uLMT16u5WzcZitbw8FUAXUbzCeH4bcdNmwz3X3Y2xdfHFMX39hOOhEm3TmYk4KCJ/1jTtxjDvh61F1zHA/6tp2ueC3vuJ4CJ8TNO0vx57zSAifxWRS0VkmaZpO4+9/oqInCEiN2ma9kvdNp4QkbWappUc+98oIu0iYjv2fcex1xtEpFlE0mJhgDs6EhvlcrtVB63KShUR37cPkwyjSNGMehMOBxaVVVWYhFJSAPp2OwB3ZASp5OxKyMkxMxMRsZQUHNPgIMCGJcvUOwwW6+7oAGiWl0ePkB85gv1nZmL/8ZQdO534PTYbzlVRUWIFqgn61A+kJtN0p1aL4NxR45AOMEkSZnfMJGOzi5yciZEq1OeMltmoaSKLF+fLRz7ySfnmN+9636lnpoDPJ/Lkk3fLT396nfzsZy9Lff06cTpFLrsMUa5kxaKDB0ULzjAcz3jw+dBB8NAhLFgzMpDJ094O0e72dpz/FSvw/8GD6Lp8/vkgHjdtgrN77bU4hs5OkIrd3XA8P/UpVRpqNsNBfeghLIZXroSDXVcHrOH4I4HY3Q0i8qmncP/PnYtMpPXrR+OH16s0ErOzQ2OLzQbC8JVX4PCz1Pm001AeePLJsWESj1O/cJwITvj9OPfNzSAOd+7EOSZxVV4OJ56PBQsmRtBPxEjMkTwUUdl+4+moPNnGRiuJatoSjZT3enFfn3FGvqxZ80m56KK7pL8fDjabfg0MiHR13S0dHddJbe3Lkp29TnbsUBH3ZMSj++4TjdUOxcUYV+PtQr5rlwoemc2qsuLDH1bi/3Y7ghf79+P1D30Ii8vDh1UDE4cD2zh4EFm81dVYWI6M4DoVFoIQe/JJ+DG1tZBQqK/HvW234/izsnBfezzYTkeHCpTNmYPxGe/49/lwrPv2wS9zOlVwoKEBeBjL4pe6VwwY5uaG7sgZqwUC+I3UXtyzB+ePBGphIY6Pj/r6qZNICDZ9x89g4pAaW1OV/cg5QR9QirSY9nhwf5lMk+e/UeKmqSlfzjrrk3LNNXe93ziRjY2GhkRaWu6WzZuvkzPPfFmKi9fJQw8lNxZJAjMRN22Cr9Dfj/NVVgYCccMG5ZdfcAGCoa+9Bhy6/nqRBx+EvEJFBcqdNQ3+8EsvIdFjwQIEK61WzAcLFoB4++Y3UW1QXQ35mPPPV9nJPh8I+sxM7HvnTugoHj4MvD3zTPhh+gxFVlkcPYrvnnrqWKyihMvrr4Oc6+8HFi1fjuDu6tWxdUT3ehVm+P3wJRcvHr+8lKbhuClLsWWL6ubMRlJNTej83NQE4nAyOrfHeqxstkntSINBSV9lZMwc30jTgAsOh2ok1dcHHI+3eoWSGwymm82hs/Cp08+EIf2D/Qx6elS2JLH08cdjykQ84XioRNt0ZiJaReRUg8FQrWla2zi38d8hXrtIRPbzwomIaJqmGQyGnwou3kcEjDMtICK/D9rGP0XkYoPBkKNp2oiInCQipSLyC164Y9vdYzAY/iFgmKNaOD2s8RgdyfJyALzRCAd1YAB6Ok1N0Vl9lwtO6JEjcPDWrwcQHD2KCS83F6BrteLvVatU5g6br8ybh99ls+E1hwNgUFqqSpb1xtJntxtk4Ny54R1eTRvdKayhAYuMWJ07jwf7cjpx/PX1OKZEOIeBgAL+1FTVxWomAL7brZw9EofM3GQ0a6YaOxFyop/IuaTOI7Uy9HpEFDr3+0VycvJk27bN0tnZJhUV1e+TrCR4KDrf2IhIbJi4S1JhUV1dLJ+Kbv/3f8CLxkaMsS1bQATedhvGbk0NHtdcA7x54gmQBTfdhHF9663ICDx4EM7zSy/BCb7jDhCPdjuu1TvvYOHf3g7n8o47gHEiYwnEt99Gp8IXX8R7p5+OTqwf/ODYse9w4D5h4yC9k9zfjxLFjRvhTPt8OPaPfQz6hqedFjvRzWwXatnE0vky1DZYlszHjh1qjOfmgui44AKcu5Urp06vJ9Ix60uVRWY2cRhsiWi0Qie4pwdOc1+fcnxZzkZ9IRERjydPNm3aLF1dbZKdXS2lpbiOc+cCx3ftErnnHpFvfAPXOowlDR5dfnliAm5tbRgLJSUYy62t8EkuvRTjQQTn/H/+B2P7xhuBJW+9hXli5Upg1aFDuNadnZhL164FAT8wgHu4v1/kvvtAQubmogz6jDPwHWarmc3wWViy3NWF75aXA9+KiuK7n9xukIZ79mC/rO5gNvGCBbGfw0BAZR6mpSFonJcXfxbI4CBIWr2WIQnJjAyQtldeqZqf6HUop8OY/c0HCUM2RpnuoC+Dlnppi3D4aLfjnsjJif+4g/VG2bRI/z99ME0DFmVktInFUv1+xVBDA54tFmTlX3st1g9hLGmwKFF24ADOS38/zmlxMQjE9euVRvr69cCi1lYES9etgz/0zjvwL774RbzHYEFvL75z2WXApoEBjLfrr0c2dFYWMO1b38L97HYDyzQNQQ6nE37M9u0qm+y88+DTBmf6dXXhsx4PiMD6evWexwM/7/XX8RtHRhRxePXVaCIVq8SU34/ft2sXtltdDUIvXokqZpyTNNy+HeeHwdqqKvhrK1bAD1y4cGolEoJNv35kWS+1/riOnG4t5VBmMGDOy8hQ68vUVHABGRnxZUcz6/3gQSWrRX+LJOHQkJLbYpYmm7iyaoX7rasDb1FRERfXcsLxUIm26SQRbxaR+0XksMFg2CkiG0XkIU3T3oljGwdCvFYjIs+HeH3XsefaoNd7NE1zBr3GAooCERk5tk0R1MoHW6jXJtUCASym/X4sLIxGOKp798I5XbYs8uJM00A4HjqEwVhYCDKQYqkjI5iEyPSnpcHZZip6Xx8mtKwsOMt0ao1GkIf5+aEX1x6PcvQLCyN3dnI6QZKyWcvcubGntFPId3AQ2y8pwXlJRBq4z4djYskyCafpyu6hOZ2KOHQdU1BghqjFMvVp+eM1NlphhkSoSYkls6FIQf3/mqacbWa3UBicpeZo+nGnXH/9VfKhD9XI0qVLZf369fKJT3xCTj75ZBFRDg27aYexEw6L3nwTBFtREbBg+3Y4gzfeCBwpLsb53rABuPDMMyD4br4ZY/IPf4Az95e/gBTxeEQ+8xk80tOBEyQE9+2DI/yjH2FRT9zgwsvtRkkhP5uXh6Ygl14KbAu+/1kWRpF6ZuB0dqqOyu++i89VVmIxfPbZcELZvCEWmwh5aLPhnLJT8rZtqkGC0QgC4WMfU+U3tbXRs1imwkIRhykpmBOSgTjUW2qqWtiHOq+BABYrJAT1z3qS0OEYqzVqsWCMlJSABCoqwmP37jvlRz+6Spqba2TJkqWyZs16+fjHPyGnnHKyGAzQEb3nHtzXZWVhDz1p8CgR5M3goOoiarHg70OHsDAngbhvH7KmRSClYDZj8Z2SglI7LvTfe0+RaitWYFwND+P7r76K7WZlIdNnzRpgm55AFIEPtXu36kQ5Zw4WyPFk3rECpLUV+wwE8P3GRgSK587Fb4h1PHFxZrWqbeXnx3b+GXBmp+TduxHQEMH+580DGUEtw6nWDgtn7OTJjEMRtfCcCUL8emNghZUQXCiHIhPZoID+MbP1bTZFBIYjCZ3BI1pUxVB+Pu5VkoWNjXfKN795lTz0kPKNzj5b+UaUEpozJyJJnDRYlAgbHIQv0t+PucFiAbH1gQ/gfa8Xf//85/js176GcfyZz+D63XoryL2XX4YvQuLmssvwvYEBBD4efRSfoTzLRz6C7xmNGK+HDyuJmSefBFnD9dCKFfC9gtdgmqaIuNxc+Dx5ebjP3nkHxOE772D7WVmQgjjpJBCIsWhI6/fDCgqHA/PYsmWxNenwehFM2bFDHeuhQ+r92lr8Pna2nzcPPlxZWWLKosdrfr8iDrl+TE3F2iQzMz6/crrNaMQang1nhoaQjDR/Pt7TNPxGNuvUk4L6/6n16HTimbr8RUWqhNtiUSQi9SBzc7F/YlZe3rixfJaHmqBNG4moadpjBoPhXwJG9kMi8hkRudlgMHxH07TbY9xMiOkwbvNHeG9GDunOTgzQykoMqkOHEPkqKcHCMpLzZrMBgNkJl01FiotBLB49qkgxpxPbqqxUBGJPj9Il5EI8KwsAHVyyrLfhYRCfmgZgD5c6zpKYri4ASl1d7N2f/H4s2Pr7sZ+CAhx3IrTGXC4AnsejNCqysqZG2DaUsXkLiUMeV1YWHLp4JvSZZPpSR5aBUFNSTxIGm15TyGzGPc3/OdEVFoZ2Iq66aoOce+5p8vTTT8sLL7wg9957r9x5553yne98R771rW/FeugnFBa1t6Pkht07d++GQ3fNNTjfZWVwYn/wA5TB/P73Ir/6FfTFzjwTHQNtNmTxvPMOnNBvflNFvHfsEPnd7/BcXi7y9a8jCq/HNpbyPvwwMhyHh7GAvf12ZB/yPggeB9QAQxYq8ObBB0EcNjfjMwsWgAw9+2zVOTBego7kIbVHI2WW+HwgC0gWbt0KTCfxVFeHSPry5cjAnD9fHQs1faZz0R6qozIXwJPdUXmyzO1WZGBXlyop1pOEfX1jdUVTUzGfFhcrIXn+T6KwqAhYpP8uy50vvXSDXHfdafLss8CjP/7xXvnFL2bxKJyxMYDbDdzZtw9jZ/VqPEQQ8PjTn3ANbrgBY37nTvgIp5yC8fPaa8C1mhrMFSYTMlja2tCMqbUVn//IRzD+srNHE4goNcezx4PFTkMD5uNY/YShIVUOTH8pPx/HOH++8sPikUHQk0t+PwijgoLwC2rqLJIw3LUL55P3alkZMgs/+lGVBTndQVS9UWvL7Va+Qmoqrkd6+vT5bLEayUT6Om638vdIBlqt8I86O3Ft6QsG+0YMVuTl4V5duFBpk+sX4OGCzGecsUGuvHLWN4rVvF6Rp59W3X6zskC+L1+uGkkuWSLy05/iXvz+95Hx96c/YV30i19gvXX//fCVqqrgP+XlgaB/7z34N88/j21ffz18g8JClEMbjVi37d2LR1eXqrRYuxbj3mCATxVcpeB0oiS5uxvbXLAAmYavvw6fxOfD/XL22ahKq6/HHJ+ejvEf6xx/5AjwengYx33qqeFLYTUNOEjCcMcO+JoMCBQWgny8+GLgY0GB0hLMzVW/c7r8DxJkJA5FlA49icNkM79flRcPDWG+Y1IS5z27PTQWsXdDXh6CYJTQYICLusgMiogAC/V4lZ+fuGq6WR5q4jat06mmaT0ico+I3GMwGDJE5G8i8q1jKZ/j1aY4JCILQ7zeqHs/Xjt87Hl+iPei9KlKrLEjEkXH9+9XZc2LFoUHy0AAn2tvB4g1NqosHosFTmJ3N0C4qgqDeWQEIF1ejgF94ACAIi0N++AOYXkAACAASURBVGeX5UgOJEvw+vow8CNlFI6MYJJ0ubDdqqrYHD6WGPX24jfRYZpo1Emfcu73q7Ll6SpZ5mKAxKHPpzL3SksBxjPZQY6UPci/9Ytqln1QuzEtTZUdkSDkYioSuZOdrdLlw5VtlZSUyLXXXivXXnutOJ1OOf/88+X222+XW265RQzj90COSyxyOER+8xtgxMKFWPRu3YpFpQjuxZdfRnOVf/s3dCG84QbVaOUTn8Ci/L/+C9f8ppsgNJ6SggX773+P7+fliXz5y3AQ9QtmTYOze//9yAxKTQXBeOWVcNDtdhXhDb7WxLV9+yD+/eKLwDURkHNf/Sq2NXeu0lYhGRbrbeDzwZlipDm4oycdY32GYXOzahRRWIhFxyWXKNLQYlFjhfpdJA+ny0EORxwy43CmEoeUTIiUOciMfP13RIBDpaUgc1atwnNJicooLC6Gk5uSorJA9F3Pg5sZ6btP689XWdksHsVigYBakBYVwX/Ytw+lyevW4Xw/9ZTIs88Cq664AmNteBiL5MZG4NLWrbj+S5aA9Bsexlzz4IPAIpMJ4/H00/EegycGA76/dy+e2bCtujp26ZT+fpCGra0qu6+kBEGDhQvxN3VUo5W5BpvdDtLJ68WxlZSM9dd6e0eXJbe2qozK7Gyco6uugn/Z2Bh7UHcqjZjrdqsgZFoaFq2sQJiJRj20SJmDAwOqKZ9evzg9HdcnKwvzVUmJIgW52J6IxiVt1jeK3Z5/Hv5EZyfG6XvvqdLv1FRg1C9/ibXNjTciM3rrVmQQ/ud/gmD7yU+wrluzBmXNR4/i+7/4BcqfNU3k05+GLuK2bbjPzzwT47utTeTvf8cx5OTgvjj7bDyzeq2qamyFz9GjIDMHBlTmYksL/i4tFbnoIpCQDQ0qs5flt7GuOXp6VCl1bi7wrbJy9GcGB1VZMrMMee+bzcDnK69UOoalpfjO0aMYM3Y78Le8PLIW+mSa16sao7BxiNEIH46BjJlqbJ4SLnNweBhr0GBJJwaxKbGwfLlak/KRk6MyrYeGRuOb3a5Ku41GBKooPxYpOSkRNstDTcymhXIwGAypIpKtadr7/Wo1TXMaDIY9gvbWuQLxSBGRML2gwtrTInKLwWC4VNO0x4/tzyAiXzn2/lPjOOR3RaRHRD5rMBj+K0jQ8txxbG9cxugjHYQ9ezAxzJkTWePBasVnnU4MznnzFDiYTNjG4CC2M3cu9sGJq6wMTmZrKz5TVATnm5NiJPN4MIk6nfheeXnoY/T7cQx0whcsiE1bQdNwTD09qhlHWdnEo+KcBPQly7m50xM1osA1rxcXErm5mJQS4SRO1FheHKqkWP9/KC1BEoEmk8rs1JOEeiHk8S4EqL0xOIjzmJ+vL4f1i81mE4uus09GRoY0NDTIK6+8IsPDw5J9zBsZjL9N5HGHRZoGR/bQIZSNHDwIJ/jcczH+iouhF/avf6Esp6IC0fKKCryeno7OzDt3IoJ8882IsPf2orz52Wdx3a++GgtXffmf3Y6MwwceUN1Sv/AF1ZnZ6VQ6X8GOh9eLTKONG0E89vTgfjrlFDil55yjSkOZ/RFv9iHveRKPJhO+a7WO1jHctk11STWb4QxffTVKcJYvBw7z2Dm2WIKmz+6bDtM3RtFHivVk2HSaz6c0ByORhHTuaQYDyJHiYpz/5csVMUhykBq/4QhSEoUMzgWXL+ubGOlJV73N4lF8tm8fCN/sbGTd7NkDPFm/HmPxj3+EJMJpp4EA3LwZ12/tWswJbMRis6E5UmUlsG3rVmRI2+0oIfz4x3Htu7qAL8XF+PvgQdxP6emqxDiWZhdHj6pGAn19eG3OHCz4GxoUUefzqQyWeDq4O52Y79xuJTOTmQm/ZsuW0aRhby++k5aG3/DhD6uyZDbsm4nm9SrikMQ8ZUqCAzfTYazUiVZeHIxFIrif6efX1ipfLydHySDk5OA3stnPRHykUDaLRfHZli3AjcOHgfNsSFJaqqpp7rsP5b9nnilyyy0Y21//OjRVn3kGEipuN/Bm7Vps78UXUW0xOAjJgO99D/7UCy9gP+vWgXz8179AvBmNwKzVq+HTjIzgmFjZpc/i0jT4RE8/jTWYywUcmDtXlU/X1GA/Xq8ikEym2NdZg4MgD7u6gEGnnIJ72u3G79NrGba34zspKSCRzjlHybXU16v72+fD9rZswTGbTOpcT0cFlsejiENmSZpMGL9MgJhOo7RTKGJQ/z8D2Xqj5qDFgjW8nhjk69Rw7O5GVmJeHl7PyVHNVtvaVC8F+j+UUaipwXcCAVXinJ09uR3oZ3moxNh03do5ItJhMBgeF5HtIjIgIitE5LMi8pKmad0Gg+EdAQt8m8FgyBOkjG7WNC0ag/tjEfmEiDxoMBjYWvsCETlPRO7SNK053oPVNM1rMBi+JiL3isgmg8Fwn4hkC9p07xCRlfFuM16z2zEpZWdjUdPaisFaXQ3SLZT5fIhIHT0KwG9qwoC1WjHpcDHk9WLSKC/Hex0dqjtiayuAgWn4tbWxLRSpkSCCbYfrvkxw8XqVKGoszt/QEI7L7QZIV1VNrNMfRVvZyEHfHWuqJwC/fzS4sxyS+hCTHZkJPpZIuoNsThJszEhKS1Pag/rMQf4dzdLSlLjuRDpqp6bi+1Yrzinvx5GREZkzZ45ceuml0tTUJAUFBbJ161a5++675ayzzpLS0lI5+eSTxWAwyI9+9COxWq2SkZEhl19+ee2JiEXPPIPSwOJiLD63bIGzW1YGbLrzToznX/4S7z3yCBzB225DV+X77sP9++Uvw3HOzRX57W9F/vpXOBAXXADntapKXevDh9HA5bHHMD4XL4Y24oUXqqiuzQZHjmS0CP5/4w1E5jduBNaYzSAT1q/Hgj1f5xqwQQszCGPN8mP5XCCA8bBvHxxiZhoePozPGQxYpJ9zDkiq5ctBGIQaB8zw49iazpLl4GPh8fAxVWa3hycG+ffg4NhgRXq6IgKXLFF/60nCwsLY8IjnITVVkYZ8pjGrkPNrqGtmMKjuz/r3x4NHn/nMZy6TE9A3IolnMGB+2LULwdQLLwQe3HUX3r/kElzjbdsQzDzpJNwn776L65aRoTSzHn0U+Jaaim2ddRae2RCOhP7evSpgsWgRFueRFq/MPiZxODSE4547F+RlQ8PoZgL6jr3U8Y0Fi7hgczrx+aGh0aXJ1EkTwW9esQJk4eLFWKTP5CwZkdGNUfTdjNPTp5Y49HhUd9xQjUkGB+FnhJI6YDlxVRXIkeDS4ry88FhEjV2/XxHE2dmjNaQT5RvOYlHs1tGBpnC7d+P8W61YT1VWYg3R0QH8Oe88vP+NbwAzvvMd3Ce//CXIsPp64M2CBXjtz3/G+6tWIdh5wQW4h154AWRLcTEqMphIsXYtggAkX1jyziow6tUdPIjjfeIJHFtWFvZx2mnYBhtoiowmdqgnGsu8z2YnbW24nwsL8VseeACkITsxiyhNxE98AqTh4sWhgzFcB/f04LgsFiU/MdUBTJdLVaqxdNdsVqXKU+Ub+XzRswfZtERvKSk41txczI/19aOTU/iIh5TNzcW6f/dutY6lxJTRCIyrr0eQLC9vLBGtaQiqcc2Xnh6eO0iAzfJQCbDpIhEdIvIbQQ36BSJiFpE2EblDcPJF07QDBoPhRgFz+78ikioin5YoaaCapvUbDIa1IvJDEblawCYfFAho/mK8B6xp2h+Ope5/7dhxcpvLRWTReLcbi7ndGJgmE8Cd0fPaWmQVhrLeXixmvV44K4wmdXdjAjGZEPl2ODAplJVh4LJLVlGRapleVITJsLAw+rFqGiYtdv+qrg7tmLLJitWKz82fH1sE32ZTHZfNZqWrMF7jBOlwKIedk8BUTkper5oAGO2jeK3Fgkk+kcfDjJlI2YMkVIItuDlJMDHIcsZEWEoKnGSm0cfbtU1vRiO+PzKCB65zpnzxi1+UF154QZ599llxuVxSXV0tX/va1+TWW28VEZF58+bJXXfdJT/72c/kuuuuEz9m4zPkBMOinTtBBKalYfxRy5BBjO99D/foT38K53doCFH2efNErrsOi+gLL0Q31kAAjvDDD2PsrV8PB7KkBNdb05Ax+MADyCBMS0O242WXYeHLRZam4Vr6fCrC/o9/gDR86SXVJOq006BldtZZY4MNzPajFkusi1GfD2QCuyQ3NwM/6VCWleH8XH45AjjLlkUusQnOYtNnQk4lFpEY0xOH+o7KiXaOAwGVUR5cUsy/e3pUiaXe6ACXlICI0ZcW8++JLqz1Zcj6TuA8Dykp6hFclhzJqPvDay0yPjwSkQflBPONbDbgkcOB8bpjB3Dm4otx3/z615gzrrwSvkZ7OxbnVVVKE7qgAOORmTq//jX+PvVUdPAsLFTNIvbuVVrQzGxnCWm44KXfj+2SOGQny7o6bH/BgtDaTpyDiUWxjDePRzUbOHAAvhUb54ngeBctAv4tWoRzMYmLs4SZpo0mDjlWSBoy2J3I/dlso7UHQ2UShsIiZtXk5YFA0hODfJ5oAJi/nQEr3ismkyI0JhJM19ssFsVmdjsy+XbsUIROcTHWZ5mZ8Ava2+HfvP46Pnfhhag+2LgR67TKSpB3IyPAmhtuwJpv9WqULldVqaYVf/0rsC89HfhSVIRAxMKFwJbUVCUjxdLhOXOAD5s2IbDKtVdVlciXvoRAS6j1ndutMIQZvtGM2oqvvorfzYQSapNnZyOYd+218ImWLo3cuV3TVHXc0BDwt6QEYyxR93osxmYhLLulv8gGIJmZiQ1icH/hiEG+HgqLWD2Xm4v7MDh7MDcX12Eix+vzhW7a5PUiuMIsapNJNVKNVs3HihD6hIOD+C2J0kAMslkeKgFm0EKxBLMWsxkMhqdEZKGmabHUpMd9sn0+FT2ursZE0NMDNr+mZuzn3W5MSn19AImGBuUoDw6qlPL58wHMfr8qzdmxA4vzefMwMdlsAIXS0th0cNxuTE5OJ7ZZVhbaYertxaSiaQAWipNHMqdTlR2xPCcvb/wOmderuiyLANymWujW41H6hpxgTSaVcRgLqRrK6GBGKzEONn1zklC6g/x7OkoWXS6co8zMiU8o1N/Iyhr/OZYZKHY7mVjU14fGJ0ePwiHYvBkZLGvXwmH47W+xqP74x0EMVlaKfPvbaFbyxBNwYm+7Dbj1yCNwuoeHUS7zuc/BKaQm5lNPIfOwrQ04ctll0FukhAIJRL8fuGa1omTxxRdBOLrdwIbTT1dljMXF4aUUSNpFy47t70dm4datyGLasQP7p2B0UxMeLEuO0Dl39IXQRo9Jva7gVFmojsp64nC8zqbHE1l3sLcX91ZwlJz6UcGEYPDzZOB1cIZhqCzDQCAx10hPIk4AV2ccFonEhUdxYZHPh2zBjg7MBdu3wze6/HJoif3ud1h4XHIJsCktDRqJLpfShJ43D4u/557Dtvr7sSi96iqMW4cDGDIwgCDByAjut5oapYVoNI6dP7xekHitrfDD3G5V6tzQEDnbLxAYraUaKfvQZkPAoqUFGd+7dgFP2UCkoQH4vGgRHuGkZGaikTh0u1Ug02DAWGdH5fH8Fp9PaXGFIwet1tANAXJzx2YLBpOE0yV3w4xVEhsWy7RmlM7Iu2yysMjvR/byxo2K1CkoAEmWmQlfwe1GduBjj+G+vvlmkMnPP49765xzkHn3wAN4kFT8xjcQ0OjshP80OIj333sPeLJmDZ5ZjTV3rsqQb2tTgbk9e+CvWa3AvvJy3CMnn4zAbKgkDHYS9vtVNVE4H8DhAA5t3YpmMNu3Y1/p6TiuhQsVWdjUFHvndq8Xa76uLvxGsxnHXlo6ddVh1Cwlccjs58xMPMzm8flGlKkKRQrq/2dptN6yssZmC+pLiym9lejgCjkEPkZGlG+UlTUaEw0GzJ2FhThPbGSYkaFkGCKZ1wvfcHAQv6msbFzXfEZi0WRYnGu/xO57lkSMzQwGg0lEPJruhBkMhsUisk1E7tE07foYNhPXydY01WikqgoRqv5+RLCrq8d+9uhR1dGzthYTD3UQmFpsNmNbdJrS0vDc1obBetJJAKH2dgBneXn4Tsp6Y4m0wRBauFcEv+PwYVV2MXdudMfL7cZic2hIaRGNN3VdH0marpJltr0fGlI6ZxkZijiMpjMSnD0YiiQMLqERUeRLJJJwpoqP00ZG4ITFm2Ifyqj/MQGty2mboKYai7xeZBm+/TbG3zvvAF/WrQMmPfQQHNqKCjiSH/4wiLvf/AY4c8UViDq/+iqExHt7gTOf/zwcS6cTi+7HHwe56HSCiLvySmQoiqgsYY7Tjg7oJ77yCgg9vx/O5bnnQm9o/nxVrhGKdNZnH5K00zs2TicyCPTdkjs61AJ/wQIQhStX4jFvXvzjhwQmx+tUlyxPhDhkN71o5cXDw2O/m5kZuqRYTw4WFEzNedCTheHKkvVahjR9KfJEjzMBROK0OssJwKOYsUjTsEhtbQVu79wJP+eKK0TeegsZ0CUlkEpgRtD8+Sq4WVoK7GpthV5iTw98qQ98AI+UFNW8bmAAf5tMGOtstsQyUhKILhdwsLUVVR7Mim5owAK6piayf6EPIoTKhPZ6QY7quyW3tyv8mjMHpMWKFcDTurrp1+CK10igkjgUUbqyJA7DGf26UOSgniTUN0qiscQuFCnIZ4tl+vUVoxnnEqsV56OwcHq04eQEwiIR+B+PPALfgHrby5Zh/ty+HQkcdXUgDOvrRb74ReBURwd8iIsuwti+9VZ8vrISAdPLL8f919ICDOrsRNBS00QuvRSdmJ1OvJ6dDQyjPuazz8JX279fYdGqVcAHpxOfWbAAPlgon8XlUprQlCKi+f3YLjUMd+4E6el0YvwWFIAQXbcO+2xsjF+j3mbD7+rrAy7k5cG31GuZT6bpG2o6nTjnKSmjicNIx+HxRC4t1leb6S01NTwpqH9MxTqNmrr6AAsDvenpsXV37+rCOZwzB9+x2cBHUNM/2n3BtT+bpsWSbBRkxx2JmKC1X2KPaZZEjM0MBsNqQS36X0TkqKBDzuePvb1C07SDMWwmrpPd0YEBVF4OcnBwEKA8Z87ozzkcSmcnPx8TRCCgOviaTMrRKizEIq+9XU0QVivAf9EiDPD2doBIRUX0kpdAAIA/MKCiYcHOCwnOo0ex/aoqgEIk83qVxpXBgMVAUdH4nDlG1ViynJamMtqmYlJyOBRxyNIARpMYNdYvJCKRhJGak4TTHZwuLbVEG7uq0rGYyG/itny+yBpEEWw6ScQpwyJNE7n3XjjK+fkg1CoqUBb3xhsgBs89F+PfZkMTlZYWvN7QgHLm4WF0XG5pAT78x38ggzEQQAT/gQdATJpM0PxhJ1ARRYqnpQFnnnsO5cpbt+LY6uqw//XrsYB2OJRWmcUSXm+QC1Vm1u7fr8hCkhR0miorsW0KfDc1jb9ENlTJ8lR2Myb5pScOU1IUaZiSgmPr7x+bORhMEoZqTpKfHz5zkH9PZQmS3oI7JQcHW/RkYSyEHpvLJOLaTZBInO6F+0TxKGa/6NAhZN75fBijpaUin/wkcOH550HmL1mC8V1fj3mwuxsLlvnzgflPPIHAQ3Y2FuzV1cAKrxcYZbOpzu4lJfCnzGa1UE5Px7XfswePw4dxL+XkgDRk2XQs8xOxSJ8J3dExmjCkLI0I/LR583DM8+YBk6qqko80FFFash6P+n3sZm8y4TexOUk4cpDPoRoCsDlJuMzBvLypl62ZbPN4gN0pKfh98XTyTpCdMFjU2grfqLlZaXY3NeFe3r0bfpLHA9/iIx8BLr35JtYdF1wATLnjDgROc3LQrXnVKtybZjMqMjo78b/Hg+u5YQOCEizvzckBJrz1FnyuTZvw2ZISBHLXrsUxdXfDX9M0lEjPnTv291DvNRBQY7C7G0Th9u14bmlRyQ+UdMjNxRqVmoqxJJ2MOenHtPA6O3Euib3l5ROqForZ/H4la8XqNGZ1kzjUNLWWi5Q9yO/rzWwOTQrq/58uLPJ6Rwdb9HhK6Q5iZ35+7P6b368Si9igi9ngXi/ur2ikqN2Oe5BNYWORVNPZcYTssASt/RJ7TLMkYmxmMBgqReTXIrJaRIoEXXteE5FvxCGSGfPJZplXQQGi6MPDiPDoS+SYtt7WprpZWSwYpHR2LRaAX0cHBrDNptKM58zBPlJTVeertjYAyJw50fXn3G6VKRmufNlmUx2aCwrg/EZyeP1+9dtFlC7ReJxkdszSlyxnZU1+qQc7YZE4dLlwrcxm7J+ZUXqiMFpzknAk4VTrpU23+f04p4zaTeS3BwIqcp+fH7ezPZ0k4pRh0T//KfLzn+Mc7d2L8bhuHRbsu3cj63D/fjiTH/oQ9Hp8PpQoNzWJ3H03yLmCAjjAH/0o3n/0UZQsHzkC3LjySpGPfWy0bILHg32++CL0E3fvxnHMn499XXQR/mZ5KXGPDlvwvUFR+s5OFUXfvh1RdcoJ5OaqpicrVoDMJIampY1fg4sSAxznU1myrNc3ZHOS/n7MA8RafXnxwEDo5iSRMgdLShDkmUlkRrSy5GDScDzG7LFEXEc9qRunTffCfaJ4FBMW9fdDa8tqBZlYVAT5hEcfBcYsWQJCzWxGcIGdbysr8dozz+BzqanIwlm/Hv5Bfz+ejx7F3LxokcqSKCnBa04niPPDh7FvNo0rKFDEYTwlw8Si/n5gHDMZd+9W2btmM7a7aBGCx/pyxcxMzFkzvRFKsLEhiMeDc2q1wkd0OPCsL5mzWlXQUG8kyCKVF5NAOxHN7cZ5MxpxD01xhvsJgUV9fZBwefVVpa+9YgXOe2cnCER2ab7qKmBHf7+SOvnNb1DBYTaDUPzc5zAeDh7Efb9zJ9YrZ58t7zdIWrUK67TeXuDF/v143rkT19xkQlXEhRdiP8SKLVuALQUFIBaD13VMMOnvVzIMzc3YLtdhRiMwiGXJRUX4vM2Gv5uaIusahjOPR5Us03erqADuTrYv4fMp4tBux8PpxOtut9IjJFEYqjmJwaCak4QjCXNyZg5O6zvG82Gzqff1XeHz8/EbJoIbzJbNycE1pdntar/sQRDOhoZwf4iAl4ikKx5kx93KOEFrv8Qe0yyJOKUW08nmRJSVBcfWZgNw6wfh0JDqEFhSAufZ4QD4paWpKKvVionB5cL/Hg8GYmUlJqFAABHttDQQiHS6ow3UwUEQk8wsDJ6Y/H6839MDAI3UoVkEx9HXp/SxmNESL/hyQrTbMRmkpKiS5clYsJMI9HjGRnI0TRGXzGygUfMoUonxTC8vni7zeDChk5SdiPn9uF7smhgHmZDsE1RULDp4EJmELKfJzkbJ37PPAn+WL4dDe/rpwKiWFpFTTkFp4TPPQJ8wPx+lOStX4jw/8wwi7y4XHM9PfhJOL+/1QADO69/+hizF9nZck5UrUaJ4+ukgCPTXndqimja2fNlmA0m4ZQuyDHfsAFkmgjG2eLFy7JcvV3o9HNPMNBtv58/gkmWO60Qv6EiIM1uwu1s9mEnY14c5Ivgep4MXKmuQzxPpjD4VNt6y5ETslwLr00gkzuArE5NFxSKnE3jy3nvwUwoLEUT485/x2ooVSnolLw/zQ3Y27t1//hNZOCYT8KO2FvjR0QEykEGCqioQkRSILyrCeNm2DeQe9RXLykDuNTTEt3B2u+GzNTfj0dqKxRHvydpa4BG1DGtqcE/Z7TgmrxdzHjOVZqoxiEoikEGKvj68xswdt3tsEDQjI3LmYH7+xJuTnAhms2H+ysxUWBhPl+8JWLJfmahY5HYjOPrkk8CErCz4DpTvyMsDRs2bB7w5dAj37bp18H/uuQfX5Oqr8VpuLnCB2dGaBmy5+GJFFjY1gQT8299AXOobbS5bBqxobBwtnWCzATP7+4FXJBZFgCV798Iv2rYNvtt776l7o7ZWVV4sW4bjMRqVdv7AAHBz2bKxlXGx2MiIKllmEJ8ly4k0TVOk+vCwkqjo7VUBDDYDCV5zGY1jicFgknCizUkm2/Q4zGf6GCbTaMJwsgIvAwPYd2npaF6BEmuUBwlXOcRtdHYqObYYJaiSHYuSwmZJxKm1qCfb4cAElJYGsHO5ANQs//X5MCl1dGAg1dYC+JxOPFsscMSoV8VFeE0NBm12NrZ14AC2pScQfT4QiJGImUAA+x4cxOeqq8cCDzUWmVY/Z074BZamKWD3+QDMpaXxO8lMR2c6/kRLlvXNScKVGHs8KnputyvdNjrB1IqYKc1Jjifjtc7OnrigOUkoZu7GaMl+BSNi0ciIyFe/iky9vj6Mx5NOghNrNGIhnZIC4e8tWzDOrrkG437jRvx/+eVY6D/1FBzulhZs5yMfQdODBQtw/QIBlOOwJLG7G9teswalymedhbHs86nSEhp1VtLSgEcHDqiy5G3bEFWn01RbqzIMly/Hgj04SKHXFeWiK15yKNElyx5P+I7FdIj7+lRJIKf0lBSlJVNcjOdgLcLi4plNSISySGXJwYThBBuWxGSJ1Efk9kTi2tZxjUWBAEjA5mbgCxfkDz8MX+Okk3Bv67WSS0qw2H31Vfz/wQ8i83DPHixGGHBtaMDinET64CCCJz098J26uzGWa2qwqG5oiK1cj1Uiu3aBgGxpATZxjJaUgLAkabhw4dhx6HTCN2JVSX7+1JT2RTKWo0VqTMIMUEomEAMtFuBRYeHYxSufkw2LZqpRrkUEPjXnI5FJ90GPayzSNFRb3HuvIjWWLUOyh8ejdAlPOw2Y4nKhgcm+fZB0sdtF/u3fRL7yFUXidXfjO8uXw0fJz0cQ4cAB4JfDgTXX7t3YBysx1qzBdWTzCZaMigB73ngD13j1ajxTx3DHDuASEx3y8+ETkTBcsmSspv3AAHzB7m74WUuXIjEknnuIiSKdnfDbUlOB2+Xl42uWGAhgO6FKivX/M7tQrxefkwMcKiwE/k9Fc5LJNo9nbFkyJWe4LtXj7VTNJZqGa+7xhJbeYMZnIID1QFZW6Cqinh6MCo6VSgAAIABJREFUs/x83Osx+OVJdPWS12ZJxKm1iCfb40EKPDWpfD4VgRJRpS9uNxbxBQUYgHqhUrL+LF0zmZC5MzSEgVlSgn243XjdaMSE4/eDEIwE5i4XPutyYTvBQqdeLxzvgQFsp6YmMiFptWJS8njwubKy+IEtuGTZbMY2ImUwBjcnCaU9GK682GBQzVlI3DI7oKho5mfsHE82PKyI54mWPrhccOQyMmJOl0/2qxwWiwIBkR/+EOTfwAAwYv58iIgXFqqxajZjYj/jDJBRzz2H72/YgBKd554T+dOfMMbnzkXJ8oYNGDMuF5zSF17Aw2rF9tauRbfCD30IY4odmDVttBQBNdG2bgUxsHs3HGTiQEHB6I6AJ58cucM89blIBqWnx08exluyzI53oToW60lCLgb1lpGhSoiLivDbmEFIbC4pmdlR8lgtUllyvDqGk3mMJJ6nQR/xuMUiEZCHb7yBRbXFgqzkZ57BnL1qFTCJGrmZmcjSefVV+DirVyMQ4XIBK3p68HmLBeSd0ajm8x07sC9Kj1RUwEdasiS6FlN//2gdw9ZWJZGQlYWACRutLFs2uqok2Nxu4K7LBfzIz4+rhGvcxvLiSN2L9R05acwiyc3F4lyv91xUpIIVM0nq4EQwnw8+Unq6ynCjfyuiAtsJtuMai958U+SnP8X4Tk8H2dfdDcxgl+TVq3GuS0rgC9x7L+bz884TueUWvLZxI4jF/HzM1WvXKr16EUi4vPii0kKsrARenXkm8MPvx1rMZlPzvQhe/+c/RV5+GZjkduNYBwfxPiUSGhtVFUZNTfh5ZngYflV7O9aSixejpDoev8LjAfnT1QXMzswEcVhSEt430jcnCdegJBQWUeYoI0M1ZWIlWFGRCqZORyf1RBrle/SEIecbEZwDfTb3RKWfJmpeL+bl9HTMq8HHwm7VTidwKTd37Bre78d91NurunRH+U3JjkVJYbMk4tRa2JPt94Pcs9vhQIoA4C0WJc7b0wNwZBaQCJy21FSACLV02L3W7cb3GYkpLVVdl2trAaTvvQcgrq6OHAVmOnFKCj4b7NT29WHbgQBAIpQ+Im1kBBOKy4V9lpVF11/Um6apRin6kuWMjNgalMTanIT/szxnZETpODBrzWI5/sS5k8U4kYrEXYoc0ux23FPZ2TFFRpP9iofFor/8ReRXv1KNkMrLkdVXVIRxWlEBvCkuBka9+y7G8nnnITr+t7/hwZLla66BdqLLhaYoGzeKvP66yiQ9+2wQh2vWAA8oBu/1YrwZDMDHlhYcxzvvgBBg0yWzGQt9ZhkuW6a0Y5lNGM7YFZQlqenp8S92Q5UsGwwqwzpU92L+H6ohQKTmJOxObzaP7ajMBinJbNHKkhOhYzhZNo2NVmbYmYjbwmJRRwfworkZfk1dHQjC1FQsvLOzVWOsvj6RzZsxTzc1IRBBDVQ2NZs/X2WZ9PRggdzVBZzx+bC4XrkS1RNpaZgHghczTicCF3rSsKcH71FfetEiPBoalC8UrZzU6wVmOBwqcyQRpbtcoEXrXhyuOUm48mKLBQv0tDTVHIYYajJNSensrEUxpxOPrCxFnFCPk3Mefd0EWbJf8bBY9N57It/+NojE1FRUcVFmgF3SFywAZqSlQbalrQ2k4k03YRv0W1wuZFBXVGAu7+8XeeklBEr6+jBely5F1iKboJAAY7KJxwOfjM1P3nkH5ctdXbjWGRnAIpYlL1miSo+JbeHIQIcDmHvoEH4rdV/juU+Gh4G9/f245woK4Eump4cmBfVkYTzNSRi84P3NajR2l2ZFWrIGVPXyEHwwc09EJbHo8XkmBmxsNtyr+fnhA/puN36b34/rFjz/eb0YUyMjGBdRyt+THYuSwmZJxKm1kCdb0zAwKHZvNGIxnJuLhTxLjwsLVUSBWidWq9KWKSjAw+/HREIihI1PurrU4DObsU+DAaRguMiMvnw5OxvpyPpFuduNCY3iwtx2KHM4cAx2OyaS0tLYO3kFAirteWRElQWR9AuXPWgwRO5azHLFYGeXOhpDQzhuEfwuEofjSb+ftcQby6uoXzJRo0YHu2ZHsGSfoEJi0TvviNx8M4IWBgPO6eHDCnuysoA7ixYpZ/cDH4Dz/NxzIPkyMlCyfNZZcJK3bwcR8NpryvFdvx4ZQqtXj16EGo24ptu2ocS5pQVZhgd1Pceqq+EUn3oqHHE6twwgcHEUqTNlMHloNMaeleFwgDSgIHhPD85Ff7/qajwwMLYhgNEYmhQMbk4Sqrs9G6MEE4eToa84VTZdOoaTZYnWR4yDSDwusWhoSOTvf0cWYk4OMLm5Gc8nnaQ6k/b0AC+sViyaP/hBnK/+fjwXFiopl/Z2LIxJHObmggwoLcUiu6QE/gkXMSkpwD/i0K5d+D7HYUWFIgwXLQJJSV+KgUviS7h7wudTWX7shhmroL3XGzlzkI9IzUkiaRAGYxFxkx2VNU1lbvMxazPLRkZUxYb+HgwEVIlntHs0DjsusWh4WOQ730EGdCCAzEC7XTU0mT8ffonfD5Jx3z7gyWc/izGxZw/GCvWci4tVOfOhQ/AfWCackYFy6HPOwdpreFjJNbS2gmzcuxeZXfv3qyQJas+feSaqQxYvVpVgbBTCoGu4cep2A+f27sX/DIjEIjPg96umVwcP4jcxm5qSWySI9BbcnCRc9+LgYw4EVDKJ06mwSE8cJmMQw+0eTRharWq9y34HetIwmSQgenqARxUV4dfQmqZkivRVljSXC/eX34+5O0L1YhJe/eSzWRJxai3kye7sxKO3F6C/ciVAd88egEh6Opxb6uv5fMrRNZtVeU5KCl5jxmB6Oj5XUIBF7dAQSMCMDDjTzCoMN6G4XIi+ud2qPI6grGmIKnR2yvst3MOJjLtc+OzwMI6/pATHxG2FKynm/+yeRX0Hkwm/mxG/SA1K4nGKnM7RHZVFAFAkDpM9Bf54NTbSofMwESMx7/dHjegl+wQ1Bou6ukSuu07k7bfhoBmNWIiTQKQul8mEsbJwoWpc0NcHLLnySpB7FABvacG2ysuhY3buucg41DdS2bcPRGNzM5537sRYp/zCihXILKqvB1lZXDw2QqnXIIyUWaFp2LZ+cc/MQWa2htMd5LO+jIbHkJsbmhTUaxDGky17vBGHkXQMRWZOWfJEbZqIxCQ9W+/bGCzyeBCUeO45RfC/9x4WycSdkRHgy+Ag8GXNGoVNJhN8nfx8ZD23tmJc+3wg6isrscieNw9jPisLPgkJw0OHVMdk+gK5uaoMkM1PQmnoUi6FWBQuOMFmSPoKEosF9w2zT6KVF+tL2Ghmc+TGJHl58ZW3kTh0u9ViNiVFlQueqF2Qk8U4r7HUM9T7JBOjSXDEYMcdFvl8InfeiYYoTiewx+vFeMjLA1loMoF8a22FH3TZZQhaUE+6qQl48dJLIps2AVscDmDOvHloSLdqFUjFkhI0ZGluRuZiVxdIk23bVPUFm5osWaKavtXX43t6H5ha8fTnwhFrPh+Oafdu/LbaWmybQWNmiIXLIOzvB75SG95oVFnMkRqTMIswVn8mEFAdlV0ulflP399sTi7fgcSrHtOZtMIgvp4wzM5Ort8XbIEAyG82Mot03b1e3FtsKKYPrNlsGBNGI+77MHNQEp+p5LFZEnFqbczJ7uuDw9rZiQll5UoAMnULKf5KvS23W4FLYeHoCUPTMOF4PACbkRGAts0GErGiApNCezuAd+7c8A5gfz+OKS0Ng11fvmy3w6F3OABsoZqriOBYOzpUBy6ScfwtJAmDjQswfoaLfUarSKYmAkztdkUckqTMzlaT3WxkPTnMZsP9lpMz8WsWCCgNmfz8sBNdsk9Qo7DI5RK54QalNUaNH3Z5p86VCBbh2dlwan0+ZP6cfTYw5oUX4AhrGqLz552HjMSaGlwXpxNEIZufbN2q9FozM1VGz8qVyFIsL1f6OJo2NirJ0iwugMKV0LndyOqmpsrgII5XTw729o7Fo5QU4CzLiPW6OtRlKSpKTGZyIKBIQ31zDRKHyeQ8RtIxnOllyRO1aWi0kuxncBQWaRqylh95BOPW58NYXbJEybDs2oUxnJcHrMjPx3kiUTY4iEVxS4sKpNbUICBRWAgsCgQQMHnvPfhEO3fiewzALlig8KixEaWAke5VfZkosSjUNaM+VHs7Fo3MiB4eHk0WkrCj0e+LlDmYl5cYLKJGLM+/CDCIxOFMLJebtfDGJoDUDA9levI70v0bxY4rLBIRefBBkdtvx5rIYlFBorIyEBiHD4M8LCqCFnRJiWoquXAhxvXbbyNDcWgIY7ShAdmC5eW4JtnZaBTFBmnvvou1XFqayjCsqgJxeNZZ2K/djkCt1YrARlOTul6aBp+OzV6YcBFsPh9w7913sZ3sbPg0fv9owpBrI71lZuIe4XyXnY01Zm0tgjT01SY6v5MI1Wvfs3FmZmbyJHdQA1sfDKJfK4LfoicMGVA63oy8QGamkh2KZHa7khHLyVH4NTCAsWex4J4LgVXJjkVJYbMk4tTaqJM9PIzIT1sbwHf+fFXWbDJhgtFnoqSlqZLlUBPCwAC2mZWFgZeZiQmprw8TQ04OHFejEZNSqG34/RjgVis+r++mxNJmdk8tL8dngjMHXS4szPv78b28PDjuzKAJzhbU/y2CCYvp6UZjYiNMTJUmcciMJJZLJaJJx6xNvWmaKpVIxOTLEjNqU4W495J9gnofizRN5Ac/EPn1r4EbHBPZ2apbGsdgSorq0Hf66Rgvb7+NbEIRLLZPPRXOdGMjmhVs3oyoenMz8EcE25k/f3S3ZDoU1G/iWHU4lHi//roSc2w2RQqGyhzs7laksD6jy2yOnj3IzA19l+VE4gOJQ+rp8dwkE3F4vJUlJ8ISqY8oEpVITIK7JKKN8otaWkTuvhuLaOpbNTXB32hvx7jOzMSCurJS+QgOB/ynzk5sh3M6Oyr39gKn9u1TJXf0RyoqVNOTpUuRHRRPhh0JGKcTeERMCu6WSb8oEMB+TSZ1DJEyB6diUenzKeKQJYc8xvE0mpq1mWUsv83JiXxv67XDU1NxD5yoneLfeEPk+uuBKxkZqulSXR3O5b59wJ4zz1SB0vJyVWGxc6fSA6ysVDg2PIwgx549qou7CHyfwkKQhKecAl8qNxdrqvx8Fcg4eBByL6mpkJOpqFDH7PXi8yxh9nhAXulJwaEhBE+YrJKZCb+HEg6RyoqzsvCdnh5gbloafLfy8sQResRSh0NptRLnozXOnCmmb1TFuYABGaNxNLazwudEMasV86Dex45kJLU9Hlx7rtM7O8FJVFQovU+dJTsWJYXNkohTa++fbGbkHDqEyaWwEIPB7QaYM8JDgVyWLIdbkNhsKnXe7VYg29OjugazO1J1deiFsNOJScXpxP4KChQxODio3svNBSkZ7FSmpKgJKjUV2ygvx/GTKAznjLjdoycMpqcnolSGwuKcRJkpwMmRzWlmLbmN5WHUlZro4t3jwT3DJjpBluwT1PtY9PDDIl/6Esa4z6fKXkggpqerDIXiYkzWnZ2q8crJJ0O/p7YWBMCePdDqaW1V2TR0oFma3NCAbRuNGI82G7afna1eIylIfZS+Pvzf3Y0HF+ThmpMwi5vkIB1dEoThSkOCuyyPYyEV0UIRh/oy5ZlMHJ4oZcmJMF7fRJHOEYjEZD/L72NRZyeCGTt2qIYQixcDR3p64A8tWgTCj4vlgQHVjK6iAtiSnY0s57174WMdPqzGc2EhFvyNjVioz52L/WRlRe6kHpwpyMVhX58K4FKGQW8MwqSnw9coLERwtqRELSSzsqZnrHi9SuOQ58doVM1RThTS/0Qw3sPM6I92bYPJxBgb5Rw3WHTkiMjHPobgJ9chLC89cgTn5NRTVQZgSgow6vBhfLa6Gs2f6uux1mtpAfl38CDGGzOnCwsRtLjkEmCC2Qx8yMxUFV+Uk/L5QB7u3o33GxtV2afVCiyizIHXO/Yak6iyWpU/t2wZjlFPEoa6zi4XfD7KQmRnK38qETjh8aiMQ/qN6emKOJzJsglMOtAHjJg1yfWInjCcLryfSdbZiXNUWRk7Kex0KjkhzquHD+N819aOadhygp/hqbFZEnFqTRMBWG7ZgoV2YSGAfXhYMewETk4w0cpTPB6Ae2rqaF2T7m4MKosFAzY9HQvpUB2Me3tBYhoMmBiYMuzzYTsjIxi0NTXYXrDu4PCwKgfMzcV+okVW9MK4JPY4YUx0UmJ50NAQjj0QwHGypDo7e9ZBPh6NDpXJNLaD+HiMmSUk1XSW7BOUJoKy4gsvBH4wO4YlTyYTxlFqKhxYpxOEXVoaGhtQDmHPHiz62SnbYkFmYWMjsnpOPRWYIqL0l1wuOF1Hj6rs65EROMFdXXh9YGAsqZaaqkqKSQzqMwdLSxXhy8UPNYOimb7LMiUVEiWboNc3DCYOZ3IA40QuS56oTaE+YrKfeU0EC9+f/Uzk+eeV9lh5OXDFZEJ5cXU1sGNoSPkMbEbkcGBBsXs3FvPUjJ43D1hUX48gBnX8iGlsEqInCfWNSfh3MGEuosqL8/NxHMEZhCYT5g9mUBQUTH9TNuq5kcgQGd0YZdYvOn6NGT2UB4pm+rWCiPL5j3d9VodD5NJLofcsos6X04n/ly5FcMPnU80eDQb4RHPm4POUSOjowOfS0vB+YyOeV68GTqWkIJOxpwfbz88HxrW2AncyMlSzzO3bgUeFhaM16plwwmZTRUVjMwk9HmBjby982aVLgafR5m2rFevHgQF8tqgIuJyIRoZMHnE41D1G/zNcCfZ0m77bPR8jI+r9rKyxZcmzmDrW/H4lrVZZGZ9GLzt4p6XhXj54EP8vXDhKriHZsSgpbJZEnFrT/H50QGV0KyMDQMoyZYtFZQHGAqCBAADe7VYL9JQULMKzsrD9o0exr9LS0GDW14fJy2LBpGI24/NDQ/iuiMri0X9f01QjAo8H+ysri97cgs1SWLJM0nSiXabYqXdoSGU2sQzSYpmN/pwoRmI6KysxnctsNmwzO3vUAjDZ7yStvx+ZOOx8bDCo0jXqgjFIYDSqpkwsFRbB+42NmLzr6qCPWFEBYpDBB2YUMnuwt1cJcOsJu+xstfguLYVjQZKwsBDv5eaG76RMHS8GUmIpwdN3dKYWFMnDCZ1cbXTGIc+vvlR5ptlsWXLijURiovQRwxCJSY9Ffr/Iz38ucv/9wNqiIrWQZNM2rxfjhs1E3G5gSVcX/jcYkI1QW4vP19UpSRhqD3Z1qW61/f3wFagzrTeTKXx5cW6uytImFgV/3+VS2SgsVU5EUGs8Rq1GEoe8f/TE4axfdOKYywX/KB6fOw4yMdnvJE3TRK64QuTRR4Ed9IEMBiRRVFUpHWZmE6ang9hg0wgREIULFmDs19YCi9LTsd5KS8Ma0GbD57q6VONJEdVsyWIBBlKWKjsblR+1tQqHjEZcR1atBc8zQ0MI8nZ04HNsKBVpPvL74a8dPQo8Nhpx/GVlEyslZpMWEocMsumJw5nmGzkcowlDBrBEVLNB/VyRDKXWM8UcDtxjFgvm/HiMzX44Rpkd3Nj4/jhKdixKCpslEafQNE20d96BaLjHo6JEtbVYNFMfQD8x0wEM18G4qwsTUUqK6oo1MKAW5IOD2Ca7MOt1CD0eRAK8XkwO7K7sdiONnvqKNTVjo+fDw5hkXC68V1YW3Umm88KSH04cE0lTZ8np0JBaXKSnq65gE+3WO2vJaSMjuDcslsREM6nHYbG87yQk9QTl84m2fDlKbGh0lkmisQO6y6XGaEUFFucVFQh0mEwgBtvalFyA260IORJOBQWqtLi8XBH75eVwogsLlYh/ZqYqNdaTfGywFOz8Uo9M39Ag2jWfjJJlEkYkD0Vmbkfl2bLkqTMuNhOljxiCSEz2q6P9/vciP/kJcJvdg0tLgQV+v9L2YhaVwaCaulVUYAGSkwP8OXAApCH1FDkGiSGUMmCwoqBgLFkYLluQ/hexKHjB6/HA53I4lJ5ucCf5qTB2oWdHZT1xaDLFXJo6a8epjYzgvohXZ1PfPCiMRnCy31XaLbegGzPHTGrqWE1SNokjplss0HiurUUmYkkJxh67x7PjbiCA7w4N4Tw2NGAbmZkg9sxmpX/Y2Aj82L4dGdZlZdA/JDZR85BrqWDyym4HUXnoEMZ7YyNIzUi+kdOJpBRmcufkwEcrKhq//6JpKrCvx2RKVoUiPqfLvN7RhKHVquRyUlJGlyTn58+uLxNhfX0YD/oKyFiNzWp4bx09irXEggUiBkPSY1FS2CyJOIX22muiPfYYwHnBAkw6dXUAJqMxNFHIxajeWK7MaA4zhTIyVEOUoiKVth4qVbivDwOOKfaZmRiQPT2IWImoDAD9d+12EJcOB5xRfflgKAsuWeaEOZGJg+VMQ0OqvMBsVsRhIrLPZi25jVmy7Ao+USdF0zC2/H7cZ2lpyT1BLV0qWnPz6NfoEJOEy8zEAjszE68z4qc36nwVFSHYwAV5eblyptk5ldu22YBxGRmKpBweVt1HKYPA8mLqygXvOxBQWl5cIEcjD4mpiSpZJnGob4A1E4nDSGXJs4Th5FuiG60EEYlJfcXuuUe0r3xFZY9nZwMD3G7l65jNSts5I0ORYXrjuTWbgTkMXBQUYJu5ucAonresrNjHp75zbagsLOpGM6DLIMlUjiXiIR//n703j5Lsqs58vxsRmRE5zzVXaSqNaEIIyWAZj7TtZ7+228vDwwZjuRd2P9NgwHgELInRdGMbT3Rj2kY8sA02NGB5mQYMmElCaEYSGkoq1ZxVlVU5RmTGfN4fO7fOiZv33hgyMzJu5Pdb664YMqa8wz77fGcPgI3IVuGQEMD6RzrmNnueuotwPkE91rbo3e+Gectb7ONkUuyNCqa9vfJ4zx7r2wwNrc1ySiRsplV/v629PjYmcyhjJIVZF1wvvFCOh87ZDhwQn+jrXxeR5NprpTu9+mErK3KrUYiuHSsUpFGLNru77DIREKPKS83Oing4P28XWrRxZiu48z7dD265qo1qlLkeqlXZ5yoWzs7aQBTAHi/dhoY6x5/rJoyRKMJKRaJ8W4lELZXkWJ4+LRrGxRcDF14Yb1sUFygitpGrr4bp6xPx8MAB25HJP/i4tbLcyahGyiQS4iRqcVydAOZytoOV1nEbH6/9/GrVpi/39dkVJk0N0k5dU1O1E3L9Pl19q1cMvFKx6TOAHYBbDfV2Q+C16G46HY+iu2RrqFSsyN7qiqGbkqqdgMtl4I1vjPcA5XlYY/h1MUJtTjpdu+lk1N20hqA6heWytWFArWClf9eJeCKxdiLi1vhxbZsfNxW6kfRaNz1X37MeJ1b/L/9n6v2tRn9X0PDeSb9zO+Ger63gP6YqWt91V/fYIrU9mYwVE92IlUwmfNPC/IAV+jRDA7DdVYHGU3hVeHcXHfz+VKFgo1XS6fZOkDVS241+Vh9xI5tBke5Dy39o3eBmcCPZ9fwzBvjTP+0eW5RIyPU8Pi7zNW1usnu3jZZ2uxa729KSrdOWyVh7pFFXN91kbcaBAzL3WlyU79qzR+rl33+/HJdbbpHv1lRgLb+gDSuVclnqKD75pBzbiy4S4THM99V699PT8ju1s/SuXa3Np1TcXF6Wz9NFM52j+ee67SabrY0wXFiw44EuPLl1DDmnbB/FogiJmUxtp/FmMEY0kCeflFIlP/qj8bZFcaEDy5Z2L8mkFPceGBAjtrhYO1nXuhZqbHXg8df20uLI7qRkedmu2GezYrTHxmqNtgqFlYqt7VOtygWnXW137qxNS9auU7mc/F1r+4Q5p7oKrrVTVHhodnVBB8xcTgYmTSHS4sH9/Z1ZdJd0DjoZzeetM+ei4qC7uaKh2z3XfU9QN+C4ohNOvZ5TKRshqPbIf6tRODp50Am5v4lEkIAIWOdM63O5NQjd1NqgZh06oXd/exhhQl8rjqz/s/TzOqGhSJRgCFA07BQ0PV/vu/iPoXuu1Tu+3YA2ldOxXf0G7Zqstkef1wVJtSWFgk2VU3/J86y4oQKif7EiCrU1bq1UxZ3Qa11nf0TQZuGOUa4t1AUgCoekEXTc1YU8/7jtj1wPi2QH1tbQjTOplHRcvuoqiRDUDKexMYlsHh62pV7cW/WDcjkp76LNKAG5JrVz8oteJP5oIiHZXqdOie3SMi/f+IaUk9q9W9KXMxk5Rq59cxcqqlURHR9/XOzRvn0SuRjW+GR52aYsV6s2QntionkfQevbLy9bvziVsna8XnPNzaJQsGKhCocafKLp6ZdcYtOTt7rZ1Xant9dmT87Py3FpFs8TbeK666RxLWkPjERsI5UKjNtUxL/p82rsXDTCMJOxIdc6gFWrcvFMTMjANTYmq0kuWoC8p0dWvvr75bVHjojBnZyUwUcn8+WyDDLakUvrmQWJgdWq/e3rSVk2RgRQFVhVOBwasit+FA5JPTQ6Q8sCaKMdneRpqYAg06f1QlXAD3rcDSmEjz0Gs2+f2ISobW5u7XsHBmRFfnhYHF0tybBjhzije/fKY51863WtzVO0E6rWEurttbWWtCmKO9F368JqFGNUJJF7fNeTsuxO1vVccSPFt6LOGesYxg9XjHKbE6jIFdT1F6g9nv77vmMc6yN9/DjMjh22y6ne6qaP3VQzJZUSe7SyIvti506xS2Nj8nx/v9gjtQONNFdz676pLVI/RmswaWkL7cS52VEr5bLN7NCIw1TKCqqd1oyAdCYqPOvYVq3KGF8uW8EryC9Sm6MiddBtN9iir34V5pZbxJ7k8+G3unjgorbi5EmxVUNDdnEkn5fnbrjB+pG7domYp2mc5bKkL2ezwPXXi4gJ2DlhIlHbsdgYERsffVQ+e8cOEVAmJtb+X8bIXG562gaMaMpysw2fSiUrHGqmmTbH1FIT7USDatxahhqRrun6bi3DrahRSxpDS6Xt3bs+AXp1HsGj3AYoIraXhna2rjoFCYynT0vkYD4vr9UmJZo3SNi+AAAgAElEQVRaODkp4cAaTdTba52EHTskEjKRkIYq587J+y64wK5aVasiOJ47JwPP2Ji8L8hJ1sFEQ9d7e8URaebir1atU760ZB13N1WAK+tE0Tp5/tqh9eqI5nJ4vhi/m17iFwmbIO4DVEO2qFCwKS+nT9v7zzxjVw3PnrWCv5JIiN3YsUNs0q5d4iiPjsq2d6+Udejvt8dNo6/dCXGpVFuLLKixCrBWoNF0vmYn1240qg6NbkmJdtc4C4v+cMXCToiG3I74BV3//bDoHBUNNXIsSBxscsyL+9FvyBZVKjLBdkXGxUVJG5yetv5IMmn3u0bsDA1JqqDWJtOoR73VCb+OJcDaBk2aDlcui2+l5Wg2i1LJNkdxzxkVDukXEcVt6uUKhP5bPzpuZLNybo2OrhUImxxftoUt0khkv7h46JA0Msnnbc3CbFb8z717bU3FqSl5rq9P0o5nZyUNc3gY+IEfkNeWSraeoGaCKCdPSsflhQWxQ9ddtzZwBLDNN6en5fem0yIc7tzZ3MKHlooIKyfVruAOXZB2BcPFRWvv1S673ZK5wBIfqlXRJjxPgprWOcbF3RbFAoqI7WVdO3tpScS9QkEM4+ysFRGXlqxwqKLj3JxM+qtVuwKjTnhPjwwm+/fbdKGVFetMTE7KQON3kt0041LJ1uZoZiDRlaPFRTsApFJWOORK0fbDFYGiRMKw6MGoyEF1lrSQ+AYVvI/7GdqyLTp3TuzNrl3WHuTz8vzJkzZV5uRJcWBnZuTx0pKNTtT9Pzwsn6Pb7t2yTU3JpH9qSmxROh3sUKiorBOkVrosu+nriluPth34BSm/YOgXDcnm0og4GFZvMihy0H9fReoNmnxta1t04oSdaKuPk83K392JbzZrhUddhH3+B6wei8FBGR+05plmP1Qq8vmjo2KXNiP9TSMgtSSMm36twiH9ou2HKwKGCYRhtigqctAtB6I+vc4h1kHcz9CWbdH585JSPD9va7pqQMaePXIdV6tiT06dkuPT3w888YS8bmICuOIK20leBUetP9/XJ8fo0CGxY+Pjkra8f/9au5DLyXfMzMh3jozIb/DXyI8in7fBK7o4m8lY4bAdvlE+v7ZbslsWx98teavSp8nGoR3Ch4YkCGEdxN0WxQKKiO2l5Z1dKMiEXMXCpSUb3q5Rhm4Y+9mz8npA/lYsAs8+K4MKIAbXGBkkZmdFYKlUbJdjDU3XzS2KqxGHExO2c2s9NIVRU0uNkUFAuxg2kmZE4okW33ZFwaDHfjQyrZH04kbQc1A7Cq+TuJ+tLdmiXE5sy8iIXPuFgo141ihQ7UyoETuA2Ct1SM+fl4n/9LR8lpZaOH1aHG43YsLz5LNdkVEjHKemZKFj1y5xkBst3K2CkCscqsjTDuGQaclbh7vvw8TBqPTieuJgo8dKI4fcGqLrIO5nSEu2aGVFohCXlsSeDw7acioDA3ac0DRNl1LJRjOePy9+ydKSXUjV7Ihczto0rX2mkYtB0Yx6v9FjakxtR2UtwaA1Hykcdi/uOBQlEAZRL7W4lej0bFbOwXWWDYr72dqSLSoUgIcfFr9mcFDshNqXPXvsQmg6LXZleFj8p29+U15z1VWSEaZzJE2XLpftAu1zz8lcrbdXSlLt2lVbv1oDQRYW5Dj29Uk01/79jTUWNKZWONTxyRUON3MBU+vvu6KhLvbo4r8rGHLO2L3Mzsrx9/doaBKeHW2AIuI68TyvB8AlABaMMdN1Xt7Szq5UbC0LHZzUuBsjF9r4uLy2XJaivtmsTRs8f16igoyRxzt2yPsXF2XirvWE1Pl106iXluSCzmbtKlo6bWuWaV1Gv+iotTuKRRvOD8h7VThstWMu6RyiREF9HDQp14ixKJFwM8QcrU2j5+s66LgBarNtUbksdqSnRxYQtKOfLmTohEdTcPr7bVOb3l655tUx1sgfnaxop8hiURzm8+fl1q3PqCnVp0/bIt5u05CJCdtdcNcusYv6eOdOER3dDq0q4Gx2MwKmJbeHRsTBqOjBevUHN+P3qkDVjWk7Tdijpm1RtSoC4pkzMiFXW97TYxc9tY501Ge4zZ3UpykWxefRUi0aiehGMrrp1EE1rPv71wqM+lhFBs+zpRoSidpmMbQD8ca1PWGpxWHpxY0IhJuBMSJAAevK1ujIM3czbZExEoF46JC9hvN5sQ26yKn1uHM5ERTn54GHHhJb9X3fJ6KYCnfa3C6REJvz6KNSuz6RkNRnN9U5nxc7dOpUbZdlN5W3pye4EYzaSR2HtHGLLpioj7xZY9/iYm2E4eKi/bvWmnW7JTMDY/tgjJzTxaII4S3WHK575jY5ZyIBUERcJ57nXQjgOQAfMcb8Sp2XtzRAnTkjk+pUSgYMFVhSKZkkayejbFYExEpFBpq+Pim8m82K83rBBTJo5HK2gGk6LYOc28nLXZFyU5aTSbmo/XUa3S2XW1t0V6MWJydlMAgSHRmG3ln4m5OECYWtNifZSocgmxURanh4XQXxO85Z3mxbND0t17RGNmuUjzZEqVbFbujiQi4n58jgoFzvbuMUPQd0Iq+r3r29a6Mg/CnL6lyfPVtbr9G/zc+v/qPOfzowsDZ1Wh19fW5iovXzk2nJm0OQOOgXCoOoFzm41cKt/vZ1CokdZ4uApuxR07bo5EnxbXQynEzazs3a3TlskchtnKI2J5GQ5+bnxbYkEjalud75oRN5V2D0C40a0ejasVTKTvbHx4NFx8FB2olOI0oY1Nt1NifZMsplm63RYgTQtrNFx4+L0Kc+UbUqYuzEhEQBZjLyXKkkc6Ann5SIxf37ge/5HlsSRudZPT1iUx5/XLLHPA+47DLgyitrm5ZkszZl2Rjb3K6vL7heo95qJ+VCwS5iJJO1ZRzcCEc30rFVW6TltTTSUBtTAVb0VFs4Ntb+5iyk8yiX5drq7ZVo3hZsYyMi4oVofM5EAmCv2w5nfl42XSVURyOVEqFweFj+phPqdFpWq+bmJPw9mZTHExMyiBw5Io5tT48o/KOj9uKsVKwwqFGHOqDoa3SFyGV5WX7bwoI4y1qzUQUjXeVaWbGrZX509Us3v8ionanpUK+fsOYk7uOgNBoVf1IpWwNznc1JtoSBAfkfl5bk/Oc5VZ/5eblux8flHNEO7CoMViryd73utfvy2Jg8p5GDeo5Uq/KcNlTRCBxFV8fdLstuI4p0WmzalVfW/k43+mN52drFc+fWio733GObwrikUuKMu5GMQZt2lY5KS24l1XU70WpzEleQ1ZIGQUJhp5NI2P+X50hjLCzYUi3ugqqOS1ECYqWytlFTpSKLtEtL8prR0eYiX3SCPTW19ru0MUqhIJN+jW7USb6KjMeP28ZyLp4nYo4/XdqfTr3Z3aG3A/704kabkwBWCNTGX+tsTrJlaBTcyoptxEHCWVy0NQrVFs3NiQ3Zt09skdqbgQHg7rvl+r/xRmlyqXOt3l6xIeWyCJJPPSXn2yWXAC94gbVnxogvc+qUfGcyacu5uDbPX65H/SGd36mAqGOlMVZkXFgQPyloDqANXoIERr0PrE1LVv9PF2cuuKA2LZkQP6mUjKlnzsg5pNmWpLNgJGJ7aWpnLy/LirtG9ekkY2BABMShIRkIjh8XB1XDvo8dsxP+AwdkkDpzRgx7MikT5PFx6yRr5y0V99Jp+Y6w1SBjRCxU4VAHIy1KPjISLSZpxFJQ92l3CxvEglKn3ee2q0NdrzmJPg665DXtISrFOA6T8kbR5j7JZGPRJgHEYEoQScO2qFAQp1WLextj649pJGGpVCsO9vbKftXX6AQLsB2XAXnOTd/T+pl67TfSZdmtb6jntlvfMOrYqoAQFMnoblqOwWVkRBz4HTtqIxtVeNy9e8Oa+MSSzW5O0k2okN3iIkzc90bDtqhYlAn23JxNGdYoxJGRtV1Mn/8CJ/rQtUXqwxgjn6WLHq1SLtuyDO4x1SjJqM/Wibw/itEf2Ri0CJtOR9doHBpqvH51N1IvtXgjmpN0E0tLcv4ODzd9PcT9DGvYFpXLwAMPiIioKcPnz8u1dtlltY1UVlaA735XfKhbbpHbYtEGT3ge8Mwz8ppCQeZt11xjxcBi0foiWu9QsyjCjk+5bIVDFfG0xIMutkRRLNpyNP5oRv9z2sxK62pqWvfoqERfah3rHTvkuxutX02INmX0C+UNwDOsDVBEbC8N7+xSSULeZ2etM+p5Mqjv2yeTeE1frlZl0qqRN729stIzMGBVfM8TYz45KYOOpixryqFbByNoUDJGLmR1utUZHxoS570FZ6Mu9VKn3ZRpF11NjYps1JpEcWG9zUn8wmCrzUm6iWJRzulMpqXV0LjvsYZsUbVq66lq2YSBATvxKhRsMwOdpA8OipOoUYaauhwlHgal+kXVfwoSDjcyLcwfIbe0JLb09Gm5PXPGRjaqc3/+/NqJqJaLCNp0ErBjRzwieJWo5iTu/SA2sjlJN7HORitx32MN2SJjJLvi6FEZw4eGbI3m0VEbEeNHx0nARjMvLsqiarVqsytaXXwsl8UOalkG/R6dSG+0X6QNwvxCo3tfm9e5JBLBNRr9YmO7OtJvBK69iao9GJZeHCUMdkJ68VahKbmJhPj3TRD3PdbwHO3pp4EHH5TzrK9PIgSHhqTDso5lqZT4T2fOiDB44402u0I7rh89KtGHy8viF1x7rY260nqH587Je8bGREzxZ4MppdLaclK9vVY4XG+ARS5XG2GozU/0u3RupZklpZL83X/9ackbfxSj/zZOtohsDtWqaCHGSAmAJhZt4m6LYgFFxPbS8MT91CmZmLpFu8fH5SLq67OT2ExGHMCzZ8WQa32v8+ftpHZ83E5Ug1KWBwaCRbVKxQqHi4vyeo3cGhmRAXOrV2E1jdIvMvofhw1iQVGNrujYjkEsKnqw0eYkYdGDHISj0fNEha8miPsA1ZAtOntWHEdN+9ZGBTpx1vNSBUOtrap1dlKp2ghYN6o1KGVZxcOgyZuKhu6k0G2M0uqEL6pbcjN1DMtl2V9hNRrdSAIXXeAJawqjWzvSfhoRB1tpTsIakPVZR33EbWOLnnrKLmikUjKGj48H11XWzsduJHQuJ+Khdo4fH2+t/lapZIVDtRduR+WtPterVflfg6IZXdExrClMWDSj3g8SazcaHR82ujmJ3ifRaFNE9ZMbZFvYojNnpBzKwoLsn9lZ8SGvusoKhKWSNH8qFoEbbhARUUvB9PWJn/Cd78j1OD4OXHedjPvVqk1Zzmbl9Tt3ingYdN0VClY41EACzdjSskOtUCzWioXz89Z3SSZt/ULdomrQakBIVFRjUBCENsoKS53u62P9xO1AoSBifH+/+MINEndbFAsoIraXhnb2zIxEGGqUDyAi4IEDMiAcOyYO4tCQGN6FBTGmBw6IQZ6ZkfeNjsrg09srRlzrFQLWMfAbYC2svLAgTqZO/DVNeXAwfquzWrg4SmR0B2AXra8UFdUYJj6ttzlJvejBrZ6odAuLizZ1pwmHK2ZXwRrq2qJsVmyJe54nErKvNKVOJ3PptE111vqF/u6nOrluJmVZhUN9rTtBbGUiqIJhWLdkf8ONjbZ1xohD7kYwquioz01P2y6ZLoODwZGM7mO3TIWfRsTBsIidTm5O0k3osWgyCirue7+uLcrlpNHA4qL4Nb29IqqPjweXXtExVm1RPi/XXakktmp8vDkhTNOhtcaha+c0oiiO14B2kVVRMSiycXl57ft6eqJrNGpTmLB9slnNSbo1vXir0DnD0FDDUWwxvApqqGuLVlaAb3xDBMJMRq6RgQERELU5yuysiB6Dg8DNN1sRNp2Wsf2RR2zq83XXSXZZoWB9glJJ3rN7t8z9XF9H5zMqHGoEuxsU0axvpOV9XMFQS7h4ni31oM1PWiwBFIn6lWFio9Zy9JNIREczrrcpDOkM5uflmpmaqm0EG0HcbVEsoIjYXuru7MVFSdlZWBCjmkzKQKKrWMeOySCSyYiTV63K39NpmfCXSmLwd+2S51yBLCxluVSyacrZrDynNYZGRrZPLZ1yOTp1WgczYG06pU4m/KKfDmAqwLjNScJEwu2cRrMVGGM7+bqNhuoQ9yMUaYtKJXGCPU/2iZ6bOol2o0AGBuTvuuCg79cIIE3pC0pZ9qfSu8XtXeHQjThsBlco84tk/ui5TrrmVlZqhcag7ezZtXVjk0lxsnbutKnSO3bY5/SxLnz4m7/EsTlJt6FjShPjQAeduS0RaYsqFakVduKE9UcGB6Wxkl9A9C9clMti2wsFsVHj441HVWkEjW4qHGp9Q7ccQzejGSlR3afVF3UXaQDZ1wMDdnMfa8MYFafqRQ52mo3eDhgjx9cYmbg3MB7E/QhF2qJqVeogPvCA7ItcTs7pF7zALpRqjXqta6g+fz4PPPaYLBL29wNXXy1NLxcX5TnNHJuYkDmdlo8BbPkpFQ61EZcb1NDoWG2M/D4VC2dn7TEG5LPcTska9d0JaM3tsNqMehsUoRzWFMa97ZT/kwQzPS3HV5sW1SHutigWUERsL5E7O58Hnn1WJo8qIO7fL4PR3JyIhOpE6erg6KiIf4WCDfXNZOxgowKXdjd236/Coa40ZzJWOGyygGnXoCk0YdGDmuKhq2I6kOXz9jWaIqTOgzrB6jyHRTRu56YwW41G9Pb0bJtVrlBbZIwdrMfGbIreykptXUMtheCKTioSqnioz6kw4tbodL/PTVUGWhMO/SnJfsGw0bTkTiIqcrBcljFBBUX3vm5nzshxA2on4GNj4TUa9f5mRBuQ+rRQHzHuRynSLzpyBHjySbEn2rxtamptdJQbfVitysR4ZUVszehoY1kUKkLqBlhbpsLhdiWq9qAKjdmsvc1mbUr18rLc1yZ87uamT4c1h2kinZZsMJWKXEup1NquvwF0tS06fBj4ylfknC4Wxf+54gqxDdWqCIiJhNQ13LdPzu9yWezXsWNiR666Srounz8vflYuJ/tWx2GNkK5WawMYjKkNBNGGLPXI561YqF2T3aZP/rTkdpQq2Gy0KUxUQ5iwevr16jSyKczWUanINZZM2usrAh6lNkARsb2E7uxKRQao48fFwPX0ABdeKHUwdKBRZ00Nf6UiBjGTkclfOl3biUsbRqjju7JihUONqOvvt8JhkzXhYodOvOulGPtxhY+oFGPXoGnHa39EY6NNYaJqNXIQ2xwKBZn46H6uQ9yPQKgtmp0VR1NF795eOV81nU/Pe41Q1hRlFT784iFgBUEVRYKEQ3/USeSPj6hjqJ+1mWnJ66VdzUk06sBNnQ5KpT5/fu33ZDLhTWF0m5ri6v1m0KSQ2GFnd9NE2qIHHxTbrFExO3eKsKR+jS766TWl4lUi0VjqnUa3aJd5wEYVaYOAbqadzUm0mVlUVGMut/a7ksnoGo1xbAoTJwoFG3VXR2TqWls0Nwd8/vPSCKVYlEWJK66Q27k5sVUjI9I8ZXhY/J/Dh2VLJIDLL5fIw/PnZcwtl8W/0pTlRMLO6ZaXbUOSZNLOB+o1hNTIaxUL5+ZqFxGHh2sFwziWp9ootJ5+VAp1vaYwYXUa2RRm81heFr91ZERqiUewTc/s9kIRsb0E7mxjRDx8+mnbLfaSSySs/fRpKyBqkdmeHpueo6lpWhtDmx/oBH952QqHKlrpav7ISPc4yP7owSCRMGhSrgXXo0TCzRoM1GGIqtUY1hSmXp3GdjWF6TayWRvlWydcPu4DVKAtWlmRATqRsFGIroDY27u2C6qKHdowRaMRVQBRgT2oo7Jb37DeRD8uacnraU7iioHtbE5SKjXWFMbfiCGRqG0K40Yyuk1i2tEUpttootFKV9qiQkHSBk+fljTksTFg716bXqd1CtWeqHjoeda/CdtvlYotzaALh8mkrW/YLcJ4HJuTVKs2ojGsXqPWMfYzMBAtNg4Pd/9i+WaRzco1MzISeey70hYVi8CXvyz2qFCQ8+zSS8UWaefYAwckAhGQ6OnDh+VcPnhQ7NbcnAiInmdTlkdGassoaXBHKmXncVE115eWapufaB17QN7rCoZ1jhsJoF5TGL0Nq6cfJjCyKcz6OHdONI3duyMDPuJui2IBRcT2ErizZ2akVoY2MbjsMlucd3HRilo6cU+lbDcsXSnp6bFRQ7mcFQ41vWdoSAaRJptHbDnanCSsa3GzzUn8jzs9rdEtohwW0RjW2cxtChMkMgY11tnuaA2gSmX7OcuVitRBzOclwkwFRF3J1mYGOoHXa84VCf0py60Ih52cltxqc5JGxMGtFj/rYYyMSfWawiwurn2v1ukN23bvljGt0+1xu2mwPmKHnzl1WXPFGAM8+qh0Yx4YkHNk/345R7Tju0Y653K2JIuWeAmy21qOxG1Yl0pZ4TBuE+x6wqBrc13CmpP473c6blOYoBqNmkbtR0uWREU1bucIrTCMkTmFRrSF7J+477XAOdp99wFf/KIIdUNDMkfr77f1Da+9VuzTsWNSkqpUknTLXbtsc6KeHjveJRK25JQGd2h2R5hfvrKytluy2rGenlrBcHSUYnk7USE4KrIxrClMve7TWk+fWIyRuUq5LNddyNgdd1sUCygitpc1O3tpSTp1HTkijsvBg+LYnj8vA8zgoO0+lUrJ4N3XZ8VBNTr5vAzwKoAkEvLakZHOTfPQRgtR0YP+xgGAjXqKEgm3W3OSUql+VKOucrokk7Vd3YJEx+3W2axaFQctkZDrZ7s4y2fOiEg0Pi6Oizo/GqEzNGRFMo0yVPEQsFGHbnMUHV7c+ob+RiqdkJbs/w1h94Ngc5JaVlassBgW2Xj27Nr9mUqFRzK6kY7bbeFDr6OIxb+us0VHj8rEHZAInwsvlOidREIm3VqDTyP1BwetwOjidlTW883tqNyJ16ZrP6MEwiDYnKSWcjm687Te99siXXh3tyDhsVsyeRqlVJL9lU6HRpfH/ewKtEWf/KQ0vBwclLTkREIiM3fuBF78Ypl7Pfus2JmJCYnO1yjnwUEpSzU8bJujaER/Om19bX99VzcleW7OilDql7qiISP9Ox9/U5gw0TGsKUxU9+nt2BSmVJIMzkxGrq8A4m6LYgFFxPZSs7OLReChh4DHH5cB5qKLxHmZmbFderUGiXa204lpOl3rIFWr8npN4xka2lpnsZHowSBjqUJEVPRgJwqicaBatQNVVAfqoOOiA1VQVKM+7qZBrFSSSUY6LU5gAHEfoGps0eIicOqUHMeBAevUqO1R4UZtSjIZLCa6kYP+OojA1qQlu2JgmDgYFT1YL4qQNE+lIikpYTUaVYDUKFiX8fHopjC7dnVXU5gG6iPG/T+tufoWFoCvflUWNC6+WCbtk5M2tUybdWhTDi27oLiNUXQs0/qGWy0cNlJ7MCy9OEoYbKVzPRGMEd8nSGB0nwuKJMpkgsVF93G3NYVRf3FwMHBBp6tsUTYL/P3fA9/+thzHSy+1CzqXXSblpA4fFl8pnRY7pYudk5MiKCaTcn5ptpAu2vf3y9+0CZQrGGaz9jfoAoluDXbJJjHFTZ8Oq9MY1RQmqvt0b2/3+EWAXDczM3Kdud3MV+mi/7RzoYjYXp7f2dUq8J3vAPffLxf3/v02AkpXHTTqcHDQ1hsrlWwDCE1jVuFwYGDzDYTbnCRKJPQT1ZzEfdxNBi6uBDWF8T9upimM+zhOTWH0/x4YCCwmHpP/IpTnbVGxKJHQxogd0ZXy/n67GKGioQqG7qZRMW4dLX39ZqYlr7c5ST1xMC7nabeiNZ+iajSGNYXp61sbxejfJifjs/ChQmJIZGvcz9TnLUO5LALioUNSF/oFL5DSCtWqFXUAOb7aSVRrI2pzFF3ccIXDdlzL9SIHm2lO4n+OtmjrKRajazRqV2o/2hQmrEajpk/HaXHcLfnis0dxP1Ofv0IrFeCznwXuukv+x4sukmM0MSG2KZez9mhiQo5jKiWLXNpYRRd/XOHQTUvWJijqp6TTtSnJY2PbL9qV1EebwkRFNTbaFCboNk4i9Zkzci3u3bsmhT/utigWdLSI6Hne7QBuc57qMcYESFTtxfO8gwAOOU+9yxjz1gbe+vzOPnQI+MpX5P6ePTLgaMHe0VFbwzCZrF1ZB+RCUeFwI1c5/c1JgqIHg9Jo6jUn0Y10D/6mMEGiY9ggFqemMEtLtpj4O995O+644w73z3G2RwaQ43P0qDjDg4NigzxP7g8MBKfluhNaN0LGrYvoind+wbARgS6OzUnI1lAsrm0K40+lPnMmuCnM1NTaKEZ/k5hOiSTS8z6ZBO64o/tsESCdmO+9V/b7i18sxyeXkwm3MVY87OuzacramdnzbJpyT8/GRjI3IhD68TcnCWtUQrqHoKYwQRGOYU1hojpPd1JTmGpVIoaTSeBP/qQ7bdE99wAf+pAcswMHZF62a5fcZrNyDCcnZevtlef7+60t0uwcrd2pwqHO45JJeY+KhWNjnTPWkPij9fSjGsKsrATP6bUpTFS9xk4pK1OtSlqz50kdUse/j/QAulBf2hLiIu28avX2+dPd87wfBvDbAK4GMAngPICnAXzVGHO787rbUXui+HmNMeZ/rb72TgCvdv5WBnAKwL8CuM0Yc3b1+dOrv2kSwJ82+8+cOiWdvrJZcZJnZmRAmZqSAWlw0EYlGmOLr+7aJWJGQFRUJG5zkqjowajmJL29MsDFsTkJ2XiSSTlPQ1J9AYQ3hdHHCwsywQ9rChMkLrqiYzsGscFB+Z1u17uPfvSjeNWrXvUqdIE9mpkRxzaRkP+xt7d2suJP2XVFQL3u1b4o/vpb/sm8Wy+x1eYkGrUct+YkZOPp7RXncd++8NdUq3KeuxGMbir1c88Bd98t14Cf4eHopjC7dkn0yWafe4mEFbS60RYdPSoT94EB4Lrr5FaLp2cyMtnu7bVjiu6TdFq2ViJ2NqI5ifpAQdGEZHuhtciHhyUyJoyVlfDu0wsLMikOKuXQ2xtdo3F4uD0ZSYmEfE82axdnus0WfeQj0ihlzxPcWosAACAASURBVB7xN4eGxPacOiXztH37bN15Ff80Inp5WeZvuZz9TG0upoLh0BBtBNk83KjDgFTf5ymVohvCzM833hTGf9uOpjCJhCz4njwpJXJ27Gj6I7pKX2o3sRARjTEfcx97nvc6AH8O4CEAfwngHID9AG4E8PsAbg/4mDcDOBPw/D0Bz70aQBXAAICXAfh1AD/oed71xpi8MSYL4GOe512IJg/ywgLwL/8ig9PoqExsJiZEQBwasgXDtRabRhyGCSbVanT0YCPNSbSo73ZvTkI2FncQi8LfFMYvOs7OBg9iyWRjUY3rOYe1wPrCgl1BfuUrX4lXvvKVH7Oviac9ymaBEyfE0dXahyMjtZNiVyz0RyJWq8FNT3QCrgsTzTQncSfg27k5CdlYEgkZZycmJEU2jOXl6KYwTz0lwrv/PO7pWdsUJujxelPTksnaBb9usUWLi8AXviD79YUvlOfOnLHlWtJpm42hdj+dDs9wCGtO4n8uCFcM1BqKfoGQfhFZD+qbRE143ZrnQVGNR4/WbwoTVqdxeHj92UG9vbY2O9A9tmh5GXj/+yUqWoM6tJHKjh0iHmozk0JBhAsVYfRYZDLymgsusKnJzMYinUhPj2xDQ+Gv0Xr6YanTs7O1579LVFMYvV3vtaHX29yczAGjglv8dJO+tBXEzqx5npcC8HYA9wN4iT/81PO8nSFv/awx5pkGv+bvnc/9oOd55wC8HsBPA/h4Cz8bgAw4//AP0nVQB/PRUYliGBy0KwYjI7YwvKY553LBImG95iR6gbI5CelUdBAbHg5/jQ5iYQ1hzp+PbgpTL6oxahBLJsVhDBbj42mPymXg6adFRBwetgXi3ciaoIhDf1ROM81JXIGQzUlIJ9LfL808Lr44/DXlcnRTmMceA/7t34IjiSYmoiMad+8WXyDqmggbu+NqiyoVWVg9exa46SaxCbmcrQmt9qe/39bUVRFQF0n9AmG95iTqG7E5CelUUikbtRaGMbbRUFDq9MyMdA0OqmEd1hTGvV8vvVabPepv0ftxtUXGAP/tvwGf/7z1G9Np2Rc7d4ptLpXEb3LLOI2OSuSpHq9ms8UI6WQSCVvTMwq3KYz/Vudp/rIygA1miqrXWK+28diYfNfMTOuZCXG1W1tJ7ERESIjnKIBvBuWvG2OC1OD18kXIQY6YWtTnz/4M+NKXZMC54AKps7FzpzweHJQLxRhR02dm1r7fbU6ikYpsTkK2A40OYoVCeFRjNivXVZBD3dNTP6pRhcZCoaY2USzt0SOPyDY5aScNQG1dQ02f1Of9dsUVA13hkc1JSDeTSlnRLwxjZBIfVqNxehp46CFZwffT31+/KYxel9VqjfAVS1v0uc8BDz8s3U5VQB0aspMGtUPLy7ahnB+3oZNmUbA5Cel2tH7x4KDYijDcpjBBNRrPnAlvClOv+7RO1nO5mgigWNqi970P+OAHZb9eeaWteTg8bOusujUMx8bqL/oQsl3QRmYjI+GvCWsKo7czMzLH8o/zWrokqk7j1JSkNZ89G11OIoJY2q2tJI4i4lkAywB+3PO89xpjpht835jneZMBz88aY0KS7J7n4OrtuUZ/ZBAf+pCEw19zjdzqivvycm19HY0UdO/r46D6cYSQYDxvrfhYqVixUQsPa1qK+9g/iD3wgNx+8IPA61///NOxtEef/ayIFJq+XCjYyB43nTiqi7HbHZkQspZ6YqM2hTl7VsRFvX/mjGzf/KY41f5xX2s3/sRPiAi3Sixt0Yc/LLaor09EjVJJ/j93IcJNI/aXPGBWBSGNEyY6VqsyH8lm5da9f/YscPiwPPaP9/esJuz90R8B73zn80/H0hbddpvYmBtukHnaxRfLXG1sTHyloSFrb5aXbW1WQkjr+LPRjLGN0/J5uZ/Pi1/gztOC5h46v/vt327pp8TSbm0lsRMRjTFVz/PeCeDdAJ7zPO8eAHcD+Aqk6GVAsCwA4Nshz18E4IjvuQnP8yqwOeu3QU6sf1nPbz90iC3HCYkrb37zHbcDuO31r7fXcVzt0dvfTltESCcwNRVdpzEIzxNb9LnPxd8WfepTtEWExBW1Re98Z/xt0fIybREh25W42q2tJHYiIgAYY97jed4zAH4DwPcC+AEAfwDgvOd5rzPG/EPA214N6YTj53QDzz0D4NeNMUHvJ4RsY2iPCCGdAG0RIaQToC0ihMQN2q3miKWICADGmH8C8E+e56UBXAvgJwC8CcDfeZ53yhjzVd9b7m6i8OWPQrrnlCAnxjPGBFXiIYQQ2iNCSGdAW0QI6QRoiwghcYN2q3FiKyIqxpgCgPsA3Od53jcgRSp/GYD/IDfDl4OKahJCSBS0R4SQToC2iBDSCdAWEULiBu1WfRL1XxIr7l29ba0vDyGEbBy0R4SQToC2iBDSCdAWEULiBu1WALETET3P6/c87/tC/vyTq7dPtOv3EEK2L7RHhJBOgLaIENIJ0BYRQuIG7VbzxDGduR/A1zzPexjA5wAcBpAGcCOAVwCYAfCnAe/7Kc/zzgQ8/5gx5uHN+rGEkK6G9ogQ0gnQFhFCOgHaIkJI3KDdapI4iojzAP4zgB8H8PMAdgNIAjgO4G8BvMcYczzgfe8L+bz3Aujqg0wI2TRojwghnQBtESGkE6AtIoTEDdqtJvE6uSmM53m3A7gNwBQAGGPObekPWsXzvASAcQD7ATwI4F3GmLdu7a8ihGwmtEeEkE6AtogQ0gnQFhFC4gbt1sYQl0jEGQDwPK+nQ7raXAzg0Fb/CELIlkB7RAjpBGiLCCGdAG0RISRu0G6tg06PRLwYskOVL5kO+MGe5/UB+F7nqeeMMc9u1e8hhGw+tEeEkE6AtogQ0gnQFhFC4gbt1sbQ0SIiIYQQQgghhBBCCCFk60ls9Q8ghBBCCCGEEEIIIYR0NhQRCSGEEEIIIYQQQgghkVBEJIQQQgghhBBCCCGEREIRkRBCCCGEEEIIIYQQEglFREIIIYQQQgghhBBCSCQUEQkhhBBCCCGEEEIIIZFQRCSEEEIIIYQQQgghhERCEZEQQgghhBBCCCGEEBIJRURCCCGEEEIIIYQQQkgkFBEJIYQQQgghhBBCCCGRUEQkhBBCCCGEEEIIIYREQhGREEIIIYQQQgghhBASCUVEQgghhBBCCCGEEEJIJBQRCSGEEEIIIYQQQgghkVBEJIQQQgghhBBCCCGEREIRkRBCCCGEEEIIIYQQEglFREIIIYQQQgghhBBCSCQUEQkhhBBCCCGEEEIIIZFQRCSEEEIIIYQQQgghhERCEZEQQgghhBBCCCGEEBIJRURCCCGEEEIIIYQQQkgkFBEJIYQQQgghhBBCCCGRUEQkhBBCCCGEEEIIIYREQhGREEIIIYQQQgghhBASCUVEQgghhBBCCCGEEEJIJBQRCSGEEEIIIYQQQgghkVBEJIQQQgghhBBCCCGEREIRkRBCCCGEEEIIIYQQEglFREIIIYQQQgghhBBCSCQUEQkhhBBCCCGEEEIIIZFQRCSEEEIIIYQQQgghhERCEZEQQgghhBBCCCGEEBIJRURCCCGEEEIIIYQQQkgkFBEJIYQQQgghhBBCCCGRUEQkhBBCCCGEEEIIIYREQhGREEIIIYQQQgghhBASCUVEQgghhBBCCCGEEEJIJBQRCSGEEEIIIYQQQgghkVBEJIQQQgghhBBCCCGEREIRkRBCCCGEEEIIIYQQEglFREIIIYQQQgghhBBCSCQUEQkhhBBCCCGEEEIIIZFQRCSEEEIIIYQQQgghhERCEZEQQgghhBBCCCGEEBIJRURCCCGEEEIIIYQQQkgkFBEJIYQQQgghhBBCCCGRUEQkhBBCCCGEEEIIIYREQhGREEIIIYQQQgghhBASCUVEQgghhBBCCCGEEEJIJB0tInqed7vnecbZUlv9mxTP8044v+sbW/17CCGbB20RIaQToC0ihHQKtEeEkLhBu7UxdLSI6PCq1a3iPul53kHP8/6H53nPeJ634nle1vO8hzzPe5fnebuc1+nJcjDqSzzP6/M877dXP2Pe87yl1c/+uOd5P+Z7+etWf9O5DfofCSGdD20RIaQToC0ihHQKtEeEkLhBu7UOOkZ5jcIY8zH/c57n/RyAjwLIAfgYgMcBJAG8EMBrAfwcgMsa/Y5VFforAF4E4OMA/gaAAXAQwMsB/D8A/o/zmz69+r53tvI/EULiB20RIaQToC0ihHQKtEeEkLhBu7U+YiEi+vE873rIgf0OgB8zxpz3/f13Afxekx/70wBuBvAGY8yfBXznrrVvIYRsZ2iLCCGdAG0RIaRToD0ihMQN2q3miKWICODtkN/+Cv8BBgBjzByA323yMzUU9atBfzTGnG7y8wgh3Q9tESGkE6AtIoR0CrRHhJC4QbvVBHGpifg8nuf1AfhRAF83xjyzgR/93OrtL3uel9zAzyWEdCG0RYSQToC2iBDSKdAeEULiBu1W88QxEvEggF4Aj2zw534GwKMA3gjgFZ7nfRXAPQD+jzHmqY34gr17YUZGgBe9CLj6amDvXmBsDBgdBUZGgL4+oKdHtt7e2vvJrjrtCNlajAEqFaBcrt38zxkjr3/ta2/DX/3VHfjEJ2CWl4Fbb4WHGNsiSD0OQkgHYEz4Vq1aOwQAf/AHt+Hd774Df/zHMJ/5DPD1r8fbFj3yiPhF+/fTzyFkq2nEL6pW7etf97rb8Bd/cQc+8AGY3/gNeKtPx9Ie3X47zMAAcOmlwI4dQH8/kE4DiUTt5nlrn0vELiSHkO7j5Eng618HXv/6521RM8TSbm0lcRQRh1dvFzfyQ40xBc/zXgbgtwC8AsAvrG7wPO/rAG41xjy7nu/4zd8E7roLeOopYG4OuOwyYHISGB4GMhlgaEiExHQaGByU57zVyyCRCBYX3fuplH09IduVajXc+XWf9+N5cg2lUnLtJZNy3WWz9jU7d4qDuUpsbREhpD24QmDYFoTn2cmqez+16rXddRfwH//j8y+PrS3avRs4fRo4ehTYs0dsLyFkYzGmMb8oyB6pX9TbK3MUfazPAcA119S8JZb26NZbgQceEHtdKIiImErJnCydtv+rK6K6BAmLQQIkIWTjyWaBJ58EDhxo+SNiabe2kjiKiHpwhzb6g40x8wDeBuBtnudNAfgeAL8C4GcA3OV53guNMYVWP/9NbwJuuEGc/+lp4NQpGaA0CtEYGbjktwBLS/K8CovJpDyfzQKlUvBgHyYwurdcMSNxJcz5dR3jIAcvmZRNHULXCU6l7N+VahVYXJStWpX3ABI9vGfP8y+LrS0ihKyPKFEwSiBUQdAvEPq3MBYW5Payy4A3vOH5p2Nri3bskP1w9qyIiRMTsojKyTYhjeEunIb5RUELp4mE9Yv84qD6RamIWWKxKNcsALz4xTV/iqU9OnBA9snRo/K/l0p2TuZ58jiTkX3V2yvPVatrt0olfI4WFsXIqEZCWqdSAb7yFbnmfvAHW/6YWNqtrSSOIuIzAIoArtvMLzHGzAC4C3JwPwbglwDcBODrrX5mKiUndyYDPPqoiIhnzgArK8DFFwOXXAIMDNjBK5GQC2NpyUZDpdMSuTg+LqtkmYwMWqWSDOilkmz5vLwvyHFIJhuLaiSkXbir5GGr5Y2skvf3W8fX3RqdkKp4v7Ag39ffL+UGikX5+wUX1HxWbG0RISScqLTiRqIHwwTC9U4Ol5eBL39Z7r/97TJerxJrWzQ5Kftmdla2UkkWV5neTLYzQWVXghZSg+yR6wdpZoXfL1qPPTIGuP9+uVaBNRHEsbRHnieLxNWq+IA9PbK/Ewl5rqdH9vfcnLw+nRZBMZMJnjPpuBG2lUqNRzVqZgyFRkLWcu+9wPnzwMteJr5Di8TSbm0lsZOKjDErnud9AcCPe553SZtCQO+FHOS96/2gZBK4+Wa5HR8HrroKOHZMVr7On5dUyUsvtZGJKoBopJRGIs7MyOd5nggdw8Ny4UxMiBCpQodfYHSFxmJRBEx1Alw8r77Q2NPDaAFSn0qlfgRh2Cq5roa3skreLLmcOIflsjiFO3aIkzg9ba8RTWcB4m+LCNlutBo9CFghMJlsPnpwo377e94jixwAsGuX+7f426LRUevfLC/LmDA0xPRm0p1o9GBUanG5vPZ9WnYlmRT/ZGAg2C/abHv09NMShTg1tfZvcbZHyaT4firgqoCo9xMJmW8BMn+an5f7PT02c0wXd9y5WxRRQmNUVCPTpwkBnn0WOHQIuPJKCchqlTjbra0idiLiKrdDOuj8ved5P26MmXX/6HneKIDfM8b8XqMf6Hne9QCmjTFnfM97AP6v1YdPrOtXr9LTI81VPE9SeF78YhEvnntOLoTpaYlKvOoqGy6vop/nyar9wIAMECsrknJ57py8D7CDnLsNRQTnahSYX2DU+8vL4StmqVT99GlGE3QnG7FKro6X6/xuxCp5M6ysyPVXLMo5u3On/CZAJrRnztjHAdyOGNsiQrqFesJgWMTHetOL28UnPgF84xvAddcBR44EvuR2xNgWJRJWSCwWZSxZXJT7Q0OdcQwIaYRG/KIge6QLp27ZFb9f1An+9NmzwBNPSD3TiYnQl92OmNqjvj4J8pidtQvHOoZUKhKlmE6LvUomxYfUudjiok0P17TnejQSWdhIVCPTp8l2Y24O+Na3RBd50Ys25CNvR0zt1lYQSxHRGPOA53mvBnAngKdXw0EfB5CEhKH+AoAZAP6D/Bue581iLR8D8CMA3ul53ucA3A3gHIApSL76zQA+YYzZsI49vb3AC18IPPywDFQXXCCRBcePixj49NOS7nzwoE1zVkFxeVkGKkBqB+3aJQq8Ot2LizLInThhHZXe3rXCokZxadShkxoVSKUSLjQWixLJFbRyyqYw8aPeKnkjzUmCVsnVCe6EY10oyACUz8vvmpqS36uUyxIhrCUEgugGW0RIp7Pe5iRql4KEwjjw0EPAhz4kWQy9vcBnP7v2Nd1gi5JJYGxMfCJNH8znxRa7PgshW4EuuNfzi6LKrrgLp36/KA5izsqKpDH390uN989/Pvh1cbdHg4NyPBcXbZmpfF7GjL4+uX/2rNwfGZGFjkpF9o+Wk1paspk0Wtu+VRqJatzI9GlGNZJOp1AAvvpVsZ/f930b4x/E3W61m9i6ZMaYf/A870EAbwLwfwP4LwAqAJ4G8Jerm583hnzctwB8EkAfgP+w+ropAMsAvgvgdQD+50b+fkAGlGuvBb7zHXGa9+yRVb3xcYmAOnVKVvuOHweuuALYt08M+diYTCRKJQmlP3FCNq3ftn+/pERXqyLsqaioEYuKpkHrNjgY7cS4g2EYxkSnT4c1hdFJXr306Tg4WZ1OvfSZZlbJ/Sk0nbBKXo9iUa6b5WX5vWGF/I8fl31x2WXRzlM32CJCtoKtak4SJ2ZmpP7hrl3A7/4u8Gd/Fv7abrBFyaT4MXNz4isMDNhocaY3k80irDmJ6xfVWzh1xUE3grBTFk7XS6Ui3YvLZeCmm+qLYnG2R4mE2JtyWURBFQuzWbFHmt21tCRp3f398vfBQdmqVXnfyorMw7JZ+UxXUNzoc2Kz06cZ1Ug6BWMkAnFpCfj+7w8P9Gjts+Nrt9qNZ8KW8DsAz/NuB3AbZIfDGHMu8g1txPO8MYgy/SCAY8aYWxp4W+DOzuWAxx6Ti+HAARmMpqdFWFxZkdWulRUZmK65RiYT5bIMFMPDMhgtLYkwog1YMhlxxEdH10ZXabSibto0IpGQ71BRUbtGbwYqLIZFNZZKjTWFCRIat2u0gr85SZBYWG+VPMgBbrY5SadSLttrJJGQ83t4OPj/OndOhPm9eyVC8fbbb8cdd9yBmdVipJOTkx2zNzbSFhGyUWxEc5KobTtQKkkH5mefBf74j4EXvGB72CIdy+bm5FiPjMiiT7Eovg3Tm0mj+MuuhC2i1mtOEuQTtbPsSifw6KPAM8/IPOTgQXnOtUdTU1NTMZ+nrTkLlpdlfmWMzDEGB62QqCnNuZytVTswIH6lK+QZYwXFfF7GQY1o1MYsnWbP6kU16ubHXdwLawizna4Zsjk8/rgsaFxzDXD99YHXT+QV1YX60pYQF7llBgA8z+sxxgQkzG4Jj8IWwjy2ng8aGJD6h088IU1WLrpI0pvHxkRMHB6WwWdmRpT3iQlJhR4ft072wABw4YVinOfnZTtzRlbIenutoDg4KO8bH7ffn8/XiorT0yKgACLK+dOg66U9N0Ij6dPVanT69HZqChPVnESf74TmJJ2I1rBRJ29kRLYwRyaflyjgoaG1RcOn7BOpbrRFhNQjzs1J4sYHPgB897vAb/2W+Agu3WyLNLprdFR8mYUF8VnyeZmwa/fmbh+7SDQaOVUvgtCP2qCwsivtak4SJ06elLrt+/fLHMXPqj2a6bZ5Wn+/2JtCQc6pbNYGbywsyILz2JjMrRYX5e+5nPiPQ0M2Ul4FQ2Pks7SO4vKy/F07Pff1dYbIxqYwpFM5fRp45BHJ3rz66nWfJ12tL202nR6JeDEAt9fOl0yH/GDP824BoIk188aY+xt4W+RvX1iQWohLSyIIalrz3JxEJeqK6vHjMljt3i0pBYOD8p5q1dZvGxy04sn8vLy+WrWO+ehoeBSWMTYNWrdczg4EfX1rm7Zs1aCn6dNREY1RTWHqCY2bnZ67Ec1JolbKO8EZ2SqqVXv+VqtyntabeFarcg2Wy1JCQF97+PBhHD58+PnXvfzlL090sy0i25ONaE6y3aMHN4ovflG6Mf/ETwC/+Zvb0xZpHa+FBRnLxsdtNoUx4udsVrYE2Vo2ojlJmG8Uh7IrncTCAnDPPeIXv+Qltdeca49e/vKXvxzxnqcF/u5KReZRxlifemREzr/ZWVuzVdOfFxZEHNQml0HlchRXUNRAAFdQ7IZzNSyqsVKx95k+TRohmxXfyBjgR34kMo25XiRit+lLW0JHi4hdSN2dPTcHHD4sF8r+/SIUzs/LCvzsrIiF6bQY3SefFHHvwguB7/keCYlfWBCnO5mUAU2LkVerVlBcWBDjnUzKQDg6Gh2ZBcjrl5ZsbcXFRRn8ADH0bhr08LCs3nXSpLFcDhYX3ceNNIUJS58O+l/rNSfR5/1oFEZY+gxXyaMxxp6rlYpEGYyONhZBe+KErCxffHHdGhtx3/s0/NuMsJTiVtKL49icJE488wzwuteJHXrPe7a3LdKMhMVF23gFsKVYmN4cL+o1J9Hnw8qu1POLKCpsLMUi8O1vy/V2003SBTWCuF+FobaoUJB90Nsr56jniV3W7K/lZZmbjY3JuVgsig+az9u5lltaKgjNsMrnbZZVb68VFLs58rrV9GmAUY3bhXJZGqmcOSONVPbvj3w5j3gboIjYXhra2efPA0eOiEC4d69cKEtLMkgVCpLWXCjIYJ7LSYfnQgG4/HIRE3t77UoYIAPXyIgtSK4iy9ycDH7lsh0Qx8YaTxPSQdXddCUtlbKRXyos9va2sMfaiEY9+MVFv+io0YPu5k/R04FLu/FpUxiukm8u2aw9pzMZOZ8b7Yi3uCgC/tSUXHd1iPsARcPfJWxEcxJGD3YOi4vAa18r4/f73hecOugj7kepri3StLjFRRkjx8bk3FxeFpuvk/RunmTHgaDmJEERhX7c5iRRfhHtUXupViVt8PhxSRu86KK6xyDuRyjSFuVyYnP6+2X+o/VaEwl5fn5enhsft35nPi/zsWJR5gGN1povlWwdRa1br529+/o2pqxUHGlEaGwlqpH2pfO5/34Jnrr+einvUmfBiEezDVBEbC8N7+yZGRm483lgxw6pkVgoyCAFiAh49qw4VwcOyGsfflgM6NVXy4qhioma6tzba7uHqbHU1GUVFItFG1k4NtZ4BJd+1vJyraiYzVqDnsmsTYPuNMHMbU4StlKuq4TlsuwvN7VGV9PceiLqDPf0yD6ISp/mJKg1lpdtN8/eXjl3m0lzK5WAp56SY1CvG/MqcR+gaPhjAJuTbC+qVeCtbwXuuw+47TbgpS9tKLIq7keyri3S814jErXOs+fZdGemN28u9fwiTU30o/5PWAQhowc7l8OHpSbr/v0yaW9gLtDVtsgYsTXlsu0arynLiYQ8PzsrNkkzwZSVFbvArfar0QXuSsWmPGsGmHYD7+vr/ACNdrOepjBMn+5MDh+WiOh9+4AXv7ihayfutigWUERsL03t7NOnZVtZkajDCy6Q58+ft92Zp6dFJJyYkCKjjzwiXYtSKVHrX/QiGfizWbsapoOepjq7qBijKdSADJYqKDY66CnVqvw+V1hcWZG/aUMYV1gcGNi8yW3QKrnfKW50ldy/Wu5fxdLIiag6jY00hQmr2UgBQMjn5XwtFGS/+LuRN8qzz4qYfvnlDZ/jcT8CNPxbyEY0JwlKK6Y4GG8++lHgzjuBX/kV4Od/nrbIRcVzFRLTaVkU9Txb/7ZYtHWheR00hrtwGhVB6MdtThIVRcjjEE9mZoCHHhJ/6rrrRKBvgLgf7bq2SGseJpMSFLC8LPfV5hhj05t7eyUq0Q2WyOVsqZ1MRmxYMyJgpWIjFAsF+T5tnKiCIq+5xmg1qpHp0+3l/Hng61+Xc/ulL5V5XgPwCLQBiojtpemdfeqUDObFogw2F1wgF9LsrK3BsbIiXZ0BqaHU3w/ce680iMhkgBtvFCcgmbSh9bmcvF5FvKDV+3xeBsO5OZsa3dcnF3CzkV4uxeLa+orqpLq1HHXTNOww/M1JwlbKO605SVhTGL/oGPS7VUzcyqYwW0mxKOflyor8n9p5vJWB++xZuc727xcxvkHiPkDR8G8STC8mrXDvvRKF+JKXAG9+c906iC5xPysatkU6qSsUxIfQSbjipjcPD2/flD+lEb+okeYkQX5RN/sX251cDnjwQZkDXHedZEM1yLawRfm82BltfJLLia1xa7O66c1jY7XzGGPk/dr0r7+/tXIM1aoVFPN52/glk5HflsnQZ1gvblSj2whmk7f5jQAAIABJREFUvenTjGpsjHwe+NrX5Fp5yUsaKjWl8MxvAxQR20vTO9sY4ORJ2wFsYECExEzGpiEnEjIIHTsmrxseloiqXA64+255fnBQ6iVecYUNu19cFEe8UhHhSbuIBRm2YtEKitmsPJdOW0Gxlegvl6A0aDXQqZT8f/398j06GG+X5iRhTWHc+2FNYep1n47LPlDKZTkHczn5/7TmZqv/w/IycOiQfM6FFzb11hjttUBo+FsgKq24lfRiN5KQbF9OnQL+638VO/Sud0lDtSbOibifPU3ZIs0WcCfyruCqtRMrFfFn+vs38qd2Bv6FUzYnIRtFuSylkc6eBa68UuYbTZwP28YWLS3JYsbQkDzOZsWndm2Rm96szSddu66ZWktLthzD8HBrAr0xtYKillVyBUVe15sDm8JsDtWqdIU/cUIyKy+5pCmhfZvutfZCEbG9tLSzjZGah/PzclH19UkdxL4+GZzOnZPBanhYBo9nnpHHBw6IA3DqFPDNb0pHo7ExCQe+5BL72boiViiIwdJIwLBVfK1BND8v7zPGppKOjjbeKTGo4Lbe185mKihmsza92vPkO/T7tLu0imLbcZXcbQoTlT7tv9xVbK2XPr3VzkelIuebCtjDw/U7itejWpU6iMaI6N7kORP3AYqG32E96cVhKcWMHiSNUCgAb3iDOMrvfKfUNKYtinixU3d4eVkWlPr77WQesJPzQkEWO4eGtn4MaxQ2JyFbhTHSuODIEZk/HDzYdAmjuJ9dTUVFay3W4WG5JrNZ8ZtdW6R1FHO54PRmQN6rcx2d36zHZmm0toqK2vwxnbZpz3Gxh93ERjaFSSa7X2h87DHgiSekodNVVzW9INiFe6TzoIjYXlre2dWqRBRqo5KeHhnk+/vlsUZnaT2g554T0bC/XxpFjI5K3be775bX7twJfO/3SpFSJZ+XgSyXk8/UEPuotOVKxQqKCwvyOxMJWVEbGpLP8DvFra6SA/LbVPRcWLB1BV3xUzcWWK9Fax/VS58OWjFzu0y3symMOmqLi/J4cFDO5Y0QiTVy99JLW4qkjfsAtW0Mf5gg6EYSBsH0YrLZGAP89/8OfPGLwJveBPzAD7Q0bsX9LGwpQ0OFRO2YOji41o53WnpzvcYkYenFboO2sAjC7bZwSjae48elDNLwsMwb3FIBDbKtbFGxKPYllRLbUyrZeZi/huTKisy9gtKbFa23uLxsa9e3WqbH/zu1MYtmLbmCIm1H58CmMMKJE8ADD8i8/rrrmio1pcTdFsUCiojtZV07u1IR4WNlxda+2L/fDlZuevP4uAxETz8t4uCePVIvMZGQlcZvfUsGvwMHJDLRrXmiq2KaEqQh+rq6Fub8uinPWutDV9VGRmTgTKfDV8pbWSXXGo9aY1E7UQP2d7vbVk8i4kCrTWESifpCY6NNYYyxQnG1Kg5aM53C6zE3Bxw9CuzaJVsLxH2A6grDH5ZSzOYkJA7cdRfwF38B/NRPAb/0Sw0XDPcT97O1JVuk134iIWNFPm8XLl3akd4c1JwkSCz0E9acxP8c7RHZbObmJPLHGBEQ6Rc1xvKyzMn6+2V+o9F/mczaRY166c1KqWSbWyaTMn9ab8ko97NVUFQ/vrfXCoqbEQxANp5ubgqzuCjZk+UycMMNol+08JvibotiAUXE9rLunV0ui/jhpqbu22cFvlJJOhmVSnYV68gRUfV7esQ5mJwUB/eRR4D775eB6rLLpAHL4KB1ftX5np2VAUdTqYeGasUcvxCotysrNhVZw+mHh20K8mYMVm56tm7aRAaw9ZO0ll5YDUgSjdsUJkxobKQpjF9oTKVs451yWY7X2Fhz3evqUSyKkN7XJ+k6LQ6YcR+gOtrwszkJ6Xa++11poHLllcDv/I4s5NEWNYdOlJJJmXQXCjK2+6N8dFGqULD1nxsd99UfiqpBWK85SVQNQkK2mnwe+M535Bo5eFDmFC3659vOFhlj/dXBQfFll5dln/b3r40sV1ukqc9B6c1KoSB2rViU4zEysrGLIOWyFRSLRXmup8cKigy6iDdaO3cj0qfbFdVYKomAODcHXHONBDq1OP+Luy2KBRQR28uG7OxSSYRErYtTqcigrwV9/enN4+MyaH33u3KrXZ5V6Hv4YeDRR8WgXHopcO21MlC5q+SVigyMxaJNVx4fb7ypRTYrg6E6+kBtXcONFIn8lMsyyLvCov4Gz5P/RUVFTYOm0LAxqBgdJTTqebyyYp2xTEbC1wcHwyMaW3FwjJFGKoWC1EFcx3kX9zNkywz/RjcncaMJCYkDc3PSSMUY4O1vl/F4HRO2uJ/5687QAMQG6IR7dDS4lpsubOqCpjaZi4oiDLJH9VKL2ZyExIVqVfz/c+dkHrF//7qEqm1pizR7y/MkYjCVkvlXoVDbDNJlZUXsFSD2KqqMxcqKLd/U2xu8ULJeKhUrKOr8KJm0gmKTtTFJTOikpjDGSGDTiROymHHBBbWNipok7rYoFlBEbC8btrMLBUltrlSs8Z+aEmFOnd+lJXEMjLHRXNPTcoGmUlKsdM8embwUiyImPvWUPH7hC4Gbblo7cESlOjfqNGttkPl5uQ+I0zI2JoPpRg+OQRQKtaKi/j+A7Bt/GvRmipzbneVl6QSoTVMGBuw5qUJj0GTO84KjGv1Co3teTk9LrdALL2w5dfD5r1/Xu7eeDTf862lOEpVWzOhB0k2Uy8Dv/z7w+OPA294mq+3++llNEverY122yK2P6HniWxSLNsvALwrm8/KaUkkmx65gwuYkZLvxzDMyJxgfB/buban2mEvcr4yWbVGhYJunZDJiK5aWrC0KEuEqFcn0KhbF7x0ZibYtuZzMVXShfWRkc+Ym1WqtoKhlI1xBkTZwe7HepjD+RjBBQuOhQ5IlNjUl5dfcMmstwDO0DVBErIPneQPGmFz9VzZEwztba+xEFeHO5YCTJ+2FmM/LxTc5aR1fwHYQGx0VR6Fclot1fl4Gocsvt470woLUS3zqKRkobrxRipr6UxuMEfFnYUG+V2sfNiu4FQpWUNS040zGCoqbUb8oCP1/tA6f22BGf5MrKg4NMRVpvRSLcuxXVuT8Gh0Nn0y7TWGiohqjmsIUi9KpfGpKRERXbGwhdWdLBqgNtEdNd0St16Ak+PcyvZgQl7/+a+CTnwRe8xrgh35o3ZN2YJvZoiC/yC2fUSrJuFKt1tZBdpuTaBZGpSKT4vFxeR3HdLKdOH1aRMTeXhEQW6w95rKtbJGfbNZGH+oC9tKS2KShoeC5kZve3NMjtijKH3VLNlWrtgHmZtUyNMbWeXTr8WcyYjszGfpwRFhPU5jz56XEWiYjmoQGOK0jfbrjz8oN1pe2hI4XET3P+xUAHwbw4wBeCuBXAYwB+AaA1xhjjnme91oAbwCwH8BjAH7dGPPA6vsvAPBmAD8E4ILVj30IwLuNMZ/zfde/AzgI4IcB/CmAWwA8COAfAPxPALcYY77pe8/3A/j31d/yv+r8OwZY2604yCnWqLja71q7Sl4oiCPQ3y8XXD4vBZHdiYkxItJpDQ4VGdWBqFYlbHj/fnuhnjsnnZyPHJEB8eabpcV60IWsUX3aOVrrDmpKdKNoMeG5OftZvb0iLo2Nye9o52BVqaxNg87n5W+atqCiotYq4WBaHz3OuZycT6Oj4mBtxL7TpjB+cXFlRZoMVaviLPvPY7cpTFT69Ec+ciduvfVWIP726HnDH5VW3Ex6cVAkISHE8rWvAe96l4iHr361LGi0KlzdeWd32aJGmpNUKsER6RrlkEyKzdbJu+dJNEPYRNef3syMA7JdWFqSaOhyWXz/3btbP/+7zRa1igqC2gxQmyJp9GBUc0eNkNbMsaj0Zv0unZ8YYyMZN3MhxBiZ76mgqBHgrqDIMg6kHn5RMZuVOoilEnDJJeIXBZ3/jaZPGwN4Xn0Rscv0pS0hTiLiQwDyAD4OYA+A34Ic0L8H8Murr+kH8LsA5gBcYowpeZ73swDeAeDTAI4AGAXwSgBXA3i5MeZLznf9O4BrAeQAfAHAvQCqAP4JwDSAjxpjft33+/4WwC8A2G2MWYz6Xz71KZjrr4+usRNVayfMOGezEpGYTstrslm5CKemal+3vCyh854nq119fSKwPPOMpJMODMgKgFuD4ORJEROnp0XseclLwptRuKJbuWzTgluJ2iuXJSJQOz0bY6PVxsY2TnRqlmJxbRq0dl1MJtemQbOOiKVSEfFQJ3dah7IdTsdzz8mxuuwycXSCuk276dNu4yKXf/zHO/He994KxNwe/fmfw+zda1NT/KvYQWIgBUJCWmd6GviTPxFR61WvknGslfFBF0o+85k78f73x98WHTsGo2VZ/Ghzkno1CN19A8hzlYqdlEc1L1Bfo1IRH2ijuqAS0qmUStJIZXlZxMMdO9ZVewwf+tCd+LVfi78tWlmBWW9kXakkczCtJ6i+lZZMGh4OjxpsNr1Z37O0ZMsBadfndvjVrqCottcVFBnZTepRqQD33CNj8MGDYovGxlpPnwaA978feMc7mhIRY223tpI4NXOvAHiZMaYMAJ7nJSEK8BiAF2hIqOd5cwD+EqIs/zOAfzXGfNL9IM/z/hxy0vwOgC+hljEA7zPGvNv3nk8D+HnP815vjCmsPtcP4GcBfLqRA3zkiDis3//9Yuhdx3g9g9bgoIT+njxpB56ZGbnIdu60r+vvl5XG8+cl0nBoSF571VXyuqefBh58UCK1Lr5Yft/evcDP/ZyIMHffDXzuc3KRv/Sl0jXJJZkUkW9kxKYGz86KI6/NSxpd6UylJJpyYsLWYZybk887d06+a2TEfl+7Vr80knNyUh4bIwOoKyoeO2aNWzq9Ng16s9IOOpVq1aaIAzZys10Oxrlz8v0qmgFyHOudi/706cVFqRu6SqztUTYLPPusDNYavaMFwPv6bDQPIWT95PPA3/yNRKH8zM+E18jyo1kLpZK9VQf68ceff1msbZF2Aw2qQdisDVLxsFq1/sjcnGxq6/ykUiIyLi1JdHyp1L5JOCHtxhjx9bWOuvqmraANF//1X+1TiLEtqlbFVq9HSOzpkffn82KzNYNsaMjOEcKExGRS5haa2VUs1k9vVjs3OCjvUzs2NLT5wRbptGyjozbjR2veA+JHah3F7TbvIY3x2GMSXKLNYbVWfSMpzEHp0x/9qIiSTRJru7WVxOmy/pAe4FW+CTnIf+fLKddw0EsAwBizrH/wPC8DYACSK//vEIU3iA8EPPe3AH4RwE8B+MfV534GwBCAOxv5B374h0WE+Na3gFtusULURjA0JGnMp0/L/fFxEQurVVlpVFIpEQE1IqxQsGLdTTcBhw+LGHnunERtaVr0RRdJHbknn5Tf/5nPSArES19aK1QCNtV3YMBG7i0tyabFgJtJ/U0mZQKgqxNLS7bT8+ysGBo1PqOj7V398jz5X/r7Zf8DNjxbHYaFBRF1FTcNenhYBv9ujOzS1I6FBdkng4NyfNrpTOTzUgdxeHhtZG49dCLb1ye//+Mfr4mWibU9es1rJDJKJ/DVqp1Ay2+T5/U67u9vT8MjQroNY4B3vEMmlG99K3D11TL2+22+1vPThQttKKWkUra8wuc+V3M9xtoWuf7JRpBI2AlFKiV+gy5mjo8HT0zcdOalJXk905tJN3L0qPhkw8MytrdSk1Ubb+TzwP/+35LNtEqsbZGKf4XC+vydvj6x4/m8tTeambWwIDZmeDh4rqJZOum02KyzZxurD6+LIUND8h0LCzIP0eO82XMMXZgfGZH/XQVF/S3qa/b1had0k+3F0aMyP5uclHM3yC+KQkuZ6HX0pS8Bn/60lItpkljbra0kTiLiUd/j+dXbYyHPjwOA53m9AN4KCUm9wPfaoGDYWWPMfMDzX179Db8Me5BfDeD46t/qct11Yszvu09qI117rUT8bdSK9+ioDO5nz4ohn5gQIdEYiVRUPE8c60xGnOUzZ2x686WXiij41FPAo4+K4HjwoAwOngdceaWIi489Bnz728AnPiF/f8lL5DP9aOServQvLMj36cpc2EAaRiIh/9vIiERCZrO2juL8vPzGoSErKG7FYKWipru6q5Fsup07JyKOvl73hUbpxVmw0cLP8/MiuvX3y7Fo94SsWpXo30RibdRsMxgD/OM/ioD+whcC//ZvAGJujyYn5f+am7NRiBddJKJFLifRBbmc2A8VwJNJKyjqLZ1BQqL5p3+SKP5XvAK44goZJz1PrrWwEgqJhFyXmj3gdpn/xjdkbL7iCuCuuwDE3BZtNFpuQeu5qpDoRiSG+VzaVXVxUcYvpjeTbuL8efE7tVGg1kdvFM28yefl/n33SUDBFVfInAAxt0XJpIh3hYKNSGwVjQzM58WOV6t2bqDzgKgsqkxG5l+6AFIoiB9dT2Tp6ZHjWijYklBLSzZ4ox1oDfHhYRnnVFDU/1sX5zXrhWw/5uaAJ56wATja3KxVHn9c0pgvvxx485ubfnus7dZWEicRMaBiTuTzamrfD+C/APgfkGKZs6vvuRWi/PpZCfowY4zxPO9OAG/xPG8HgF5IMc0/MsYE9BsK+EGeFA1Np6UL0SOPiHG/8sqNE43Gx2WwOndO7k9N2dTmvXtrB6C+PhEMNb1ZI8WGh4EXvUjSco8dk4v9kktspF0yKYLolVcCDz0kKdDPPitp0TffHNxh1xX/tKuzCn+agt1sfSgVDIeGJCoyl7OCov52/Z9GR7e2PmFPj434VPxp0CdPAseP29drzUDd4pAOoMegVJL9PTW1dYLoqVPiwF1yyfr23de+JkV/b7lFjtkqsbZHnmfPRXXwtPC3Xi/yPeKM5nJWXDxzxoodKkC6wiLTAAkRHn4Y+PCHJcr/llvkuYWF2k7ynif2fmDARnOETey/+13gC1+QaMacXR+PtS3aDNxoRG2apanN8/NWyA1CRUdNCywW21syhZDNYGVFIgY1s2dkpH7zDsXt0GuM+HZHjwIf/KAIiFdcIR3n0QW2SH3FQkG2VucNiYT4Q2pDNBBDa6driZ+o0gnJpPjQms1VKtVPb1bSaREh83mxeefPy+eMjrbXJ9eAkaEhCSrQ80iz07R2pNbnJt1PoSDagZZMGxgI1g0aZXoaePvbxa697W0tCdOxt1tbRQxkiXXzCgD/nzHmte6Tnuf95xY+604AfwjglwBkACQAfKSZD0gkJCowmZQB/cgRmZhffvnGpTdPTtoCvZOTIhTqxH/fvlrnWdObNcS+WBRxIZWS9OUdOyQq8ckn5TMuu6y2rtzNN0tE5X33SXTEk0+KwHjjjeEDlab/lko25D6blQFkZKT10HuNGti7VwYpTXk+cUI2jYgbHW3cedpMdODUdHCN4PNHLCr9/WvToDtlYqN1UNRZ2rGjfaueQSwsyL7bsUOcl1Z5/HFJ17niCuBnfxb42MfW/dM6xh5pLR3tsKeRiYOD9thp571MxoqO1arYLI1WzOVsDRx9vT8NuhvT9QkJQtOST54Ebr9dJn0/9mNiIzMZGZszGSsYNrr6fvIk8KlPyYLZf/pPwN/93bp/asfYos0gkaitj9jbK/ZO/YKoqB6mN5NuolIR39wYGcczGbtQWI9CQcb6alVsVX+/CFLvfrdM2t/yFuCf/3ndP7GjbFEqJfuqWJTHrYpbvb020lwXhrTuqz8iMcpH0iaNs7ONpzcrmYwEgGjwxsyMrWPYbnum2SwDA7b+5MqK+JDZrNhsV1Ck39h9GCOLq8WiZD/19bVWUkHJZoE//EMJgnjXu9b3WS3QUXZrK9gOImIFqO3S43ne5QB+utkPMsYc8TzvK5CQ0wyAe4wxTzf7OSrcJRIibs3MSITBnj0bl968c6eNSNy5U+oOTU9LdN7+/bXf4Xk2Um92Vuoqjo9bse/66+W9hw+LWHjhhfIZauD7+oCXvUxed++9Epn42GMSzXj99eGTJA2711TnxUUZIN0ux63WN9SBaPducYJ04nDqlGzptDhAo6Odk67kRlbu3SvPlct23yws2OMDyDHUTmy6tVu4KxREQMrn5byenFzfitJGUCrJea7Hv1VOnQLuvFOu1Ve/esMiQTvKHvX22lVxddqyWdmHQ0PBtkjPO/c4+9Og5+asAJ5I1EYrDgwwDZp0D5qWrJvWNXzHO8QuvvGNMonbvbt1QX1+XkTDwUHgF39xw66fjrJFm4E/IlEbnel4Wk9IyWRkXy8syDHo79/68Y2QZnnmGVv/XEsM1bNDxaKM55WK+D6Dg3It5HLAHXdYGzc+viE/seNskdrYYlH2VauCW3+/jBGa1qz7M6jZStQx0chCLcvQaHqz+zv6+uT4LS5KUEhfnwiYW1X2SeeYbqSriopal1ub/VFQ7A6eekrmsfv3y7GdnGxd81Dh8NQpWcw4eHBjf2sDdJzdajfbQUT8DIBbPc/LQTrmXAzg/wXwBIAXtvB5Hwbw0dX7vx71wig0vVUd2/l5iTRYWpKop42IlNu1S5znM2dkArNnj1xsx45JjTj/hdvXJ+85d05WGt1Bas8e+b2HDomYePasRE+6UV7Dw8DLXw7ccIN0R7rnHknZvukm4AUvCBcE/anO2olZU52Hh9cXfp9Oi5C6c6c4PioonjkjglxPjxUUO63JiaZWufUm83lbX3JxUQTeEyfs693aisPDm+MglEpyjJaX5bhqQeet3nfGSJqNMSJ2t/p7FheBv/5r2Xe/+qsbOnHsOHukjm02ax3KbNamNzdy/qRS9hpW/GnQZ8/aNGhN33TToNvZEImQVqhUahufFIv2nNaJ5tAQ8KEPyXj+pjdJneHJydYnoYWCRECXy8Ctt27oolfH2aKNxvOskGiMnZgaY8dQ12YFoemf2azYMe3eTHtF4sDJkzJpHxuTcbde7bFSSc7zclnO8aEha7sqFeC975XPfMtbJOhhg+hIW9TTYyPLgdZsuDaa1EyvdFr2YzIpnz80ZFN76/nQ2r1ZAwu0e3OjPr7niS+rv2dpSeZAOs/aqpJJapfVNhcKVlBcXpa/p9P2NZ2SgUWaY3oaeO45SdHX3gWtRvkaA3zgAxLV+Gu/Jn0ZtoCOtFvtZDuIiG+A5KH/DCRP/UnIwbkSrR3kTwH4K0jO+ifW88O07l0iIYODpg08/LDUcduxYz2fbsW/Eyfk4t27V9KZT5wQoeXAgbWOcDJZm95cKMiglUrJxX711RI5eeiQRBzu2ydijfs5ExPAT/6kfOf/z953x0lSVmuf6jjTk9POzmY2sqSFVSTKBXQRI2JGUeBy9WLmfnoVMYDoT1Hv9UY/EfVTUTFcXRX0ghIEFFjisrAsy0ZmJ+3kzt3VXVXv98ezx7e6p0NVdZjumX5+v/7tzkyH6qq3znvOc85zzsMPEz3wAPofnHkm5NCFNkmz1Jn7gLDUuRyTjL1eGLC+PmzkTChOTYHkYDKkqyt/JdZ8g+WlPG1YCJld5AcTaUTYdM3ViqV8L03D+WLpAffQrJXzNDGBY1u1yvnmpKpE3/8+1t7VV5d9emhN2qPWVln12tmJ9c9kvlnebAd+Px5cqSDEXBl00NRiOFsG3cg+NzCfYDmbmTQ0TWf/m7SPZckcgP3pT0T33APJ8cknZwbhdmEYGOo0NUX03vfanzBfBDVpi8oNHrTC1YiKIqtfeB8r1vKCVQJeb6a8udHDq4FaRiiEooG2Nrm/5kuIapokyVltYF7fQhB9+9vw+6+5huj008t6qDVri3w+SSRy/1q74EEiiQRiJY9HJjh8Ppxrbutkpf0O7ymzs4jHuA2UVXC7Bh7+wgkSVjbNpz9vbqHT1ZVJKCaT+M5mQrGRzKkPRKNoedbWBk6B25c5xfbtRHffDa7hTW+at1ihZu1WtaAIZhoasARFUfxENEZEfxRCXGbz5XNOtmFI2cDYGG6EZBI/9/eDTCzVoBsGBnYkkyD9DANEot9PtHp1fiOcTMrpzixvZmgahqmMjcGQb9yYezozEfo+PvIIAqHeXqJzzsHnWj127hPIfUV4knE5s2aGIUmTUAjBIldIdnXVX+WBrstsJT+SSfyNs5HZMuhCRtgwQPhEIvi5rQ3nppbOSTwOcrujA8S2ExgG0W23gfR++9uxVvOcl5qgt0qwR3Nska5j/QsBIpGnlKqqlD2X27nkwMUshdY0/M0sd2FysdGTrIFKwTwlmf9lcE89cx/DXHbhwAHIlzdvJvrYx6RcxwmEIPrDH4gef5zokkvQHiQPFpwtqgS4GtG8Z0UisDt2GrvrOnwETZO2qZHsaKDWkEpBCeR2I7HvdqOoIHsP13XcAyzbDQRy96P77W+RXH3Tm5BczeML1MSdUAlbpKq45/1+57FHJILzzf42k4lE8M9jMby/VVtkGEhoqCrisEIDowqBbRrLiLnYoNbsWjotCUVzdSgTivUwfHIxQtPAAaTTkBy7XCjOcHq9Hn2U6Mtfhurx+uvzKjdrbPUWR4n80rygQSLahKIoVxAaYF4khLjH5stznmxdxwauqiDl3G7cZEePYrM5/vjSe93pOohEVUWVFv/s84HQy3cz67qUNre0zN2kgkGiffvghCxdCtIzV6ZOCDxvxw5sVsuXE519tr0qL55mzFMpeapzuSeNsdSJJdWaJjdWnmpXj5tVKiUJRa40ZcLG3IeSH36/JFfDYfyfp13X2vfXdfTaIILM3im5+Yc/oJLowguJXv/6gt+zJjaoEuxRTlvEMnUeuuJ2476LRnEPVKN/TiqVKYPmpu5EuB7Z/RVrichuoD7Ae66ZNGRXiKtDvF5JGlohz8Nhoo98BO/zxS/CsV2yxPn6fPRRorvuwlTniy4q+NQFaYsqAV2XFUCMcBg2rq3Nup/FVYyJBNZJvSUZG1jYMAwMhUskZFujgYHMykLDkNVdXP2Vr/p/xw6ir34ViYzrriuYzFvQtiiZhA1xSiSyP80JUqLM92H5LleNWgUXDHg89uTN2eBhl4kEjrEc6q9KQdMkocgDcLwXxIgkAAAgAElEQVReSSg2+m7XDp5+WrZAc7mgqHDKaezfT/TpT8Oe3XRTwUEqNbhqC6NEfmle0CARLUJRlAuJaBMR3UhEI0T0MmH/5OV9vqbBgGsa+o2wVPTwYWxa69bJKb5OoeuQuWoaiETDgNTB4wGRWMjocv897uVofq5h4H35vdavz3+sug7n5vHHsVmuXYteBnYmKmlaJrHl84HcqMRmx1LhYFBOHyaS/RzmY8JZucDyUnO1YjSK3wshAy4OhletKj9hWy4MDuL6bNjgvGfYY48R/eIXkCC+611F+5LO6wZVBnuU97lM1jc1Sem7pskKXafyZqcQQjrXTC5yVS0RHHqzDLpYVW0DiwuGkSlJTqelLJnlaWbS0Glw+PnPo/Lni1+Eg9vV5by38d69RD/7GSoa3/nOout5wdqickMIOY3eTCSGQrAp7e32rpmqwlYSNeTNDdQODh1Cz+/ly7HeOflNJPfTZBL/Z/IwX6LkwAEQhytWIGhvby/40QveFjGR2NTkLHGQTssWTVzRbt5z4nFcn+Zme35WKoWqRMOwL2/O9V7BIOwb91mvlQGUuaDrklBUVfyOJeTNzfUboy0EHDqEAo+1a3HPtLY6n6A8MUH0yU9ijd90U9GerHUTBZSJX5oXNEhEi1AU5QEiOoeIniKivxdC7HHwNnlPNvfc4EmCR47InoaDgzDoS5aATCwl482Taw0DxKGu42eXCz8XMrbJpNykurrmbiqxGIxFOIxs2MaN+UmndBq9H596ChvW5s3omWilHwiDqwFCIbwH9zaqZKVgPC4JRSYyWlpkH7l6DyIMAxWww8P4nqkUzqXPJ6UuPLCFHYv5JmxmZrCGly7Fwwn27YNUZ+lSTGK2MHFwvp3lB6g0e1TQ8EejuJ95GqmiyArdZLJy8marYAmWWQbN8hZep+ZqxXq/LxuwBiHmTkvmamsiacuYNMwnS7aL224juv12og9+EL3CAoHik3/zYWyM6Hvfw37/939vqaJiQduicoNlzdwfkQg/h0IIQO2qG3QdPk86jYC1Vit3GlgcmJhAq6ElS2Qv86VLMyfgCoHfBwKF9/DJSQTtLhfkg8uXF/34BW+L+DwahnMiMR6HreHzz33xGZwo5d7QVmEYMjZpbsYeVIqPlkzK+Ip7+Jdj6GclwRW2TChyCwsmy3NJ9RuoDKaniZ54ArZoyRIpY3Zy/uNxJDNGRog+8xlImYus7bq5ymXil+YFDRKxuih4srmRO5EkEoUAuTc5iZ+bmyFvLjXLdOQI/s9E4uAgbuzVqwsH3MXkzULgJj98GD8fdxwcj3xGI5kkevJJVHAQoQrs9NPtb1S82bHUmcmuSm54yaQkFONx/I437s7O6lZrlQOJhKy29PlkJQ0PuTE/mLAx96jkRzWrFVUVxHVzMypgnWxOR49iypfXS3TllUQrV1p6Wd1sUHlQ0BZxL9JcMj+zvLm9vXayvOl0pgw6FpMyaLc7s1qxpaX2JPkN2AdX8JurDLNlyWbSsBKk944dRDfeSPTqVxNdfrmU6zixReEw0Xe+g/f4wAcsJ9UWtC2qBPIRiZw4szs1khUL8bgcztaQNzdQbcRiRLt3y7Yzug7/m3vJsXInECi+PuNxBOtjY6iuPv54SzZtUdgiJhK5ktPuvsIJWSHgizDRZX6faFTGWXZ9ai6ucLuREC/VR4vHZR9YHohRD4lZw5DEOV8vl0sSik1NDUKxUkgk0AfR50Nslk4jmeFkLeo6khhPPkn00Y8SXXBB7SdXFwsaJGJ1UfRks/SKK38GB/G7Vavw+xdfhCFfu9Z51RURNqcjR7DJcI9Enui7enXxTYv76uWSNxPBYO/bJ6cYbtpUmPiMRCBx3rMHTvjWrUSnnWbf4LDUmRsYc/astbWyVVNc/m8ePuL3S0LRaqPk+YCqyuylx5O7yjQbLHllmXs0KgkbrlIzPypB2AiB/hiqivXlZHOKREAghkKYfso9Oyyg3jeooraIpzWzhNlsE8zyZibnag3s6Jtl0ImE/LvPN1cGXStTxhuYCx5CZiYN2eaYZcn8qAaJMzoKp3ZggOhzn8Oa6+111o8plUIF4uws0T/8g632JQveFlUCLGk3rxMhcP41zVmrkoa8uYH5gqYRPfss1vCKFdjrOjpgG3Vd9hO24ovpOtFXvoKg/Z//GS2HLNrTRWOLWBZO5IxI5ApmrxevFyJz0AoR/K9Uau6kbCswy5u5t2Ep4ERJOIxjb26uTo/scsFcicuVpOZeoE6uYQO5YRhIrsZiKAxSVTmg1C6EILr1VqI77yR697uJ3vxmywU69W6L6gINErG6sHSyNQ0Pt1sSidzH0OcDkRgMotph/XrnwVIigeEqXm8mkajrIBKLVfGpKqoSDSM/UTYxAaKHj3/16sKGemYGDeUPHsTnv+IVRCedZP87stSZp8uy1Lm9vfKbnqZJQjEcxrF4vZJQrJWpZ0x8xuNykIZTKZZhYMNgUjEcltWZRDD6TChyv5ZSN+zRUayvNWucSQc5aH/pJaK3vY3o1FNtBY01cAVLgiVblErBkeWJ3ubzU0vyZqswjLkyaK7+5v6fZhl0rfYAXejg9h5m0tAsS2YpMhOG2cFXNZBMEl17LfbAr30Na8VpsGYY6IG4bx+qGTdssPXyRWGLyv6hefojsiRQ1xH42PUXGvLmBqoNIdBHNRRCy6NgUPayc7uxp1n1bYQg+u53iX7/e6KrrsKAuYZflBtc6UaUfyhNIagq/JDmZlyn7P6I7GOl04gb7CY1zPLmpibYs1J9ND6mSATvz6qvelJ2sOqPZc/c/93vl30Ua92XrWXs3g1u4eSTpex/yRJn73XnnYjTzj8fSrGuLssvrXdbVBdokIjVheWTzc3fPR5JJKZSIOJaW9GzbnAQRu/4451nmWIxvFdTE2Sc5uErK1cWrzDSdRB/3L8j1yaVToMU5GnTGzcWJ33Gx4kefhjH1t6OfombNjlzxpNJOdVZCEloVUNurOtw7IJB/GsYkrDr7Jwf4oVJzmgUn809Dssd6JgH4PCDCRuXC2uWSUW7De0jEaypnh7L8uMMGAbRz39OtHMn0cUXE51xhu17qN43KMu2KJHAvePx4BxlO4vJpCQaa0nebBXpdGa1YjyeWaXEpCL/Wy+Z93qCuY9hOp0pS3a7505Lnm9SRgiir3+d6IEHIGXmBJ/ThuF33YXk2RvegMSZTSwaW1T2Dy5AJM7M4O9dXc4C5Gi0IW9uoDoYGoKvvHy5VIUsW+asH/Dvfw8S8eKLEbTbbAm06GxRqURiNCpJQpbbZldHc/VfW5sz/4MLKlyu8sibifC9mUwkwlqr1yn1ZkKRk5VmQrEev9N8YXiY6Lnn0MqsvV1OhndyDh9/nOjmmzEz4SMfgfrSxv1V77aoLtAgEasLW6XyPGjF65VEoqqCNGlrw6awdy+et3YtblQniETQxzAQyCQSUyn8bIVcYWmrx4NAKtcmNTuLKspkEse6bl1x5/zIEfRVmJjA+559NoyTE3CFAG/IXi+MHE+grTQMA5/NVYq6Lkm8zs7KBxpMaPKmzwReNUlMJnT5wXJZInk9zI9cDpOmYR253SCknRz/H/9IdP/9CNjPP99R8F/vG5Rt2U4sJichZ69TJow1rXblzXaQSwbNW6XXO7e/YiNrbR0sSzaThmZZsrmHYbVkyXZxxx1og/C+9xFt2wYbxo3D7eKxx4j+8AdIBl/7WkeHs2hsUSWQqz8ikUyQEiHwdrIOUympRmhra1Q2N1B+zM4SPf+8rFRLp6HOcCIdfOIJyJi3bCH6xCccVdEuSlvEwzy4356dc8YkoaLAl+BCA/NewoOfWJrsJKnB8mZdly2eygGOq7hPdltb9WKqSoD7hyYSsve7zydlz40kcn6Ew0iGdnUhvo9G4Rc5mU1w8CBaxHR1YbjT6tW29+B6t0V1gQaJWF3YLpXXNNkrgwjkXjKJjGNHB4zcvn1wJHp7IW92ssGEQmig3NqK9zYMSVquWGGtwbsVebOuQz46PAxjvGEDZNmFIATRgQMwTsEgCMhzzkGm1Qm4twdPY+SquGr292C59ewsvhP3wWxrg9Hs7CyfPIDJy3AY/6/0BGs7MPdZYSKah+MQYfMxk4ptbVg/kQgIRCeb05NPEm3fjk3uda+znd1i1PsGZdsWxWK4X5qacvcPNMubuRdpvTqS2eAgwSyDVlX591wy6PmulqsFmGXJ/OCkAVFmdSFLlGsdzz9P9KlPYQDYtddiPXR3OyOI9u8n+slPYMsuu8zx/VLvK23enVAmEnMlR2ZncS93dzu7PoaBfS2dxhqplXYmDdQ/4nFU67hcSPirKvxiJwTioUNE11+PoP9zn0M84WC91/vKdmyLdB2+jxMiMZ1GPOD342EYc1t0sB9vGM4LDgwD8UYiUT55M4P7ZMfjeE9u7VHPtk7TJKHIKiqPR1Yo1pvqppJIp6EeFALzDEIhrAEb8uO/YWoKk5hTKRCImzY56i9cxyuvftAgEasL2ydb12HIzCXuR44giF22TN6gIyMgV0qRN8/OQkbc3o731nV8ViIhSctiMAwQiYXkzUQgG158ERtnby/IxGJGwjAweOWxx/D916xBZWJvr/3vyuBG6NEojB83C672ZOVYTBKKTE60tkpC0clmxaSOeQBGZ2ftB+rmilF+8DlhWfratTLbHghYd1QOHCC67TZUHl5yCRxvh5VO9b5B2bZFmgYHkSf05Tvv9S5vtgo+H2YZNEthXK65MuiFeh7MYCmyucqQwbJkM2lYbwHG7CzRhz+M9f+Nb+A7trRY2xuzMT4O2WBPD9HVV5e0PursLM5BTTihuQatEGENz87i96UE3WwnuFddLSTxGqhPcBJ65074RiefLH1uJ73HpqeRGNF1TGJevtzx+lzUtoiJRLfbflKJB34w8ZZr0AonJIhKkw5zEUU55c2MVArvzeeBe6DXO3RdEoocj7jdklBczEO0hEBxxswMkqu8zzkp0EgkiD77WRQaffzjRKecYq2IKQfq3RbVBRokYnXh6GRrGgyYucR9aAhOxMAANgEiBO5798KIH3ecs0q96WmiyUk4y/392LSGhiRpaXWARSQimzvnkzcLAUNx+DC+F0uyixkdTSPatQtGS1VBmp5xhrNAjqHrOGaWZHIfo/koy08kcO5mZ+X0t0AA576ry5pzEo3iPTRNZhzreZNTVQTdPH2wvV0GfR6PrK7kisVc621iAlO+3G6iSy8FCVmCvKzeNyhHtkhV5WQ7lnfkAkvnNQ1rt5ank5cT3CjdTCyaZdDZxGItSnWtQtczycJUSn5Xl2vutOR6r0rVNGTH9+0j+uY3sabdbiSx7DrKkQhskWEQ/eM/OqscMmFR2qJyI19/RCI5hMzjwV7qlPxuyJsbKAXmCbMHDsCOnHqq3JOXLbO/pySTCNqHhlCB6LDqh7HobZGmwQ9wQiSa1UJsi7LJXPatFKU0tUc6DdJH06TKp5xIJnGcqZRUpjhRDtUiuA8mE7/cDsNMKNZbgrQU7NsH+fFJJ8HXU1XE8k6Gkt18M2L7978f7aZKKBJaRFdg/lC3JKKiKFcS0Q+IaJsQ4t55PhyrcHyyuW+U2y2dhKEhOBH9/fJG0zTc0DMzIO82bLCfUZyYkK/v68PnDg+DmFq6VJKWxZBKoSyZy+/zbVKJhJRkd3TAibFSCaiqRE89RfTMM/iMk09GFqSUKkIhEPhzJo0lxvNVUaWqklBkmW9TkyQUs79rPI7nptM43q6uhbFxGwbWiKaBNHa78V3N1YpcTUokJ6XyQ1Ew4SsSkQRiKaQzZW1QdWiPHNsizsTyNLt8jjJXSyQSst9lPZNmTmDuJ8nkIjdhJ5LScJZBO2nMXg3wNEMzacgkvqLknpa80HDrrWiD8KlPoWdYKoX90e53TaWIfvADJOuuvtp5L2MTFq0tKjeYSMzuj0gk92KfD/uv0/uUZYmpVEPe3IB1MGnBE3ePHpWJUPbN7ZJWhoGg/Ykn0JqhVP+ZGraIiGRSzeOxR8iybXC74Q9w3/R8PahZNuyUSBRCFiv4/c5bNhQCF0VomrSd9VzQkA0zsc+9s1nS3ty88FvbTEwgDl+xAgPmgkFwB04KB77/fQx2eutbiS680FpRUQEUfWUd2qeawwJ09SsPRVFcRPQFInpGCPHbanymxyOJRM6Ur1wJGfP4OH6/ZAmed8IJRKOjqPDbuROki50s05IlUpbscskJuMPDcFwMw1p2wOeDYzMzI2W6uTap5mYEZUePIrv65JNoorpyZeENze+HnHnLFkicn30WcufTTiPautUZ6cfNjVtaZFk+Vyhyfz478tlS4feDJO7vx/VnQnF8HOeLN+WmJum4eL0IbheChIAxOoqN2jyMh68TB+G6LqfQcX/FiQk4L3ffjXO3bRuunduNzX4hbO7VtkdNTZnDMVyu3PcaE/BeL+6h2Vn8vJAcyGJQFNgLc2Cm65nVipGIHODAzzdXK1b7fAmRe1oygwMjJg3rUZZsFw8+CALxkkuQHQ+HnfWtFYLo17+GPXv3u8tCINYU5sM3KicUBY9cRKLfj8RTKIRHR4ezde9yYe3w/Z9O106P4gZqD1zdzkMWiZCc7+1FknhyEuvHSVXrj36EnorvfS985oWQcGbMpy3i68R99Kzu4dwGJRaTvnyu6mhW3/B0ZE6U2wX3emV588RE+VVLXJ1n/oymJqzZhdDiRVHkdxQC9ysTivE4/m4mFOtdkWFGPA5VYHs7YrPJSfisTgjE//1fDJg77zw8lixZ2H5lvftKjIbb4gwuIrqBiH5ERFW5+FzWrmlykqXLhd4lLhduXsMAaUckmyvv3Qtybc0aPNcqli7F+01OgnDp7ESmYWQEm4AQxQei8DH29srefEeP4udcmwdXOR44AAJ0YgJVicWkXi0tyFps3Uq0YwecomefRVb1lFOcVz/5fPiO3d2SSBwflz2N2tqqW1nF5GBfn2xiPDGBa5xM4nhXrMB1rnZPx0oiFILTvGRJYTKc+6+YKwxVlejHP8a/Z5yBczgxgfdzu2WVKVfK1qm8rKr2iJ0iHprBw4nyBcFNTTjvHHwzObaQHYRC4HVnXsupVCaxyBXcRDiv2cRiOQkHsyyZSUOzLNnnk1MJF4Is2S4GB4n+7d+QnLvySpDhXEFqF/fcQ/TCC5jCvGlT2Q+1FlB136jccLlwT7Dywwy2e5yoKqWavaUF91Q4jDXV2rqwSJwGSgPvCboOe8+B+bPPguRZswa+jN9vvc2QGXffjSnz27bhUc3keJUwr7bI65U+kqJYJ8x4wnYiIXsi6rpMcJjfv7VVEomlVDRzz+aZGfgelZA3s//Cif7xcfy8kBIo7BtzCykzocitqfx+STrWszJH14mefhrf+dRTsYd5PNaVimY89RTR//t/RCeeiEStE4VHHaLufSWiBolYV3C58DAMGWC63SAMFQWVg0LI6obWVtzcTMqFQpA3W+1TMDCAzzl6VJbNr1iBKgomLfv7rb0XVyBNT2Pz6OzMvUn5fAjW+vshX336aZBia9cWN7idnUQXXwwy8ZFHiP7yF0idzzgD1ZhOg18mUbl6IBzGZsuOf3v7/FRXGQY2og0bpKMRDqM3BRNqnZ31PSU3ncZwn0DAWdXOQw9h4NC2bahQXbYM72mWQQ8P4zOIsP6YVGRHahFsZrbBQzKIZMPpQCD/PcpDCaJRBEbp9OKUN+cDy4A5GGSJjFkGPTYmn8+DbcwyaCv3uGHMnZbMewnLkjmg8Hobaz8WI7rpJpzf669HsMb7gV089RTRX/+KSsYzzyz/sTZQPrjdkkjMpZwwDNgyHh7lFD4fgq5wGGsrnW7Imxc7NA12R9Nkwsnnw56wZw9+f9JJ8D+JEHDbXS87d2Ko0ymnoCK63gmNWgX7SEwkWo29AgG5Drj/N5PJ2e+fXZHoFF4vEvXBoBxm2NVV3nXB6pSWFnnM8biMoxbaGuRp252d8LWYTAwGZWsMJhTrzdfavRvX7/TT8Z00DYVAdmPNl15Cj+lly4guvxz7YSOZVj+oU2ohA15FUW5WFGVUUZSEoigPKYqy1fwERVE8iqJ8SlGUPYqiJBVFmVIU5XZFUVZlPe+Vx35/2PS8XyuKssn0nDVExOKuKxRFEcceD1T4exKRzEwJIfv3EIFg6e0FuTUyIitJPB4QaOvWwenYuRMbhBUoiqxqGx2F00yEm727G4SgObAtBp8P5GBzMwyoudomGz09CLa4+vHxx/F5VrBkCdGb30z0lrfg2O+9l+j220GulQqWz65Ygc0wGsXx8fmpdItRXcd5GBnB5tvRgT4Uq1eDaN2yhWj9emz+4TDRoUMoNz94EK/j6bH1ACFQBSQEvp9dR/npp0EibtoEZ7mvTw63WLqUaONGope/HKXzL385fu7uxnk9eBD3ykMPQSr/wgs455FIZk+7HFg09oh737lcsvdfofXPDmRHB9bhzIycctdAJlgi09uL+3vzZiSENm6ETWZ50PAwptzv2oWK5KEh3Ofc7DuVkpPfx8dhr6em5AAp7q+6ZAnsWl+fbIBeb05tuSEE0b/+K87Z9ddLJYCTCb0HDxLdeScSPq97XflIoiJ7eV3Yonw+wHyD7Vqu42PyPpHAnlDq53R24v2SSTnsoIHFBR7uFwphzbW0YF0wETU4iPt93TqpAOjutm+nBwcxWX75cqIPflAqBUpFkTVbF7aoEmA/iav8rYDbKhkG/FFuv5PrHPt8IOHS6dJtkaJgf+vqwvFOTFTGR3O54GcMDODYo1Hss7z2FyJ8PnznpUsRB7e345qyQm98HPe31TUynxgcRMy7caP0RZ30upyZIfrKV2CDrr4a685JgjYbPD/ABhatfSoVC2Gwyi5CA80fEVE7EX2EiLxE9HIhxH5FURQi2k5EryeiHxLRU0S0/Njz4kR0mhBi8th7/hcRnUBEDxDRGBGtIaIPHHv/E4UQE4qitBDRW4993l+I6NZjhzQuhLinyGGX5WRziTwRNhdzv4zJSRj+jg44CeZgJRpFoKmqIGVWrLD2eYaBSi1VRZ9ClnGxLLSz034DVJ4ezD0XCxmfcBiBciyGYHf9enu9NA4eRGXi7CwM+NlnW//uxWAYUuqcTuN68DCPcmbVDAObjXmyY2dn4c/gwRac9UqlJJHDlZXlcB4rBSY9Vq2yXyJ/+DDRD3+I671tG9ZYV5f117NDxr0V+frOzoKwuemmvA3E68IejY+XzxZxs3ccszVZFAdM6TSckMUsby4FLHviIUORSKYkmaUzra14sITS663f6uRq4be/Jfr5z9Ev7MILYQf4PNrB1BTRT38Ku/ue95Svan1khOhLXyL6zW/q2xY98AAJTkz6/bmHmswXDEM2ys91TOZKmnL0IE6nsc6EaMibFwuYKOJhZbmGMUxPQ1HU3w+fZnwc683u5NJgEBOY02kkRpwOQDAjEkE/s1//muj3v69vW0QVHPKUTMLv8futE788sINbmOg6fP5cezf7AX5/6deUKHN6M7f9qRS4PVM8LlVvra21sw9UEpomrzMTth6PrFCstb6Rs7MorOjrQ3HG2Jjs3W8HqgpbNDxM9NGPIs5btqx0vzQaJfrkJ4luucXWYJV6sU81h4VAIh4ioi1CiOix328hoqeJ6FdCiHcqivIOIvoFEV0ihLjD9PpTiehJIvoXIcR1x34XEELEsz5nIxE9R0Q3CiG+eux3HgKL/CMhxJU2DrtsJ9swYHyYQDQTidPTyGy0tYH0MxtiXYczMjkJEmnTJmtEkq6DSEyn8Z7s3E5O4tHePpe0LIZUSlbHsXy00PcdGkIGxOUCkcj9H63AMECg7tgBI7N6NdFZZ4GULBd4qnMiITOJHR2lBY3cfyk7O+2E/OPJzcGgrKZraZHZn1oaeBGLYZ12duJa2cHkJCapNjcTvf71MvtXKkZGMDlM14k+9KG8znJd2KNykYhEWJdMJPLQGivBL09Cj8dln9GFJmepFHiwDQ894aFbRPi9puHBU5TZLnMlLj+syqAXG557DhnyM88k+vCHsU95PAi67SAWQ09WTSN63/vKF4hNTRF98Yu4vgVIxLqwRQ8/TCKRQNDIAyI4eHK78wfN1UIxIjEUwn7a1laeXsTm6c1+v/OhCQ3UNnjfTCYzhy9kX+t4nOj557G2jj8evj0REvd27gtVhc0YGsKE+dWrcc85vbcOHSL63e+I7r8fdm7zZqIf/KC+bRFVeFI8E4lNTdZ9nUgEr+H2T4YhFWnZiMexppqaypPUEALxApOT5ZY3Z4OHRyaTsiXTQhoQWQy6LglFjtHYn25unv8YTVWJHn4Yx3TWWTJ+HxiwVxFtGKiGfvxxog98AHZtYMA5Ycp8SDJJ9IUvoG/s/ffbIhHrxT7VHBaCYOl7fOGJiIQQuxRFuY+IXqdg+s1lRDRCRI8oimLO2w0T0X4iepXptX+78IqitBKRn4hmiOhFIjq9ot/CJlyuuX17eNpsTw/+HRsD8Weecux2gzjs6IATsHOn/LkQ3G68z+AgMgerVsGg9fXhvcfH8fkrVlh3eFnePDsLR5zlGbk2KZcLTk9fH6oS9+7FZ3I5tZXzdcIJeP6zz2IC9M9/jp/PPLM8JdQcmHPPvUgEhCVPdbRTbSUEHLNgEMaxuRkbeClZKT6+5cthbJlQHB7Ggz+js3N+KyB0HevM67VfMRqPI2hXFKKLLsL3tTIAqBgiEVSz9vQQveY1BZ9aF/bIbtawGHgDJ8La9fmsOzyqKmWZ89VftJbBlefZfQw9Hkm+cg9D/tcMDlZ5aAtXvaRSuP+5EpR7LGZXwSw2TEwgCbFhAzLl8TiSTX199gKodBqDC1wuomuusTfYrBCCQQx6MQyir32t4FPrwhaddRb28tlZmYhIJmFTeD17vZmEIj+qBV3Hv7mufyCAa8IDpsoxnKu1FesuGsW5WEjDBxY7uHo/lcJ64R5gudazrnrkb14AACAASURBVMPfbWlB1U8ohHti6VJ7+6QQRP/934gHrr0WQbuTAV26jr6u27fDj/b50A7moovQc7oA6sIWVRp+P+7nZNI6kdjSAv+I+yOyrDkXkRgIyH7KLlfpfjzLm/1+2Die3lypAYQ8PFJV8XkzM3KA1UIaFpkPbrdslWEYklCMxbAX8DVlQrGafpoQ4As0DX0QeYK4kyEoP/4xCMR3vAP8Q75hq8Wg6zJRrutEX/86EsAf/7jtt2rYJ4dYCG7Ji3l+t42I+ohoE6HsdDLP64/wfxRFGSCim4nojUSULX6cKvlIywzuk8GZciYTFQWOicuF6qnBQRBwZidl6VJktvbuxU23atXcqsVseDx43uAgspmrVuHG7+nBe+ciLYvBLGcOBhFMFJI3BwLoDTY2BhL0iScwpa7YsZu/w9ataEz91FMwigcOYCrUK15RnqyX1yvls9GonKJsVerMFYPpNM5Db2/5N+2mJmR+BgawETChODqKBzcD7uqqfiZweBjfff16e0G7pkE2GIkQveENCMSWLCk9cxqPE913H+6vbduKTq1blPbI48G6T6dxT7N03opj4PfL4QKhkJTfLlYiS9PmTktm8EAbM2lY7Dy5XNIxNX+GeWgL96jl55uHtgQCtSepqRRSKaIvfxnn/AtfkJOr7VZgCEH0m9/Alr3zneUjEKNRos9+FhUAN96IJFgB1IUtcrmwD7W0yGERvFY5QEilZP9Vr5f+NqmUyUQmFytlM8wD7XL5Nh0duIdCIRxDORIhgYCc3jwzA5u4GALphQohQI7E4zLRVmgYGRF8U1VFApz72zKpYwc//jHRo49icMGWLfD/7AT+wSBUGHfcAaXHwAASI2ecgTXa3V3UR60LW1RpcMUpE4lWlAC8f0ejsspQ03IPWiHCc1nlwZ9XKtgHmJnB3tPWVtkBUCyPTSRgU6en4ddzpfpiAPthZmKYB7PEYpnVy01NlU+q7d2L/XnLFtzzTvekP/0JduRVr4L9aG+3F2Myia5pMuno8RB961tQGl55JSY820TDPjnEQiARi0EhosMETXouJImIjrHNfyIslH8joueJKEpEBhH9O9XoEBqPR0rZ3O5MIrGzE/+OjGAC0urVmQ5LSwsIuYMHQf6FwwhKCgWMXi8IuyNHQCSuXo1j6OrCZ42O4m+rVtkzaq2t2DimpmRPx3zSL0VB74SeHjhZhw7hNZs2FSV4/gafDxUQW7YgI7J7N4ZnnHoq0cteVp4ggHt7tLfLzZDJupYW/N68IXJ1oKrKSWnVCBq4IrS/X/Zkmp3FOR0fx7EwoVhpcoenXnNQaRUctB85ggnd3d3Fe21aQSpF9MADuH4XXGC/N2MOLFh75PfD/ug61j5X5lgJVnJNb+7oWPjyZs6kmklD7jDCJCxP5+RqrHLA48H5NVegq2omsTgxIY+FZdBmYnEhXptvf5to3z4QiL29CF5Y9m0H99+PPeWii0AAlAPJJNENN4CYvO467F0loqZsEe+Hk5Oy6qatDeuSe3zysCAzecj3EI41s1qxXGuUCUtO2GbvgexvsarCPBCjFHi9sIusauCp9os1wVKvYPLQMKQtLbYvjozAH1qzBvZndBT3RzHVUDbuuQe+0bZtCNy9Xut+0Ysvourw/vsRtL/sZahkPPNM+LGJBNZnGZQrNWWLKolsItEKAcTXLJmUCgQmEnPZuNZWqWYqV1LD40HVWSgEe1RIOVYucNVdLIbPnZyU98BiSWwSyV6pzc0yGcGSZ26fxf2vK9GiZmxMcgj9/fiZkwd2sGsXpsJv2YI4jSXyVsCSZU2T7UX8fqy/W24BOfnWtyJRUgEsGvtkFwuBRNyU53dRAmt8gIj+jogeFEIUmnt0MhGdRERXCSF+aP6DoijdlMkg11QjSTORyI4uG/aODvxuaEgaAbPz4naDOOzokBNpN20qLO/1+yWReOSIJCc7OzOrH1etsrfBeL2okJyZKS5v5uM48URsLPv3YxrvihVwuqx+biBAdP75kGLs2AGZ83PPYVrvli3lkxCxcc8ldW5qwu/ZQejtnb9BE14vPr+3Fw4KE4rT0zjPTD50dSHAK+dmpaoIkLmC0A7+/GfIa175ShDMnCktBek0pjMHg0TnnGO5r+KitkdNTQiWiLA2kkncY1bXSWsrnEOuvmlrWziZZyZAzKQhyySJZHDJlYbVli/6/bIqlI/XLINmR57BfZeYXMzVz6ue8Mc/Et11FyoHzzwz097Zwc6dRA8+iID7nHPKc2zpNCok9+1DEH/WWZZeVne2yOdDAml6Wg6z6u6WyQkOnFl5wT6P2y3vF34eI7ta0emexdWP5kRt9t+ZSAwGsUeWY3AZTzJlefPMDIjEWh6K1gCQSuG6cdVYa6u16xYKwbfu6YHfMTaG9WW3NcuzzxJ95zvwZS+7TA5sKARNg/3avp1ozx48/w1vIHrzm2V/6tlZ7A02ZKZ1Z4sqCSYSmQiyQiQ2N+PaxOO4/znWM/fDN6O1VcYZVlUhVo6b+6dzoUEl5c0M9jOiUewL4+P4eTG2eeC1w+c8lZIVirOzeJgJxVJJ3mgUMXFXF9ogTE1h3S1ZYs/fO3IEfRBXrCB697tloYyVIYysSCCSqide8z/5CWzVRRehv6JDH7RhnxxiIbCi/3Bs0g0R/a0h5quI6C4hhEFEPyOiNiK6LvuFCsD6dg7nshsDX0FEA+bfCSF0AvNsY95r5cDlvOxYE8l/iRCIr1oFY/PSS7lHyPf3owrP60UFxeCgrELJhaYmGIN0GgQlf157O36fTOI9zM681e/S04PAQVWxWXCvtXzo64MUeWAAx/LEE3C07aCjA73uLrsM7/Pww0Q/+hHOhflclgqWOq9ejc+cmkLT7EOH8Pf+/tqRcrrduA7r1sEJXbdOyrYOHEBW6dAhnGszGeIEQmC9KArOjZ3vv2sXSMSTTwYh7vfbH4CQDU2D/Gd8HISyjeEui9oesYNjrtZh+ZZV+HxYdx6PJNzrcf5XOg3ijZ3t0VHc76EQ/ubz4X7q6wPxvWQJHHQrVSrVAE/a7utDYubEE2EH1q/H8fr9+C5DQ5C67NqFypXhYVlRXS/Yvx89w047jeiKK2DjDENW2FvF4cOQ6qxbh8C7HHZc1+F879xJ9P73Y1K0RdSlLXK5sOa6u7H3Hz2K+98s3fL7MytzeUo5Vylyco6lz7ouqzficbwvk/h2bAuTh/l8ApcLa8blwj1g1/8phEBAVm3MzspkTQO1B1Z0RCL4ua0Ntt4KgaiqSBYEArAjs7NYq7299giBoSH0TF2+nOhDH8JrA4H8NmlmBj7vO9+JhEU4jKmp//M/6DHGPhBP0eUJuhZRl7aokjBLjZPJ4naIhzUaBvwKt1vatlyvVRSsO48H6zBX3OcUzc2yVdD0tJwoX0nw9xkYkOqusbHyxB/1DPYjly5F/NjejjUSDOL8TEzg+jvZizQNxTkeD3yjWExWH9shpYNBDKpragLRx1Wt+ewZ9wFnkt0wZPsHv18SiL/9LWzWOecguVoCYdqwTw6xEKYz82juHxJGc3+U0Mjy5UKIF4+N5v4lEb2NiP6XiO4jXLjjiOgSIrpdCHHjsUk5u4loCRH9J2E09xlE9GYimiWiISHE+abPf4iIXkZENxCaa04IIe4vctgVPdncXJRvpOwMVTwOosbjQVCYy6ExDFQkjo/DMG3aVNhYRKOoPGxqyuyFGIsh8+Dzza1+tIp0GhsUS3isVIQEg3DA4nEY1PXrnWXsR0YwSGNsDIH9WWfhvcoVFAaDMOzmDGEigX9Z6jyfw00KQQgcezCIRzqN79HejnPV2Wn/eo+OYrM77jh7lT+Dg0Q/+AHW3rZtOLbly0sjYQwDEveDB0GanHhizuuebwphvdijitqidBrBEMtumJCye/9wBRwPEKkFci0XuG+buY8hb60uV2YPQ59vYU1E5l5dLIVm2R4Rrpe5WrFWCFIzwmGij3xEDh/weBAU2QySaWoKUp3WVpB95ajOEILoP/6D6N57IdN5xztyrp0Fa4tSKVSEahr2Fd4bzFWJ+MzM3xPJ/oleL64pVy5yVaOZCMyWQRfrr8gTz/Pdx7oO8keI8kv+hMCaVVXYEq5KamD+wZVi3BuYg16rMAwklRMJDFIRAr54W5u9xGgoRPTpT8uJzFwxmO0LC4E2Ptu3o/pQ09Cn7C1vwfCE7HuAk3o8ST0HFqwtqhR4gAaTisV8JJbG89pie5dvX2V7oevl96GEwFqLxWTyt1ptTnRdVloSYU02bKGEpskKxVQKv/N6ZYVisbiYB6lMTMAmtLQgodfUZE8plkqhPcyRI5gK39mJdZKrXZlhwHbymmaFQa41e889SK6edhreP0f7q6LRRh3ap5rDQiARX0dE5xPR+wiM7hNE9E9CiCdNz3UR0QeJ6O+JaDOBLR4movuJ6FtCiD3HnreOoGM/l4i8RLSDiP6ZoGWnrIt/IhH9XyJ6OREFCGWuf/t7HlT8ZHOzUa5MzHZ0EwkQLy4XiMR8BOHEBEgUlwvVXYX6FkQiIN1aWjKnM8fjMBweD4hEJ2SeEHDGYzFZYVZskzIMfEf+7PXrnU+jPXQIFWnT0zCcZ5+Nqk4nMAxsuDyJlrPT/H00DX8Lh2XmhftC1UJlYi6Yp0hzxpwIx8yEYrGMVSSCtdbTAzLQKmZmINUJBIguvRROxdKlpZGvQmDgzosvonT/1FPzrrd8znK92KOK2yKesOr1ysEITq5NKoV7QojakDezo5M9LZkI96l5SvJ8yJLnG9wE3EwscoKECHY8WwY9X46/YRB9/vOoovzmN4nWrgVpxQPDrCIex0RnVUWm3Wqfn0IQguh73yP63e8Q1L/vfXnX0oK2RYYBWx+LYa2YfQCuWOBqHJYt8+/NvRJz3Y9MJjKxaHaHC8mgWfVRiEjUNOyJPOG03MF1IiGlilar3BqoDHQd10NVZf8yJ1PuDx0CabhpE67p6CjWzcCA9fdKp2HTDh9GcL1ihazMNT/nz38Gefjii7DFr30tJMsrVuR+Xx4SaK6IzYEFbYsqBV2XE5WtrBtzf1SXSyZq8/kahiHjikokY1lKy3LnahZAcNwUi+FcVHroSz2C7RPbKCLpj3NlfzYOHYJt2LwZ8fvYGNbPwID1vUwIon/9V7QK+9jHEDu3tMxty1BMspyNhx8m+tKXEKPdcENee2SHRKwX+1RzqFsSsU5R8ZPNk4uIpDOdnVVPJiFrVhQQifkypYkEZGqxGByLQjLTYBBZira2zEmUiQTIPJcLr3fal4NlgSx3tkIkxGIwguEwMh8bNzojIITAedixA4TXypUgE60Sk5wJDIVghFtbC1frCSEdNpZntbXVRx+keFxWKJorK5lQzD7/moZrxL05rZIJiQSC9ngcEnQhsJEU6uVZDEKg98eePbgvtm4tuF7r3UWpiuFnKbPbLSW8Tpp8sxOcSmENVctJNJMRTBiaZSEeT+a0ZJZPNpAJXZdVikwumgme5ubMoS3VIop/9COin/0Mcr2LL0Y1oa4jYWTVFmka3mdkhOiqq+wlQgrh9ttxbNzrp8B9U+8rzpIt4n6ALheknbn2kuyJjW43fsfVDZxYNVcomu9XJgfN5KLZRc6uVszlX2UfEx9zd3f5yXJNg5+g643pzfMBw5CSO/O0VCd7ACfuly9HsM2tfJYts+73CYFkyF//SvSJT6DFC1eDEyFBcscdmLQcDMInv/RS2JhCxA8nipubiw5TWBS2qBJgItHtLr7/cUzBEl8h5EC7fAQPFzEQIZYod1KDkyaplKwKrKYvxC0EEgl8N5762/DHMsE2iwlF9s/Zdvn92LOeeAJFGaeeip8jEcS8dnyzn/4UiYrLL0ePaE6I8N7Je7N5X87ek7Px9NMyOXLjjQV71TeufBXQIBGri6qcbA58Czm6qgoiUQgQJvkMg2EgI3H0KIzypk35g5mZGTlZecCk/uf+iNzvzumksGx5s5VNSghkc7nn4HHHwUlzsrHoOkimJ56AAV6/HjLnfFlZJgODQby2udl+L4lkUsoFiGQz4VqVOpuRTEpCkY+/qUmSfYEArkskAgLR6nfSdQTtR46AQOQmwk6rTRl79qAH5sAA+iAWOZ5636CqYovYYWG5IJOATslwJqDc7so01da0zOEnuWTJZtKwIZ1xDu4ZaZ4IzRWdbndmtWJLS/mv9Y4dcEJf8xr00+FqBjtT3YWAk7xrF6TGJ51UnmP73e+Ivv99onPPhdS6CDm0aGxRKgWiN53OlDebkS11ZkkUV+xwUoADF656yJcAKCSDNgc/TDBmv0c6jeCaJ9CX22Zwe5FksiFvrhZ46BT3suMA3Ol5j8Xge7S1ofInEoE/3dNjb0Dc7bejh+Hll4MYJILt3L0bduovf8Hxnn02yMOtW4v7wokEjqWpCQRikecvGltUCWgaYjMrRKKmYZ34/dgf2DaxHcoFXZdKKB66WU4wuRmNyum91VZiqKoczMmD0RrJldxgKb3Zlqkq/Jn2dgwd5ZYi7e32FBb330/0rW+hzdSb3oT1ylWM2ZJlc3/jQnj+eaLPfhbr6oYbivaqr3dbVBdokIjVRdVONo9D5+a7uYhEHrRiGMh8FjK0k5MYpqEoIHzyZSOnpvDo7s7sm6CqcljL6tXOK02EACnFk42tyJuJYCD37wcJ2dYGMtROvyszUin0inj6aZzjE05Azwjz+3HmNp2WY+xLqa7hkv1IBI6A14vNsbW1PgKGVEoSitxsPBrFddm4EUS2FWJXCKLf/Abn/9JLUZHiciFbX8p54EEx3d0gEC047vW+QVXNFrGUglssaBoCLqfOpVne3NrqnFA3jMwehvlkyfyoVq+fxQp2YM3ToM0N532+uf0Vnd7zIyMYGrB8OeQ2LJnlCgqr+POf8XjVq4j+7u+cHUs27rkHvRm3biX6p3+ydDyLyhYJgX08FsOemm/gBNsarkB0uTL7K5kJRXMPT5Y8F1pbZkKRX8/HYK5U5IA+lQKR6PXaH9ZjFWZ5c3t7eaaxNpAJbtWQSOD/nMAsZW/QNExRJkIfRMOAdJCHV1jFn/9M9J//SfTqV2M4VDxO9NhjSEgcPAif5vWvJ7rkkoLVOxlIJnGv+Xy4zyys20VliyoBJhI9nuLJLCZ/WlpwjdjWFarm4ljC5apcwiGZlD1hu7rmp+iBixjSadjdXEqoBiQ4MfLQQzhvW7fifAWDWF9r1li3c889B7nxSScRXXMNbFF3N9aoVclyNg4eJLruOtwTn/884sYFntCoCzRIxOqiqic7uz8i0VwjkE6DSNQ02a8gH5JJyHqjUQRf+Yif8XFsIL29eDBSKRCJug4isZSNJR5H0KcoME5W32tiAmSipkF2tmaN8000kUBV4nPP4WcewsGNbH0+WXFXLnB1IzdWryepM0PT4CQ/84wchMKbfGdnYanqgw9iyMAFFyBjr6ogEEsJmF56CcfS2oqNs4hch1HvG1RVbRGTdT6frAIqhQSyK2/m6mwzaWiWJXMlkrnKsIH5h2HMlUFz31WWD2bLoIsFuskkKg+np0HW9fYiSeZ2Ww6UiQiB/69+BbnPpZeWhxj6619Bah5/POSI5v2zABalLSombzYjl9TZHGjnIxTZLhSyUywlZB+LycXs/oocvFus6HIEs7yZ74sGygMeaGEYWBPlGBLFrXJCIQTcgYDsPbZ8ufX98fnnUZlzwglEV14JyfK998Jerl0L+7Rtmz0VkKrCRno8MllrAYvSFpUb7KtYIRK5sICVWcX6IxJlEokdHZWxRboO+5xKwQ5V6nOKIR7H/aVpOJdW+rQvVuzejanuW7fiPA0O4tz19WE9ccV1U1N+QnFkhOgzn8Ee95nPYJ0FAng/q5LlbAwNYVCUYRBdfz3ibAuEZr3borpAg0SsLqp+sjlgzzdohQjG9aWXYOxXrSpcoWcYaNg8NiYr+nI572NjMD79/Zkl0Ok0DBOTeKU4uZoGJyeVkkNKrBimdBpZjaNHYRA3bSqtl144DJnIzp34/C1bMHK+HM31C4HL9mMxScq0t9d+6b5hYIK2rkMSHo+DdOaekSxV7erKzJQ+9xzRL3+J83v++XCe+vqcV5QSEQ0Pg0D0eDDly4Ykut43qKrbokQC17ypSfaQcjKx2Yx88maWJZtJQ4bbnUkW+nyNvjn1hHRakor8L2e3Xa65MmgzISwE0de/TvTAA0Rf/jL69PAewo6yFQwOEv3wh9jDrriiPFWqTz5JdPPN2IM/9SnYIovrst5Xr2NblE6DAE6ncf8X28e5cjBb6my+ftzkPft5TCjmq3rMHrTC5KJZCp1IZBKJ5h6L5bJBDXlzeZFKwc7oOtZKrinHTjE0BB9k3TpUHU5NgRxfutR61dToKOxFOg179PjjWFfnnYeBTKecYn9tcdsAtxt20cb6WbS2qNxgIpF9lHzghKrbLfsjalrh/oj8/uEw1nSl+hfWgryZjyMWk1Oqm5sbw6iyMTyMGGvdOlT5hUKoQuzpwfrjPoqcfOcq7KYmeU3DYVQLJhKQHbMSj/u6OvGTJiZg36JREImnnmr5utW7LaoLNEjE6qLqJ5srcDgzlW+ioK6DSFRVNCwtJqGamoIElIhow4a5kyy5F2Ekgj4I5t5FmoYgLJWC01MKCWSWN/NETaub1OwsyKxEAse4bp39DY77HcXjMKB794JAbWmBxPnEEyvvwHOfE94gvV451bkWg4ehIQTu69ZlyoYNA+uFCUV2hNrbcX5/9Susl7e/HRnO9nZ7E1SzcfQoJOmKAkd72TJbjlS9b1DzYovMMjCeRui0ET0jmYQ9Yoeb7RwR3je7j2FDlrzwkC2D5nVGhGvOxOIDDxD94Aeo1rnsMlnVbadifGYGQ52amzHwpBxSreefJ/riF0EkfPrT9qqQaJHbIiFwTaLRwvLm7Nfkkjpz+xcGk47mymXujZptS7j6sBAhaBiyT7LPl7n/5ZJBl4JkEvtpQ97sDJys4LZAgUB5z+HsLPzFJUvgC8ViIMQ5gWoFExNIYuzfLxP2F1+M/qx2pNBmaBqOQ1FAINrcLxe1LSo3OAFajEhMpeT0+qYmmbQoZkdSKdgIr7eyw+rM8uZyq7PsgBMs3A6HKyQXu08YCqFHdFcX0emnY10cPYrzk62GSKclocjJefa7b74ZRUbXX4/15HYjZnNK1s7OgkCcmiL65CeR9LUhSa93W1QXaJCI1cW8nGzuj8jOaSEi8cgRGIfly3M3LTfDLG9etmyuNFgIZDdiMbyf2WHWdRCJTFraaR6dC1zNRmRP3szHMTQEQ7dhw9zx87mgaZK8ZKKLM/5jYxhBPzqKc3jWWXjfSlc7mbNtXOnFUudaCSBCIWwyS5ZgzeQDy7ZnZ7Emf/ELfId3vxt/7+vD5uT0nE5OgkDUdRC9K1Y0nOVqgOWpXNnDvRKt3q9CZPYwTKVktU8kIvsk9vY2ZMmLGULMlUHv2UP0H/+B+/1jH8P6UFU4zlYTCIkE0Xe/i/f7x3+03PqgIA4cwLTB1lYQiKtX205mNWwR4ZpMT+M69vZatynZUme2TdnrIR+haJ70zERisb0kFsP+xlPJzT0Ws2XQ2ROh7YBli5rWkDdbhabBbvBwwkDA+TDAfEgm0Q6hqQkyZsOAv+j1ogqxmC0aHSX69a/RAzEUgjLjssuIXvnK0qaAaxoCdiLcQw6qxhq2qMxQVVyXYv4Mt/poa8N1Y3tSTDqqqrIIo9Q4rBDM8mazvHU+wNWb0Sh+5l7ItVh0UWmkUkSPPIL/n3021svYGH7mScr5wLYyFCL69rehprjmGrRj8XhKG6QaiYCMPHIE/lr23AELqHdbVBdokIjVxbydbF2Xcgyi/Blzw8BNG4shsCqWERUCFYwjI7jBjz8+M1NgGCDokkkQNWYn1i5pWQxmeXNrq71NKhIhevFFbCq9vSD9chk/w8gcDsIy6lxBw0svwThPTYH0OvvsotOkygZVlZukEAhW2tvnN4hIp0E6+/3WSdVkElU/MzPo6zM5ie/W1yfla3YnXs/MgEBUVazXlSsdkaz1vkHNmy3SNCm1I8J18Ply32/mKcmFZMnsYPNwgUpNb26gPjEzQ/TBD2KN3Hgjfjc0hLXY1SVlimYZdLZN0HWi227DnnXlleWx5UeOoEm4242M+9q1DVtUCrLlzXb6cHGy1Tw1Mlvq/LcDFpmEonlKMxOLxYjESAQBWEtLZnDE0mizFJrddE7+msnFYt/PLG/mgWyLMVguBl3H9UilZKsNv7/8RIdhQDaYSkEB4fOh6iedhs+db88SAkH69u0YmDI8jDVw/fWQLTNR7HTP03X4qoYBH9hhAq5hiyoAJhL9/sLrgycvm/sjFhu0QgTbEIvh/UtRhlkBD4icT3kzQ9dlSyiu2K5kRWatgW3KzAzRmWdib5iawvlYurQwAchJNU3DRPjf/IbobW/D+0xNwa/q6EDs2dxsj0xMJOAX7dsHv+3MMx21B1skV3F+0SARq4t5Pdmcbfd6YTwKEYlDQ7I3ixXJ6MwMbnghQBCZS6B1He+nquj3ZK4Q4M9i0rKU3oRE+PxQCJuUXXkzV04ePoxzsm4dMjGKIjNX4TD+zyRlsfcWAudlxw4c1/LlIBMHBkr7nlah67J8X9Nk/5NqZ92EQB/KeBw9KK1sKLpO9OMfg4y94go49fE4zjvLIxIJPJczm8WmYIdCRE89hddt3Ahi26Ecsd43qHm1RaoKB6SpSQ41YLtkJg15e+LA3Ewa5lu/3OtH13Gf1nqP0AYqC01Dn579+1GJuGaNtB3t7VgvZhm0ebAGy6ADAUxNfvZZore+FX1ZS8XRo0Sf+xzWOROIDtdqwxaZ38wkb/b77VdUZUudmRzMljpnP988mEUImRgpFMCHw1hzbW2Fr725ryL/n8HVk2ZyMRca8ubcMAw55ZaHNZXaYqMQ9u9HkL15M3yWYBCPvr7cSd54nOjuuxGkDw/D1It7TwAAIABJREFUx+nvl8mMSy7B8Tc1Oa/6MQwck6bJKn6HaNiiCiGZxP1fiEjUNBn7tLRI21Rs0AoR1lA8LoeVVRKqChs93/JmRjqN2CCRkMNmWloWPpm4bx/ispNOQjFFNIpCnM7O/EU9vNdx5f6jjxJ961tEr3oV7NHkJK5na6u0q8w1mAnFfOc2lUJrl127iK6+mujcc62pA3NggV+92kCDRKwu5v1kcyUPD1rJRyQyoRYOw2GxMiVSVVFpFomAfFy7Vgb6LBvmKdBmokeITNKyHBKxREJuUt3d9japRALGdXYWzvayZbLJeiAAJ85ullbX0ffq8cexUa9dC5lzKT397IDlfaGQdJZbW7FRVCOYGB9HifyqVdaurxCYMPjkk8iyH3ccrkdPT2a/TlWVTjhLE5qa5KRnszMUjeL94nFJEBfr/VkA9b5BzastYrI/mUTQG43CeWhuzpyKyoSh3Ww1y5tVFQ5LpRqHN1D7+M53EIBfdx1kf4kEbElb21z5FvftNMugk0lULj/2GGz2hRdmkotO1tXMDBqPRyJE/+f/IPFWgpSs3ld2RWyRU3mzGTxkxUwsezyFE3CcAFFVfDaTkPzIXi9sB9vb7R1j9tCWbBl0thSaX8P9hjnQW6zge52DXCYPK5lcPXoUSepVq5BQTibxO27BYcbQEOzW3XfjODdvhi/kdkPGfMEFqNKJx+W0aCcQAgRiOg3/qkTpdsMWVRBMJBaajptMYr1wJS3bh2KDVoiwlhIJ3AeVJvZ0Hfuwqs6/vJmRSiGWUFVZcLFQW0BMTKCgYsUKopNPxp4wOoo1kz1gkivvs3sI798Pwu/44+FfTU7i91x8w6/lNZlMSvKRCcWmJvlcXSf66lfha73nPUR/93fW2jvkQb3borpAg0SsLub9ZLMxYCNgnj6Y67kjI3LEu5VGzUKALBwehvE9/njpGKfTyJ4aBqRgZvKKSctIBJ9jhbQshlLkzUREhw4hG6KqIJ1OOqn0JvrpNKYBP/UU/n/88SjVrmQvkmykUrimLHVuagKZWOqk3HyIxdD3q7PTugTwr38l+uMfsYmcc45s8ltoDabTklDk/ng+Hz7X5yN64QVsYmvW4H1KJKvrfYOqmi3ibLh5WjI7I9wTsb0dv2O5XbkaXcfjUt7c3t7okbjY8OCDcEovuQQBt67DefZ6re8xu3YR3X47JslfeKHsl0YkZY9mGXSxIDwchlRnYoLo2mtRmV1iBX7DFuUBD4lIpXD/Ow1U7Uidza/h5COrQIgykyQul0yoqCpsn43G8RkoJIPmY+aEMQd0PIRtMQ0WMAe1POQrEKi8MiMSQSK5sxN+H/dBVBTZe0wIqFa2b0fC0+OBzbn0Urxm717Yjk2b0EfVnBB2sq6FgI+sqiAQna49Exq2qILgtWsYhYnESAR2gIdbsE2wMrCJE2eBQHmGhhUDy5s9HvjkteCjJZOwyTysj2W5CwWxGFpttbQg/lQUxFiaBlvESXveu8z7Hlflj42BOGxvJ/rKV3ANNa14SwZVlYNZmFDkKupvfxs+29vfjsrGgYGS9qZ6t0V1gQaJWF3UxMlmh5g3lHyDVohw04+NySqwpUutfcbsLHoMCoHgi8uRUymQjIoCQsm8YfBE51AIAZ7T6XLZx8/yZg4crZT1z87iWPm7hEIwuBs3lt67kQib1JNPIkAlQibo9NOru1GZJdpmqTM7HuWArmMdEMHxtfK+e/YQ/fznIG0vvRRrgrNbVh19TcM1CwYRRO7ejYBuwwasu3XrSg4a6n2Dqmjgbh58kk5nypKzJyVz1tfnA0HDxEy5CO2GvHlx4qWXiD7+cew/X/sa1tr0NNan1amjQ0OY5rxsGaQ6vHeYJdCxGNYtk0RutyQUmVzk1yUSRDfcgGP7yEcw5KW7u+S13rBFhd5cYA+PRJzJm7PfK1vqzNWJua6hOUnLVY2pVOZa4UrrcBh/6+ws3yAPJhPN5CKDJ5p7PDJInu9KoEqDyUPDwHkPBKpDoKZSaIXgdsPX83iQREgk4FOn00R33YXKw7Ex+NqXXEL0xjfKBMPRoxi81NICe+Zy4Zq2tDj7Diz7TybtDSMsgnpfQTURoxUCE4lcAJDLj2Xfnoc+Elnvj0iExKuqYm2VgVguClWFjTYMKSWuBbB6i/tRdnSUf8hStaHrkCAnkyjQaG7GuQ+H4RcFAnMly5z44nUTjYJAjMWQpHW78f/+fnt2hAnFeJzohz8kuv9+ote+luh1r4P6rMS1V++2qC5Q9ySioihXEtEPiOg4IcRLNl8riOinQojLK3BouVAzJ5sz47yhFCISieDYzMzA2bDaz09VQSCxJJqJG1VFRaLbDVlHtkM/Ogryp6dnblm1U1iRN/NGlkzimLq65GY2PY3S7WQSMpTjjitPQ+BIBKXbL7wAI711K9Gpp1a/XxFPdU4kZGa7vb30DXNwENdy/XprjsHICNH3vw/H+sorpcxm2TJnGcpUChLyiQnZ44Tl6DyYxWGj+Tkb1GK0RYYxd1oyB6os4zP3McwV7KTTUnbscsnpzeUk+7jpOA9xWayT+BYLYjFM9IvH0a+nu1sOmurqsuboBoOQQvt8RB/4QGH7xYGdWQbN/VqJJFF0662QM37oQ7DzPT1lWYc5neU6skdV8YvicezjRDjvpdoXq1JnXZ/rW/FruVKRCH+PRvHcnp7K+QBmQjGdhs/D8mZOIGbLoOsdqRSuPw8XLGUAiV0IgcRoNAoCMRCA3zc9DYLivvuI/vQn7E0nnwzJ8rnnZh4fB+2hENHXvw4bxjJQp5VbMzOwUdltX0pEwxZVAaziIMpPJKbTWDcs0yeSFWVW1n4kIlVc1SDODANrspbkzUQ41xwf6TrOZUdHbVRMOsGuXYixTz8dCbVkEu2mWlth/82SZa937lrRNKKbbkJs/8UvIjabmZGDVJzgttuIfvELovPOQ+V1ZyfOs88nZc+VmBRfR3apZtGYXblI4fHIHgcsq+GMda4NiavAeHrbsmXFDbzfD6foyBFUdEQikGQEAujDMDSEx6pVmeTCsmX4rOlpfFY5hpA0N4OYmp7GI5mE0VMU2QeDyYuenrnykJ4eGLbDhyG7npqaO0DGCdraiF79apCHjz4KKcuuXUSveAUq8aolM+LKmVRKygsiETgg3BfE7oY+M4MAZWDAmpMaChH95Cd47nveI0mf/n5nG7amEe3cievKFaRLl+Jnlj3PzMhG87wJNib65ga3QjCThuyUEuG8NTVl9jG0sma8XjhnqiodBpY7lKsyV1FwbXl688xMfTuCDeSHEET/8i+o3Pna10Agqiquu1WJVjKJoU66TvTe9xa3X+YeP9zn1jAkqRgOE/3Xf6Ei+tJLsb+Nj2M9su2tRsXHYkYgALs0OYlHW5v0AZyAyTaz1FnT5k5nzuVb8WubmiSZx4OmgkH4RVwVUu79yEwOspQ3HMY6DYXg+5h9wOyhLbUQ2FtFOo17kJU3bW3VT9AODuL8btiAc51Mos/hffchEPf5IN17y1uQbM2GpoE4PHoUQXtvL74TV/M7QTAoB0vVStVXA9bBEtBkEo9cVcReL+5vLopgm8R2qphdaW1FDMCJjUrfNy4X1jYPgkylakPezEUVLS0yLjp6FPdyvcULg4MgEDduxLnWdfghPLAnnZbJsHwtzr79bbRluPZatIYaH5fnwgl+9Ss8LrgAQ+t6e7G2ueVGKISH1yt9rPleEw1ILIRKRDcReYlIFTa/TL1W/5QLLM0hwk3JPXRyDVphTE6iqqu9HUSgVYcyGITDpOuoSOzvh9M6PAyDsXLlXPJyYgJkXUeHNdLSKkIhbFI81VBV5UQuKwMYwmF8l1gMjv6GDeXbYI8eRa+K4WEcy5lnQgZcbcedB1OEw9hYuKec1f5JXIUaCOB6Fzt+VSX67nexTj7wAWwUU1Ny4rJd6DqGIQSDIDHb2kAgmjOqnGGcncXzUinpMHR14bMLbFa5KhEXlC0y9zHkIJe/FfdGMVcZlrJGObMuBNYMVyfyhNNygqXuDXnzwsQvfgEJ8jXXEL35zbBlk5NYn319xdepriOZcfgw0fvehyFYpcAwiP7t32DX3/UuVCB6PFiHXB1FJKtvzTJoi85yvuqferFHVfWLhIC9D4dhX/r6yhMIFpI6Fxpil/0eqorALJ2W/Xy5h2IlA1ZVxTkhwn7p8WRKoRlcVWkmF2uNWOR7i/t/85CJamNqCgqWgQEQIr//PVq1TE3Bf770UqLXvz7/gDchUEl9332orD7vPPgsLpfzoTjcD7utraTBcvnQsEVVhGFkThXPNSAzEsHzWH3B97OV/ois4OD+itUib2pV3kwkY6NIBD+3tNRHX9nZWaje+vqITjsN13R0VLZU4KrmQrb8179Gf+h3vQtJj9FRrCE7rabMuOsukJJnnEH07nfjWmf3qtd1WVSgqvidxyMJxQKxt5VKxHqxSzWLuicRS0EtBu7VBmfRedCKFSJxehpkV1sbyD+rDmQqBWIpFEK/w3Xr4OiNjMCA5XqvqSlJWi5fXh5nlbMvQ0P4ritWyOpHqxACFZaDg3gdT/stF44cIXr4YQS/PT1EZ58NCfV8IB6HI8H96lpaCvcGEQKOs6qi8rSY42EYCNoPHkTQvmIF5PNNTdZ7cGa/3zPPoNqMqyCXLClOFsXjklBMJvG7lhZJKGZ937KGTfNti1iWbCYNzbJkDmQLyZJLhWHAUXC54Bwkk7I6p9zOa0PevDDx9NOYenzeeZD/KYqUSPX2Fl9HQhDdeSf61V56KZztUiAE0S23EN17L5qFX3AB9k2zLUomZV9FlkGzW+b1ykpFJhdzrNOyUziLIXAvt7zZDG5Iz+Qby5yZgLNCZE9Pw/61tUkS0jzp2Wqlt93j5h5gzc2Ziozs3orm/opMJprJxfmAruO6plJyH5mvCt94nOi553Adn3sO/b7icSSF3/lOyPaK7aPbt6Mi+h3vQODOw/Da2pxde04Mt7SUPMwpHxq2qMow+025iERdl4NLmHjme9iKDeG+8kxEVqvyzjBke6nmZqzXWvLRdF22SFEU3JNtbbV1jAxVRTypKFC5KQqOnWNxK4UaDz9M9M1vwrf66EcRl6sqYiwnRTQPPIDk6qmnIu5rays+B0HXZV9bVcXadLszCUXTeq5oaqtBIgI1uNztQVGUKxVFEYqirDH97kxFUf6kKEpYUZSYoigPK4ry2gLvcaGiKE8oipJUFGVQUZSP5niOUBTlJ1aemw/MotcS2Olj55ANoNlBzEZPDwxHJAISrdBzzfD5INFdtQoG6JlnZBaDycRsTru3F1WL4TCq80rhvA0DBNHICJzM447DsXBlgp335sEwL385NuYXX8T3MffBKgWrVsFpfO1rcV3uvJPof/4HmZ9qIxAAmbdyJZwIvlYjI3IKshljY3jOqlXWgvY//AGk4xvfiPL4iQmsSR7GYwdCQDI4PS0JxHw9MHN9z+XLMezgxBPxf54avns3+hqNjmJCYi7Uky1KpaSs9+hRXLPpaZm1bm6GY7FkCQj23l7ZfL9SGVeXC0QtS5t5+mAymVkJUw6wvLmtDYH6zIycuNtAfWJigujmm2F3rr0W15gnTVqdzP3IIyAQzzuvPATibbeBQHzTm0Ag8iRnM5qasKeuXImky5YtIBpWrMDekkjA1u7bhz3mhRew73KVUz7Ukz2aDwQC2CO8XiTruGdyOeB2w5Y1Ncmq01RKJkasvL67Gz4T9yXjylQe6sMS5FSqvMfd1YVzwwPmzD0bPR58r+ZmPIfbVygKnpdK4XVMhnPLC6s+olMYBvazYBDnh3uqzReBqKpEP/0pguSvf53oz38GafiVr+DnbduK76OPPAIC8dxz4QvyQBinQ8d4zfC5KTfGx/P/rV5sUaXXaSXA5CFXJWbbAiZZ0mmZHOe2BFw1XQjc7sflklWJ1YDLhX2R29BMTsphl7UAtpUDAzi/4TD86Fwx0XxCCPg08TjR5s34HZNxXV3WCMQXX0Q7ls2b0c+ZCy16e50RiI8/TvTv/473e+97cf6sxHs8tK63V1Z3+3ywbZOTcnbD4KC146gXu1TLqPtKRCWrMaaiKOcQ0X1ENEVE3yGiBBFdRUSbiehdQohfml4riGg3EfUT0XeJaISI3klE5xHRq4UQ9zl5bj7853+SOP98EGm1lq0wD1rhqW9EhR0dJuQCARBqdr5TMIigSNMgF/P74YS0t4O0yMbsLAxES0tu6XMhcEk/SxgDATlcg0jKm71ebFp2q56EABFz8CDO4Zo19io0i8EwQGA99hiM5Zo1qEwstR9jKccTjeK8sdSZpTGJBM5Dby+C4GJ49FGi//1fOMqveQ3OYzKJDcKJ/GjPHqzJ5ctlo3juUeYU3DNzdpbowQdRUfDznxcfrFLLtmj3bmTceVoyVxkWkzNUA1wNySRiPC5lzpWwm5omnWOWkTZQX0iliL7wBZD8X/sa7IemgWjz+eZKZHJh3z5U/mzaBBl0qffBHXdA/nPBBUgGNTU57xukaZKcicfx/337II/87W+tDTOoVXs0NDS/1T9caRONwv5Vog+XEHKYCidrrVQTptM4Nrc7cwAYy6bNFeP8fuWy4dxHlMj6cAX+nuap0AxFmSuFLvU4uQ0GE6lMcM7XHhYKIWnwm98gmF21iugNbyB65SulDLm/v/jxHTpE9I1vwK/+xCdwHnn4mBO/KJHAsfn95R1YwWTE4CC+8y231Lctmp0lMd9ryCn4WnCv1WxEo7AXLL3l9gvcdqEYDANriKj68t1UCvcTV0M6lfJXEqkUzg9fg/mWYfP13b2b6KWXiE45BXGpx4M4i+cNFLuO4+NQdQQCSNK6XCDs2tut+VXZePZZ9HZdvZrogx/EvWblOAqBB9slEviu3/gG0c9+Zn+wSq3apVrGQiQRHyeiE4losxDiyLHndBDRs0TkI6JVQoj0sd8LIjKI6HQhxNPHfucnoiNE9KAQ4h2mz7H83Hy49VYSMzMgV175SjgYtbRRcc8zdqCLTWwmkhWCTU0wCnYMQTqNDEcwiCxEV5ec8pRrKnMoJEnLVausEQqcndY0HGNXV24nLJmUm5R5KrMdpFKoDJmcxCa3aRNIrHJB0zB05cknpVT4jDOcB6blADun3Lx8dBTn7+STi1+fvXvRX2PzZmTaedhJb6+z87ZvH5zZ5cvloIIlS8p3j91/P0iBlSuJrrvOEolYs7ZoZISEufl/rSGRkKQekZTSO63CKAYhYCuSSZBOtSpLaSA3brkFfcM+9SlMHRRCDgHr6yt+LcfGUDnU14fePKWSSH/6E97vrLPQO8jjKW2IRzaeeQZVSr29RP/yL5YD95q0R/NNIjLYBxBCVuNVAlyVyG1j3O7cU50ZPOzM48Fen72GchGK3IuRh+Y5BUsh2X9yMmDNLIMWYi6xmC2FtgLuHcmVVzzFc75s9oEDGJbyyCPYq1auRPXxq14lg26rQ+KmplCx6PcTXX89vlc8LgcL2EUyCb/K5yuPDeKEBlfVjo3B3rW0EH3jG/Vti+JxEqqKc5RDHlnz0DSss1xEIrdwYemtokiiyeq9x+0OWMlRzfut1uXNDB4GkkrhnmUFT7VgHvQ1NoZWCuvWISYjguIoGoUtKlapHYsRfeYz+D5f/Sr8jbEx3BdLl9q/N/btI/rc5xCXfexjsmVVuWYKDA8Tff7zuAYOScSatEu1jDqaK1QciqIsJaLTiej7vACIiIQQIUVRbiGirxz7+yOmlz3GF/XYc1VFUXYQ0bocH2HnuXNw1VWoJtu5k+h3v4Oc9hWvyE2YzQfsTmwmQjZi5Ur0F3zpJRCJVntmeL2oyhwaAvkTjSKzMTuLz8sub2YHemQEz8+e6mwG97dLp2Gg+vsLG/KmJjxnZgaPZBLHYsdI+nyQwbLM7OmnQWgdd1x5yBqPh+hlL8M5e/JJBJL79mFzOP30+RkQwb0o0mlkmOJxnMfRUVyv7CnXjLExyLOXLSN629vglAaDsq+IXRw6hDUxMIDPZDl0ORxAIYjuuQdVPyefTHT11cVfU+u2aPlyK8+aPwghicPmZjnplodPVAK9vbjvIxHpaFd7kmcD9nH33UR/+Qv66lx8MX4XCmFv6ukpXrkTCqHBd18fhjqVWuXwwAOYNnjuudjzXS7sJeUi7HfsQDLj5JOJPvxha6+pZXu0cqW171AN6LokfAIB+z6AVWT3F2RCkcm/bKiqJIIKVZJxtSNXPBLJIVilJI2iUdhfJjJLXcvmakUmFxnZvRWzfU+uOGlpwfkIBOYnGaZpuNe3b0eLgeZmVB1u2gQ/cMMGPI8VMD09xX2beByT5X0+VP0sW4Zz39fnjMBVVZAGnZ3Y35yuZZ5yze16WOExNoZ4ZtMm2DorqGVbxAN44nE8VLW+psHyEKdUSlauMrifeSSC68gJWbcb67NYwQiRHLAYDuNRzV7SLG9mFRRPb641H62pSU4XDgalGiJHX/WywlzpToTP378fJN1JJ+F38TjOX3t7cQJR11HRd/Qo0Q034H3GxnAdnBRnDA4S3XgjzsPHPiaHmpXr+g0O4jiFIPryl+2/vpbtUi1jQZGIRLTm2L8v5PjbnmP/HkeZiyCXen6WiE7J8Xs7z50Drxcy1HXrQAAdPoxS4bVr0VzUSWlwOcFl7ZzF4Ox4MSKxrQ2EnplItLPpcq+9F1+UkuXpablpmMHTk4eHJZFodrqTSZCHqopjYOfLCph44oazqZS1ZvzZ6O2FoTx0CMc5NUW0cWP5rq/fT3TOOeid9fjjIO/27EEPr61b52dTDQZx/k4/HccXDiMYm56WU535OoXDqKJpbiZ6z3vwu8lJvM6J9PjIEUio+/tlVWZ/f3mcGyEgt/7jHxG0X3WVZZJ8zbF/a9IW1Tp42iBXPHCD/GQSj0r1uuIeZuEw1jQPtGigNrFvH6aXnnYaSEQiOajEigRTVTHUKZUiev/7SycQd+wg+u//hnToqquwjjs7y0dyPPQQ0c9+hir0a66xFZSsOfZvwx4VgNuNYCkYhA+gqvAJyk0i8N7EgTsPYuGhVkwmcqDm92NvC4XwyFWRyMfPVUiGgffiXowss3NCKLa2wq8Ih5FkbWsrzQbzcTK4QpHJRT4f5nOUTsu+4lwtXq0hD2ZMT6NVwZ13wtdcsQKDBi64ABWJPh98fCLZCoVbqxQCB+0jIwiGV6yQcnInFfipFI7V44Ff5YSA5KmofC1YDh0I4Lv+7ncgFK64wpbtXHPs35q0Rdyeh++bWAz3i98/P+vNLthWcf9A8x7h8Ug/yuPBWjVPbOaWA4Xg8eD88IRijsmqBbZFMzOIG7hYodbABRaxGGz2xIRsaVKuGI0rSVlFyEMQiYieegr/P+002f9yeloSmsXe99ZbUcX40Y8iKTIxgffo77fvz4yNoULQ70d7Bm4xU64KzQMHiG66Ccd1ww3kdAjpmmP/1qRdqlXUgUmsOPK1ic1lFu08NydcLjipPG33wAEQIMPDyFyedFJ5JbB2wRlgdua4b415OmAutLaCPBwclESiHUPZ0QFjt28fHDOeEOh2zzV42aTlmjWy1D2RwGt6evJXwRVDezuM3fQ0SN7OTvublMcD4rC/H+Tos8/i/+vXly8gaW1Fs+6tW9FbkAnF009HEFut7HwiISsPuXqU+yMyGcOETFMTZH6pFNE//AO+Aw+LcZLdGh3F+e3rA3mbTuP+KoezJwSChfvvxzm+/PKKO5FVtUW1Dh5OoKpYLz6flLFxL8dKgKWnkYgcXFBt6U4DxREOI+Pc1QXJDZMxwSBsbLF91DCIfvlLBCKXX158MmAx7NqF6YUbN6LXjxBYN+Wy9/feKysQ3//+qlTHLFp7xFUj09MIgLq7yx+o8noVQpKGTJ5xNSGTfjw8gSWJ4XDxNiY8qMrvl4QiD1dIJmVfRu6lWAwc+HEP6XTauY+VDa6IMvssnLxmeSCTq4GAPFdcwVmO/oqFIAQStdu3oy+yYaCVzFveguF6QhA9/zz+3bRJEjOTk/h/sf7VQhB997soLvjwh3GPm1t62N170mmsXfaFrbye91YmDplU8vvhz5nl4rt3Q0kyMAACsQpyzarbIp8P64yr+jQNv/P5arcNDMPrxfVMpzOJJSJcK02TlcVcAc3T5K3YAq8X9z4TiU6nhTuFz4f92pzs6eqqTR+tpQX3cDQKuzk+jp87OpzHE2zPmdznlhhcibpzJ67vGWdIEnl6Gn+zotC64w74G299K9H558uWVd3d9pNHU1MgEA0Dfhrbk/Z22187J/buJfrSl7Cuv/AFcANVxKL1kRgLjUQ8fOzfzTn+tjnrOfMG7ifhdkvybXgYTsqRIyCaTjihun0UzDA33c0la85ngAIBEHpMJK5ZYy/Q93qR8RgexusHB7E5bNgw1+C0tMBYHDqEjEt3N4xTV1d5MmN+P8io6WlZ2ehkk+rogJN55Ai+z8wMrm85JeydnWjc/7KXoSfPX/4CZ/SMM1CxUsmN1TDwvTyeubI0zsTx4IpQCD0QBwdRNdTfjw0mlXJG/I2P457haeGJBDbIckgGDAONwh96CNfvXe+yfXx1YYtqHV6vnPrJxCE3mVeUyhEpPJHQ54OTPDMjf25g/mEYkPvNzIC44/0hGJQ97YrtAXfdJafCr19f2vHs3YuBLitXEn384zgGq8MoikEIHOuddyKZYaMa2oyGPbIJbvg+NQU/QFXLL2/O9q2YSGP/S9Mk4efxyNYO0ai0UVY/hwlFJhe4ss9sS/lR6H26uqS8mQc1VCK5xi0s0ml8bx54Y5ZCcxDNx1ZIBu0EqRSSiNu3w1a0tIA4vOSSzJYghw/jnGzaJIPsmRmZ1Cx2LHfeCbXDW95C9OpXy+Fifr/9PY4HSikKyMtChJd5IAFPf+YWIlz9n33szzyD87FqFSarOrBxdWOLmLz3eGQyU9cl8V7LZCL7KkwkmtdRS4uc7s7JNrc7U4X+TSkbAAAgAElEQVRm5f2zKxKrCW4TYq70q0V5M5Fsj8Ny8kgEtq211d6QGr4+LFnO1ff20CHERps3y8nLPPClt7f4td2xA0qxc84huuwy2IXZWRy73WscCoHYi0ZBJLJdsTIR2gqeew69Gtvb0WvRyjDPAqgbu1RLWFAkohBiXFGUJ4joXYqi3CiEGCYiUhSljYj+kYiOEtET83mMZrS2SiPQ2opN+aWXkNEcHERgc/zx82MUcxGJ7LwVMnjNzZJIPPz/2fvu+LjKK+0zfTTqsmQVN9mWuzEYTCfBkEBCAgRIDwFSCCRZkhC+FALZTTb5kk2ym28/UjbZfEkgbSEFTAgt9F4M2MYFF2Rbsiyrz2h6ue3749mz753RlHvvjKQZeZ7fTz9bo5l777z3vec973POec5h/N+sk7FwIci3vXsxHtEoNk/6TABFgXFzufB3hwNZnKUsc2RdxnAYm9NUCoSV2ftht2Mc2tqQNbd3L4z8ypWlvd7589FV9OhRouefRyRp2zaI+3N5TakxOIjFqacn9+LkdGJxf+klZHW87W0gPnfvhoOzaJF5wnxiAgtIYyNI+HDYekOcTCgKNM1efFE0RjDfsbuybFE5w+sV+kTs1HOGihkxfqvndrngDHFZWjmWzhxv+O1vYdtuvBF2lAiOajIJ22LEUX75ZTjKp55a3LUcPkz0ne/Axn3pS3itpqY0tkjTiO69F40LzjgDm3Yr5EjVHlmDw4Fg13SVN3OpLusiMkGpJ/V448ilzqwDyJqxZitXODOJM7v1x06l0s+dq3u0vrw5ECi+vFkPRcF348ARVzAw9PM/Xxk0UTqh6HAYJ4BHR5GNc//9uO9LlhB98YtEF1ww1VcZHYU/t2CBkKyJRmGPGhsLj8vWrUR33AFf46MfFb4tl56agaKAQCTKTSDy2hmLicY0dns6cZhrnF55BeOyfDkaUFnZm1SiLWLCUE++K0p6pnA5gp9xLm1mu2W3w4ZEo5hr3I2aq9D4uxk5fm0tjsMZiTMN1kbl8uaGhtmt5ssHu12UX4dCsBFM5OZq5perZDmbbWY9/s5O7DmJhJ6uEWme3l6i225D4s4NNwiNYLe7cDZ1JmIxlBaPjoLgY/mJUmnVb9sG+Yd589CAqquruONVol0qB8wpEvG/8UUieoKIXrLZbD8jogShRfcSQotuaTYvLhNer3BuampgYCYnBZnY14dNUj6SZjrA+oicMs2LqJEFxuuFAevrwwZryRLzJFF9PYhDnw+k2zPPQKi+rk6U02gaDNvChSDOjhzBuUotXssNFiYmYBAbG60tUrW1KNk+dgzRoldewTgtXFja7IaFC4k++EGUyb/wAtEDD4gS+iIjNWmYnMSYtLcXJla2bgUpt3kz0bvehc9yN+xwGHPKaBezyUlEw+vqsNhNTuL/pehSLctEf/wj7s0556DDYhEkfkXZonKGXh/R5xMdK1kgfDqdeIcje/ZNOWchzGW8+CLRXXehiQo3UuEOtjU1hRvv7N+PzL41a0AKFINjx6DF4/PBkeVs2VJkZWgaSgaffJLorW9FNnSR60TVHllEUxNs0Ph46cubWYcsV6UHl6npS5I5YzEcFkSb1XMzYajfrOoJRQ50u1zp18blzVxenUoVV9bImYecGenz5Se0+PpzlUEzqShJ6e/PzFbk42sa5GDuuYfouefw+9lnI4h40knZryMSgY/b2CgqMVh7jPUD8+HgQWRS9/Qgg5kIY8AkjxmoanpHev1+gYnJeBzjq2kYg9pa2EyPp/B9e/55NLFatcpSZUYmKs4WMXnDGXvcDZ0TK/J1WJ9NcPax/nkmwndheQMOGNjtgpg30miFSMgscNOO2Qiysv795KSwReVa3kwkfMr6erGf5YYnLBGRrWQ5n5ZtPC72RdyJmW0CJ3Lkw/g4svqamohuvhnnGhrC38wSf8kk/KL+fvhFra2YI/Pnl+aevPQS0f/9v9h7fvWrxROIOlScXZptzDkSUdO0520227lE9C0iupmIHES0nYgu1jTtwVm9uBxwucCmBwL4vasLxsTvR6bXrl2IEKxZAx3FmTKM+kYrTBwaabRChIVr6VIQiayRaNYpcjqRXdjQAKNx770wjpxx1tQkImv6MuolS0rfeIHLm/1+LFRc2mT2XthsiFi3tkL/8eBBEJOrVpV+8V2+HPdg715k3dxzD8bmrLOmdr42i1QKmpQ+H8YlH958E0TmqlXY9CsKFs2ODiwq0Sh+HxrC/eRIYraxDYWg91FTg5L/QAD32kpDlkxIEsqtd+wgOvdckJ3FZAFXoi0qV3AGIhOJXu9UInE6NXk448flwsads2+ms9teFVMxOIjo84oVRJ/9LF7TNNwPh6NwIGF4WGh5vfe9xa2l4+NE//zP+P+tt4qMlELEgRGoKmzR88+jvPGKK4qf31V7VBy83vTy5kTCmg+QDUYqPbgkmcm+2lqsWePj+GyxxLWeUCRKJxSZiNMTikzCNTVhDY9G8T6zWl+qKuy6vpTW6nzPVsqsz1ZUVZGZRYRrfvJJZNgdPoxx/MAHULKcz7eRZfhwLhcC/awdPjaGvxfaeI+PI4O5oQEbbY8HY6iq5rUmmSxQFNEMUJbTiUMi3Je6OqyXRn0bTYMO5OOPwx9/3/uKD6BVsi3SE9CcVMHzi4memdQHNAJupsIEPd8/nw/zJBoVMlBchWa00QoRnldNw1zjLtAzjWzlzc3N5e2jcdOjhgahHc+lw7yHzVaynAlVxb5I05CowvfX78f9LSSpEI/DFqVS8GkaG7HGpVLYo5nJvJdlkJF79xJ9+ctIXEmlsHaWIhHq2WfRvG7RIjRp6ews/piMSrZLswWbpmmzfQ1FwWazfZKIfklEizj9tIyRd7BVVZTN+nxwbjiyMjyMDXNtLciTRYtmbqGSZVyblUiVJIHYk2WUa5tdXDQNi0JfHzZURCB4Vq+e+t5UCkSiouBcZklLo+DyZl4AiiGaRkdBEHNZb3f39JDEsowo+6uvwplYuRLlcVY2vJqGa47HQQzmW6RHRiAY3tKCRiouF+ZyMgmynMeO7zOXjNntIruQF7BoFBmCDgcyA5g46OwsfsxSKXRp3b0bzWre9raCWZFTnr65ZIvKFZlaUVz65nBM3/OeCUURjZ98Pti0cts0zEUkEihf9vvhRHIjFG6o1dqa3xaHw0T/+Z/4//XXF1fyNDmJEp1gkOib34Sd0jTYuWI32IpC9JvfwNa9611EF19ccH5l/WsF2aOKs0Usb+ByFZ53RmE2A4hI6N/F41jLa2unh8RgXVrOviKaSihyNjDrgRaqKmDSgUtqOTA0E0FyTUMW8b33Ej34IGzD0qUgDjdvxnXoyaLMa9I0bJLDYeh4cwCYiYC2tvy+bjwO4nBkBNquixcLkqemxtx80jTMAUmCTeNx1Zewcva+eVkWyOI88wx8rssvL3h/jitbxNlimXIEmd3VywGsf6mqogKOCDYkHBalyfxeWU7PXDSCaDS9WmS2IEmCQCvn8mYGj3ckAn9GlkVyhJE98+7dSOo4+WShtx+JgAhsasofXFUUkH47d8Kn2bAB82FiAp8zo1+oKET/9m/Yq3/+80Qnnog5MX9+afzzxx9H1+ilS+ELFkhgKfj0VZBdKlvMhUzELoLh98/2hRQLjqSEQtgYe73IWuNuRrEYsrVeeQUR0PXrC2eBlQL6smZ2ULPp+GSDyyUyEvv74SwZzbiLxWBQJQnG9AMfQEbdjh1wttatS1/g3G6RkXjkCEi56YiIcQaSvnuz1UVq/nwY6UOHcM1jYyDmSpHNoofTiQVm/Xo0otm+HUTgunVEp51mbpxGRrAwLF6cn0CMRCDQ63ZD64dLwhMJONl6R9lmw7yoq8O9DQaxkHGJotuN8n6bDZG2YBCfa28vftORSEBj7cABlDeee65lB2jO2KJyhdsNR4WJZodDRNk5Q3G6US1vnnloGspX+vsRMWcCkTNtWHIiFzhIkEggmFHMpiISQamO3w/R8OZmrI3NzcXPAVkm+tWvsMZddhnRO95R1OGq9mia0NiItW98HEExLk0rBnq/qlClB8PpxBro92O9JMJ1cXllqWySwyEyBLlEWJLE88cZWI2NQh+NSa1M/5C7AMdi+D9rPM6E/dQ0aGndcw9kEWw2SAVcfjl8I30pdLYyaF5zjh6FD7J8ufBnEwkhrZLPn1IUoh/+EP7e178OP4p19lir0sz3GRrCtXg8ouGC2w0fsqbGevYPN3R68UXoxl5ySVGk2Jy0RZwdzLqlDP7daOfzmYDNlu4rMWHPDZtYh5NL2zOr0IygtlaUNvP5ZgMul+jeHAqJxpjl5qNllizX1MCXTKXwTPv9hbVVBwbws3y5IBCZRPV68xOImkb0619jP/jpT4NATCbxWbMNUDSN6Kc/BYH4yU+i0WcwCE6jFATigw8iuLpyJTrYl4j7mJN2aSZRsZmINpttIRFdRkS3ENEhTdPOmeVLMgLDg82OmMsFZ4A7hBLB2A8M4D2trXB+SlHOmQ/c0U/f5YtJxHwdmxmKAiIxmUR6c77Sm0RCdER2uWDI2Aglk8imGxwESbhu3dRjyTI2m6kU3jNdGh2qinvCwsTFljYFAiCy4nFk1y1fPn0OSCwGncLdu3HNJ50Eo18o9T8aRXlyczNKo3NBkrAZHh3Fpr2rC4shCx8bma9c9jw+LlL1zz4b1ytJWDCLdVJiMYiaHz6MDfsZZxieL/8z4+e6LSo3cKaFpokyZu6c6PHMbCOqZBJzlAjzupxLZyoZ995L9POfE33sY9DjIoKdHxsT2WC5oKrQUNy/n+jKK0UjFitIJFDuc+gQMom6u/GakQYKhSBJyJTcswcBs/POM/zRtNW3Au1RxdoiRUFgjCUV5s0rPqhlxq9icEm/JGH94ooRJgnMNBUxe61MKPJGmLMSuQmMvryZyUNVFRlyM0GyxONoTrRlC3zDpiZk+F56aX5pF722Iv/f74cPNH8+ZBX4fo+MYIwLVUb88peQd7n+esi7cLdtrr4wgmQS32lwEOPJGUNM9BZLlqgqyrtfew0+1zveYXj+HLe2iJ8Ffu70mcWcmVgO4IxEzv7lucra5PX1Yv7oy7TN2LVwGDagrm72faJYDGSizYY92mxfD1G6pqZ+fmSOMVdnKYogBPX+bTAIqa+WFqJNm4SkwvAwztHVld8W3H8/0e23IwP76qtxHtZB7Ow0bkc0Dfu9++5DR+dLLsG+rb6+eG6Cm8vddRf2+9dfD9trwB7lfEcF2qWyRSWTiJcR0e+J6GUiuk7TtIOzfElGYGqwuauS3Q4HgfVW4nEYkkgEmnrJJB541g+cLqgqDBM7pvyaGSLxyBFc/4IFUyMkqZQoTXM6RXlO5nETCZSS9PfDeC5bNrVhiKLg78kkzjWd4xKJiPs0b15xi5SqgmwdGICT3dMjsm6mA7wI7d+P6960CSno2RweRcH7iJAtmWuB0TQY/L170cFv9Wrc22PHhLak0Q2NJCHzNhAAqRqJYH4sXow5XwxhFA4jsjUwAGf+lFNMzRM9iTjnbVG5gUX4OUOGCPNClovLvrACfXlzTY15Pasq8mP3bohnn3YaMv/YUWYNsLa2/M7uww+jwdS73010+unWr0OSkAW5eze0ftavF0LyxWa8JxJEP/sZyIkrr8TG3QQyN+6VZo8q2hYRiS7F3H2y2EAG+1VmyCBVxTUoCvxFm01I0bDW2XQ2f8gkFCUJzwc38ODGfE4nyMNSdbjOh8FBEIcPPYT1YtUq6Itu3mztHnHzAo8HGuWahp/RUTzDnZ2i0Ve2MugHHgCJeOmlRB//uJBxYR3EXPdGn70Zj+P9waBoVtDWVrr7qijI1Ny5E+N0/vmm1rPj3hZxFmJmZjEnYJRDNhwHYokEkaiqsGN2e7ofzAS6mfJsTUvPRp7JwG426Mub6+undz+YC5yII8vpAZ5C46ppsKOhEO6Dz4e9s6rCryGCzj2PcSCA97a15c8AfPVVSCmcfjrRl76EaxgZgR3r6DC3j73zTvxccgmqzkZHMa8Mkn15v/sf/wgS8aSTEERub7cW0Ej7Q+XZpbJFxZKIFQrTgy1JMAqaBlLN4xF6Bex4TExg8yFJyA5bu3b69MFYSFjvjHIJhZHFUVVBJEajiJI0Nwvtx2hUCHUX6vIXi4Fs42Yc8+Yhw0TvmPK5YrHspGUpkUrhPsgyzlPsIhWJgLALh8V3m84I2tgYylb6+uDwn3465pHeMe3rg+O6YkX++fXIIxC/vegiLG6qCgJRVXEfjDpRioJIeDgsxIKHh0U3Rk0T6f9mN/HBIDIQR0ZwnSecYLqEvNJpooo3/LIsOgvysxGLYd7MVHkcgzeDsRhsY2NjeWwWKh1+P0pXamqIfvxj8ZxzN8OWlvwZgK+8QvS3vyHD+F3vsn4drPWzdSvR5z6H4wWDwv4Ug3gc362/H07yqaeaPkTVFpUBkklBbJeivNmMX8XgTDlVxbPBXZ315ZalLnXOBt4wRyIg8rhaY+FC+KzTqRenaXhOt2yB/I3TCYmSK64A8VdM9+hdu+Drbdgg1pxQSFRY1NcLApgovQx6xw5oj512GoIidruQw6itnRr4YqKHf1g6qKYG16AoWGdKqfcmy2g89cYbkHZ561tNH6Jqi0jMf5Yl4I7O/Pt0P39GoKrwn4hEIyN+Zj2edB+fiS+zRGIoJHQJZyJoUOh6JifxzHk8M1ferChT7a8VMllVsRcKh/H/AwdgB84+W+xt43EQePX1+bsxHz6MZnALFxJ9+9sYj0AAPk1rq7nqvfvuQ2Dk7W9HSTTv0YrVqtc0yGE9+CB8oo9+1HSwpNJtUUWgSiLOLCwNtqLA+LH2ls+H18bHYfC5ZKS/Hzp3RMjOW716eognXgz1At5micSBAbEJ4+Mw+WbUSLCDGg5jMXS7EWnWk4V8rmgURs2MxoNZcCYA61kWK7KvadDeOXwY47NsGYjX6cxyGhxEdGtoCKTamWeCNPT7Qch2dgrdjWx49VWiv/4VjjI3BBgZweLW0WG85I+7jQUCyIz0+eCo19VhkVMUoZkoy3Bu2IkvNOZ+PwjEQAAZiGvW5F9wc6DSF6g5YfiTSdhFrxdzgPV4uNR5pjrZ66+nWt5cGsgyNtu9vUS33YbSYSKM8cQE7m8+4r+3FzqIPT3IiLY6FzQNJN/TT0Pr54ILYDtYaqQYexyJEP3oRwiyXHstou0WULVFZQLukluK8mYrjVaIsDZy4Fnvg7B4PxMC+syoUvsUsiwIMs6IZLvMXe6dTvhspSIUYzFkHW/ZAr+ppQUZfxdfXBq5nzffxL1du1b4mJKEZ9frTfeLMsugDx8m+sY34D9985sgDWUZtsznE34Rd6rWN5yx2+En+3wiiSAcFo3nSgVJQjbRm28ia/uMMywdpmqLdGD9UCIxz/Ulz4W67k43mEhk/UKbDc9RMon5xcSf1UYrTCRy9/hyKOnWlzc3N0+fbqPRkmWzUBTsjfbuxR5/xQqMLeuj2u2wM7ls6sQE0c03433f+x7GIBYT5KMZW/nYY/BfzjqL6KabcAxNK74Ts6qiPPqJJ0CSvu99hStOsqDSbVFFoEoiziwsDzZHUdjp4OyHWAzkCkd7fD5ksPX14YFbsQI/pY4CZS6MZhxe7kK9fz82UUuWoFTVCuEWCsGJs9kEmbhkCSIsbEQ1DURiJAIiywJhZAqlLG8mgkN54AA2Bo2NyEqcjoYxehw6hMzEiQksMu3tIDGXL8+9OB08iAYly5cjamS3YxwCAYyD0YwdTSN6/XXM6/Xrcb+GhzGOmansTBqFQhgnbtCSqR3CGB+HBkg0isykZcvy66nlQaUvUHPG8HN5FwuFc6mzzSY0E2cSrOUpSdXy5mLw85+jjOXmm1FWR4R7OzaG8Wxryz2uo6PoCt/cDOLPqg1mrZ+HHoLWz+WXIwjB+krFbAiCQZCj4+PQ+Vm3zvKhKn12zRlbxNCXN7e2Fjf/OHvJjA2RZZyfN8qZvhWTG/pOy6Xa4Mbj8FM5a47JCUnCnE8m04M+RKLLs5XO0keOgDj8+99x7nXr8Jyee27pSIuhIfjUixejmoJIbNoVJb/2mN9P9JWvYKy/+13cj2QSfiJn0SeTIiDG5BJ3uNXPnUgEY+jzlTYgzo2n+vqgj3bKKZYPVbVFmQfURIMSJu45O5glC6ZTZqAQFAX7JrtdEGqc7aZP6mBSnDMrjYLLpPl45UAkyjKeSy63LlV5c7aS5VI31xkZQXOozk7YI67gSyRwnq6u3Pv9RALNnIaHIc2yZAmulyv6zEhNPf880Q9+gCqxW27BXjGVMl8KnQlFge/33HOQUrj4YpRFW+AHKt0WVQSqJOLMoujBDodhNDwekQXBJSzBoHBaVRUC7YODeO/q1SBMSrVQZWu0UohI5KhUMCi0HWIxfJ+2Nuvaf5OTMIo+H4zk6CjGZuVKQSRpGsYiFMJ5LBJHhiFJ2ByWqryZCItHby+OuWQJFpDpdDw0DdGue+/FvDv5ZJS3ZMtE5E17YyPRpz6FOReP477U1eUXL8885549WNRWr8ZCOTQkBMvzLSSpFO5vOCxEoxsbBZE0PAwNxFQKJcxLlhi/riyo9AVqzhh+fTdALstRFFFabLHTdtHg7s3V8mbzeOopRMkvuwwlMgy/H5vt1tbcjnIkggYlqgpyrhjb+1//RXT33dhYX3ll9gwvK/D70W06FCL67GeLa/ZCVVtUluDyZu7cbXUeWmm0QiQ2ynZ7bsK7VKXOnEGnz2piW5z5vnAYY8Mlk/qMHSKcnwnFXP6NqkLLecsWVD84ndhwXnEFqlFKiXAYPklzc/qx/X48v+3tudeYRAJlg8eOgUBculQ0wWFNukRCEMXcGMXtFs0smLThDCpu4lcqJBII/g4OEr33vSjVLgJVW5QD+sYrTNrrM4O5zHU2Ao5MJDoceHa50sfpTC9t5cxas41WWMOTCHawHHwhTcM1RaN43opZ00tVslwI0SgqxWprkSnMTSaPHsX+Zt48kIjZegmoKtH3vw+JqFtuwX5O02CbOBBilOzctg1l0CtXotEc8xJtbcUluEgS0U9+AjmKiy5C1Udbm2USttJtUUWgSiLOLEoy2Jx55XSmR5mTSZA5qRQe5LY2vG/XLmRv+HwoxVi8uDQLFTugrFHHr2U6vCwMOzkJY8Wt491uEc3lbDWrbdv9fkEe2mzIinM44PRxyRsbTNZ9mM6GJXw+v7905c1EMLK9vSAUfb6p5dulxuAgFqdYjGjfPji+PT0oc+ZIeDSKTbskYdPe1IR5MTgoImNG59u+fcga7ekByceR/s5O49m0vFHhZhdOJ37fsgXz8qKLcE1Fiv5W+gI1pww/Z8A4nSKaLklCM3G6SlYKgYltLuObreuoJPT1EX3hC7AB3/++cCC5U2FjY25HVZKQaTwyggzEri7r1/HXv2JzfcEFRNddh3NLEuxbMULxY2MgEONxohtuQHCvSFRtUZlCVZGhEYvB72lttRb4s6KPSCQ0tR0OrNf5GndkljobFf3Xl94yeVjoO8ZioukK66VxdqQkpX9ft1sQiuEwNLL++lf4Bq2tIPgvvti0prEhpFJoMOJwgFzj8efyv4aG3IQeb9pfeQVE4oYNGKvxcYwXZxryD2eoZZZCE+FzTCByWZ9ZUjkbYjFIu4yOoiP82rXFHY+qtqggeH5zAgaX+zMBZeS5mw5weT0TidzIh7OJ9e8zq49IJCo0iLCGz2YZtx7xeHrWthkfLVvJ8nQRwYoCAjGZRIkvBy5SKezRbDaR1cyBa72u5e23oxvzpz4FGSci+CLRaP5ASCbeeAMN7hYuRDYj9zRobi5uL5pKwS/asQMyFOeemz9YbACVbosqAlUScWZRssHmzs1s+PTZgFxCarOJMtLRUXSVDATw+/r1IGaKRbZGK3oikaOnkiTEbLMZ6eFhUTprdeM3Po6flhZEz/btw/kXLUonTo8dwzW1tFgnLc0gGsW4c0ZAKYgEvx8lzokExmvZstKXCYRCKGtubcWCkUpBi2PbNiyea9ei7OVPf8L9++QnUerDxLAk5U+tz0RvL7SDurtBIIyMYJ63t1sfs1gMWQR33YXxeec7MVYlINIrfYGac4ZfkjBf3G5B8iSTmLcez+x1COQofLW8uTCiUTQuiceJfvpTsUHnzG6PJ/emnTv57d2L0uPVq61fx6OPoqTm7LOJbrxRdIVvbCzOfg8Pw1GWZaLPfx52qASo9Nk052xRJsJhQeZZLW9m4sHs5juVEhqe3LW50Hm4/JIoe6mzpsH34IYfHg9smxmSU5KEXlpdXfqGN5NQ7OtDZ+OnnsJrJ56IkuVzzpm+8kiuiohG0XiNr09R4ENy84Bc43nHHQhcfuQjqODgRmAshWC0tJMJS6cTn9Nv2fRdoM1mh4XDuEa/H/ayyGxoRtUWGYA+K5Gzf4lmn0xkItHpxDMdiQgtfv2zrb9Gs8fXd4AuFyJRX95cV4dryzXuM1GynA07dsB/2LRJVNLxXktVsdey22GT2d/kfglPPYVKsYsvRld4ItwHv98c+XfwIAIizc2oFHE44JexVr1VJBJoXLd3L/QPTz8dxyvSZ690W1QRqJKIM4uSDjbr3qjq1M2NJCHKwF3xOCV4cBCOUSQCgnH9+uJLe3kx1EdgolEYKFkWpdeFOkaPjMAgNTVZbyAyMoIxaW2FoTt0CK81NiJrj43S8LAwoKUgUwtBkkCS8oJciuxBRQHpNjiI77ViRenKtGUZJKzTCedSv9jH44iuv/46FjZZRubQySfj7+PjcFDb2413Ce/rg6D3woVodDIxgWOY7RSWiYMHUZLo8SB70uXCMWtqREaTRSet0heoOWn4EwnMR/2GNh4Xr82mHo++vLlctIHKCZqG0phXXoHWDmsEahrWMlVF9nCujcejj6Ir/DvfCaFvq3juORB9GzeisQvrl9XWFmeLjh6FBqLdDntZTFJd4UQAACAASURBVJZkBqq2qAKQSgn96qYm8z6A1UYrRCLo7HYbbwaUWerMm2VuksLN9Xw+67aMJW64vLm+Xnw3zry5+24EL51OaKNefDF0l7nkebrsaF8fNuiZftXIiAjeZgZINQ3f5f770bH0vPMgg+DxCA1IfSOVQuAmUixTZLeLeaDPVswkFvWl0NnmSjCIzKRwGPrVS5eaHp5cqNoiE+Dni7PYHA5BUnHQgMnEmYIkwVZx4yN9ozh9dZmVRitE6URiY2P5BFQLlTdzYCMzuDITpdl9fSDYVq6E7WNMTMA3yZZoEYvh+2zfTvQf/wFi7tZbhX7iyAh8YqMVeUePQp/a40GGdV0djpFNq94MYjEc7+BBBDM2bixZsk2ZzKy5jSqJmAM2m+1jRHQ7ES3VNK2vRIct+WBzR2AWiM0s8wqFYGg0DYRZUxP+39+PtOREAtl469dbJ7Z4QdFfD0dczTrLY2OiTETfHMUMhoZgPNvb8Z1HR2Gg7HYYYS7DHR0F4dXYOP1dj4mEFg5rWs6bV5oFKBRCkxrWpFixovisq0OHsDitXJnbmN93HzJ/WltB/J1yCha4YBD33ajw99GjWCB5HrIofWNjceLh+/cjA7G5mejCC/F8zJ8v5AAkSZRTWdBpmbEFqlJsUTmAy+t4s8ZNn/TNV2ZTj6da3pwbd92FzJjPfAYligx27PM1qdq2Ddqtp54qusJbwWuvwaFdtYroH/9RZJGyvqpV9PWhi6HXCwIxX4d7C6jaogpBZnmzWR/Aqj4iEWxgKCSCukbB/h3rV6sq5nFDQ+myu+Nx+BtMkj3yCEqWR0fxrFx2GRqh1deLDEVu7sfkZikJxfFxBDU7O0VXeCLYApbeqa/Ha/qszHgc8kG33YZsyVtvhU+uaRg7h8O4ZlgqhetwOODX5SOONU0QikwuMph0ZmJxchJ2NpEguvpqVOqUEFVbZPaEmiiJ5bnMWveZDVlmyndJpXBNehkBtzt97lpttEIkspA5oFouRCJRenlzU5PQrpyJkuVsCASIXn4Ze5eNG8V5ualqY2Nue97Xh6ZOLS1E/+t/YS9UV4fPcRdnIwGp0VEEVBUFGYhtbdhncza21YzScBjHO3KE6JprsI9saSmZjnnBOzRNNuS4QjUXosLBJbLBIB5IRUkX8G5ogOEfH0fmXSQCY7R0KUqpDh5Extljj8GZWLfOvDAqb9RHR+GYcMlZfb1waowaGXaWhoehjbdokXlj3dGBc46M4Fjz5+Na9u1DFubChdDb46wWbku/YMH0LgxcxuLxYGFgIdxiiYSGBqS4Dwxg0QgEQOZZzbAcG8MCv3Bh7mvbvh0ZQ+9/P0qKXnoJWUCPP46I17nnGjvX8DAIxLY2EIi8gNfWFkcg7t5N9Je/4B6//e0glDo6sOg1NuKHycRAAI51bS3GskrsVC5Y1J91uljcv6YG9zseLyr7tGhwhDsUwk8qBdtUTk70bOC119D06LzzoIfDSCSw+a6ry00gHjqEgEZPD4gGq2O5Zw/Rv/4rSINbbsFxQiFsGIppzvLmm8gEqKtDafS8edaPVUVlw27HWsflzazrZ3TN4e7zVjo219TAzwmHMa+NzmkmELmqhJt+6Esaiy1LrKmB73LXXURPP43vt2kTpA3OOiv9+CxXweQLZ05xR2g9oWjFFsRi8Ivr6+EnMjib0+fDGsLrCQet7HYQxLffjiDEN74hxjwaxTGMVmbIMo5ltxvT0cyWEZaZrciZsL//PX6/6irhK5dLWenxCJsN85kz3bicmDMBufQ5lZo5MpGDA0wker1CX5r/ps+KNWuLXC48X+EwfsrJB+JA8+goqrx8PpB0nE08k0gmsdfy+aCpymPE9sHjyR3cDASI/uVf8PfvfAdjHgzCzrpcUyvMciEQQEfnRALH6+gQzS7b263bjslJHG9kBDqNy5djzzdbjRCrsIZqJmIO2Gw2BxG5iCiplW6QpnWwuUMSl6xkPtzRaHrHYO7Yl0pBX6+3FwvCsmXQkzLi2MoyjEEkggWFyR+jHZtzIRCA7kxtrbUuxJqGDLdoFOQgE5qHDoG4amiAo+fxgFzlLsJWSEsrmI7yZiI4tgcO4J5wh2qjjisRHOIDB3BNuUpc+voQyV6yBJFshwMO0Pbt6JSYSGAOnHkmsiJzjefYGEqim5sRYZMk3Ae3GwuV1fuwYwe0iBYsQMdGjwfHy6XNyFHRcBhzxOPB9y+gXTeTEfeKs0WzDdafcrkE+aSqooszZynOJqJR/Bzv5c2jo2gw0tKCMmJedxQFNoJ15LLdr7ExaP00NBBde631AEBvLzb9ra3oOlhbi3WBAz9WHeW9e4l+9jMQh1/4wvQ0f6CqLapIcKYZN+sx4wNYbbRCBJsTicAGcjZdNjB5KEmY/z5fui3NLHW2UnYpywg+3nMPAn9uNwKQF16IoIBR3TTOlmRSUd8chklFI/ZeUdBIRVGwaWfCRFXhTyYSuE96PTtujBKPI1tHkhCM4BJoHsPaWmPjw3aPCMco1bowNASCkwgl1m1t5sugDaBqi4o5eUZWop6gZ5JRr8M33eRvMonnyu3G3OckFf15rTZa4eNHIjh+Pls0U9CXLGsanl2WWmhpmVkfTdOQgRgKYS+lH5/hYVxnZ2f2a0omUUlx9CgIRN7LjY9jX+31wp6z/mOueRQOI6A6MiK6MbNWfUeHNW1fIux9v/Md7FOvuw777sbG4uRissBIJuJ02JDjClUScWYx7YPNoqqZnZsZqorNEb+nrU2QTIkENj2HD8OorFgBo5GNfFFVGIBwGL/X18MIMGmoX/ysEomTkyIStHixeYdZVZGdl0ggq44zLMfGsGm02fD9WlpEVkBtLQzaTERmp6u8mQjf5eBBjEF3tzFyVFVRAqyqIFizLU7j40S/+AWM/XXXiU378DDGubMT537hBby3rQ2ZBPqIPhHm4PbtOM4pp4hrttlwDKvjsHUr9IiWLsVmxOnEYmek7EpV4dCwKLHDgXmdg+Apk7ipZcx5w59KiaYqbMNkWXRxLoeIp768ua6uPK5pJpFKocxmcJDoxz8G8c9ggoX1fDMRjcIWpVKiK7wVDAwg0u7zwbFtboZ9UpTiNg47d4Lg7OhAE5Vp3CRVbVGFgv2xaBRraWursbWvGH1EIvhtsVh2nU9Fwd8484nJw2z+A5fRZhJ3hQiFQIDob39DBvHEBORkLruM6KKLcD1c3myzwa8026FTTyhyh2MmE/MRivv2we9ctw7PK4/F0aO45rY2YadrasQmOpXCpr2vDzakpwevJ5MiI96ID6IosHuqWnRn0jQcPYpMb48HjRU4GzqzDFqvr8hzS08uGiCJqraoBODmRkzO6degmSYTmUh0ufAssF/MKEYfkUhUG3g8JSeRDEHfmZ5tqj6TOZHAs8+SYDPlo+3dC3ty0knpVWXBIGxUa2v2qkFNQ5OSl1+GhuGmTXg9GsXel5NWWCbGZsNrmdmg8Ths2qFDCLCeeCJsUyQCO2i2YpExMgIbGY9Duqa9XZy/xKh0W1QRqJKIOZBZK2+z2e4gomuIqIOI/o2ILiEiBxE9SESf0TTNb+CwMzLYqRSMDFF6VqAeiQQyQLgbld55jUSglzgwAMdn9WpkJ7LobygEA6Sq+CzrRjCyNVqxquUTCsEB8npBRJkllxQF3yOZBBHJC0A8DoeRMxW7u3EuPWk5UyUesVh61kupFqlUCqV0Y2O4TytX5i9hGhiAQ9/Tk30xj8WI/vM/MZbXXy/KjbkcuLU1XSfowAGiF1/EuC5YgE6nHR2YO6+9hu+5aRPu6fAwFvHOTuuO8/PPE/397yC/3/pWkW5vJVrGZHwsht9ra6c0L5o17Z9KskWzjXgcNsDnE8+zJMH+ud3WI6mlhKqK0mavt7xKe6Yb//7veGa/+U2iM84Qr3OZU66GXLKMbOhjx4g+8QkEiaxgZAS6ZURwbNvbYc9SKdg3q5pvr76KrJ9Fi1CSadXhNoiqLapwRCLwAbh81UhGbbFEYigE+1hfj2dMVYUMBMtCsByEEWR2dXY4phIc+/Yh6/DJJ/HeU08luuIKSKBknkeWsQYrCp4fq89QNkJRn6HI1zc4CG2uhQux1sfjsAMsezJ/fu5mKv/2b/B1vvpVfBc+bzQqms8UgqqKSqESdCb9H/T3E/3ud7iGT3yicLAlswyax4wI90ifrZjFH6/aohKByS29JmJmBiATjfysTZffwFmINhv8f7YNmddqRR+RCM9aLIbjTvNa+T/gbOpMbdVsRKiiwD6nUmIvMJ0+2tAQKqq6u6ETyEgmsVeqrc3dRPO3v4We7Mc/Dn1oIlz30NDUJiiSBBsbjwt9+NpavP6tbyE7/Gtfg01jPdimJusB28FBou9+F+PO1Sd1daWrxMuAaU3EEtmQ4wrHaQFVUXiAiA4R0deIaBUR3UBEKSK6ajYvSg/W3QoEYPgyOzcT4fdFiwQBFI8Lsei6OqLTTgPptHs3MioOHACJ19AA4+vz5SYonU4YIY5eEVnX8mlowHUODCBDsrvbXLTL4cDn+/txjMWLxQJ44omiu3EohOy7hQvxe3+/texHK/D5cM/Gx/HDWZ3FLlJuNyLqLBK+fTvIvKVLp36vyUkQiO3t2QlEWUaX41AIjigTiLEYPltfnx5Jstkwnj090Bp7+WWiP/0JTjjPz5NPxr0cHcUi195ujUDUNOgoPfEE0dq10GhUFDj9VkkizjTgjQxLBbhcyNZ8+9utHbfEsGyLIpHpvbByAZekcCMDfqZSKYwBa3zNNpxO0YmTmwrN9fLmRx8leuAB6KquXy/mZCqFcfB6RXawHpqGJiq9vUTvfS8cWivz2e+Ho8wR99pakJKJBGwZZ7KaxdatRHfeCTt77bW43ul63p59FtlbZYCy94vKGaz5OTYGYjufWD7DZhN61JwFaAYNDSIoHI+LY7F/ZJaYdDimdpdl8uH557Gx3bcP/s6llyLzMF9TD6cTfgKvvSz9Yva6OJOrpkZkcUmS0DN0OGBz33hDBFmDQdFIQpLgF+aSWPnDH1B5cc01gkBk6Qwudy4ETYPNk2X44aVakw4exPU1NRF97GPGdDCZIMzMftOTi1zGzu+XJIyfPhA0i5gTtkivf8iaiA6HyJDjzsBMJiqKsSxgK2BNREXBOVkqhucIk8tMNJp9Rlk7lO2QGQkms8jWZblQNidLqoRCWMtTqekrbw6H0ZypuRkJPAwOMrBdzIZHH4Wdfec7id79bvE5bqTS1pY+N1wufC9OPGI+4Be/wL7/pptg06JR/I2ThqzgyBFoINpsOG5dHe5zqQlETUMGZ5Ed5+eEDZkJzPFtyrTgOU3TbuRfbHgib7DZbP+gaVpo9i4rHU4nnJHJSfzU1U0lhzjzjbs1jY7CgLW1wbg0NYGQ6evDxujZZ2HYTjstf1t4XuB4cWNDa5VIZIHrI0cEkWiGbGIi8cgREIlLlsBJs9sh5trYCJJtxw5ksTFp2deH987EZt7pBInGJeLJJO5fKc7d2op7efgwsjrHx0UZNxEWkIEB0XwkE7xp7+8n+sAHhOMvSZg3XIqdDQ4HtIXWrEHzlS1bsHhfcIEgcpjAtpKBqWloCvTssyCFzzxTlECWokEKP0ctLbg3v/oVOsGWCYlYEbZoNpGt0QoR5qyq4jnL3DDNFmprYZc44juXy5vffBNZzRs3En34w+J1lsngBkjZ8MwzCEycd156lN4MQiE4tOEwMhEXLsSmP5GAHbQ67s8+S3T33QigfOIT05fpqmkoT7z33rIhEau2qEi4XMjEZ7mZZLJwebPepzIb8ORqEb9flOoWo//J4EYRExNY7++7D8/0woVEn/0smh8ZzTbiUjuXS2RrFtMVmolOXhNCIfhDu3bhHD09ePb5nMPDQvYnm7/6+ON43i+8ML2jPFcwGNHeZQIxlcrfgd4s9u1Do5q2NhCIxWR48bgRYa4NDqLM8fBh/PT2Ys7ef39JLr1YzClbZLdjTuhLbplA1Jfe8t95z1VqMtHjwfrIWYfRaHpnZbZFTCSaPbfPl04kltL30WumcsDF7TY3RiytwI0xR0dLX94sSdhfOJ3pnZiJRJChoyO7jd65E+Tfxo3wPfiznN3c3p57jXC7saePx4l+8ANkVXNQJJnEMbxe603henuJvv99jNWNN2IMa2qKa57J4KAGE+lvvIHg9Le/XdRh55QNmU6Uwdap4vAfGb8/TURfIKIlRLRr5i8nN+x2PKQcPWFR3Eyj6XYjQy0YhJM2MABn0u0WZdHnnAMDc/AgMtr6+5E90taW+9wcmdJ3f7PbxWtmnN7aWhB6/f2C3DPjSLpcIiPxyBF8nonI1lZs2Pftgw5FV5fISORzlUqbJh9sNtwvXqRGRkpX3ux0giCdPx+ZdDt3YlFZvhxjomkgZ7MtqE89hQYob3sb0Qkn4DXuxk2EYxZaiDnq9453YA4cOIAFb9EizC0rehiaRvTgg8hyPPVUkNvxeLrOZ6kQi4FAPHgQ5VdlAsu2aDa0Z2YTPh+cIe7sSSQ6bGpaernzbKOxUZQ357LZlYxQCGXM8+cjA1CfHRMICG24bPZ9xw4872ecgY27lXGJx9HAJRAg+qd/QrZ2Mol5MG+e9cj4o4+CMDnlFGQgTteakUzC0X/+eWQclAkqxi8qZ9hsmINeLzaNQ0OFA2x6n8qoDUsk8ByoKo7PtkaSiiOxNA0E/z33gOxXVTyr73kPgnxcGptMmtNyq6nB+0Mh+KRWyps1DeflDERunjA+Dl/opJNwDm4ow2V+uZoX7NqFpkknngh9aLZF2SQ08l2T34/ram4uTeCTr+0vf8G1X3ONdR+Sdbv7+sTPkSMiQ5ubM6xfj8B0mWBO2iLWmWcyTFFECbOeTOQqMH1mYinAAdlEQpyHdVX116g/t1nU1qY3wCv2eeDO1pw5y5mcxYyJ1wvfxe/HT6nKmzUN+7J4HOSd3g5HIhgT7hKdiaNH0cxp0SJoTPPemiWZWloKj6WmQSLmtdcQdDj/fARRJifx/Yzs87Jh3z5cW0MDMhDtdlyLFQKRS9D1xKFew3X7duxX29vNHzsDc9KGTAeqJKJ59Gf8Hvjvf3MkGM8uOHricAgiMVvnZiK8r7YW5Nm+fXg4589H5IMXimXLQDq98QacxPZ2bMKyGQQubeGy5syIlRmnlwgOS3d3OpFoxuF1u1GizBmJixeLxcTrRcZcXx9K2kIh/H10FK+ZzX4sBlzePDFR2vJmIhxn0yaMQX8/MoJqa1FanG3TvnMnyoRPOgmNShhjY3AkOzoKL8ipFKJrsoxMwYYGjPWjjyJC5ffj/Bs3Gh9jVcWGfds2aC1u3IjFct680muqjI4S/fSnuB8f/SjOVyaoKFs0m3C5MGdSKZF5yGUz0ahwhsuBrLPbYaNjMZF9M1fKm1UVGYCBANEPf5hOIMZiQqMtmy3q60OpzrJlRJdcYu1eJZPQ5Onrg37ZunVCF8jlMlbulwkOZtx/PwjEj398+mQwAgHoRx44QHT11elZnLOMqi0qITgrmStECpU3G/WpWOOPN/n19fiXyaJgEOcxm+mXTMJP2LJF+BRXXIGS5a4u8T59E4NEwlxXZ24WyOXNqRTGJd/31TSch7OMWUOS5UoGB4XuN/uwmibO4fHgd7YP/DM4iMyari6ir3xFPO/ZmnnlQyCA68ql/WoF27fjPixeTHTVVeZ85HgctrG/HxmG/f3whYkw/osXw/9ZuhSB9tFRrE8LFyIgXSaYs7bIbsd85WeIyXief5xhp9f702cmFgsm9vg5ZkJRP8f0JdZWzllfL54/m81aUEOfmUZkrGTZDBwOJCuEQrjWUpQ3HzqE52nt2vT9tCThGfN6swc4g0HoObvd6KSs1/0PBGCLjfg1v/sd0UMPQSLmyisxdgcPCgKW/VAz++Bdu4j+z/9BUPimmzAvWc7KSIa2nizkLFyG0ymySe12kIcvvYRs8g9+0Pg15sCctSGlxhzYlsw4lByvl8H2Mzfq6vCwBYMgQ5qbpxo81kXQNDg1zPJziRdHvLq7EfE4dAhk4xNPwIlYt25qhlM2fUTWzLBCJNbUTCUSzUSrPB5c68CAIBJ5AbbbsUHl8ub9+0WZG5dRz5R+mtMJApf1+EpZ3my347v4fCjHicdBKtbUpI/lkSPIKOjuRiYBG/1QCAu8kVR+WYZTG4+DKGxowDxLJJDZ6HbD8L/0ErIdTzsNUe18m3BFwXXt2kW0eTMIyUgE11PqDl+9vSi7VFWUYq1dW9rjF4mKtEWzBY8Hc4dLmFlSwecTBJaZRgLTDZ8PNnMulTf/5jewB1/8YnrmCmuPut3Zn+GJCegMNjfDQbRC0skyGiDs3YuSmlNOwXyYnBTErdl7z1IPjzyCjKurrpq+jNb+fmROBgJEX/4yMgXKCFVbVGJkljcnEtiMZfMB9D5VNn1EzhqSZdFdVe/L2GyY/6yJlUvvOhOjoyD2778ffkF3NzaLF1yQ3S/jjCnO+JNl+AOSZKwEk8ub3W74RdnKm7lBDEtYcEM/Jg69XhxndBRB2gUL0jftTBo2NiJIytlMrEsXDoPIt9shhcDkn6LgnE6nMZ+Utci5mUEpsHUrul4vXw4iIN89lGVRbcM/IyPi7x0d8He6u/HT1SXmXjIJ/ysaRYXLggWluf4SYc7bIi5n5nmpz0okEmSjfu7yHqzYAJc+Q5Az5Ph6+O9chcYaimZRV4fnjDu0G9l3laJk2SzY9nB5s9VgwPg4AoNdXdjTMjhT2mbL3kgllSL63vdgS779bfEeWUYAyu3O3YBFj7/8BT8XXYTgpKYJqar16zGm4bBo6tLQUHgvum0b0W234TvddBOO4XJhH5vtfmQShoruKdYHm/Sl/DwG992HxKYTTkAzmRLsk+e8DSkVqiTicQSvV4hI+/0i4izLMEKRiCiBZtHtiQn8LRpF5IU3sQ4HnIfubhBub74Jh2TpUuhU6Z0oPZGoF+K1SiR6vVOJRDOb65oaOD1Hj+Jn0aL088+bJ8qb+/uFTlBfn2jMMhNgx97jwf0aGcG9KUXEWlGwSJx4IjYUfX1Er7yCcV24EHOEBbk//GFx3xIJXIvPV1hgV1FAGITDyGRsbsZrIyOYP/Pn4993vxuL0wsvoEHK9u3YkK9alb1j45//DCLgwgtBXLPDX2qB3q1biX7/ezwL11+fXwS+isqAXh+RnyMur8jUTSwHuFyiuQBHvCu1vPmFF4j++Ec4qu94h3ids6BYziETsRieQ5sNmcBW7o+qwqHdto3o05+GhIKmiaBZc7N58k/T0CzqqafQDf5DH5q++7JtG7INXC5sFk48cXrOU0V5IVt5c2tr9meAg7x6zWkuzZUkvMYNXLKBfT+/XxCJ2TZjmoaA3z33ED33HF475xyiyy/HOm/0GeANoZ7okCRRcpiPfPB68R4ub/Z48H624UT4vbYWdj7zO0ciCII3NU1d18fHcU2sIcadb4kwlrfdhjG69VZcRzQqGmPZ7cb8s2AQn8tsSFcMnn+e6OGHkVX5wQ+m3zsmI/SE4cCAyDJqaIDvd/rp+DefT82NHxQFm/ZcTR6qmF4wScbkSzI5NeOQ9RT5PVyJUWxWHvtMqor5YLdP1Ucspnu8zYbngqW46utzE+LZSpZLQZYaBZc3BwL4SSbNBSTjcci01NeDsNNjchL3jPdKemga0U9+gr33l76EDDx+naWmcmm56vHQQ+jofO658I1sNqw1HLTi4EtdnbgfXLnT0JB9nF96ieg//gN25ItfxLG4OQ3v+zMJQ31Zsj7LMB8JHA4Lvf6zzkJSyUzd9yqAKol4nIEjAYEASCQiYXyZiGGDb7PBCHHjlWPHYOjYEPDx1q5F5HPvXlEC0dODTBO3O73Rij4yVUx3QY8Hzg47REuWmCPXamsRIRkcxM/Chenn93hEeTOXvHi9omvzTBINNTVwaCcm8GN2kcoGdiBXrMC4zZ+PSNjBg8hAfP553JOrrkqPtI+OCqHxfFBVbDSCQTiara14bWQEx80U+e3oQPkTn/uRR6DNcdZZosuWJEEo/M03QTyuXo15XF9fGoFehqZhYX3gAcyxT3zCuqBwFeWFTMKQAwJckpNM4me6GmJYgd0Ou6wvb2bh/0rB4CCyAFeuJPrMZ9L/Fg7j2W5pmeoAyjKe+clJlAlb2bBqGrKJX3gBUfYLLsDrwSCOn4ssyQdVFV1Z3/522K7pIhAfegibhQULQFzoMxWqOD7A5c3j41iDGxqy+wCsjyhJsGNMGtTWwqYVmqN6IjEQSH8mEwk0MduyBQRcfT2I8/e8pzgNKiY69KXO+oZX+Z5NpxO2YXBQlDvX1wtJmGyQZfg6bjf8H/2YhMNCQyzz82xHDh5EJvBJJwnic3ISx21qwpjnI2k4w8pomWEhaBoCGU88ARLife9LL0tmHzkaxftZ1mfzZpFl2NxszH6Nj8PPd7kQyCi1dEwV5uFwCK1E3mNlzj8mw/W2oVgykQlzbrLidKbPB5azstpohbOOuRorM/stM2ONNSFnQ9ta371ZX95cyEdTVSRNaBoqtfT+Dzd+qq/Pvt+8807sla6+Or0rOjdpmj+/8Pmfeoro5z9HBdiNN2LMebwbG9MrC7laQ0/u6gMhPO7PPAO9+5UrQSCGw0Lbm7PhM8uSPR4RODJKAo6OgkCcmEBCyaZN5aNrfjyhSiIeh+B0cyak2tpAouVy1mpqEK3lCHUsJpqRMDweOFUrViCteP9+OJqrVoFQ5IWOFxR9oxUrHZuJ4AwtXSqcpcWLzTk19fUoFxoaghO6YEH6+W02HL+xEU4nl9tpGs5V6uYd+ZCtvDlXaVMhcHZpV5f4Dh4PyL7hYaIf/QiE8Sc+ka4TNDqK+5SrOxhD04h278Z51q4VG4zxcSxu7e25HfzFizHXenvRIexvf8M92rSJ6MkncZ8vuwzzbGICc7CUBJ8kEf3XfyEL8aSTkIV5vDUhmetwODD/ixZi9wAAIABJREFUuIyOHS0u/9FH68sJXN4cCony5pm0QVYRjxN961uwVV//evqzn0yKDXVmhremoVSyr4/o/e+HbTALTUOU/bHHoPXDHVTZhlrp9KooECB/9VUEM9797ukhEFWV6Ne/RsbXhg0gLqrBjOMXLhfW3kAANiCbD8BNCeJxQR5y+a5ROBxY9/WZNffdB93PSAT+3Fe+gnL6UgZb8pU6c3aRLItS5czmHrKMZzkfgahp8OUkCYSbfuxYe4y7M2fij3/EBvmjH0Vwk0hkydTUiGPx9bEOm56oiUZx74xUchgBBzwffBB+fDhM9M//DF+Lx7SzE4Tf0qUIQHR1WdtsDwyAQG1owNjNlLRPFYXBzw6XOKdS6Rm0DCZp9GQ9v8/KGma3Yx+l1xXWzws+l9VGK3oiMRQSFXJcssxl29NZsmwGDQ2icmxsrHB58549+G6nnJL+PkXB/sblyp4g8eST6Ar/9rcTXXqpeJ0DFI2NhX3DrVvRYG79eqHrGosJHcVciRm8PtTXi/sSieC7v/gi0e23o0Ls05/G3lpRRPMufWObYnQ6Dx9GkkcyifLl9evL4/4fj6iSiMcRWDA6GMSD3dEhFh02PLkeRC6r4azEkREcq60t3RDU1qJT7sqVIJJ27wYhtGYNIp5GGq2YMQYuVzqRuGiRufKQxkaRITc0lC4CzmhpQeOO/fuxOEQi+MySJTMbic0sbx4exrWZIRISCRCm9fUgJfXQNCwCNhuIOlVFifPKlfh/IoHPFHIe9+7FeK5cKbRy/H7R+KRQFqfNBpJw+XIQ0s89R/S1r+H1z3wGxx0fx/cu5aY6HEYH5t5eLM7vfGd5lbZWUTowYcgReY5+smh4IiEyqMsJXN7MjpskpUeByw2aBkf1yBE0NNHbHFWFw+p0Zt+0P/MMspnPP190hTeLu+8GAXLRRaIJSSyGH5/P/PMty0S//CWu6/LLEQGfDsTjyNx88UXYouuuqwYzqoBN4k6bmd2bYzHRZdznw3vM+lMMhwOE0V13oSLA5UK52xVXzMyGTV/qHI2K4LWm4XUm4fTknaLAt52cFMRiJgYG8J7ly9N9N9YAs9uza4g9/TRIxPPPxxgwOKuLS/74OjhDkQlFvZ9dW2udQNQ0+FZ9fdhIP/wwfKS2NqH3u3Qp0VveAn978eLiiV4mXoeGcJ41a8p3vTnewVm9+kYU2aQB+PnSd3q2SiYyqTQ2Bj9//vz0arZsVWhmv1NdHezd8LAI/M1kybIZeDzGypsHBiCntXz51L3YxISQVMj83J496Aq/YUN6V/hkUgRBClVm7dyJxlA9PSKwm0wKHUQjOoqc+e3x4Hp//3sQexs3IgFlYgL3bsECYadLsW7s2IEMSpcLDfYys8mrmFnYNC5Er2ImMGuDHYmIkgtur87ORTQKAoWjHoUcBBae9vuFU5tLj258XGSl1dUhM40XmcwoGaelW1kYFAUkYiKBrEqzZSITEzCgzc25S3M0TZSHBIMgLFeunJ3NHUeqkkkRNTLS7Yqj8KtWTR3/Z55Bx+TNm9H0ZHJSEKcuFzYPHR35z3HgAMZo2TLRrS8cxrU2NJgvR4zFkI2zaxci6nV1mD9nnokFsFSLx9AQyIHxcRCoZ51V0Pmu9GXruDf8moYNHm+6eS5pmtiw+nzlu2GKx4WObbmWN2/ZghLAj398asc8v19kU2Ve+65d0D498UTrpcIPPADbsXkz0Q034BjJpNBQM7uRT6XwXd54A99l82bz12QE4+PQPTx4kOgjHwFZWYDsrNqi4xCyjOqAcBgbtMZG+HZss7jJihkiMRaDlMiWLSD+GxqIzjsP2balXG8LIZkUGZWsl8WZfayHyCSC/po0DTYxHhfd1tmf9Pvhz7S3wz/Rw+9HYKa9feqz9sYbRN/4Bsizf/onQVqqqrC/uXxA1msLh3GveIPOJEihIFUoJMqRWSqIm8UMDmKczjwTPgtXzZQSsgzSIhBA0Ly7u+AcqNqiMgHPPU0TGWDZ7p1eRoDIWIOjbEgmsXZlI6FUVZCIZvwpJjk5ySQaNb5PLQeEw3iGnc708uZgELqBLS2ostKPNVeatLRMTYg5dgwJFU1NCMpyIERRsIchKpxpfOAAiMP2dhyjvh7jPDQkMpez7cH184R/WIrsgQdQNXLCCQjW8r500aLSZSwrCsq3X3sN93/zZtikPPO00m1RRaBKIs4sZnywOT1ZkvAw5+qoy5sr1sQxkoEjSVg0YjE4dm1tuQ3G0BDIRE5JX70axipT/NmqEC8RjMyRI7ieBQvMbxJHR+FMzpuXX/MvEEC23bFjINU2bCiNto1ZaBrGMxQSWpf5iITBQRCly5ZNvd49e5B1sGEDNHXYMCcSSHsfH8eis2LF1KgZ49AhbHwXLwZJSQRnfmRE6C6aQTiMssFAAJvptjaUJe7fj4VvwwZkvRZb0rl/P7rGKgrIgXXrDEXvK32Bqhp+EuV/Dke6XeTXuXtzuUY6uauxopRfefPu3SiTOeMMon/8x/QxjEZFQ6TMbO4jR/DcL1hAdM011rJBn3qK6Mc/htbPl74kyqr8fhFBN3NPEwkIhff2ppczlhoHD4JADIWQdf2Wtxhq5FWms9MwqrbIAhIJ2Cgm47mbcGZ5M5MI+XD0KIjDhx/GMVevBnl93nk4xuQkfLtitZhzgbO/OXOP/UCvV2T5sU+oz7IiEqSH3mdMJvEMEYkSyJ07cZx169LfG4sJncnMIOfQEGxYYyO6oDJZqGmwYaqK1/L5q8kkgqhM9OqbQHAw3eXCsY4cAVnIlTV+v3jfggUiu3DPHrz3/PNxj6bjnsTjCObE4/DnCgWQ/xtVW1Rm0BOE+bL3uEw4U2PQDDhZpb5+KqGtKCIzMt985etgkkqvjaooeK5ZJ7pc/TI9kknsYVQV1+xygQyz2YjOPjt9z5ZKIduypmbqHjQUAoEYiyGLUL+fGhmB/ezszE/a9ffjGHV1OEZzM65reBjj3dkpriez8Ym+W7K++dXdd0NO4S1vIfrUp7DPnJgQWolcPVcMEglovu7bB2LyrLMMdYSvgNlR+aiSiDOLGRvsREKkUrtceJALld5KEj6jaeYefM40U1V8LtcGTdOQwr1nDz4zbx6IIL2xLJZIZEcsGkVExmzDjeFhLIJtbflLZVMpEIkHDmBczzhj9rrUJRIYf+4ymu0+h0Ig+Vpbkampx9GjKOPt6kLGkD7SfuyYWPx6e8V9W7EifXN75AjIuK4uZJvabGJBdDoL6yhmIhiEtkYkQnTllTju8DDmcl0d9Mj27MGxN26EKLGViNcLL0BzrKEBIvHd3YaPU+kLVNXw/zdkGc+Qy5Vu8xRFEIzlRM5lgoMJySTmbkPD7Efp/X6if/gHjNuPfpRukzj45PFMtZmBALL9vF6U6lgZ95deQinwCScQ3XKL2KBPTIjMeTPjE4uhsUl/P9HHPobAxXTg5ZeJfvhDjMuNN4LsMEAgElVt0XEFztJTVcxtnw9r7cQE/j5v3lR9LaLs3T23bsX6t3Ur1tLNm5H5u2ZN+nsTCazJHk/pNu+cCc4/TBhwNmUhLUd9h0/+fvrSTS5vTiZByjkcyGzOtPHclKWzM/18kQjRV78Kn+cHP0gn0bjjdW1tfqIllYKtczrTGxIqCnzh3l6RYTg8LPzetjYEe7nxyaJFQhPyT3+C73nhhdi4TweCQQSBNA0VKCYC8lVbVIZgYo4lo/KVLfNzxfr1ZrXrAgHsv7JJLTExmI1I5A7Shbosc2avw5HeEbqcwdIt8TieXUVBBrGeaNU0BC1UdWo2oSRB67S3F/9ykgYRjhsMTu1TkImhIdgzhwMBkfZ2oXXP98vtTr8HROkkrj6jlfWmH3kEkivXXCMCWvPmCfurKLDlTU3WqmWCQZzj6FF8740bYasNoAJmRuWjSiLOLKZ9sFMpYawcDjy4dXXGDa2igESTJBhoo5s4Lq/lsuj583NvgFQVhNaePbjOJUvgqHDqdimIxIEBOIEdHeZ1844dw6a8oyO/88Tlza++it/PPNNQdGRakK+8WZYRwXE6UX6tH9PJSXTncruJrr8+fbM/MoL709EhtOIGB+H0EsHJ7erC4rRnD+75hg04L6fXa9rUjNNC8PtBICaT6A7d3g4H2+EQOp587S++iG7NXi829xs2GCuHV1Wi+++H1tGiRci66OoytchV+gJVNfw6JJOweVwqx2Ahf6ez/PUxubzZZhMR79mALMNZ7e0FgajvJszaY5qGjbLeFsXjRP/v/8Ghve46a3qnr7+OEp3ly1GCyF1f/X7YpJYWc7YoEiG67TbYn2uvBQlRanADmTvuQIDn85/HmJmI3ldt0XGAVArkFTcp4EZLDFkGYZVMwpdiHyDTn4pG0Yzj3nuxns+bB3H+iy/OHwjlbqFer/WyWVUVpCGX5drtsK0+n7EO0pnQl9lp2lTiY/t2+CLr1mGt1/sHw8MYr8y1X5aJvvlNBEa/9a10UjWZxLV7vfmfUQ6W8PUMDIjS5P5+0RimpgbZhQsW4Id1vfWNWWw20fSttxf36vTTzY2TUYyOwl/kZnsmAzlVW1TGYKKOibx8ayGXQ7PtMKpBqGmY95IE26J/RvhZ1etN60uW+fVC2oyplNhr1tdXBpFIRLRtG/ZK69fjR29zJibgb7S3p++dNQ1+1DPPEN10E7IXGZxFXV+f318aHye6+WbY3e9+F/ZOlrHHCwZF12Uef30DlGx7cFWF/NPTT0Pq4kMfAu+QSGANYV+Z5SVCIXzG58PaYdQHGxoievxxHHv9+vRGnQZQIbOislElEWcW0zbYsiwiQJzqbTVKo2kgaJJJPPRmSnVjMWwSZRmfmzcvNxEoy9CbefNN/N7dDSNRU2NNzyfzOxw9CuM1f37+8uRsnx0chPHr6ir8/QMBGPhIBF22Vq82f72lAnfL0pc3HzyIebFyZfrilEhg0x4KYdOuH6PJSXyvefOmfv9EAhmYfr8gWRYsQDdjux3jNzyMhb5Qen0mxsawmVYURLZaW0WEPrNUizE6iozCI0ewEJ5+Ou5BrnmXTBL94Q+ItJ94ItEFF2COmCzdqPQFqmr4M8BldPrSOSLMY87yK2U30umAvry5tnZmGz8xfvYzkGJf+xqaMegRDMIWZW4uFIXod7/D5vqaa7AWmMX+/YjSd3biX47K81pmtqwmGASBOD6OToNr15q/pkJQFGRe/v3vsJ/XXpu/e30OVG3RHIYsi8w3ll3INY/ZdwuFMIe48Z2mIfj3178iqyORAKl2xRVEb32r8bUvGoWfk6uDcTYoSjpxSCSyu/N9FyvgpiZc6jw2Bj+ws1MEqbmDajAofBy99pimQQrhySeJvvhFjA9DloUuWy5yLZGAz/X66yAOmRwgwjgvXCgyDLu7cY/0Pq6+KzVvz1QVGrFHjyLgefLJJRisLGCSs7FxKslhEFVbVOZgIo8zDfXdw7OBiUfOYszWqCUTHNBgeSz9PNJnOrKmHmdHmvHB9UTibMhJmcXICEjEjg7YI1XFddfViX1zY+PUxJU//xlSUx/5CNF73ytelySQbC4Xjpmr+s/vhy82MUF0663Ct2IprJYWsU4YIYkVBT7eiy9i/bjiCpG4lKviUVVxr8JhXFNdXbpebTa8+SbKvpNJJIf09Jjbx1Pl26KKQJVEnFmUfLA5c5CdlIYGGKJSlLOFw3CYWITeKJnHqduTk6KMI9dmliMV+/fD4SJCFsnq1cL5LYZIPHZMlCeb0eTj0utYDE5foeYpqRSIxKEhlPqeeursdQ5LJLBw8GI9OYnvoBc7VhR00zp0CJt2vdB4PA7irq4uv9Heuxeb35oaNGJZtgz3anQU4zZ/vrko9vAwCES7HWWDLS14TdOwSBZyaAcGQCaOjOCzZ54pmrswJieR5Tg4iNKtU0/FJsKC7lqlL1BVw58Bbqhis2FO621OIiEyFcuxgYkemgbbnUjMfHnzk09Ca+fyy5HZrAfbJXYg9df717/Cwb7iCpBpZnH4MJoeNDYS/e//LRzxcBj3tL7enC3y+9FVOhQi+uxnEYApNaJRjNXrr6MT/BVXpIuvm0DVFs1BsJxCKiWy9QyWt1M8jk28oiBz7aGHRJflt70Nm9EVK6xdVySCuevzTRX+Z3Bwka+fCOdmfcNSie3ngqriGd69G7ZgzRrY82gUdtxuF52SM32cv/wFQcYPfSi9GRTbVZtNVPdwOTSTb3198FliMXxm8WL4IEuWoPHJggXmfA1Zxjl/+1v4spdcAvvIGYqlsuuqCh98ZAS+VmbFiglUbVGFILPxSqF1hwl6PemXb47E40ICoa4O7+djpFI4v8cDW2B1r5RI4Jl2u3PbonJANIq9CUtfEYnMPZcLY+XxTO3G/Oyz8EP0zeGIMHZDQxjPrq70DvV6LcNwGP4QN2Q54QS8l+UvamvN7YslCQGW115DA5WLL8aeKhqF71Von8yallwxw7qJ+nmkafAFd+zAta5fD+LTQmVKpduiikCVRJxZlGywVVVknWmaEDAtNXEVi4nuUs3N5o6fTIJQSqVgrFpbsztQTHYlk3BkjhwRpbfLluGcxXyvY8dExNmgOPT/XNeRI7iuRYsKb0JVFcZ13z6c56yzZicTiEg4t3v3wkneuDG9++zf/kb0yitTo9qyLDSCurpyk7eTk/iurJE0MYHxYZ2NlhZz0cGjR+EoezwgEJua4IwrCsbSzKajtxdRskBA3IeFC3GO22/HnH7Xu6CvMW+e5blV6QtU1fBnAWfNOJ1TN+zxOJ4Pn2/2AgRmoC9vbmiY/o374cNEX/gC7Pb3vpdu6xUFkXaHA+uA3q489xyyo849FwSHWRw7hgi72030ne+IYAmXX+YjO7JhbAyOezxO9LnPYfNfaoyMoIHK4CAatbzlLVMzNkygaovmELipUzIpAhqF9AEzEQ4T3XcfslhGRrBRfP/7iS66qDR+IpPzdXXCx5Ek0VFZkvCa2y2Iw5kMvqRSaKRityODmDOeiHB97GOuXp1+Xc89B13Sc8+FLdOPeTgMYnZ0VJQmDwyI71pXB7KwuVlUZhTbLTkWQ2B1dBQN73p60ptfMPnjdlsnFCUJZGswCFunl5+wgKotqjDwfDKSlUiULh3AZa+5PqNPROH3ccksP1tWOkHrwcEKJivLDYoCAjGVwl5EL4sTiSDjTlUR6NDvMffuhaTCqlXpXeGJ4KOwPj2Ts3xPiESA41/+Bfuhr39daDmnUiAg3e7cGYzZkEoR/fu/w65ecw00WYNBfIf6enP7Pa6YicUwJzgjU1FAnB46hGOuXIlAjMV+A5VuiyoCVRJxZlH0YHM0dHIShqe21rpgqVFw52abzfxGR9NgLPx+fD5beSyR6P7kdMIo7dmDzaHHA0OydGlx33F4GERXc/NUAe184I7PkgRjZiQTYP9+6PDU1sJwmyEuSwWOLHM5t9eLzTV3Bnv4YZTpXHCB+Axnbspyfn3AcBg6kB4P0aZNWIz8fnznsTHcq1NOMR5x7+tDVmRdHQjEhgbcL0maqg9i5vvv3YtmBZyle+gQFqP3vAdzoAgCkajyF6iq4c8BSRLly3rijTMVNQ3O3mw3LzECWYYNkOXpLW+ORKDll0gQ/fSnUxtasUYSl80w3ngDpTonnJDeFd4oxsdBIKZSIBC7uvA6awNzFr1RDA2hhFmW8X0WLzZ3PUawbx90iWQZZdKrV5vXasxA1RbNAbBeYCKR3pnYzDNx6BAapTz2mCgDu/BCbFC9XqFDalVvWo9gED9MArAYv8cjiMMi5rRlaBr8x2g0XdOPN9ojI7ANrPnd2Iix2b8fXeR7eiCHkEpBXqGvD/Ithw9jTDkLa/Hi9LLkpib4mLIMX6vYoE04DALR70dzuZ4e8TdVFSXPmYSiUe06Iqxnu3bhe61ebS4rKQeqtqgCkZmVaITYYy1DIjH39J/RNMwrvx9zta4OxJBevoBtRrF2goMXXu/sJW7kwo4d2M9w1ZMek5MiuMoJGXV1eP/NN+P/3/sevhOXgPv98HsaGsReOrPxiaYhA3H7dnSXZx1FWYaPY7NhD2TUTiQSaFa3bx86MJ97LuxTKCQ4CCuQJIxBIoE5sm0bjtnVhcSPRYusH5sq3xZVBKok4szC8mBrGpyiyUkYgpoabNSmO7uEwZqL3KnXLLEjSTCW8bhoX59JUvEixovRxAQipGNj+Mz69XDcrEatRkeF7sSCBcaPI8twJlUV5zei4TMwAAOuqnDOenpm1qE+cgSLTU8PxpO7Zw8PI0Nh7VqU6ujHYHwcC0N7e+6sy2gUGYwOBxZFngfxOBYnvsceD8ql9CXU2dDbS3TnnVgoPvYxsYCmUnBoi21mIUlwxP/8Zyx2F1+MBZBLr4tApS9QVcOfB4mEsLN6R4uzhGw2PCOVIOg93eXNmoZN9yuvoIvpunXpf2ctnKamdLty9CjRr38NZ1bfFd4oJicRYQ8G0fyAMwZlGbbP4QA5Z/QeHT0KAtFuR3dkgx0ATeHZZyGS3tyM8qTOTlxjkZlhFTAL8+K4tkXcpZgbjXg85oIUioIMunvuQZaIx4Pg4OWXC5mSeFz4ACx8b4VIZGKANQ79fqyxra04rtc7+1nafX3wRVaunLppj0ZBItbVYQxCIYxfOAwCUZaRrTk8DF+GSFRW9PTgZ+nSqRtwfTMJHodiEAzCNkYiyFTOlw3NBJAkCVKGic58OnOBAMhWmw1ka4l05aq2qILBWYb6Bhv5oG9qRJQeUGByW1Vh25go09uIbI1WrCIaxXlYa7Uc0NeHZIZVq9Ilo4hgR4eHsS9paRHlzbIMmZNQCH5Va6sYy0QCdqa+XjSsdDjSfRxVJfrXf0WyyOc+JxJFeP8ny+aqu6JR+HWHDhF95jPIpmQuwuebGjC2At6XhsOwdQsXYt9cpE2qdFtUEaiSiDMLS4Mdi8HASBKMcHNz8U6KFbDWoSTBiFmJ+IRCcGY1Dd9Dr7XICwpROsE4PIxoaSCA92/YYD27b2wMZGJDAwyV0Q2mJIFIJEK5h5GsyNFRkKChEEjLNWtmJt1+chKLV3u72AirKhzGO+5AlOeGG9LJUC7VaWrKvSgkEiAKVBUEIhMCLPDrdOK+RKOI6kciIIt7erITr/v2Ef3xj3jPNdfgeKOj2JyY1VPMBkUh2rIF2YjLl+OYvb2CkD7ttKKilpW+QFUNfx7wxp6zDvV2grXKuDlApSCREJpepSxvvvNOot/8Bg7me96T/rdUCnaFg16MyUmiX/wCdvS668w/h5EISnyGh9GFedUqvM5aaLzxN0poHD4MrR+vFwRiCTJy0qBpRH/6E8Zq9WpE8xsaSkIgElVtUUWCCTnObna7zUklTE4SPfAA9ETHxrD2XnYZpDqyle8rCp5F3mizPE0hH0jT8BkmDrmLKpdZ88bXbOOi6cD4OMoDOzunNmeSZVRacFllXx82xjt3olN1KgUd5cWLsZHt7kYmzLx5+bO4NQ0+bTKZ3pnUKvx+EIjJJNHVV+MajELTBKHIWWJcpup2C6JmaAjZlT4fCMQS7ieqtqjCwXOIG6kU6pRMJLKok0n83+0W+tGsRZpMinJpffM6VYVtstuLXwsjEZyntnZ29sh6BALYe8yfP7UREmsaEmGfxpJe4+PIPDx0iOiWW4SGIZOzo6MYo87O7AEgbgr12GNEn/xkuj/GWvXt7cZtVCiE6xkcBCG5aZPgI7xeSzqFUzA4CC1tmw37ZNbAb2mxlrCkQ6XboopAlUScWZga7EQCDysb3+bm2d+0cnlyIgFDZEXzhZ3ZSER0EGRDwQsYdwHTn3dgAGQiN+1Yv96aEZuYEI1DFi0yHo1PJpHhZ7eDSDQSORsfx4IwOgone9kyUXI3HUilQOB5vSDvePEPBtEFVJIgqt7YiLFzu/G9hobwmVzkbCoFAjGVAoHIZKii4LOaJiJjROJ+9fVhvJYtSy8j370bAuZdXURXXYVzj43h3ra2Fk+2xuPo9vrmm0jlP/VUUWb1yis4v90O3aJTTrG0+an0Bapq+AuAHWN2evXgxgEu1+w7q2agL2/2+Yp/zl57DdmAmzejbCYzIs7ZPPPni78lEkS//CWu41OfMt1xjxIJZB4ePAhH+8QT8bqmYb2UZXOyG2++iRLshgYQiBb1d3JCkoh+8hOip5+GhMQHPoA509JSsozQqi2qMDB5qKqi06/RTJwDB5B1+MQTmFunnIKmPGecYWw+TU7CH2B9Uo9nKknA2UOxmMiQZDuYqdGof+6ammauOiYTXJpbV4dKC76+UAh+yPbt+Je7tRNh7Pftw/V/8pPIslmwAMdwOATBW1+fm0jx+7EWlMI/Hx1FoFdRUJlRTDY0B+VTqXS9tMFB+GytrfChS1whU7VFcwTcBIVIEFmZYPKL5xc/IzabyC7k8lrW72f74PUKe6UoOFax+ohECJRy+fRsBTWSSWQCOp2wKfp9EZcUh8NT5V1+/Wv4CddcQ3T66bCn9fX4HFdo5ZKa0jSiX/0KGX0f+hC6OTP8fox/Zif6fJichOzK6CjRTTcheSeRwP7Z48Gxir1XLDlVX489osslsq6DQcwLrxf7VQvrSqXboopAlUScWRgabNZzYoF/bpteTqVzkYggAZuarG2GolGQbLIMI8GbKtZHzNZQRZZByu3fj3Hq7IQjZDbtORBAVLq2FpFno9efSAhR7iVLjEXO/H6Uyo2Owvi2tqLUt9TlzZqGTLtEApk5bHSTSWzaAwFs2lm7R1VhvEMhvG/BguzjIEnQQIzHsWFh4li/sHV0ZF+w43Hcq8lJnHflSvx+770YvyuvxOeYVDbbkCUbJiawGE9MEL373chC9HjSSxuDQaKXXsK1eDwgGTdsMHVPyuhptISq4TcAWRYd9DLndyoltBNnOwPHDDQNzxqToI2N1uz3yAgymufNQyOSTDKV1zC9Ppiqiq7wV189tcSnECQJju2uXURf/jIcbQaTA2Yyot54g+iS6WPIAAAgAElEQVTnP8d3uPHG4hshZCIUgrj53r0gD88/XwQES1hSXrVFFYJUCsQU6z/7fMbIblnG5nLLFlQUeL1E73gHSpatNMLgsjguv21sxDUxcZhMCn00Jg6zkY0MrlJRlKIaBFmGLAttv6Ym+Fv9/cgw9vvxuiTB3qxahTHr7gYZ+/jjyLLZvDnd73Y48L3y2ZNAAONlpDNpIQwNgUC02yHvUMpsaM563b0bPltHh5C64Z8S7S+qtmgOITMrkbN4mWDkMlunM70pC5fYc9Yy2wNu0sn7Jj2RyERksUQiy7dwxdxMBzVUlWjrVnzX007Dd2Q9VkWB7xUIwO6yFrLDAfLvD39AA6wPfAD+DOs8ss/W1pY7I/rOO/FzySVE114rxjAcxl6IKx+MYGICGtOTk/Cz1qyB/ZiYwL3MbI5nFpqGMXrjDdiiBQswBkuXikAMf+dQCGPK+rXH0R6tInBck4g2m+0OIrqGiFyapskzcMq8g80io9EoDCtHIcqJPNQjHhfC2mY7NzO4/IyP09YGYyHLIjKVudniBerQIWSRSBKcwrVrzUWCJydBJNbUgEg0I0Y9MADH0igBGQiI6FMqhXOuWmWua2ghDA/jhzsEEmGs/vAHkItXXSXEuXnc+/sxv9asyZ7irijINgqH0eFZvwiNjWGuGik9HhpC5tCePfg5+WQQiC6XiJI1NRUloktEyDS44w58v/e/H+SA14vxyPYcjY2hc1p/PzYBp52GeWTgnpb0qSw3W1SFQCqFH49n6uY4kYD94dKdSkIx5c2pFKLTQ0Mon8nMro7FYF+56x4RnML770cm8Hveg4CEGSgKxL23bhWbfkYkAltUX298DXj9dQRXOjrQibXUUhNHj0LcfGICpd5r104LgUhUtUVlD+5cLMtCBsHI8+b3E/3tb/iZmMBm6/LLid75zuIbCCiKaDjncGD+M1FQU2P8GhnsU7Du4nRrQLPGV18f1vDeXuEzEmHt7+6GbfL5ELjV26l774UMw/veB1+EEY1iTFIpPKusH6kvKyQSnUkbGor3444exbV4PCAQS1EmqEcqBZI1HEZQtbNTlDyrKt7jdIqS5yLsU9UWzUEwaciZidyciTU38wUX9CQkZ8YyMUYknq9S6iNy5qMs4/mcCd+Mk1927YJNWrNGVHaxXeXqjNra9KqvF15AV/hzzkEwk8czGoWPFQhAfitXZvJ998GXedvb0BCOPx+PI9jr8xkPSoyMgECMx4m++lXsGVmWxukEgViM/yJJCIgNDODYbOuWLs2+B1VVoautaRi7xkZD+3VLtmgWbE5FYxZ6p808bDbbyUR0KRHdoWla3yxfzhQoCjZcvKFraiq9+P10gJsOTE7C6bJSymK3i/LVsTEYzLo6EemQ5alRUjbIK1fCIdq/Hw7kwAAizatXG8tE4QxKjlobzSz0+eDMDw7iswsXFr5XvHEcHIRRjEahxdPdjWMVi2gUDnVLS7r22EMPofzp0kvTu/ux/gg7jZOT4neGqqKzWCiELD09gchkt9ESns5OlA3t2IFFY9UqLFLRKI7f0FA8gbh9O3THmpoQyePyZb3uZiba2kBoDA5iIX/iCXQIO+us9PEqFcrdFlUxFW43bHQyiWdE/6x7vaL0rxSaPjMJJj6DQSGSbYRI0zSU//b2En3zm1MJRFnGMT2e9OO9+CIIxHPOMU8g8jm3bkXZoZ5AZDvCxIcRvPoq0e23w+bfcEPpZUJ27oQ4usOBcu/2dsyjXMGM2UDVFk0/ZBnkIUu0GNXq2ruX6O67sdmSZWTcfvnLCHIVO38kKV3f0G4XvueSJdbJSbsd8zsQEJk2pbSHrPXMP/39sMnRKDa569fDT+nuxvdoaIDdPnYM16bfgL/0EtFvfwtbpC/7IxJ+AxHGh0vOmURxOITN4Y6zxaCvD9IrdXUgEIv1gzLBvqYsY4y42R2TxYoiOj3HYuJvnKE4E/uQqi0qX7Bmob5Zit2enkWYC3Y7nicmIe12IRNTX4/XEgkhj+B0ivMUYzs4MBoKwbY1NJQ2qKFvKKMv5+ZkCdZU5cCD3S4qt1jCi3HgAJqtrV4NX0Rv35n85GzGcHiqvXnsMRCIZ52V/vlUCntqt7twg0vG4CAqPRSF6NZb8R0kSQSa5s0rzh5Eo7jeQABJKW43rnfp0tzrot0uMr3DYQRuYjH8XgxPUrU5pcFxQSIS0clE9A0ieoqI+mb1SnRQVWy2uJy0ocEww142cLthWPx+GAarQqheL8i4yUlRIsJZZEwk6uFwiIXmhBNA+OzdCwPe1weC0UjJcEMDsgmPHEHpCxv+Qqirg1N67BgMr5EmLY2NeM/gIJztVArnDAZxvVYXOUXBd/Z4cB2Ml17CD2sC6sGZQp2dGIOJCUSg9BocO3fivq5fnx7FikTw2fp6Y+V/mkb01FMQzz3vPBAABw+iU2ltLSJ2xeiQaRoWpkceAYl8+eUi/d2oQ75gATIXDx0C2fHgg/jOZ59tTtjcAMrSFlWRH16v0AfjyDmjpgZ/i8fNdVUtBzgcsEXsmEkS7EG+Nejhh4n+/neiD38YOmx6sD4aB8MY+/bhM2vXim6BRsFaP08/jXO+613ib6kU1k/uOm0EL76ITXtPD9FnP1t6TcvHHiP62c9Art50kygFzRfMmCVUbdE0QVFEowHu5K7XEcwGScIaueX/s/fl8XGV1fvnzkxmJpnJ3jRpuqRLSje2UgRL2VRWESkIsogoILKJorgAAip8Za2ion7dWb6ioED5CQjIjpRCWyi2lJY2bdMkzZ6ZTGZf7ry/P54e3zt37mTuLClJO8/nk0/azMzd5t7znvOcc56zEs9LRQUSXMuXp67r+SAWk8QhVxOxFM2UKTLgHByUMif5wGrFNplIzLdDhWVjdu6Ubck+n9zHtGmwPfX1uMZz58KH0oNlWxob5bVvayO691585ppr0nVcQyE8s3V1CFpDIXx3brdMkPh8ow9bMYu2NqK//AXX7OKLi9uZQgT/bdMm+JaHHmq8fW3LurbijO8Xq1USimMYm5Rs0TiDvmWZtVuJ8PdYDPeDmfZjlqZKJHCf+Xx41qqq0olEju3ymR6vhaJIuSYuVMgnxhJCkqj8w9W7fG52O56Vjg4kMA4/PP2aDA/jmk2eLJ+j/n4MLqmrQ9WfNs5NJqUE1syZsEUjI3KAk8UC3cX77gMhd911cruqis8qCmyfmevY3o5jsVoxqX7qVDnsRVFARBby/A8NwTeKx5G84bVxzhxzxUe8trjdkhzm7pM8OzZLNqcI2F9IxHEFLrVmQ+pywdka6/aPsQJnKIaH8eN259capii4Di4XHNqBATmN2qjEnbNaySQWpsMOg2O4aRO0FrZvR3Zn9uzRjajbDcOvJRLNlL9XVcmWmu5ucxWFnDnp7MQxV1Xh3+vXozovHz3Azk4Y+7lz5Xl++CGIsAULiE46KfX98bi8tiyO29gIh3N4GAt6Tw/eM39+agafhXV5MEA2CEH0wgtEb7yBhe6MM3CMdjvOeXgY19xmy6+FJ5Eg+vvfUT24ZAkICp7Olo++2ezZyIpt2QICduVKkIjLlhV/YmsJEweKgns+HJZEovY1PZE4zsiiUcHOtt2OdcnjgR0yqubeuhUVgUuWQB5BD9Yi0lYh9fTgGW1uxlCnXK/NX/+KiurPfhafZyQSsB+sG2wGr71G9MgjsItXXFFcvSQhQE4+8QQC9quvhjM/TgnEEsYAXGUTiUi7oE866DE4iHa0p57C/TxjBlraTjyxsArZaBTHwhqMRJIM4y4Shs2G53NwEM9/JJJ/1QnL23BiOdsAIa4W1FYZ9vbKdsfJk6WO4axZ8LPKynB+GzbgGTbqGmACsK5OPucDA2jVq6nBUCb988+VeJwMqqlBoBoMwq7ZbCACqqrwwxV8+lZnM9i8mejRR3F+X/pS4YSkHrt3g6R0uZBoN9Odw2QPV9gzoRiJyGp7u33MCcUSPiJwhV08LgelcLeS9t52OFIJNa3O4WjQPifcfVdRITs9uLqRqx95QEu+sFjwnPp8siIx23FqCUP+t3Z7fA58zooideMdDqz9+mMOh+FbVVZK3zEUgi1KJFD1p4/9Bgex/6YmaVPtdpxLfz/s5IoViNFuuEHGrELgdVVF7Gbme2lrQ+dEeTnsYlOTHIBKVDiB2NGBQpLychSSeL04p9mzc/fBbDbY9MpKmdBhWYnxNjtif8A+r4moKMoPCWyzHhcT0fGE3vcmIlpBRKcTkZWI/klEVwohPLptzSKiHxHRSURUS0S7iOiPRHSPECJJWSAECa7iUlU8UGwY9gUwORoOS4KskAd6ZASEFQ9eqa9PN2RCSPFeraPq9UKbYmAAi9TChXDORzueUAhZb6sVRKLZ78XjkdOXM0031iMYBPnH5Nn27VhEW1pyqzoYGsJ2mpslydXbS/T738PwX3ppeotydzd+NzenE7N+PzQQe3uxGM6fL1+Lx0EIWK1YnLIFGEKAyHz7bbRhnXYarn8wiO+FK3S2bZPaiq2t5q97MAgdoZ07oRF12GG497jMvVCwvsnatdhuayvR0qX/bRfP+c4eT7aISto/eSEel8NU9PepqsKGsObZRISqwinj6c1ap8znk+0yv/xl+jPGwttaAn9kBFPhLRaiyy/PPbn0//4f2g5POAGkHx8L668JYb5t8oUXQPAdfDAGTBUzaReNYrjM6tWwReefL0XRi92eaICSLfqIIYQkD4XA915ennmNFAJry8qVRK+/jv8vXYoq+iVL8vObeHgGJzPYL+JjGe14GKoqK3dYozpf/zQehx9mtcqhZkLgudUShh0dsjrS7YbvxT+Z2quTSQwJiUTwPOuriWMx+CpOJxKkRLguN94I3+POO9M7DMJhfM5o2E08Dp+ISdGpU3E++jZPrszKZo82biR67DH4YBddZKwFli94wN7u3fAtFy4snPDTEopMqlgsskJRY0tLtmgCgr9f/m75Ps62RuoHr+QyoIfJea5A5IQbPwt8LMVYp9muEaUOkuPp0lrikCkRrQ4qXw8j+ykEihgGBiA7oZWT4n339EhJBT7XH/8YNuzmm0Hya+HzSVuj97PicfgZd92FmHbFilQb2d8PW2dGq54IyYwVK3BdbrwRcSNrNyaT+H8hmpLvv48YqqEBcWBfH9YUnsZcKKJRXK9oFN9RdfV/zzvjnTjObM6ExgStfcsJTxDRNCK6lIhuJ6LNe/7+JuFmISJ6hoh2ENENRDSPiL5GRDEi+m+thaIorUS0moiCRPQrIurf8/k7iWg2EV2e7UC6u2EAHA48UMVuo/qooShyepLfD0OZ7+RmIhjPigpkQ7xeZBuam1OvG5OHyaTMnBHBkB97LAzqxo3IEm3ditbcTOK0FRVwXLl9ZuZMc9nbujrsf3AQx2KmYo2nQnd04HMLF+Lf7e2yvTmbgY1E4ChWVsp9+v2ohnE6iS68MD0A4ImMnN3So68PC9D06ThGbgPItTw+mQQBsH49StdPPBGfDYdxDE4njllREDR1dODH60V5ezYytr8fE5h9Poiit7RInZVitQRZrSBSFy7Eebz7Lu6hmhqiL385r02OG1sUCOR1/CUQnJVAAPew/hlKJGRr8ES171zlMzCAgJ9lGH70Izy7d96J5197D7HTabHg+QsEEJA/8ABsEj8vudx3L7+MZ/zjH8czHgzi79wyzetLODz6doRAK/Vzz6Ea+oILYDuLBa8XTviOHbC5xxwjbZzNNnbPmqqiqvLyrE+7IcaNLZrIEAL3UjiMf3MQnIm0iUYxDXjlShA9bjdkNM44I7NfYmb/LLXAxCGThtmqIPXgyh2nEwmB3l74UvmsqWVl8D82boTvOzQE38rvl69Pn47nhUlD7ozIhp07YQ/mz0+3s0JIW8RaYDyUqbOT6JZb0gnE0YZn8eeJJMHIfhFX7mk10qLR1IolPd59F0NdWlpgL8xOkjcDVUUXztAQktFz5hSnMoe17RwOSRxx0p3b4Pv7S7ZookFbcUeUPmU5G7hSUXvvm61SdbnkMJKKCjx/wSD+73LJ9udEonAikQdHeb24TysqZBcbQztYyGxlJRHW/f5+xAl6ApEoXVJBCGgY/uc/6FbQE4jhMI7T5TIuhujoQDdIYyOGqHCy0mqVUmB1deYIxA0bIO3Q0IBqxtpaGcuqamEEYjIJsnPrVlSQH3IIZgg4nfh/sZK4DgdiSR70OjQE4vKII0b9WMnmFAn7PIkohNigKMpbhJvlBSHEq/yaIlfXN4QQ1+r+/jVFUa4WQuzJX9AviMhPRIuFEHvUWei3iqLsJqLrFEX5mRCCb0RDKIr57MBEBi8A/EAX0qpts4FQcruRzWlvh8HjzDaRLHnnbJjWaZo8meiTnwTZtmkThmfU16cKTGtRXi6JxPZ2OHpmyIBJk2B0PZ5U53U0aEnLri4Y1poaLErc3pypJTeZxOcsFpCRRFiE//xnBBNf+Uq64z8yIoehGGW+29vhnM+ahX17vfgOIxG5mGciH7VQVYjCv/++1EBUFGynvx+LEhOIRDiHmTPxtw8/RCtxXx+IVKPjbGtDdRJXN1VVYQHRToItJux2ZBhbW9F+tGZNfiTieLJFJeQPFgrnYFHrcJeV4dmMxWTr10SDUXvzU0/B4bzmGgSmegwPwznmtt1kEkTJ4CCGHOUqBfDmmyAQDzkEk4211ziXqYtCoFX0lVfgVJ53XnE1K3ftIrr7bhAj110Hm6WtxB8rDA5imuOWLfkF7iVbVDi46o8Hb1RUZF4b+/qQVHv6adwrs2fjfjnxxNxJJG6Z1lY+WiySNMymvTgaOClrt4PUHBoy396cSMCP0VYZcltdLAbfYeFC+BezZiEhnE+FXF8ftjttmnHQ7vGA5GpslATfH/8In+qqq2BTtGD9SpvN2NeLRrFNhwMViJEIEgOcYOH2TK7KY1/JqNX57bdxD7S2IplRzKmx0SgI22AQdkg/8MoshMD59feDIOzrw+/+/tQfrxfv5W6gZLJkiyYCzLYs5wIm3bhaVVXNVSW6XFIChafDh8OpRCITjbnaCr2OIROlwaDUFeREH7cl54rBQZBkzc2IFfXgzry6OvmsP/UU9NvPOguxqRaJxOjDULq6iH7wA1yru+7CmjM8LOOqQAC+mxnf4513MNBl6lSi66/HZ4SQ3X9aGYhcEYshCdzTA3s7axaOvbwc/x4LKQTubFu1CgM8RyMRSzaneNjnSUST+LXu/68R0TeIqIWINiqKUktEpxDRT4ioTFEU7eP9LBFdR0SfJMlmGyLfRX0iQpsZ8Xjym9yshdsNx7u3F4Y7GASZyCSTVh9RTyQqChzO5mYEfR98AG2spiaQiXqizulMJxLNtJs0NsosDg8syAajfR1yCILDjRtle7N+gevpweLEJeHJJNpjenpQtaOvamAtw0zDRrq60Fbc1ITsvqIgaHA4QGqGw9BczBbwJBKYkLxlC7QYjz4af4/FsNDZbJkrGSsqUPnHE87WrsWCoz3/tWtxng0NECLnc6uuLr6mkBZc0TpvHtoVxxB7xRaNBdm6P6GiAs8ED03Qwu3GPRmPG1crThS43bAVL7wAQvDUU6FLqLdFwaCUZeBn8NlnYdOWL8cznQveeQeZ+oMOglaQ1uYEAlIDMdvzLgRI/zfeQDv0uecWVy/nnXeI7rkH3/8998DpD4dBmBZ7QIIWa9eiqioaJfr2t8duP7SXbNFEQywmdQZtNjwnRmSQEAhmVq7EPUiEqruzzkILbi73IhNdTBwSwcdwuXD/FbOajZOyQmCd9fvhx8ViuMe5Gm1gIJUw7OqSLYhVVfBrli7F74YG2YVTSHt/IIBEZ02NsexLKCS1z9hne+YZ2KPly9OHOgmBz1gsxsl97WTSSZPk+3iqvdcrNSYZTBpqW4DjcWgsv/QSiNRzzy3uuuD3wz9JJmE3M+lVs39qRApqSUOjSu2aGti2hgacQ2Mj9lNbC9tf6PCfLCjZogJh1LJcTG1LJiOZvOP20tHucx7qEwpJLXOrFf8PBmEvOLYbbdAKtyVrCUN9WzITpTU1eF6SSWw/X58gHIZ9r6xEHKlHLAaCr6JC+gNr1qAA4qij0qfCs5YhEZ4x/XH196P12WIhuu02STLa7SiS2bULfzOjVb96NdGvf4348bvfxXVnAjEWkwRrPvD74TP6/Vjv6upQAe5yIZ4bq8GDoRCkQQYG0N1WBJRsjglM0PCm6Nil+793z29+HOcS+uu/vefHCKWxCzqUlcG5YCKxurow7RfW4vP7sb3ubhhn1kq0WLB4JJPGC6PFAiM2YwZIqi1bMC1q+nSiRYtSg1KeiKUl98xUkDY1Yf99fXI0fTZo97VrF47v0ENRcbdrF5zVefNkoDIyAkPZ0CAzTv/6F7QtTjsN79WCW5HLyvAZPXp78dmGBiyG2sWLp6nZ7ZIsyJTliscxBKGtDcdx5JHy7729uB5cHZAJigKyt74epOb27Tj2uXMRiL3yCrLsF16I44nF4BSMVXVvMgkSdedOSXKOJVlJJVs0IWCxpA5a0TtcLEzPg1Ymqgh9by/Rb34Dm/KFL8CWV1fL84nHYY+cTvlcvP02nNSjjsraUpKGTZtAyM2cCX0eLTkSieCZLy/P/gwmk6jKXr0axMGZZxaXQHzmGRCds2aB6LRapR7rWBH0qgoN2MceQ8D+ve8ZV4UWESVbpAG3cCYS+L65WlePSEQS7zt3Yr08/3y0LOdSkZtISOIwGsXfeP3lNXmsoPWleILqe+/BHxkakrInRDiOlhZ0HXBbstEgISb4uBU4V8Tj6FSw2+EP6LfPgwDsdpnAXbtWSiJcdFH6NrmS1O1O3x5PJuXOEn3FeV2dnBLKw1a02+AW4GQS5OFLL8HPXL4cr2vldwrBwAD8NyHgX+3YAcJSSwryv3n6thZWK3y/xkZcVx4ip/0x0sZkfWDW3RzjNa5ki/IEVx3y984almNF5nAspiX0RmuR5iEt2mpgiwX3FlfycsKG/QH94BN9W7LDISsMje7Lyko8t35/fpN9VRWyBEJAi91Iq58lFXhg5PbtaB1ubU2fCk8kCbzJk9OTUl4v0U03YW25447UgiQmS6urcd6Dg6PrRL/2GjTz580j+s53pO/q9eKaZ+pUM4P+ftg5IYhOPhnn0dWFa9zSMnb3XE8PKhDjcdj6uXOLstmSzTGBEokIqBn+ruh+/4aIHs/w3p1FPaJ9BNrJzSzYX0iVhsWCANLplA5pKAQnz+02V/5utYKImjkTpehtbTB0s2enauzY7enkXrbglUmwri4ppmvmfHlf7e1yX/PmwRnfvh0L1rx52H9HB4w8VxuuXQsDeuSRMKBacHYrmQTBqTfiAwNoO66tTa+OCAbxvXGm2eORArb6RSoaJXr4YRz7mWdCe4wI33dvr9RSNJt9dzhAaLKDfOed+DcTAj4fFtxCFrxsiESw7/5+XLtFi/ZKVVnJFk0QWK14brltTe/46Sc2j5UDNVYIh6GDaLcT3XornFRO4FRW4hn1euUUUyLY03/+E3ZUPxU+G9raiG6/HXbipptSn+tYDM+83Z7dnqoqtBjXrSP6zGeIPv3p4hGIySRaI595BgTpN78pJ5dWVo5dgmFwEO1LmzaBrLnqqjFPZhCVbBERYQ0LhfCMWyzwM4wq/3p6QBw++ywq5ubORZXHJz9pvlIwHpcTlXnYiN0uE7DFbH8d7RhYn3nHDqzpPMSI1/7Zs1HxxvrFZmxbRYVslWXJBLMQAknFRAJ+gdE6PDAgqycVBcf+05/iGK+9Nt0GsEyLkYYlE5KKknkyKdu9UEi2N+slFoQAofzGG/DPTjtNyl0oiqwGG80+CSFbFvWk4Pbt+H4CAVmtpYXLJYnA2bNTScHGRvw7V91yvg9Yq66Qaq4cULJFOUDfsswSBYW0LOcCbu/nFudYTN7rRqiowPEGg3iG7HZ5DkwU+v34m90u71dtNaWZQTAMux12PBCQLcC54IMPEHsuWWJcwODx4Ji562pwEORfdTVah/WEvN+P4zAqiPD7oeM6PIwKxJkz5WuqKgtW5s6V1Y/9/YiN9MntF16Ab3TQQUTf+pY8Dq8X645mKEnO2LEDds7lQpwWjaJCsqoKBOJY3HfJJL6L99+HHT/uOONCmTxRsjkmsL+QiIVO3NrB2xBCvFj44exfUBQYNNbkU1Up1p8PbDYsJlVVMP7sWPn9MCBWa2prcybY7XBIW1tBFu3YAcd57lw52KSsLJVInD49+4KjKNCZ6OxEteS0aeYCvrIyVLbwxMLp07EIVVaiavL99+V5t7bi3NraoLFzwAEIlvVgPaPJk9MXLo8HOmeVlaiw016raFQOB2Cx8/p6/N/rxfXmkvdwGMNcuruJzj5bCgWrKghEIRBo5BMAORxoG+zvx3anTcP1cbsLK7nPBq8X1zwUwv0wc2bRSKCSLdqHYLfDzrA+ojbY5AEHWiJxbzjwxYAQmDbc2QnnlyuobDY5vZXPmat0+vogZdDUBDuQy/PS2QkHuaoKmj/aSiWeGM2T90a7homEFC0/66z09sVCEA5Dh3DdOlSVXXSRTGbwELCxwNq1IEMiEZCHp55atKqfki0aBTxtnQkflyudMBEC69MTT6D6y2JBIHPmmUg6mXneuT06HJathtz2W14+tokrIbBGa9uSd++WlT21tUhoHnssSKgZM/Dc89TlbDqJerhc2HYohGtjtmq3sxPP2pw5xr4UazfzIIChIUw/dbvTK5qJZBWd3Z7uFzGBKAS2l+36a4etcHszE6bPPIPKbCYQtdPlufqqsxO+2NAQfvTtxf39klBmcPUf62rPmwe729CQWkFYbJukqlKHM9MQmjxRskVFwFi3LOcKrsblysFk0nhoCdtXnw8kGH8mGMRvvtdiMXnvFVr9yrIMwSAIvFxsUVcXbJFRZTlvr7paxke33y6rCPVyDqy5Wl6e/honcnfvhl+k7TLjAhFVlc5LL4YAACAASURBVFr13LbN9sTtlhXSTz+NTrElS1AJyc+uzwd7XIim/HvvQXO2qQlJs+Fh+IM1NYhjx8LvDYWw9nZ0oHDnyCNztnclm1ME7C8kIs9HNKFSlw4hxICiKC8S0Zf2CGV+qH1dUZQqIooKIaIFHuc+DZ5mx5Oba2vzJ2ZsNlkZMG2aHOLS2QlyqarKHJFIBEO/eDHIok2bQB7t2IFqmtmzsS8mEjs7sb9s7Th8XB0dWHBmzDBXMafdV0cHtlFZCZ3ENWtwbEwg9vVhOufkyRheoD/PQACOrZFeoM8Hw19RgXJ8raOcSEj9Qu0AFCJsx27HtR4YwPf55JP493nn4ZoR4br39sqpZPm0XvX0oBUpGETFT1MTrkFPD74rM8NrckUyiQV7505czwMPTL8GBaJki/YxcMtaJJJOFPLAAy2ROBGwciXaXi65JFXTkLVeBwYQMFdVwU74/WgfdjrR9pzL897XB0e5rIzohz9M1fRJJrEfouwVM7EYWq83b4YtOu64nE55VAwOEv3P/8AmX3EFWnVYJ65QmY5MSCSgn7RyJZJSN99MtGBByRaNNViGIBJJnXSsve6hECZ+r1wJn6CmhuiLX4RmKLevjQae6MxDBIjw7FRWjj7duVD4fKmE4a5dUv+O9ZlPOgm/W1pwbyeTspqJdc8cDtgAnt6cS3tyZaUM3jNpEWrh8WBN5so5PZjUdLmkHu3tt8vgXa9Nzd8v67HpXxsayn0yqba9ORDAMTCB2NqK7f3xj+mVhEND+LzQhLM8bbSxEYlTfWtxXR2uu9+P72jmzL2TnOIJ1rymFbmyvmSLCoC2ZZmrAMeyZTlX8LFoj5O7xrRahpGI1EYsL4f9SSSkHeY2ZyFSqxvzvf+dTqmLykTmaPD5UPk2aZJxy2wiIYcwVVfj/H76U6wRN92UPhVeVWUcpa+gi8WQCGlrw+Rk/UCowUFci8mTU5MkNhu25fPBFkWjRK++isFeS5diUB2vL2yv3O78OgRVFV1w27fDzi1bJgcx1dYa6/kXA319IC2Hh5GsW7Qor4RGyeYUAfsLibiOwBjfoChKDRGFiejtHLdxJWH89zuKovyBiD4gohoiWkREnyOiA4movVgHvK/C5YKRGx4ubHIzi+Xy4sNEGeu+BAJw5svKzGvPuN3IZsybh6q/DRvQQrNwIZy1lhYEkZ2dCOqyCYRbrSAPmXycMcNc5RwTiUxATp2Kv1VUgNSKx2G433gDi8eFF6Zn2mMxWUmod6IDARhgux0Eotb4sp6jEFicjJyQsjI4uB0dRPffj+1deqnMkvE2uJw/H9H3LVtASjgcRFdfDQJxaAgt14EAnOg1a7CQ5zr5NRMiEQRWPT1YUA84YEwmrJZs0T4GrgoJhXAP6QNTqxWvc9vrWFXPFgsbN6Ka76ijiM45J/11louor4cN6e1FJVY4jKnwuTwzHg+Iw3gcJF1jo3xNCDjCySRs2GjESiQCofC2NlQILl1q/hiyoa0Nznw0iraigw+WE2Brasbm++zrw/CUDz5A+/LFF5sjp3JEyRZpIETqxGOnM50s6eoCcfjcc3je589Hpdvxx48exAiRShxysM/VZKwDVkxEo7KDYudOrG3Dw3iNk5xHHCF1DBsbjf0ko6F12unNXq+c3myW/ORJoH6/JAiMEA7j+XO7U9v4GMkkfD4e7JRMImjfuRNBu/4zTF4SpZOX2smkbNuMwESjtlJQO8G4uxvPLQ94Wb1afra6WpKBCxZIYrShAfusq4Mfy/6tnggKh2Gfw2Hce01No13l4oDvXa0u3RgQAyVblCOEkFWHH0XLci7QTvBmP4in2rNuod0uB9YRyWnNRJJcdDrl8BYi6YtoJ6DnivJyafuNBuUxYjHISjkcSKwa7WtwEL8nTcLr99+Pz1x+eToJyLqJqgpbqn3OVZXo7rsRg37rW1JfnuH1wo7V1hofr6LIYab33w+dwk99CgQi7ycQgI2qqDCn3a9HJIIJzH19qG48+GAke4aGYMumTs19m9mgqojJP9xD133sY7DxeSbdSjanCNgvSEQhxHZFUa4miF/+noisRHRxHts4jIhuIqKziOgqgtDmNiL6ERH1FvWg92E4HHCWmEisqcmPaOL2Qe1CMmUKjOPgIJy5qips32o1v8DU1GCqMOsFvvMOtL4WLZJE4u7dWASyTWC2WpF9YvKxpcVclY7VKvfV0YGAxeUCiRgIYIHp7SX66lfTg/ZkEs6sxZJeRRcO43wsFhh+7XXn8ngm/0YLinw+BFOJBFq3uALA4cA2OEOWT4C9ahWyZs3NCJ5dLtle1NCAxWnqVCwkH3yARWzu3MKCeY8H34/XC8fc7CCdXFGyRfsmeNAKO7t6e8YT1LmSYyyHIhSCoSFU70yZgsm/RjbT65Ui/oqCauFduzA8QksCZsPICCoQ/X4Qifos/ciIrPQbzRaFQkT33Qc7ecklRIcfbv4YsmH1aoihV1fjWKdNg63gSvpiTsVlvPUW0S9+gXO/4go4/yVbNHbQEnzcKqfVMBUCFWVPPIHWcpsNxO5ZZ8nK+0zbZdKQt812gonDYmp1dnenVhn29MgKt4YGVIrMmoW1bfr03Co3jIbWWSyp05t7ehA8m12HuVtkZEQSqlqoKtZ4RUFCz4hkZdKPg/A//Qnf0Ve/igSpHkzgulyp22MCkbtkenoyTy8eGDAeTjJpEq6HEHI43hFH4Fq3tOA1M/ZCP2mW9d4CAfijRCAkCplybRZj2L6cgpItMg9t9R7RR9+ybATtMfK/Gaxrz8UdLCnAtpClAYJBWR3ncKQmYS0WeU9q95UvmcjyA0wk6pMaQqBzKxZDgtLoORgehu/HEgj//Cf0cT/7WWN96OFhKcGg9QeFIPr5z1EkccUVSFBpEQgg/qqsHJ38EwLyMm+8gW2ccYZMbIRC2EZ5efYY1gg+H/QVQyFse+ZMGT81NEi9/mIiGESnIFf+L1qUOfFlBiWbUxwoQltHX8JYo3SxNVBVGNJ4vDBNKdba0Ja1q6p0CtnB4wxvrujuhvPm94P8XLQIxt/vB+FkpkIkFkOgrShwKM06Y8kkgsqeHjik06YRPfoojOmyZVLzYt486aD29WExbGpKdcwjEeh5JRLI4OhL97mCk4fUZILHg+xWNIoWLq4S5HYXLqfPVfg/mST6xz9AIi5aBFLCapXZPa4sZQghW4+J0Hre3Jzbd8wBA18zJihNkDzjLNebM0q2aAzAEwWdTuMKa9Y9G2uts3yQSGAQxI4dcGJbWtLfwxMNeaDR88/jeT3+eDnwgFucR0M4DI2fjg606S5alPq6VqdoNDsSCOBYe3tRBanP9ucLISDT8OCDsK033IDzYgIx38TXaIjHsb+nn4Ydu/JKkCcm9lOyRXmCycNkUlbCcDAeCCAIfPJJ+AD19QgITz89c+DFrbLaikZumy0vL04VlxAI1rSEYUcH1l4iPC9cXchtycWYGM7VRIqSTujFYiDXEgk8G2YrW3hoSCyW/kxt3Qq/YuFC4+1xsrimBj/PPkv0u99hmNKll6bvp78flaSsXagdVNLVhf+Hw+nnVlGR3lKsH05SWwu78Je/oHLy9NPhY7F2rN0O+5HrABMmR3p7sd2KCkjv7A1ZDG37cg6VsiVbNEbQtyzbbIW18hYLTHpryTymFfg4tYNP9BV3/H7tYJRYDD4AV4MTyWQPk4fa68BVmaqaWsWbK3jSOg/vZHz4Ifwi1mXXIxKBHXG7sU688w70Dz/2MUxA1j87oRDsTWVlavwoBCRZnn0WHRVnn228H6dzdJmlZBITmF9/Hfbw3HPl8BZVxX5YVz7X+6enBxWIFgvRCScgXuzshB1vbMwtkWwWvb2QqfF6kZRpbTVFfk50WzQhUCIR9y5KF1sHdiKjUThG+baPsvC0PgsVCsFRjEax7UwtumaOc9cuVL6Fw9JxVBTpUGZDNApnn9uczSxyHCyoKq7P5s3IiJ1yCkjEgQE4l9psvdeLhUl7LWMxEIjRKCoQ9dfZ58Pn2CHPhIEBTPdSVaIvf1m20/D0xMFBWcmXS2aUpztv3gwR99NOk0QwD3XJdL0iEQQcPB2Rp1hnQziM4IFF4pua8GPyuCf6AlWyRWMEJiUy6UaFQvJ5Hk/VA//7v6gAvvFGPIN6sEQCZ6/XrQPpf+SRGOoUjyNoFgLOcaaKpFgMQ1Q+/JDoe9+DPdIiEoE9cjpHJyN8Pgx/GRpCxn7hwvzPXQtVhSP/wguoSP/61/E9eTyytbrYlaQ9PWhfbmtDlds55+Q0zb5ki3IEDzPhDgYejkGE9XblSqJ//Qv34oEHourwmGOMvw9VlROVo3vUj2y2VOKwEITDUr9w5078HhmR+5k+PZU05Fa6sQATidzWrEUyiWeEg/9MU42Ntun1SgLSbsfz0N4OP8moLS4ex3vsdgTCr76K6eUzZqBylysGtVWErHHJNllR8NnqajzTU6fiWrI/x2ShGV8iGoX8yq5dRMuXp1ZBhsNyInW2qmojtLdDc8ztRuUr692NleZdge3LJVtURBi1LPP3/1Edj17HUFuVqyULjQaoZNomk5Cs52ixwI7EYvAl+Hy5WpBITnDWDrTTEpr5kolMJLrduPf7+tCSPH061gI9uApcUVCB19EBH2raNPg5RkOdenpwnk1Nqc/WQw8RPfYY1psvfSn1Nf6c1Zre/qyFqkLa5a23iD73OXSK8Xa8XqwhZWUYDJNrMmLrVnRnVFVhaJ3LJdejKVOKOhn5v+eydSvitHhcyoqZTIpNdFs0IVAiEfcuShc7A/x+LBo8lTBXJ5gXW16EtGBx/qEhSRblWiXHUFVkpLZskW2LTU1wXs1kYHgaX1kZPjPaIhuLIdB2OmHwn38eQf7RR2N4AV+jcBjH4/Fgu/Pnp+oEJhLIjAUCcG71GZxgEA63yzX6ItDTg0oZqxUEova9XPXJ7QXaac7ZMDyMNqS+PjjgS5fi3D2e7ASiFn19CMITCVzblpbMC63HI8XJORuY4wCVib5AlWzRGEEr1K0fyKB9XYjUtsmPEi+/DImEs85CK6AerD1GhOdkxw5MZJ8zB7aIz4HbE2MxOShCe/6JBIL99euJrr0WtkyLeBy2uqxs9HXA4wGBODICzVQjkfN8wFIRGzZgWNX550tyhOUrit3St2oVnP5kEpXdRxwBciOH+6Jki0wiHsezx22iFRVywvqbb6Jlef16fMcnnIAALJOAPg9L4grAsjJJHOZLMicSqK7XVhn29cnXGxtBFHJbMusl703oB63oEQjgeeHJ7WZ8APbRVBXfy7ZtUphf31Lc14fr0t+PfXGFYVkZrge3SHKlYEMDgt6GBumnTZ6MYwsE4P9UVeU3WIAI98BDD4FIOPtsVCvpkUgg6aGq8LPM+J/JJPy/vj74mAccILfF7aFMKhUrGVWE9uWSLSoCVFVW1hHlRsoV+zi0hCEfDxHuPT1hWEjygidLc/U2D2wTQk4Y5vfxMCgmEq3W1PWSt6UdNGP22rFWKw/uXLcOz+vHP268Jg8MyM6vQIDo+uvx9zvvTB0Sx8fV04PfU6ak2u7HH0d8dcop6ELQXkvumBIi/XNaxOOQQnn3Xfgun/mMfC0alUUZPOTG5UJiI9v3JgRiyI0bYWOPPx7H0N6Oc546tfiazYEAina8Xqwh06ZhPznIVk10WzQhUCIR9y5KF3sUhEIICm227EL6RuAsFAv16sGl4JzdYu2KfBCPI0PC1Xc1NdDiMhL/1iMYhNPrdCK7ZbQwCQFCLBJBZV1XFyoAa2sR3OgnFcZiyDwNDSHAWLBAEno8xerQQ9MnGkejINPs9vSsmBadnSANHA7oFGoXR68XDjJn9BMJXJN4XOp2ZNpuVxcIxHgcAfQBB+CYPB58h7mItRNhO9u345wqKrA9bWWlqsLZ9/lwbaurpaB5jpjoC1TJFo0huDrJZjN2epJJSTTqJzrvbezcSfSNb8DO3HGHsU30enE+kybBlvz+93iuvvIV40qrYBA/ViueMZsN5/yzn4E0u+IKZLK1UFWZOBiNROvvx3YiEaJrroG9KwZ6ezHcpaeH6GtfQ0UgT1skKj6BGIvB9j3/PNaNSy+VA7tyvB9KtigLmPTjwLCiAvftyAi0q558Er7B5MnQjjrttPQq2FhMtipz54PdDtKwoiJ3X0IIrJNawrCzUxJEVVXpbcljMQU8HzCRmGltjscRXMfjuI767gYm5rVag1x9uG0bAsh4XFYdacGDAKZNw9r92mv4Lq+7Tg5a05IOgYDUQdQeL08vrazMvwMmGETg39+PlsEFCzK/l8mJSCR7e3M8Dgkdn08SxvptaVtHtVVX+a4lebYv61GyRQXgo2xZ1mtxctsrUer9xYThWCU/tW3JFgue0bKy1OozJhL5uIiM730mQbl6mqscs4Gro1evxnaPO87YjwsE5IBQhwPDnLq7MYjNKBYcGIDN0EtNPfssOkGOO47om99M12vt60Nc1NSUubI9FsNQqY0bUeSh9a/YHvMUaEWRk5l5snym9SuRQFv0rl0oUDnySBxTezvOZfr0/HQVR0N3t4x/6+qkFn6Oa+xEt0UTAiUSce+idLGzIBaD8VaU/II27ZAVo8WCM97cwqpv+80V0ShacN99Fwb5wAPRZpythcnvR8WBywVnWL/4sU5fSwuM+O9+h+O87DJ5/HV1WFSEgNFNJOCctrdje3PmICjxeJAd11dKJhLYD5fhZwoI2tvRquN2Y3HSBgTcBm2k7TE8jGtitxtXE27cSPTXv2K7l16K4yuEQNTC4wHJG4lAX2z2bCykXV0yi+l2gxTJswJhoi9QJVs0xojHpR6WUWVSIiGJxo+KHAgEQMTFYkS//KWxMxgK4VlmO/m73+HYL7989HbjeBz2QQjYuQcfJHrxRWj9nHFG6nvZLieTsGuZnvueHhCIySSITyN9onyweTMI1GQS+oeLFsmqSKLRnex8sHs32pfb2+Hsn3467GrJFhUXqor7l8kRbi/esQNVhy++iNcOPRRVuEcdlXrvRaOSOGRyjwevlJfntj4FAultyTwp2G5HlZyWNGS5lPEKbaCvRTSKgLW3F9e5qwt2IBgEadrfj9/aqiYibMfhkJ0Xs2fDL9G2F1dVSX/D7UbQ3tmJYVCzZ6cfI1eKatvVieB/jYzALuU7oMTvhzb08DDRBRdAp8sMtO3NVVXpa0MoBN8oGk3vKjECEyX6qjWzJE+Rpy+P4zvWFPa6X6Ql7/ZWy7K2hdioLVlPGO7tCkhtVWI8jh+XKzWuYv1ZrojUEop6cGWn9vpmez7Wr0dBwqGHwh7rt83txQ4H7NPdd6Nq8cYbjYc6jYwgLqmtTfWbXnsN5N/hh+Oz+mvNWvWj6cxHIkT33IPK5csuAxnJSCRgjxUF29BuPxKRg/JY51qLUAiTnQcH0SGxaBGu5c6duPbTpxd3wFMigbitvx//nzQJxzVa+/YomOi2aEKgRCLuXZQutgkkEjKgrK7OfeouLxaZMng8JXVoSE77mjy5sCqTYBBTsDZvhmN41FHIio+2+Pp8WITcbtmGQ4QFo60NgWt9PdFvfysndbLB7uuTk63tdji0jY1wliMRHMd//gPD+8lPpgfbXFavqjDQmc69rQ1i4bW1IBC1Qa7fj2NgMs4IoVBqdVF5Ob6b114jeuYZkKRf+hK2ywuazYbzLjTTqaoI2rq6cBzV1bIqyuHA9SqAvJnoC1TJFu0FRCJykIqRLYjH8Z6yssKmi+cDITAVed06OKBGmoLsgHLlzP33w/ZccomxVpkeySTs3J//DI3Bc89F+7MeXi9s3Gh6g52dGKJis4FALNYEwNdfRwvQ5MkgJZqb8b1wS+ZopGY+eO012HSbDYTqokWw4yVbVDxwpW80KmUFbDZUwa5ciXZ1hwME7plnSgJKiFTikIkyFvjPpHOqRzyO+1VbZcgDwjhppyUMm5vHh6zBaBACgbC2rbivD/8eGpL/Zr1GhqrCvk2aBD+kuTlVc5B/fD58fvZs2SqtrUjmLgKLBdfvnnswNfv66xHg6sGt60xMMoJBEH8VFflX0AwPwxYGAuigMNOBokWm9mavF0PzLBYkpHNJcGvJKCJzrc5jMH25ZItMYm+2LI/WlswkoZY4HC/g4+VkS01N6vXh+5cJRCYUs22Pq6gzxYjt7Yih5s5FLCIEYgfetxBIkiQSsGf/939ETz2FzoxTT03fXiQiu6O0SYE1a5AAWbQIg+b0vs/wMH70xKMWwSAkYnbuJLrqKshBaa8Py9Bk6rzjDhAeKMOdY14vfLZoFKTkjBk43x078LeWlsIKcPTw+9G+HArJgaG1tbJyMg9MdFs0ITCOzEUJJQBMInm9MKCVlblpGNpscrEwcoosFtm+GwzCAe7shMHKR4+RCMd38slYdFatQqDY1obFIZM2X3U1HL++PhB6zc0w6Lt2Sa3FBx+Ecb300tSMT2MjttnejvOcN0+K5DqdMtNWUYHgRatNJIScpNjYmNlx3LIFk6AbGkD0ab8DLuOvqBhdC4M1p4aGcBwVFdBfW7MGk1TPPRf7D4fxfbNQejECKqsVzn00itagwUEc6wEHgAAptrZZCSXowVMEIxHjtuWyMpnUYLu0t/DIIwjCr7rKmEDklh4eCPD446igO+88cwQiEc7ppZdQ8XX88bCRiUSqM8saitXVmc9/506i++4DiXPttcUR8BYC9u2RR2Cnb7gBzitXwxebQIxGif7wB1yPefNAINbWYh9783vfl8HC+0yMOJ247o8/Dj3hwUEQUFdeiWCvslJ+hn+4lZBJw2ytndxupiUMu7pkZU9NDdahY46BLzBjxt5PGGQDS5Bo24v7+tKHk7D+oxa1tQiMm5tRtWM0ydhuH729mfczZQp8Eq4C9nqxfYsFPkQyidf//GfIt1xyiTGBmElOgquqnc78K2iGhkAgRqOQdsmnGtpmw3PPWuDxOH7zBOaDDsr9HuH1o6xMElRMohu1Omvbl82S4yUUBm0bulavr5gty2baku32wtvf9wa0pOrwsOzA4nvVasVzwslavvczrdl8ztw2zu/VXn+PB7FPYyOqi1UVPsrICIgtq1UmPSdPhm/z1FMYLmdEIDKRx4kUxoYNIP/mzEHyUu8DcLLD7c5MII6MQHtx9274RdohdaoKmy7E6NJdVite9/vlUJlQCEUxZWU4r/p6XK8dO/B75sz8NWSNsHs3qj4VBcficODaFrPKsYSxQakSce+idLFzgBBSu668PFXnxsxnWf8ok/FkXR8h4Bhy621DQ2FO/tAQslhdXTDQVVUIUrXVhvr3DwzAWWZdyNZWaDVt2ACizWgqWDSKrLXfj+w9t0Vv3QoicvZsGOFt23COc+fCQPMQlEmTMk+52rgRgVdzMzLt2iqZUAgOv9OJhdbMdyKEHMzS2QnB31NPxWfZsee252I5NaEQvgNVxaLU3Y3jrq/HtZgypdS2U8LYg9tuWP/QCNwuyRVTY41164huvhm6f9/5jvFzwJo5dXVwKF9/HSTgsmXm9/PPfxL98Y/IZF9xhZze7HbjXINB7MPlymyLtm7F4JGqKjjKeeiXpoHbt19/HZXaV12F6x6NwhZZrcVLZhDB5q1YAXt0+unQteWESRG+7/3eFnFLZjgsq6o6O6F1+PLLeLYOPxxVhx//OD4TiWCNYMKRyRQmDjOtDSMjkizktmSWyHA6QRRqqwxHa/nfG+D12ogU5B8eHKQF+0JMBPKgEm17MU9gHm3QCkMI7CcQkC2AVqts33W7kczgbcRieBaZSGAC4e23YQ9OOQVDoIwGVwUC+LfbLV+PROD7OBz5+xn9/SAQk0l0ZhSjGjocRpKzuxs+3CGHFG8NMGp1tlpxbblCtNDp4Trs97bICGPVsqwlJbXbJ8L9ra8wnMhEMXcqORzwBbTXLpHA2k0Eu2VGs1F77YiwPVXFgC2bDd1kvA9VRSzKQ5sGB0GidXRA/3DxYlRE6/fJFYuxGGIpLlzYuhXEYWMjKhH1hBzr93PHlJGt8nohwTIwAB3Fgw+WryWTOMZEAjbabJIyEkGBxzvvwBaddhp81lgMBGIiAY3WfAeT6pFIoAV7YEBKVPD06SLsY6LbogmBEom4d1G62HkgEJAEX02N+YWQ9RFH0/Rg54qdWa7Qq64uLIj0euEUBoP4dyAAkvDAA401bvr7YaRDIbzn/feJXnkF7VbHHmt83N3d+Hd5OT7vdsts0YwZqHYhwsLw4YcgDrVl4plaed59F1UbLS1oPdQ6mby42e2yGtIMhoYwRKC3FwH0wQdj/0wUOxy43sUiELmqgttE43Gcu9sNUtXrxXesreDMERN9gSrZor2IREK2LRsFbVwNlUyO/cTm3l4MDmlogL6g0fHwJD+XC/Zk5UpkuT/7WfPP6KuvonrwiCOIvv1t2NhkEnYoGpUJnPLyzETLpk1o/Z00CS3MxSBkRkbgfG/eTHThhUSf+xzOKRIBaVFWJqufCoUQsOO/+x3O87LLQCw5HMXbB+3ntojJQxbPX7sW69cHH+Can3wyyMOpU2W1Id9/VqskDo204KJRkJE7d0rikHUyLRZsU0sY5rImFgrWETUiBbmSsL8fPoUePK1YTwryNOPJk81N7WRo/ahs4O4PRYE/19aGczn44PTugGgU5+D14pj6+ohuvRVE2/e/b7y/YBD2lgNS3s7QkKwGysfP6OnBcDurFQRiNq1CM1BVVD319iKIbmnBcRcrSGdwUp2HA1ks2McYVMTu17ZID6OWZbPDPYygrzDUtyXrtQz3NfDANrbXWmKU728iWWlptsCBdRfXrsXzccwx6UlNltnq60OsEo/DBjU2YiCbkRyJxwN/Q6tnuGuX7Hq46670OCyRQGzHZJrRvTI4CPLR50MSeP781PPhwZb19eaTBMkkCMRNm3BMhx2GNaC8HOtfMgkCMc94KQ1+P/YVjcp102bDmlqkxMZEt0UTAiUSce+ixbw/6wAAIABJREFUdLHzRDgMg5nr5GZuG8iUhRNCthVYLHJqIO+roSF/o+nzoUybqxq2bIFD39AAolBbUROJIAOWSMCAvvgijPjy5caZ9r4+OTTEbkfwu24dnNFDDsH2tZ8TAkHzpk1YWJYuNV703n4bWoWtrUTnn5/q1PMk57IytFqbdYTa2+F8C4G26Bkz4NCz7mVjY/EIxEQC1zwYlNMYWWtNu1j39srgpaUl85TsUTDRF6iSLdrL4PaxTNpTQkjtH5drbNqMYjGib30LAfF998F+6JFMInDnyYgPPQTn8cILzdvdt95C5d1BB0EsXH++Ph+q8hwO2AOjTPl//oMp0FOmgEDMVKmYC7q6iG67DbbnG9+QVZVjQSBGIiBAX3sNleiXXCKHcuRC0JjAfmmLolGsp0xMv/QSKl89HlRRnHkmElaKIolDIjnIiKU2GKwTrG1L7u6WlT319SAKeVrujBljJ4kRi6USgUYVhAMDxsNJGhqMNQe1FYXFJo/0flQ2xOMIdD/4AOfw8Y8b62sJgSQG+2N33QUS8I47jP2yaFR2rvB3G4thXzYbPpvPs93ZCTvocOA5LlY19MaNuHdbWxE8BwK4V8vKYCOKSUjHYpI85wqrYkx11mG/tEUpGxCyXZYnZ3PVYS7XmElCLXHIYMKFycLx3pZcTIyM4NqyPIxWx5Fb+Ilg43Kp9Hz/fdiaAw+EX2SkD9nVhfXF5YItSibx20jSKRCA3amqkvaip0dWLN55Z/qwSzNa9b29IBDDYaLvfS91oBN31UWjOCazdj4eR9K3qwu+ysc+Brs0MIB4qroaMlDFGgDY1YX2ZYcD5xmN4lj5uhcJ+8kT8dGipIlYwoQADyYYHk4dKJINNhsMJOtl6BdaLXnIlQzc5jswIAefcNtOLuBAkYPlE05AFmrLFlSnTJ0Kg+1yIWBhcfG//hWZpUxVP8PDWEC0ZeqhEBatigpsL5lMPV6ebrZoEfbx3ntYfLTaYm+8QfSvf2Hfn/98qjGPxRDQ2Gy5VVusXw/dsdpa6DqyJkh5ORZjbaa20MUjGMSCp6rYTzSKbU+enE5ANDVhYW9rQ5atvx9VicUUCi6hBC3sdtyb0ahxuw23O4dC+DHSUCwEQqCFt62N6Ec/MiYQieS0vmQSz259PSQVzNq///yH6N574XR+73vpjrCqwp5wKy/r/mhJgbVrkXhoaUHVZDGy3xs2wHEvK0PlwAEH4O+coLLbizcRd9cukKjd3UTnnEP0qU9hDaqqKg4Zuj+DNZsSCdzLzz+PtSuRIDrySKybixaBTPL58BkmZcrL8W8hcN9t2iTbkjs6ZCVLRQUIw0MOwe+WluJoQAmB4MyoelD7w8etRXm5JAKXLDGuJCzGQLJ8YORHjYayMkmM1NVJe6f3ATwe2J3aWqJbbsHfbrrJ2B5oq73ZL4rH4S9arflfm/Z2DE5wu0EgFqMaOhAAgZhIINHCJERlJY7f78e5G01vzhXa6ct2u6zyYYKKq7BYG24it7x+lOCJwkz2aSsDsyFbWzJre+4LbcmFwuXC8xGPwyZyVSZrSwohCXNu6c6G7m4kCg44APaenwmOG61WkJeqClt7552yndiIQOTBnU6nLF4YHISETCJhTCAKAds/mlZ9Vxf2qaqwgy0tqZ/3eHDedXXmCcRgEANUhofRws0dbGVlckhWVVW63EU+iMfRFcf69HV12L/bXbC8VAkfEUqViHsXpYtdIFQVxltVYdjMZEa4XJ0zgpneo8+kc6DBwwUmTcovkAgEEKDY7XKK37Zt0MVQVZxDbS2IrYcfxiJyxhnQNNSTWqEQyLzKSknI9fcjQK6rg0hvdzccxZYWLH6JBMhQngrJOhQjI9jnrFnQBnvlFTi0Z52VTkD29uLzTU3mnaIXXwQpOXs2KhDZ8WettYoKHCfrMdXV5U8WDAzIKbKTJuF7UxQsxtkW06EhfBfRKIjd2bNNOR4Tfbkr2aKPAELI9sJMJGEiIYcCFCvzS4QqrV/8guiCCzDUwwiBAJ7PsjLYomgUumNmp5h++CEIyqYmtB3qCTN2dFVVDi0ZGcF+HA7YtbfewuCE1lZoFRajauqFF4h+8xs83zfdJFsRWYPW4ch/qJb+/F54ATqQLhfR178Om5tM4hqO0UCN/cIWJRL4voJBVO0/+yyqGSoqiE46CT81NTKIt9vxWnk51rCOjtS2ZA6QbDZULmrbkvOZCMlC9kYtxdpKQq6U0aKuLnt78VhVJxcTrDOdTR9xeBidEQ0NSGYMDeHv9fXSB9BqL997Lwjj73wHuol6vyiZhO3iNl1FkZPlFUXqL+aKtjbYwbo6tDAXg0geGkIFps0Gf8soqcAabIkErke+iQetVhy3f+rB/jEP4NBWueWBcX6HZkXOflGuLctCpFcY8hAmov2jLblQcBU6Vxxz1SdfO+764MEro5Gufj/R6tVIDhxxhLRb/B2xnuXAAJ7DBx/E+6+8EkkrI1vEFezNzTgGnw8tzB4PEpja6kHG4CBsWCat+vZ2EIg2G7o79MPtPB74jTU15uUQBgcRpyUS0IbmxHIohLXSYsF6GAzimldU5O8njYzA7kWjiDktFuynri514EwRMdFt0YRAiUQ0CUVRfkhEPyCiuUKItjw3U7rYRUAyCSc0FpM6d2Y+k0iYG7Sid4C5tYjbZBoacm9jCgYRxNhsMMplZTCm69ahhdjpxCJRWYnAORjEgjB1qjy/eByLU1mZzNoMDaHar7oa7c9WKxaizk4srtOnS4FdbXm8EKiW6ewEAdnVJas4tAtuIgECUQgQA2bOO5Eg+tvfcFyHH0509tnSEfL5ZKsxZ/RVVZbgu925LVLa9uXqanze48F1Nnu8fAw7dmBbDgcykqNNnaaPcIEq2aKJjWQSzhPrsRkhHoe90VaOFIIPPyS67jpMT731VmOnmlsMrVZMG9y9G9NHp083t4+dO1EtVF0NR9losh5PNaytTa2wCYelM//MMwiuL7+8OFU4Dz0ETcfFi0FCMEkRDGKfTmdx2otDIRCVb7yB63zFFbArigJbMobT4PdpW6SquLbd3UTPPYe2Zb8fxN8pp6AVlu8TpxP/9niwtjFh2NcnKykmT04lDKdNy06YsF7yaO3FHk8qGUCE71xLChr9TJo0pvfGXgf7UZnIj2gUPofdjufcYpFBeiwmq3V7erCNv/6VaNUqou9+F+9n/4H9IpaBSCbxN4tFTkQVAtc+H0Js82ZUYk+ejCRoMXQKd+8GMely4VxGs+08IIbbm3kyrFlwJ4bFkp1IYegnB+fR6rxP26L/vimHlmU9YaiVINAStkwYjvdEwXhBMAh7UVkpJy4nEvK+ZQKdk7FG1zUeR0JKVSFtYvQ8cgIqmUSxxTPPwC868UQcg92emlzo78cz29SE7YVCIP26upBgXbQofR8+H3yjmhpjv2nbNqK778Z5sA6jFsPDqTGQGezaBamV8nKcC+83EMCaabOhKIXXJr8fRCBPlje7ZgmBc9+xA3Zo7lzZks7au2OEfepJKpJ9Kzr2m3ZmRVEuIaIqIcTPPupjKaEwWCwIQrmijQehjLb4WixYoFU1c7uNvh2Ht2e3g8wbGQHZ1dkJI5pL4OlyoTJw1y4E2zNnYh/V1ZhcunIlDPdJJ8H5nT0bQdPu3QjinU4EMIoCw6soWDjeew+LxuLF0sF0u6HX1NGBISn19QiUtEZfUXA8b72F9yxYAJ1E7XVRVewzmTRPyAWDaENsb8f05U98Ql6j4WEsqDzchWG1wtEfGZFVSWaCbm37cnMzjnNoCItirgL3VisWt8ZGEC4bN+I6t7YWTmToUbJF+zcsFjiX0SicYKP7q6wM93MsJic55gufDzqA9fVoL86kDcsV16++Cjv1+c+bJxC7u0FOVlQQ/fCHxo7wyIgkCfTnXF6O/T75JGzRl75U+HMXjWJwzOrVIJsuu0zaSB7W5XQaH2uu2LkT7cu9vRhGdfLJsqKTKy7HI8azLUomYePXr0fg9vbbuE8PPxxBD0/yZYKxpwdrXmcnAhQirDWzZuEzs2ZhXdSSQZyQzNZezFqlWlRWSiJw7lzjSsJiVLdONDCJZ+RnJZNYX4VA2xy/zkk/rxfPTUcHvrtXXgGBeNFF8E+IJGmoKPguuVXX5ZI+3OAgfudLIG7YQPT44/ArLrqo8IpwIUAe7t4N0njBguw2QVFwj9ntuCbc3pwtqZRM4pokk7lPX2Yyy6jVeayr4sazLWIYtSxzu6v+db6G+rZkHvphZoJwCZlRUYHrGwzKac1Wq7xn2W+KxXDt9c+wEHjOw2EUUGR6Tpg4e/99JFc/9SnEadw6zVJSbreMcerqpI93663wp266yZhA5OGbLpexL7J5M9E99+C1738/vbiBCzN4urEZbNyIApaGBshscYeE34/YzeHAeqn1O9kWeb2IUaursydW4nHIdw0NYV8tLTKhN3Vq8Ya0TARMBPuWD/YbEpGILiGiaUS0T32B+ysUBUbMZoPhU9XsgvhWq8wgGukjEmUmEomwUFVUwEEdGsLC0dBg3kljnSUmElk4eds27Ofaa/F70yY4nAccgPPr6oKRV1XZTuz3I7hyOlGBqHeUXS4YfW5t1jt/yaScYHnWWSALtm3Dgjl7No6jr0/qc5gJ6Pv7MYHZ5yP64hcxbZFItoWHwzgmo5Yg/j4dDlzbvj58n0aLlBBYxAYH5WAG7UKa7/RFInzHS5YgEN21C457ayuuexFRskX7OfQkoVGg63DIgJADkFyhqmiBGRkh+ulPM7fjcevcxo1wlk84AQLjZjA4iAw7EdEPfmDcmhIK4fl3uYyd+Weewc+yZbBHHARXVeUXaHm9RD/+MdpdL72U6DOfkTbB75cTHgvNgguB6rj778ex3nYbEjYjI1IPaZyTSOPOFjGx9/zzuCfa2xEcnXIK0dFHS33eJ56QVehEeKZmzCA69liss83NsiKtv5/onXfQAt3XJ/82MJA6sIBI6iI3NCCQOvJI4/biMWpN3yeQSR9x5058X/Pnp18/RUHwHY3iPe+9R/TYY0hGLl8u31dZKduXWaaGW3WZQGRd5HySL+++i2RGSwuGSRVaCZ5IwM/yeOBnsX9lFg4HrovPhx/Wvc4khZHvYAktrNZUf5m3W2CrczaMO1vE0FcR8jVg0jAcTm9Lttmk711qSy4+OIng98O/4GeCtafZNlit8t7VPsvbt2MNWLgws1xLMAg709WFNubDDoO8SzIpta2dTvgrkYgkE6uqcD/ccQdIwO9+F3GFHtGojGOM/CbWl548GdWMepLR78c+XS5zeu7JJJKqW7dibTvmmNQusY4OnM+sWZl90smTYcuGh3H8mXwcnw92Lx5HLFtZiUQfT2AudnHGBMC4tW+FYH8iEUvYB8ETeH0+kE+1taM7OPpBK0bQZtL1Cz9nzINBBCBdXTDsZqcLl5fDQL/zjhTR3bQJehRHHYX3DA0hkN+4UbahcOtPeTn2/c47OJYlS4yN8cgItr1ggcwutbRgEVBVZNnffx/7Pe44fIarOEZGcE42m/lgads2tA3abNAKmTEDf+cKp0jE3FABpxPXd2hIigRrF6l4HIFjKIRjbGiQOlP19cUpjbdYcK0aGlA1sWULAs9iTicroQR+FtnBNSLLnE7c65EIgsdcCbUHH0Qwft11xjo8RAiAQiHYiFWr0Ip7zDHmtj88jMrDUAgZd6NhLdEobJDDYayR+MQT0OU56ihU8VksOKZAADagujo3MqC9HWReMAjH+2Mfk6+NjMghDoUOUQoGiX79azjlhx1GdM01+D7ZqR/DNp19EkKAZHriCRCzfj8Cq2XLYNsHBzHgQlFwnd1uBCa1tVinYzGsYevWYU0YHk7fh9MpicDFi43biz+q4ST7ErSDVrjdk1vBp03LHLRHo3LC8t//jna6c85Jf191tWyB5q4QnkzKw1ryCVLffpvo6adhKy+4oPA280gEflwoBP8h0zCrbODhMoEAthWPp7c359O+nA3cpqsdhBOLpQ5iGedJkryhb1nmFm8iWf3KMNIx3Fevy3gCD5zRD1TiKk++T+NxPDdMMg4MIGZpbk4dTqJFIgH/w+Mh+u1v8d5vfxt+jJZct1hky25lJdaPZJLoJz9BQuKaa7CGGW2/vx/Hx91lWqxbBw3radMw0VnvrwSD8GdYpzAbYjGil18GkXfIIVj/tF1inZ0yPh2N8OYkG7c3x+Op7c1CYB1ub5dFLrEY9lteLnUiS9g38JG6SoqifFlRFKEoyimKotyqKEqXoihBRVGeVxRlxp73XK0oyjZFUSKKoqxTFGWJ5vMtiqLcpyjKJkVRAnt+/q0oyqm6/bQT0TIiatmzP6EoitC9Z7miKK8rijKyZzvrFUW5xuCwKxRF+YWiKP2KooQURXlWUZQMZqiEvQGnE0aMnUiesJgJNptcBDKBHTC9xhHD5QJRVlUlDXA4bO54eUre4CD0nebPJzr+ePl6fT2IvWXLYOTb2pDR2b4dhvvdd/H3JUuMCb5wGAtfRQUMNg9zaW+HE/rooyAQTz4Z+1UU2d68aBEWtvfek0NfsuHtt4n+8Ac48l//eiqB6PFggc9Fq4MX1aoqLJR9fVioAgHoakQiyGRNmoSFKRZDYFhI0P7AAw+Qoij03HPP0S233ELTpk2jhgYXXX/9yeRydZDfT3Tjjb+iWbPmktPppJItKqEY4Oc3EjGefscTm4nwXOciYbxqFbRJP/1ptH8agQX8+/owBGnWLAx1MhMABQIgDj0etOrMmpX+nngc2+fpuFoIQfTIIyAQjz8eVT9sd3nYlKIgCWHUTmqEdevgcBOhCkBLIPp8smKhUAKxrQ0BxZo1aHe8/nrYIbZ1xbZFLpeLTj75ZOro6CAiol/96lc0d+6+YYuSSaJ//5voK18hOv10ol/9CmujzYbv6403UDW/ejXWrTVrkERjfarHH8d9/tprCA4bG9FyduWVqIz91a/w+quvYltPPIGg8LbbEOCdey5kNxYtArlYIhCLA/Yrkkn4LTt3ItidNs34/cmk7DD405/gj3znO7gHentT/TUhZKsu68ey75fvAKN//xsE4oIFSGYUSiCyrxaNoisjXwKRwe3NTKBykpU1duNxHHM+yaZM0NqiW2+9hVpb4Rd99rMn086dHRQOE/385/uOLSLC9QyH4dcPD0tdynhcEqgWC9YoTmKwf8vVnyUCce+hvFyuFdp4jQlDux3vUVWQXn4/KvwqK0fvthgcxHt/8xs5zIR9MSbXmaz3+WCTmND85S/hf11yibHvlUzKtt7GxnRS7c03iX7+c/hU3/++8YDN4eHU6c+jwe+HbevrQ4L4sMPkPerxgPRzuUwPlSQiXL+GBtkZxhqVGzbA1jc0IEYNBHAtq6pg+8cTgVjioArHRzpYRVGULxPR/US0nogiRPQIETUT0XVE9D4R/YWILtrzngoi+h4ReYlojhAirijK2UR0GxGtJKJ2IqohoguJ6EAiOlEI8dKe/SwnoruIqI6Ivsn7F0L8ec/r3yaie4hoAxH9jYg8RLSIiOYLIU7Y854fEkQt39nz+j+IqImIvkVE7wghstZueL0kXK79sox3r4AnNycSsvU4E3jQCrdtGMFoYrMRwmEY0XhcZqIybZM1gXbvRlBTXo5hJnPmpB+vqiK71dcHsmznTpzfwQeDADQi5WIxONw2mxy+wn9vayP6xz+w4J15JiaR6c93YEA6T7EYFoLWVuPzEQITX199FVn2L35ROu9aQremJn/tCw4O+vvlRNepU+UizBOjC205euCBB+jiiy+mxYsXk9PppPPOO4+6u7vpJz/5CR144IH0+c9fQH/600N03HEXU19fiJ5++uYATWBbFI2WBquMF6iqnMacKfjl93Dwki1I6ewk+ta30D53112ZA+LBQTzz//gHnMhLLzWXOIhE0C68Ywd0Flm6QH/MHo9sU9Ta0GSS6C9/QQLihBNgA43OSQg4wDxkZrT25mefBfkwaxZIvbo6+drwMLZhdhBXJgiB/fzf/8F5v/Za7I+HarAWUiF46KEH6KtfvZgOPRS26JxzzqOenm762c9+QosWHUjnnnsBPfzwQ3TWWRfT88+HaM2aiW2LFEXaIm3rH+sYszC+04mf8nJcY+3filV5VULxkUjg+VOU0SVnIhHYuC1bYDuWLMF3rG3RtdtlVVwyKTtLIhHZWphPq21PD/ym2lqQl4WSQJEI/CzW7y52+69We49bjMdCY6+t7QFatepiqqtbTFark2bNOo9CoW7atOknVFNzIM2adQFt3/4QzZ59MSUSIdq4cWLbok2bSLCOIV9Tuz210rBkZ8YfOFFhtRqv7zyB3OsFgagoKNLIFJf4fFjTH3wQklDXXw+CzQgsbVVdjXjnb38DgXj22USf+5zxsQwMwKY1NKT7fG++SfTnPyPuuuqq9NcjEZyH3W6uA25gAAkSIUAgTp6ceuy9vbhm06fnd2+z/MjAgBxM2dqKIo/eXhxvbW2qPzaWUFX4lcuXZx+ssr9xUGOB8dLOrBLRsUKIBBGRoihWIvo2EdUS0SIhRHDP371E9EsiOpVwAf8phHhMuyFFUX5BuCG+S0QvEREJIZ7c8yU5+EvTvH8mEd1BRK8T0UlCiKjmNaObsFMIcabmPYNEdK+iKAuFEB+MdpJMzthsCNgqKkq6OsWE1QoCb3gYGSdVzawBxu2DPD3TyHhqW3IyDWMhgqM7fToMOwvrTppkvJh1dcHYvvkmjvWSS/D/XbuwDe3EQc5ULV6M8vPHHpN6iu+9h8Bdm6FSVZBtFguyW9q7VwgsJLt3o+rCKAM3NIRjb2qCUHxXF/YXCKBaUqtPGI9jauLGjRA8X748tXrT45FVAYW0AFutCCyCQTwrVVWSWLTbpUZksWC1Wun1118n256NqqpKK1asIK/XS48/vonWrXNRXx/R00/ffD1NYFtUwvgBBypc5WBE+HGAHA7LKfGZEA6jCq+sjOiGGzITiH4/nOXnnsOze8EF5p7VeBxDRLZtI/rmN40JRNZBFSKdOFBVOOfr16NK8pRTMjvCioJnvqxMtjfrB7OoKoY5PfssEiNf/3pqMoO1eyorC5uwGggQ/e//Eq1di0EdV10F2zM4KFt8ijll12q10osvptqie++FLbrllk3061+79lTd7xu2yGaTEhX19VI/1+ksVfZMZPj98AlGIxC5yqutDc/q4sXSFjFxw63O2lZarc+RSRIiG3bvht9UXw8frNB7jae+l5Vl1+nOF/pBgUbSO8WEoljplFNeJ4sFtkgIlTZtWkGxmJdOP30TEblICKKNGye2LRICcRlrbI6R9mMJRQYnV1l7We/HcFViby+ezYULpean/g7iBMDKlYh/rroqM4EYCOB5r6kBifj3vyPOOvpoJEdVNf25ZHmn+vr0+P+VV0BCLlxIdPnl6QVH0Sj8GR7Yls1WtbeDUHO50N2mjYcHBxFjVlYWZvcURepGqiqulcsl/9/YWFjiNhf09aGzxetN1dE1gf2CgxoLjBcT+Xv+8vZgFeELfJi/PM3fiYjmEBEJIUL8gqIoTiJyEZFCRK8S0bkm930W4Tr8SPvl7dm+UbXO/+r+/5rmmEb9AmfMgJELhUBy+XwwMCyUXHKWCwdnu0dGYNxVNfMUZb0+otF7uCVHCOMFR/u+ujoYy/5+GDO/P3U6oNeLjPfrr+NvX/yinHC1axdKyqdPh1HnVpXJk7FArl8Pwuwb34BQ7+bN2NasWVhwystTJylrF65wGJUzPT2oNKqsxP60kyo9HiwENTWSmJw+Hf/+8ENk72bPxrZHRjBIYPduVBAdfbS8LjwhmXWJCiHJ/X5kAYWQREV7O56fqVPRHlRsB/2yyy77b9BORLRs2TJasWIFLV36BfrgAxfNnk10/vlEP/jBxLZFhVZLlVBcOBxwLBOJzCLsDgfsFFflGH2HQkCLp7sbRGKm1sFYDHbh5ZexvS9/GZXL2aCqaLP54APYomOPNX7f8DDOYdKkVEc4Hke14IYNmP58wgnZ90kk9RR9PlmR6XLBFqxYgbbBs87CVGe2CazHSgQ7XMgkwK1bsR+vF1OeTztNahK5XMXV0WMi8qtfvYxcLmmLjj12Gd177wqqqvoCrVjhosWL0ZLb2jqxbdGOHbi2r78Om8/tZ3PmIHm1YIGUt2BisRTcTwx0dMBPmDMHz4hRwjYeh7168EE8s9ddB59CD67g6evDs8z+NA8ziMdlNbCZ+0MItPitWYPhOaedVpj/nUwisdLTg/t1/vyxIRC105ftdpzHyAheY9K9WHjgAVRV3X77ZfTVr8qL+uSTy+jMM1fQ1Vd/gW6+2fXfoRWPPjqxbZHZYWIljE+EQjJhqLcBnZ3wrz7xCfgDySTep5+u3d2NROG2bZDYOO00431Fo/BFpk8HUfaPf2AY2BlnQOIkEoFvwIS01SoHrjU2prchP/UUtnH00URf+1p6QjIWA/HX1AS/KpttWb8eevutrdC+1/qLPDRzzpzCCMRYDL5gIIDET0sLiNqeHtjkuXP3jo58PI7CnLVrcX3PPz/nTewXHNRYYLy4Yrt0/2dJ7I4Mf68jIlIUxU5ENxHKTfU94Wbb9ebu+b3R5Pv1x+rVHtNosFqlE8w6JuwE+f0wCkwommlZKyEzqqpguLkiMVNG2OyglUwTm/Ww2xG4czl8Zyec2vJyEGCvvIL9XXyxnMZls8mpzZ2dsiy+uhqfW78e53HwwXBOJ01CNWJPj/xMTQ1ItRkzUoP2YBDO+cAA0XnnwbFNJODcd3TgWFkrpKoqXaC3uhqLw9atqBLYuhXty0w8LFwo36uqIBC57ThfokoIELFDQ1gQpk3DNervx3fkduM7CIWKn+Fq0ags47rggrhcM+jww3H99nz/E9oWlTD+oJ3GXFFhbGfs9tSpznqbtXIlyJhLL8WAFCNwpfBLL8GZPOeczOLiWgiBQSJvv43ta3VA+vvBAAAgAElEQVRctfD7YR/0FYOxGPSFNm+GLeKBTmZhs8Gu8HTlnh4Ij3d1oVrgpJNSj9XrTbWj+UAI6PE9/DDs7h13wPHmJNVYTmDW2qJEgujf/4YtGhmZQT/+MRzlPcHPhLZFs2ZBQ2r7drRxrVoF275lC9a3F1+UU23nzpVdHFVVkljkNaGE8QOPBwRiYyP8Fh6yok3GCoHA+JlnkAj4wheMCUR+Lw9ei0Tgi9jt2HZ1tZSz8XphJ0arzEsmMYF5/Xq0+J14YmH3TyIBvc7hYdjSmTPH5n7UtnaXl8tzrKuDTeJBB8V+Hlp0C4TbDVs0deqM/+rO7cGEtkUlTGyw/EEwiLVBOzhk0yaQh/PmwVb4/VJnmqtOh4aw/vzrX0hwZqpmU1XEVFYrtvnii9CFP+oo6OxaLHgWeWge+3Ver9TRZPBwuSeeQFfXlVem2654HMfG3XajEYiqCu3fHTuwXh51VOr7e3rkQKqpU/O3E14vfDlVRVzEBSaxGL6Hqiqs4w7H2EoAdHbCl+3vx3d7wgl5dZzsFxzUWGC8kIhqjn/n2/5nRHQFgZl9g9AnrhLRxUR0QTEPMIdjMgWLReozCSFbNplUZEF9/inpcOSOigoY3eHhzJObFQV/40lbmTLYuRCJRLLCcGBAakWsWwcje845ctgJw2rF37Ztw2I3YwYIvQ0b4IwfeKDUsnC74ahyBq2jA9WCvb04vrlzcR5+PyoGfT4MLZgzB5+32fD5jg4Eak4nqpAyaVaUlUF0/pVXkJmuqEAb4wEHyPeoKoKBZBKLXL66n/E4SIFwGMfT2Ihtd3djcZo6FdfV48EixtObi/V8WPes3h4PWsY//BB/P+ggKy1YYPiRCW+LShgfUJTUacyZiC+nE2tGJCLb2oggLfCHP0Dr5+yzM+/H50PWdts2OFxG7ch6CEH0xz8igXD++WhDNgInxioqUo8/EsGAi+3bkaVfujT7Po3A7c0dHUT/8z+wFzfckDpAJZmEbYjHYUPzrcwZGSG67z4M8Vi6FERlebmsEne7Cx/QMhrYFr3/PtHNN+M4iIiuvtpKF15o+JEJa4sUBRUTN92Eyoa//Q0kkcWC5FgohIqxNWuQVJozB785WLBYZIKWicW9UQFRgjHCYSQd3W7p6xj5UMPDSHo89xxsUSa7JYQcrlRfD9+GE5asYWe14nlnIpGnduuhqhjGs3EjBvAcd1xhhFs4jG2FwzKYHgtoW7n1XUtG05urq4vX4sy2KJmUx0FE5HRaM/l6E9YWlTBxoShYEzjJ53Yjbli/Hs/MIYfImI87GfheHh5GIuORR+ATXX55Zp3mgQHYkSlTMPDrvvtQbHHddfKZq6yUpJq2CKKqStpAIaAN/c9/wg595SvpsUwigdhKUZDIHO2ZjkQkobZkSbpvt3s37OakSfkPehICBTG7duEaHnIIfg8OwjdyuRCDcsdlfz9iuWLPggiHkdB+7z0QlZ/+NIpa8owF9zsOqlgYLyRivjifiB4SQlyt/aOiKJcavDcTK7xtz++DiOjlIh6baWgJQw4OQyEYwWBQBpecgR9P043GOxwOGDAmEmtq0ivktPoyRhoW2vexBo2Z74CHm7S1IfjZupXo1FMzVwgR4Xt2u+GorV6N+2D+/PRWQ16Itm/HfXHqqcgw8RTn5maQfuEw2qaNSMv6eiwq0SgqPUbDqlXQHFuwAK0/AwNYJKdMwSI3NIR7txACcWQEZCERAsSqKhxbby+23dQkWxIbGqQkQCxW2H61SCRwDbduleX+RKYyW/uELSrho4XFIif8RaOZq3mZbAyH8Ux4vRh00twMRzZTUBwKwaFeswa6fpmqCfV45BE8/5/9rLFYOBGO1+/HMWu1d4JBVJp1dEAD9vDDze0zE1avJrr3XtiHb3wDiYZgEM8oV1ly9Xm+1dCbN6MtfGQE7cunnIJtDw7CLhQyLMosIhGiu++GFEVNDYKam24yJVA+YW2RzYaE2QEHIDhYuRKVBg0NGCYWiYD8fvNNPCtTp6L6gDWruruRhCKSAZuWWCymZmUJxlBVJN8sFnw32qBO60PFYgja//IXBLtXXZXZboXD+IzLhX8HAki08mRUHlLAOoScZNQPdEokiB59FMnTU05BwqUQ+Hwg+YkQTOs7OYoBffvyaH6O243XR0ZwDYrZ3hyL4Ycn0xKZCtgnrC0qYWLCakUCifWjN2zAfbt0aar9ZymAcBivb9tG9Pvfw4f69rczF5TwkLZJk/Dsr1iBGE2vP22xYN0ZGkKcVVWFGEYIWaH48MMg/U46CclVvf3j4gyi7ASiz0f0wgvw8T7xidSYTwisi14v7KQZ6RojRKOIj3w+bKO1FefR0wPfr7oaxS6KIm2Rx4N4kSeYFwPbt8MH6O8HYXnMMeamVI8B9nv7NtFJRJV07KuiKPOIyKgIOUBENYqiKLo+8yeI6G4i+oGiKKv0opYZetLHDIoCA1heDmIkGpVkIhsTLaFY0gbKjrIyXEvOUBtNbmaRXVUdvdLQzKAVLQIB6Al2dED0/+CDZVCkr5QYGMAxHHYYDOT27cgmZSL4HA45eMRuJ/r4x2GwV62CbhkRSuONWhWDQbx3zhzcY6w9qDfEySR0OlatQjXiBRfgvLduxfENDsoKz/r6/II0HiLj8eCaTJ2K8wkGsUhYrViw9M5zVRWuAU9vrqkpfJHatAkOeH09znfzZtMf3edsUQkfDWw2PEdcdWJk43mdCIXgvN12mySdMpFbTJD/619w/pYvN1eB8//+H4Y6nXCCsaPL2/b5cKzV1fLvfj9ajnt7QYKZqXrMBCFAKj30EMiJG2+EDWBxcyYa2I7lk1Tg1qK//hXO8J13gqCKxWCfiGAb9oam6A9+gHM64wwMi9myxfRHJ7QtYn21ww7D/bJ2LdagF17AWvbpT8P/2b4d69DLL+OnqQmfWbgQa4HfLyvWGNxmxcSi213q8ig2tm/HfbtwofEzaLHAtr3/PuQNmpsx4T2TL8uVb04nfg8Pp8oIOJ3wQ3p6pC51bW1qa7Oi4LMPP4zjO/10+GOFoK8PZKnTSXTQQWNT+RqP4/zZ3ptJXvPUVp9PVkJVVhZWbRmJyIoq1ngziQlti0qYmHA64ZNs3IjYYPFi464Bux0x3wcfYGhaRQXRd78rJ8Dr7/NQCM9VZSViuttvx5p0yy2ZyfpIBL/Ly6X2fjRK9NvfIq46/XTEVfrnk5OWyWSqtr4RenqwBlqtKChpaJCvCYFj9fmQcG1szH79jODxIB5KJlFMwl1inZ04x4aG9PiR5SaGh2Wyp5DOMb+f6K23UJhTVgafdP78j1Tjfb+3bxOdgnqSiC5WFCVImIYzm4iuJKLNRLRY9961RHQKEf1MUZS3iSgphHhECNGuKMr3iehOIlqjKMqjRDRERAv3/Jy4d07FGA6HrKaLxWTL89CQnFDrcuGnlGXPDIsl1bFKJNIXFa0+os2WedCKWSIxkUDFz6uvgpD68pfxt4EBkHZVVVLfgqc619cjY5RIIFB2OPB+7aJAhHthYAAGesoUOMvsYG7eDOLxkEOQAXvxRVR3cPYpHMbixAK/RNhnT49sRybCwvDwwwhejzsOwRuf78KFKGnfsAHncsQR+d1/sRj2zdPKOIvl8+H+djjSB8Vowa9z5UEkkl59kA2RCBZCIjxf8+eDOHC7cwrc93lbVMLeg14f0eh+5omEP/85yO9bbkF1jhGEgI7qk0/iGbvgAnOB4AsvgLRbtixze08yCfvFQ620OkQ//zme46v+P3vXHSdVea7fMzM7O7OFrSzLsrAsvQiCQMQOyjUao9HYYizYYiwxei03xZtYEhOjJtdEYyfYYo1dQVQQokZRKYp0YXvfnd3p7ZT7x8Prd2b2nNnZYXZhYd7fb3+wszPnfOfM+Z63P+/VZEYHkJTIMgIO77+PrPO114oARX4+7kV9Pa4znhc2WXG7sd6NG3G9V12Fex8MikExyQ5tSFW6uzHhkQjX9MAD4DRyOAS1QhIy5LGIp2laLKh8nzsXUy/ffhuUGlOnoop23jzskdpa6L133kFrWHExEnBz5kAP8sRcjwf3uK1NnIfb0vVt0Bl+xdSkpQX7vaoqNpmgF0mCvv3b32Az/Pa3sZXLepFlfL9ZWdjbLpcIkvF3xHQsnZ04N9sS+tbmnBzwbdbXYwDT7Phd0E+prcVPYSFsu3Tb3lytJMvG7ct9icUCLOYCBLZ3+4NdmiZaPYmwhhSwb8hjUUaGpni9aLkdPdq8dZd9jSVLoBt+9zvYD0xvpShiCnw0CozJzgam3HEH/LLbbzfuVNK3PY8di8/zILbHHkMw7KyzMLglHBa8jJIkAoiKggrERPiyYwe6M4YNA7ervpBC03APPB5gZLwfmYyw7Vhfj2NPmwY85eITWcb9NSvgYN/b78e9TqW9WVWh3zdswDHGjYNeLy/f50nAgx7fhnoQ8XoiChKm21xCRNuI6KdENJV6f4H3EtEEIrqQiK4lRI+fJyLSNO1PkiTtJqIbCCSZMhF9Q0RLBv4SkhduZSgqAiBxyzMbSllZokIxM321t0iSqFDgyc2FhbHGWTKDVpINJG7ejAqKUaPQUpyVhZ/KSnxfPT2Ct8Prxb8uFwB79GgEs5qbAbqqKgJ+ioLXJAkBATbstm2D089tfqWlCCJu3ozKxpISVCCxYuSAHRHW1NQkJjxnZWGKalsb2hfnz4+9tkgERuX06VjLli1QlKNGJf99cPuyJImp1Ey0zspWv0YzsVhwrV4v7mlbW3LtzZqGa969W1T5TpgABzUFp+CgwqKMDLzE8yMa7YM1axA4Oe20xG3CHR1o47PbUU2YTCvuRx8hW37YYaiEM8I5TcOe46mo/J6uLqL77sOevPZatJykKj4f0Z/+hKqCc84BJ6P+Xsgy7lNhIc4f2DMvrz9VyV9/jRZpn4/oyivFoAWuZuPAxUAZrDwp9u67EZwgQkXEscem5LgfMFhkswkHbsECBJDfew/P/COP4Jk/+mjoyunT8f6aGgSCP/gA783PR9Bozhw4QDYbHCCPRwQWW1uhC/icXKnIgcV08zkdiOLxwGEtLk7Mt+VyIVjvdoPX1IxDkIcPWiz4cblEV0k8FlqtsI/cbuBRJAKboKAA3+0TT+D1c87Zuwm8qgo7q70d6540Kf2Y0J/25b6Eiws4eJ5se7OiYA3KHvatflYf6uWAwaKMDB3x+6HTy8pg0zPtS7wEAkSPPoqkxvXXY8hXJCK6vKJR0cLPvIThMNFtt2Fv/e535smS7m6ct6QE+45pnx54AP7SBRegMIOD9Ry0ZB5/WU4cbNM08CRv2gS/a8GC2PeqKmwJnw9/5+KQ/kgohLV6PMB0bl/mwXYWC3y3ZDAlNze2vZl1a1/S1QXuw5YWfC9HHokgotl9H2Q56PFN2s8rJQ80GZCbrSiiQjEYxGs2m5j0nC5OlANJmPTVZutNwK2qIgPc14Q/TTNuf25qAtmuxQLlxENR9BIOAxjr6wGmhYUo0y4vh6HLx2xpAfAWF+Nvra1QbPoW34YGDC7QNFQMMUcUr7OuDm3Vra04xrHHGnNrNTcj47NiBQD7wgtjB6jwul0uwauoaeAT6erCMSdNSmxwahrW0d2N4EhlJQxdVUUAMBjEvUiC+6uXRCKiBaCgwFxJ+XzI4DF/WkUF7ktpadIZ/6Feq5IB/iEgioL9YLP1xvHdu4EtkycjKy7LwIP4BBIbytxSXF3d93nXrUPgbvJk8PCZJaV6eoAHeq7Z9nYEEEMhBB/j+Vj7Iy0tCDS0tRH97Ge9ORxlWbQZ80RWnw/3jDnx+sLwl17CIA/mQqqqAka53bh3TmfvZFM6pakJ39/HHyPIddNNSGT0Y8LqAY9F7GgxH3EggKrEVXsYhI45BtWjPPk3Lw/7ZedOPMtffonnkcn1mXSen1lNEzYBBxb9frxOhM/Ft0FnuKmFRCLoSrDZ0Nprdm/CYeDKp5+ihfmYY8RQlHjx+fB9M2UJT0LtK2gXCgkbwG4HNUFTEwKIe9PCzC3YXAljVvW9N6JvX3Y40veMqapobXY4zNub9RWQTCvQzzUc8FiUkf1bFAVFE5GI0AmhEDBbXxygKKgIfPNNosWLUaEcDuP9+uB9NArbiScN3347PnvXXebJEuYkHTZM+DGRCNE996Ca7oILQFOi34PME9vRgX9HjDBPhMoyBlLV1SGBdvjhsbioKAgg+v0I8qXCF9jZiYSJpsEOZB+2pwc2XnY2gpP9TS5omgiwcpelEabLMnTKtm34/njq/ejRSSdWhjoWDQnJBBEHVwb8ZnP2ljmi2EDTBxQzrTqQSARgxm148QpGUUQlhJkYBRIDAVS1tLbCyefhHPHClXAuF5TO7t1w8I8+uvd31NoKQ1pVoVhGjBCZtZoatB3n5oIsXJIQYNRnanjqcUODOM6YMXBa9aX4mzZBsVosRJddBodAL/EBRL2B2dyMtdjtUDpGHCRm7cuyjGuMRhHISyZDZSY8XCEYRABAr6Q4O9fYiGvhlvKRI5ManqKXob6LMsA/RIQdS71h6/Ohwi8SQWa7qAh7irnDGMsUBYM5Nm0iOv98VBX2JZs3i7ae22835/ryeoF1+fkCi1paEEBUVVRDV1amft1bt4JziAik5dOmxf49GgV+E/VuM+ZKMyLBnRovPT3A6U2bQNfw05/i3jF+MJfY3mBRIlEU4Pb99+P3K64AT2UKwxAOGiziNjMe7uByoUX/449xz048ES3O4TC+R4cDetDpxPO0bh2Gefh8IuA1Zw6GncXjv6LgfR6PCC4yvxVPAdUHFnNyDk7bStOAGX4/ArNmeKFpmB7/8stITp5/Pl5XFNHdwcLDDux2QZUwfHjyAS12op9+Gsc6/3zYTMyJ2V8JBMSAhqlTU2sLTCR7276crHB7s9WKfaHHTFk2DqL0U4b6DsjYRUNcNmxA0nHePFF95/FAHwwbJnDmjTfAg3jiifDT2A+JRLAHsrLEkCIeqPanP0En3HWXeTI2GMT5c3JE4C0YRABxxw4Mlzv0UBw/PpjvconuNMaArKxY3AsEMIylsxPBw3i7SFHghwWDCLj1d9iTqsIXbWzE+qZNE5je0QGbKzcXPtPeVGFzezNTL+httOZmcCG73fjOxo4FfpeX9wsXhzoWDQnJBBEHVwb1ZqsqgCQQwA+33vIkaKdzn/MJ7HORZYAiV67pnbdoVCiTRMDFgUSe8Pzoo2inuuSS3m3AeunogEKyWODgaBrKxXNyYKTGG3E7dsAw5lZnSUKlxXPPAYQvvhjKp7ERAD1qlCijb2nBMXg62I4dqHrUNCjDKVPAq7FsmSCvD4WggDjbFgrhXvEQFaNnx+dD5igcxnH0QQS3G+uQJKyNs2yhEJSupglDPx3i9QolVVqKPbBjB+4NT3HMz8f1pdCqM9QVVAb4h5CEQtjHjNm33YaAyD33xHINBgLAoJwc4NGyZWjpPOkkTLftS775BgM9SksRSDRzuINBGNc5OSLIVl8PnjObDUa5WZtiMrJmDYJrZWVEv/lN72mC0SgMbubbMQouKAr2vyyLJBrj+FdfIYAYDCJ4t3ChcCK4OrmwcGCGJRABI2+9Fa1CRx2FgHBFRd+VkyZyUGGRqgrdzDxSjY0ITm3YAD1+6qkImHu9cAo5aFJQIAaDrV9P9MUX0GkWC3TgnDn4nJnjFYmISkUOLMoy/ma1xk6CNgteH2hSU4ME4KRJiVvmli9HguH449Gqz3tR0/CdciCRqXpsNtxfIuBRf3R0Tw/oWLq6MGiguhoYEInEYlYy0t2NIKnFgg6RVIKQiSSd7cvJSCSCZ1fTYINlZ8Ne41bK7Oy98gsOKizKyP4lNTXQrZMnx3ZjKQqwhDF63TrYUFOnEt15p0i6apoIJBJhT3LRxf/9H/yX3/4Wx7fZeuvqSATvycqCzSJJ8ImYpuTqq+EThsN43W4XWNTdDdzjKcaKIvScxYJj9vSAFzocRldG/NBNWUYAkP2v/mJVKASs83rho40fL6i7WlrgOxUVpS+JwnYcc7ba7fhuamtxfysrBT1GCrg71LFoSEgmiDi4ss9uNo+y54AiZ3+dTsGjeLAGFFUVAB6NAtC5IoEVClHfHHnMHfPmm2izOvlklMebideLTBKT1ublwYHhoSc8YZTb6Hw+vB6JiHX6fGjFKytDOT5XA6mqmJg1cqTIwpWXxxqooRCCl7t2YQBMRwcU0/nnA8A7OvBTUIC19PQkxw0mywhG8OTmCROgiHt6sMZRo8T99PlwDpsN60s3QXkkggAlT4xkBy87Gw6PEb9SkjLUFVQG+IeQMH5rGiqvnn6a6JprECyJf18ggH+3bCF6/nlkvc2mKuuloQGtyzk5MKzN6AS4gjs7WwRbdu9GRaTTiQBiqkampmHNL7wAh/2Xv+zd0sPnTxRA1B9P396cm4tJ0y+/DAP1ppuEIR4Ox1Y2DoQzHw4TPfggONoKC3Gvjj5a6OEMFiX5AV17MztYkgS98+KLCBKWlUEHz5iBYLLfj8/m5YkAsabBYVm3Dj+trXjPhAkIJs6ZY0xFol8HB9Q5uOjziTbo7Oze/IoHUht0RwfueUUFnFYz+fJLTFOfNAkOdbye50Qs4xeRmLbe12CBeOnqIlq6FHtt8WJgUUcHjsWOf25ucpypLS14lnJy8BylmxpooNqX+xJub/b7cW4OeKfB/jrosCgj+4e4XPBjysqMOy7CYWBLZyewKD8f3KzxCQVVFZygzc3ApMceg592662otjbSPYoiON5HjsRe9niI/vhHvH7ddbHrCoWw/7KzRdW7EU8gBxMbGlBxn52NhHC8fRaNwg6LRlG51x9OaCJgJA9vmzxZ2HCyjI65cBj3tr+VjX0J82pv24Yfu10MgcnNhZ12kFZFDwnJBBEHV/aLm82tE8yjyIEyp1NUbBxIhm4ywhxYPMRg2DAoA3ZWLJa+ef4++wztOrNmIeNk5hAyFyK39jqdIIjXtyB2dsZmqtxuGJllZXB0V68GJ8bUqTCU441bbudpakJwrqrKuCoiGARB/RdfIFs/bx6OOW4cnoHOTvBuqCr+3p+gW0sLlJLLhcBhdTUUA3+eBwLxlOiBeOZYMbpcuH5uXx41qt/ty/Ey1BXUfoFFGUleVBXDTn7/ewz+uOkm82nJW7fC8B0zBlVufTmHbW1Et9yC/995pxjiFC9cqceTiiUJTvaDD2JvXX99alymRMDD++/HFN4TTsB05HjMDYdjJyUnm/gKh4FjDz0EQ/vEE4kuv1xgYiAAjDWiaUiXrF2L9vCGBvAhXX45sJ2rgfZCDlosYgeL274sFujiTZuQYGtowB44+2w4Rh4Pvmfm2issFC1lmgadxQHFujqcY/RoUaFYWdm3/lNV0QbNgUXmqiaC3tEHFvcieLxPhYcX8MROs2tgB9rpxP42I8TnaiEiURlXWto/B7K9HQFETUNnBldD87TTYBDHdTqxDrMBU5oGnGhoAM7wQJ50yWC1L5sJVz96vVhHTg72QhqucQg+yTGSsYuGoIRCCLBlZWHwhtlz3NpKdOONeP9f/2pOtyLLwK2eHqInn0TH1y23oH2YhXUPEfZwRwc+x8Ua3d2gY+nsJLrhht7UUESi9TkahW9nho1bt4LnsaAAXRP5+bEdcpEI8EqW4Wf1x7dRVRRZNDVBH02bJvxJDqSqakqUT0mJzwffua4O93HkSOD+yJGwQ/cCF4c6Fg0JGerTmTOSgnDW0+GAw8QZGr8fWdyuLhjYXKGY7uqw/VF4crPPhx9uZ7NYRJsyt3sYSUMD0VNPISN/2WXmwMeTlRmc7XY4J/p7zJMG8/PxntparGXUKBy3thbE5KWlUChGRrYk4TvkaWJGuYLOTrT8uFxE//3fCBxu3oxWv507oUxKS/GccCUHBw6SEacT19DVBUeKM+6aJlq58/JiA4vpknBYDHvJzcW1eDxQsg5Heh2CjGRkMIQHllRVgb/PbM94PKA4yMkh+vGP+8ZvlwutPdEoApRmAURVFfxkXCG9eTOSEKWlCBSkOjHP44HBvW0b+NJ++MPe1xcK4fxMRdCfyvktW9COFAiAZuKEEwRucvVYdnb/j5uMuN1Ef/4z0auvIiB1//2osuQpwAdbwi6dYrXiOeEpmtzePHMmnLa1a1F1+uc/o135rLPQosXBxLY26MFhwwR1R0UFKnw7OtAe/cUXRK+/jgrgsjIEFOfMgb402oMWiwgQskSjIqDo9UIvcdWjxdK7DXp/H4Yny0ge2GyoLjTDokAA7X+yjOFBifCBpwGHQjheSUn/AojNzXD4rVbwjumroS0WfHc8pZinn3I3jl4UBU57ZydsrgkT0mufDHb7crxEImLiLA/Gc7txX/LyBo7CISMZGQhRVeC0oiDIZ2bbKwr0QGcnAoKjRpkf0+1GgO+FF4AF116LQg+9WK3AFaaLCoVwTLsduuMPfwDe/OIX0D1ma+IhRkZ2mqoiwLZ1K6oLjz0Wr/MkZ5tNcCCqKnSSWWLESIJB2EZeL2yT6mph//h8uC6rFX9LNzWHpsHe27QJv8+ciXvg9QrKtaGYXDvYJFOJOLiy39/saFSQLzMvhN0uAoqDbfDsC+H2JKtVTG6WZdEOE+9kejyo3pFltN9xoC3+fTyV2O1GBaLdjso/M6NNUZAd4mo9hwPZpg8+IJo4ES3T7e14vapKOKOaBucoFML6eVJhVZX4/mpq0FJHhEpGPX9IRwcqDJqbcQ0zZkA5trTgGRgzpu9hMy0tuM7cXGTmamtxXJ5CrShYWypTwxKJpokBL5oGh5CdzJISOGhdXfiuCgr2it9oqKu3/R6LMiIkEkGgv60NhnBJiXH7WTgMXsLOTqIrrxStgGZBCY8HnIOdnaiSmzDB+H08UU+WxRCqjRtReV1RgSnM/W2fYWlsBP9idzcqGY88svd7Ug0gKgrRs88igFdVhSqEwkIEN6xWXBfzpBUUpNdo1TSid/ekY7oAACAASURBVN+FM9HTA5y94AKs3eFIawXaQY9FTD2iKLEtZkR4fc0aBAI9HiTtzjwTOi0YxHfj8+G9eXnGFWpuNxzVdevgdKkqniNueZ48uf/B4GAwll+RpxETiQ4EfWBxf0p8bduG+zZ9ujm/oKLAWf/ySyQp9FU88RIO4374/aICsT/OMCdxHQ4kCRJVQ4fDYkCA04lqJMbHcBi2j9cLLNybwVBGsq/al4lwX3ngEA+N4D2in97MHRsZaoWMDAXZsgVVbLNnm/MwaxoSsO++C7vo2GOFXxsvTLH0zDNIQl1xBTo/eFBX/J51uYAljNWdnaBsCAbhD5oN1vT7gaFOJ/ZcKCRmFRABK1avhn10yCEIYup5ZGUZa62pwZomTepfAqC9HV1akoTuMz2fbXc37oHDkTJnfELp7sa9dblQcThmjLj+UaNElyTTZ6WIk0Mdi4aEZIKIgytD6mbLstjMPJUwK0u0PB/IpOGRCACeCM6C3W48aCUSQYVLfT3Rz34GMDaa2EwEwGxvx3s5gGhWHs4Bx0hEDEN56y0olUMOgaGcnQ3Dr7ER/x87Fufs6MB3xpwSkQiUrCTBkd60CbxRxcXI2JeW9j6/1ysGuWgaFMyYMTBCnU783wjYw2Ex+Xj48NhqgIYGVHVIEhRiug10vx9r9ngQaBgxQrQojhwZyxnJJMYOR8pKaqgrqCGFRQezaBrRX/6CASl33EH0ne+Itjw9l62qopVvyxZwIB56KPZ+OAx8iE8ABYPg+KmvRyBx+nTzNTDVQ2EhjvX550hCjB0L3Eu1euXLLzHx0G5HwGHixN7vCQZxfrsd+zpZ57azE/dt2za0L196qbgHwSCuOxIR7TPplNZWVHWuWYP7+utfi8qHNLQvx0sGi/aIUXszSyiEPbRsGf5/1FFEZ5wB3SbLeMbcbhzDbhetzvEB60AAz+26ddClkQj07KxZCChOn55aslVVocP0bdDMD0gkBoLo26D3BY91YyN0eXV1Yqf9738HR/QVVyBoaybsDHu9uJ7CQtHem8z11dTA4c/Lwx5Pphqa25u50mb8eHzvmzZhPdOmJR4S01/Rty/bbKJLZDCEEyVMy5OdbW7vBAL4LqxWPGMpdCFlsCgjgyZNTeic4uGQZvLyy0RLlqDC/KqrgP/BIDBUj9WRCIoQXngB7dEXXkh0+umiEEHTgE2MS1xVPmwYbJPaWnAgEkHnjx1rvJ5gEP4g+x+SJIaA5eYCi957Dz7oEUcgSWV0jF278N6qKtFh1VfAT1XBY9vcDKycOlUkUTQNPqrbDV3Tz2nIfQpj7NatwKEZM3AvQyHYYGVl4nyBgOh8KSpKqTp/qGPRkJBMEHFwZcjebEURLc+hkJhGzBWKg83pMhiiKKL6hic36x0UVYUjvXYt0bnnYuogS3wg0e+H8b1rFxzuOXMSV8F1dOAzw4fj/n7wAX7GjQMRv80G0OUBKxyYzM2FsVpSElshEA6LNuivvoKzftFFxtl+rorg6py6OgQmgkEcs6AA69JXPxLhXrW24rV4zkHm/ggGxTTsMWNQJr+3z42q4vrr63FfqqvFdXAlpJFi9fvFgAau7OqHDPWnfchi0cEmy5ahuvD882HUEsUOIOA9/OabSDJ873to1+V9FQoBtxwO4RRGIqj+274d7TZz5pif3+fDXsnPx7k+/pjon/8Ehlx9deoBsXffJXr4YSQTfvMb42EsgQD2MfPXJYsVn3+OtmFFgdNw9NHib7IMwz8aBVZZrcDkvLz0YNELL6DqQVXRBnX66aLVdoDalzNYpBOj6c168fmQkHv/ffx+/PFwLvPz8RmvF85LOCxakwsLjQMqkQiconXrUJkbDOJZnTkTe2rmzL1rD5Xl2DZorhYjwtry8mIDiwPditrTAwdw+HDzqmUitH4//DC6Ja691jwYyPyRPT14T1ERdLZZIjZedu5EpXFxMTgQ+zN1mQj3s7YWdkAwCNtmxozUq6qNhFu0NS1tw0uSFlkWdDbJtk5Ho7gvioL70J+KUMpgUUYGSbxeok8+wZ79znfMceKTT5DQmzMHdgbvP68Xz/iwYWICcXMzuHRXrULiY/Fi7B2ucA+H8VmHA/9vawPmjhghAog2GzoeKiqg62222LWFQrA/7Hb4cPq/eb1IbKxdi/MtXIjjxEsgICoQx43DObgSX5LMg4mBAHw5nw/+V3V1bDVyczPeU1yc/sRqWxuuy+dD0qa6GvdBkmADGmEuc3BHo/h7PyukhzoWDQnJBBEHVw6Im62qYsozTwK1WESFotN54AQUmQcsEhEGFXNYrFgBZ+S440DcHu8c8sRmDnJt3QrlM2dO4jbenh78FBUBNFesAKnuYYcRnXZaLK+i0wmDPhJBcDAUghEcrwBkGdn6Dz9E2f8VVxgblMx9mJsbm9FXFLRSb9uG99hsyNZPm4Z70dwsgnajRsUqMJ5EbbOh6keSEExtb8c5Jk9OvU3e7Ub1YSAARV5ZiQBsJIJ70Fc1QTSKtaXQ3jzUn/ADAosOdNm2DQNUZs9GFaIeVxmHrVZURv3rX8CWc87pjUWBgKhc1DRU/23YIKYDmwlTO/CwqQ8+QBXz9OngZUzFIdY0tB6++iow7aabjB1Vvx/Y4XAk32rMOPfGGzBSb7oJmMMSDsMo5cnOWVkwagMB4FNBQepBvm++Qdvml1+iJfuWW3DPmId1AAdoZLDIQKJR4/ZmFpcLwa4PP0Rw5+STMfVSTyrPrc6ahu+vsNA8qCLL2K/r1hGtX499w3pyzhzs4f4GuYyEB2JwYJGdYSKcTz8JOsVqMkMJh2FjZGejG8IsMLh2LbBq5kxUOptVkGga9rjLJXhW9feHA4lm+3HLFmDRiBFw9vsZ7PpWvvkGz0BWFgLKRk57qsL8g0xhMFiVo/rKx1TOrapi6ApPGE/y8xksysiASzQKn0hRUFFulsjcuRNJ0pEjQSui92m4hd9qxfPd1gab5K23kIi96iqhMxRFTGMOh7Gn3W5gRnk5/Jm77wYG/frXwCRZFryrPBU+HEbgjItA4vdUTQ2Sq3Y70SmnoDIvXnw+BCyzshBA1OM7J9BUVRS7MH62tcFXslhQfainfIhGUdUZjWLte0Hz1EsiEejD3bvhQ8+bJ/zqnBz4bIl0FHO2+v39bm8e6lg0JCQTRBxcOeButqYJLptAQICXPqC4L1pu0imaJiYsOhwAwrVriZ5/HjwUl15q3pYcjaL1Z8sWAOWcOYkzPD4fglp5eQiAvfUWqmrmz4eTo3eEPB4oJA7iBgIw/IqKUEbPgTm/HxWTtbXIbI0bh+9l9OjY76anB8fgjI/Z9ezYgeqL9nYo54kTca6yst7X5nIJ3o8RI2LP19YG5Wu1IpBYWGh+X+JFlqFwm5vxnUyciGO3t+N4FRXJV2Uw35vfD2MkyemsQ11BHXBYdKBJTw9ahW02VNUZBSCiUezF555D0GzxYmMs4spFRSF69FFk56+8Elw/ZhKJYF9wG/GKFQi6zJqF4VGp8OSEQqB/WLsWhvpllxnvNR5w5XAkjwttbWhf3rkTWHnxxbHGqd8vDP94QzQSAZ5qGu5zf1pnIhFMwn78cWDnL36B+6rn2Rtg6o8MFplIovZmluZmtLutW4fv/rTToCf5+TZqdeaEU6IKu127xKTnzk6sYdIkMZgl1Snm8cLBOD2/Iid3ifAs6wOL/QgIxVzP11/DCZ4xw3x/7NqF57+kBNU/RtXFLMEgEn6aJhKm8cKVNfHr/eorfGejRqGjIpVBNJqGAGJTE75Pux3XV16On72xW/dl+zIPGUpH5WMK7c0ZLMrIgIqmCUydP9/cPujoILr5ZuzB3//euLU4EhHddStWIClx/PGYphy//2VZYGptLf4dOxb2xr33Yh233BJbuKBpIqinKLDpbDbgYvzxN20C3dPw4Qi0ZWWJKcwsXi/OnZ0tKhCNhAe2cCKmrg72UUEBklp6eyQUAgYSxdI+pUPq6vBdhcMIXE6aBH2rp7tKFhe5i60f7c1DHYuGhGSCiIMrB/TN1jQAEgcU2QB0OsW0paE8iZIN9bo6tKwVFsIJTjTlq6UF07UkCcTiZpNPiXDv2toA8MOHgwh+40aiY44hWrTIGGxlGRmelhYAa1UV/i9JUHBuN7hAPB6i885DdYDXC6WRmyt4CXt6RLtyMtUSoRCy96tXQ8mdeCJ4Djlwydwafj+Mz5IS4/UHAqjeCAQQ1Bwzpm+l0tkJxR2JYP1jxsBg8HpxTSNHpvac6dubi4v7VFJDXUEd0Fg01EVRkNHeuhVBNzNy7tZWcI/ZbEQ/+UniKhpFQVv06tXgVD39dPP3yrLYC0VFRG+/jbbqefMQqExlf7lcGEC1ezdw8/vfN36f14u96HQmP+3500+JHngA/7/mGvAI6YUz2Q6HOa+iquJ93Pqdn983Fq1fj+rDmhpcz803A7+DwUGdvpzBokQH10RQJRFn1O7daGXbuhXJsB/+EE4qO3uaJlpvQyHR6swBqETnr68XFYrssFVXi4CiGbdgqqIookqRA4vciidJsW3QTFOQ6Fn/5hvo2ClTzLsoOjtR+RuJoALRiMeLJRKBfcDUK2aBAE0TiWn+Htatg200diwGFaXSxSDLSOy6XLA7xo0THSM9PdD/5eWpBf/3VfuyquJ+KgowJzs7PQn8frY3Z7AoIwMq33wD+3/6dNj+RhIIwH5qaCD61a+AsWb41tWFCsSXXkJXxq9/bayzOSDY1gb8KCxEQcWDDwIrfvUrcxwLhxE8kyT4J3pcUVVUVe7cCZ1wzDHYt243/sZDtdxu4JPDgfclk8T1eJBw8fvhG06cGHttXi9sSJtNTJZOhwQCKH5pagKWHn44rr2lBdc2alRqlBH9bG8e6lg0JCStQURJklYTEWmatiCJ9y4gog+I6Ceapj2etkUMkEiS9AQRLSaiLE3T5BQPc1ApqFBI8CjKe+4Yt3Xl5Oxf0waTldpaOKuyDAN25kxz4O3pgXIIh6HEKivNjbpoVJB8l5URvfIK0ebN4DY79lhzoGRiXxZFgfPtdiPYuWYNfr/kEhjLLG43zsffRTgMQE4G2Jk7w+PBurdtg0NQUYE2p+pqOBzsIPQVCOCqjbY2rGHyZGPjPRKBou3sxLonT4aB3tyMdQwfLqo7FixYQEREq1ev7vN6Vq9eTQsXLqTHHnuMFi++/Fu+NHYQTWSfKagMFh34smQJjNobbzSvFvR4iB55BIGNc86BkagftKIXbiF+/XW0yZx3nrkzqKow1LhC6PXXwR931FFEP/5xao5pbS04GP1+BBrmzjW/pkAAa0umpSYaJXrySQQ4J0zA/dInarjKOBTqTdFgJn4/frg11EhP+XwI7r74InDvt79FwMnrhW5wOkVFaKpYdPnll/e9WEgGi5KQvtqbifC8cJtsXR109llnYUhRPK+VvtU5JwcOpFlHgl5aW0WFYk0NXquoEAHFZBJpqQhX2+rboNku40ozPb8i2zVtbQiwVlbG2hB6CQbhRNfXI5A+b17iKpm2Nuyx0tLE1C5EIpBosSBZsGwZnOHzzkstQBcKoeonEMBx9IkXWcbaXC7YQsXFyScyiPZd+zKfV5JgOyWyrVPBo0cffYzOOedyCofxXCSowt2njnsa8ChjF+3H0tGBar1Ro+B7GYmioHX5iy+QUFywwNxHk2UkSO+/H8f74x8TJw5aW4Gb5eXQE/fdB0z83/81t1cURVRcFxRg37AOikbBv9jSgg6P2bPF5zipqWn4aW6GXVFdnVxisrUVQU6bDTjHnL9WK87tcsF3dDoFf+PeiqbBR9u4Eb/PnIlzt7ZCX3Lhyt74/v1ob04bFmViW+YyoGEcSZIsRPRbItqoadprA3mujOx/4nCI6VNcNh4IALi6ugDW3PY8mGTTqYrXC8deVeHYl5UJgzEeFEMhVCD6/SD9HT1alJbHOwiqiiCcJCHo9uKLGHhw0kng1jITnw/3MScHATRNw+8eD1qPli3DGi+/vHeFUkEBlNuOHfgeJk1KzgEKBjEghrkzSkpQnfD11zjWZ58hcDp+PPjOkglKWixQNIWFyDJu2IAAod65aGmBI6OqUKKjR0MpceB1zJjE7cuqqtIdd9xBs2bNotMTlGBlZeG6urtF9UaS7c0ZyUha5KOPgDOnnGIeQIxEMNyku5voRz/C/olGBU9qPMa88gp4Ar/3PQxnCYfx3vhqW03DvlJV7MeXXiL697/R3nn22akFN774Au0+ublEd91lPrHQ7RYTE5Ophm5pIfrzn4EL3/8+2hr1OKwoImtdUJAcvhEJfeTx4P7GtzevWoWKys5OnPOaa4APPT34uz4AYyTJYlFG0ivczsztnkbtzZKECpfbbkMlxcsvw1GcOBHP/6RJeJ/DAUdSUUSrc3MzjllQIJxFIykvx94+5RQ8n+vXI6D41lsYjlRSgmDi3LnQo+kKRDGRP1OOMBWNPrDY0CBa9jgY1dSE6hmzKmdFwT7ctQs8Yoceau4kahr2jdebuAJRL5KEn9WriVauxPdz9tmpOaJsG6kqHNz4AKbNBpvJasV7XS4xOTSRDcBdOIoyuO3LigIsV9XUz5sMHkkSnulgEHany4Xfh4LdnpEDQ4JB8A3n5wMDjETTkID94gskVufPN9fFmgZMefhhHO+mm6AbzIKInIwsLYWP8uij0AvXXmt+DkUB3mkaPpeVJVqNOzpw/mAQhSLx3SZc6V5XB90yYgQqpvvSB+zXtbUBX6dNE+uTZei+piYxQCVdE5jdbtDUdHZCX3AiqaYGGMV0V3t7LubPzc6GzdXejt8HeqhY7BoysS2Wga4FsxDRrUT0JBEd1Df6YBeeDFdUBKDmCsXubvxkZcF5y81NX0l1OiUahdPe0QG+rcMOE1yJshzbtqYoyJh3dgJIq6rwuiSJjDoDKbf9yjIA/YUXYIyfeio+ayaBAI7vcAheCUkCSH/4ISqHxo2DcnK5ehPCc3Y/Lw/X5vP17WS7XFBMPAGZQTs7GzxJdjuyUD4flF4wCOU8alRyimP4cKxn2zZUYVZW4rWdO6GgCgvhxNntOL7Ph/eXl/cd5FNVlW6//XZavHhxn467JIl2ZpcLWbSSktR4lzKSkf5IQwMc8ilTwFloJKqKISoNDcCiceOwJ6xW7LlwOPZZXbYME0yPOw5txJIkWjwtlli85eri/HzwLH76KYZN/OAH/Tf+NA1Z/iVLsMZbbjHmgePMciiE/ZxM4uHjj9FGZLWiAioeK6NR7F1VTWny+rcZbg6wRCJY3113Eb33HnDor38FvnFyjCsX+zLy+4NFGUmvWK34frhyy6y9WZKQ/JszB/r0tddQ3XLooahM5Io8qxXPSVERnoOeHujlri4x1TmRPVNcDKqSRYsQWNu4EQHFVatAsD9sGGyNOXOACens3mDu6pwc0U7NAzW4w2HjRuBJfj6SG1whzFWLublES5cieXjWWWhbS9Tu6nLhp7jYnOIkXjQNA51WrULg7+yzUwusdnSgVd1uR9WP2TqZM9ViwZ4PhZCwKC01tgH2Rfsy4zfzfTqdqSc6+4NHTieuz+2G3c42e0YyMpCiKEi2EAEPzZ71t94iWr4cSc//+q/E3QyffYZugqoqcCba7dDjPExILz4fnvn8fGDiY4+h0OF//gf7wSgpq6rAUEURAUQirL2jA/iuquBgZFqpeOnuxnlzc5Pj0PX5UCEZDCJZW1UVi7GSBP0UCEA3FRYK3tZUg3vMl8u8/0ceiXN3d6My3WrF7+nGCacT3xnrFO40GSTu2Uxsa48MwYbSjAx10WfrZVkEFHkqsc0m2mz3h8CNpsFpr60F19aUKTCAuWrH54OiYK6tzz5Dtd6sWchUsVgsAFx9ILGrC8pn2DBUINbXg4tp1izz9YRCUELZ2cjuMGhGInD8v/4ajj8HEJub8ZkpU+Cgs3KLRpH98nrFxFIjInRFwTG8XihRo9L3UAiAPnw4HGynExVCa9fivhxyiPGksXhxOuGoMSm9xwMFNHUqHJ1gEJk5Wcbx+mqF2hvJycE1dXbifufnD6qSyshBJsEgWn7tdrTHmDmj774Lg+2oo5Bl5qCb1YrPsnOZlYVM95IlCIhcc414drOzBYcWV1L7fMIQ/uc/YbSfdhoqovv7zCsKBo0sX47Awn//tzGWM4ZyoKIvQzMSQdBixQrgzI039sasUEjwOQ4fnnrgxWKBke31Apsfegj37PrrwQspSTDw49uXM7J/C7d8RqOCfN6svdlqRTvckUciePz222hdP+IIojPOEM8e8wzm5eFZ7umB7nK78WwUFvYdHM/PBx/WMccAC776Cnvwk0+wj51O2AVz5kCfDsSwHotFDI3xeGAzTJ4sOBY9HujDlha8f+1a7PEjjkArXqIAotsNPTpsWKzdkkg0DXv9449RmWnGo9qX1NfDHhk2DPeur0Q1D8/hqkxucy4sjG1v1rcvD9YQQVnGM6ZpeG7t9sG1SWw2BDSYu5apX4b6AMWM7L+yeTOwZ+5cc4z5/HNQmxxyCNGZZ8YOODE63h/+APz+4x9F5wMXuPA0ZSLYE9z2+/nnRE8/DRy+5hrsP5tNBPU5AMldYbKMdejxZtcuJGTy8kBX5XTis1ZrbDCvvR0FDMXF8LkYf818kOZmdHLZbPCh4qu8uQJRltG5xQUksiwqqPtrK3V0QAewnzZnDu5JYyPwPi8PRSQDRV1mtSJA6/HAfo1EcL+GIlXaUJU+YV+SpCpJku6XJGmzJEm+PT8fSpJ0ch+fG0tE0T2/LpYkSdvzs9rgvT+TJGmXJElhSZI2SpK00OA9hZIk/U2SpMY979slSdIdkiRlx71vtck5Lt5z/rFxr58sSdIGSZJCkiTVSpJ0syRJlxi9d4+USJL0tCRJPZIkeSVJekGSpDTN2Tv4hKs3Ro4EsJWWAnA9Hhiq9fUwWoNBYdANtrz7LgJzhxyCYBYPUuEpUcOGQfF0dCBLtXMnDG+jkns2tJjvwueD0nnpJVQWnXNO4gBiOAxjlttu+XgeD5zczZtRNXTGGbiXY8eiatDjQVCOW8m58tHphEFfWCj+ppdgEMa3z4fzjR7dO4DY1YXvqKAAAQNWivPnQ+mHw6jm+PBDOPd9CWf9uD3HZsP1fvllHV1xxbV00knT6bDD8mj06Dw65phjaPny5QmPV1tbS1l7IjJPPvkkSZJEkiR9yw2klwceeIDGjx9P2dnZNGvWLPrwww9oxAgoQ68Xir2jo4cyWJSRdIqmYbJwQwOIvc0muH/2GRzq6dNhsMUbinY79ks4DFqBBx5A9c4NN/Tet1y9EgzG8gByAPHMM3tPhE9GAgG0+y5fDhz65S/NA4jd3YKPta8gXFMTJr+uWIHj3nln7wAit9plZe1dAJGlthbtSn/5C4KWjz8OXsjdu+voyiuvpfnzp9PYsXk0YsTAY9EHH3zQ6z0uVwaLUpWsLPyoKvSVqpq/125H+/E994AS4PPP8Vw/8wx0q16ys6Erq6uxj5nvuKYGz7ui9L02pxO69KqrMAzpuuuw37/6Cnv65z/Hv598gv2Wbqmvhw4eNw4YU1ICW2LmTCQvDj8c92zlSiQiZ82CXbJ2LXDn66+R7Ovuhq3h9+MeOJ2w9ZINIL75JvBu/nwMgmKHPdF3pRdVRWfD7t2wc2bNSr7TJTtbBAyZdqenRwxWCAZFNetgBBC5ZToUElWk3L5cV1dH1157LU2fPp3y8vIoL29g8Wj27Fm0bt0HlJ8vqr7D4YyflpH0S309dP+ECebT3nfvRlVhRQXoWhLRD9TWIhHkcBD96U+xhQi5udjHfr8YpNLejj3+yScIIM6di6RoTg7eoygimC/LIujIAS19smfDBtDDjBiBhAi35dpsOE4kgn9bW/FTWAi/mCc1qyr0jd4X5iFRO3bg/XPn9rYLAwHYlqoKH46HxtntYgAT0+HISbDiRaPQge+9h/UuXIhEm6YhSOp2A2+rqgY+oMdUCyUlWEt7O7C5789lYltxkhJmJvP1ziOiRUT0KhHVElEhEV1ARG9LkvRfmqatNPlcB4Gs8Uki+pCIHt3zelvc+64korw9f48Q0fVE9LokSVWapnUTEe25mSuJaDYRLSGiDUR0LBH9Zs9rpyZxHb1EkqTjiehNIqojotsIpLo/JSJPgo+9TUS7iehXRDSZiH62Z90XprKGjAixWsV0YFUVzq3PhwCOxSKMOSPer4GQL75A1qi6GkHEqqrexuKwYVj7F1/AyD/kEAC52fqYb6ezEwD7yiv4/3nn9T3RsK0Nn9cHEJubif7xD9yvSy5BoJPFbseauQz/vfdg9M+cGavcystxzzs6cHwOKrICHTu2N+cEczkGAgLEifDeujpkoyorURW5ezeM+VWrEISdPr0375miwNFqasLajj4a3/XmzRgQs3Hj5/Tpp+/TWWedQdXVY6mnp4eeeeYZOuWUU+i9996jE044wfC+DR8+nJ588klavHgxHXPMMXTFFVcQEdGIuFHZDz/8MPl8PrriiivIbrfTfffdRz/4wQ+orq6OioqKyG4nevvtMF1zzQlE2PdDFotCoVRWmZGBkldfFVOTJ082/n6++Qa8hsw343DAmItGY9+naQgC/uUvqIS+7jrsLaPghSSJibPZ2eCA27EDyYyjj+7/c9LRgcx+czO4WBctElNh49fY3Q1MY/64ROf68EME8LKyMLhh9uzYa+eW6EBAVAVGIv1bu16iUQyiefxx3Jff/AYUEx4PnJAVKz6n1avfp9NPP4PGjgUWPf88sOjtt9+jhQsFFnHAIxQiys8fTo8//iRdfvliOuqoY+iyy4BFZWUjKBQSa37oIWDRJZcAix54AFi0fTuwSNOIVq4M0wUXDH0s2peSbHszS24u2mkXLcJeXLUKz+Z3v4uAu15HWq1wUAsLe7c65+cLB7Iv4fbbWbPwLG3fLgazrFuH9U+bhiDj7Nn9GwRiJF1dgoPLzGlva0NF/hD+EQAAIABJREFU8OTJRD/7Gd7Ldg3zK3Z24r080KWgAO/3+4WzbiaqCkzcuBEdFYsWCXuK6RiM+KX1Eo3CdujpgU1ixsWaSBwOQVvjcMDG6egA1vDQlcFoX2YeT00T1EB6+fzzz+n999+nM84QeDSYttHnnxP9859hooyflpE0Sk8PAmTDhyOIaCRdXbA5nE7YHMOHm/PjtbWB/kRVie6+O3YIG5EIzvt8opiBCBj/5psIlF15pQhQWq2wqyRJJKTa2vDaiBEieaoo8CN374ZNduSRsfhns+F3WRbJl7Iy+E+McVlZopiBu8L8fmBcKAQf1Wgol8cjCk9GjeqNV0xpo6rCppJlvM8oENvYiAAid7fNnIn1M/UT0131McU97eJw4J599RUCyvfc0+dHMrGtWEkJM5MJIi7TNO1fcQv8G+Fi/4dwA3qJpml+SZKeJdzo3ZqmPWNy/BFENFXTNN+eY3+w59jnEdGDe95zGREdRkQ3aZr25z2vPShJUgsRXS9J0vc1TXsriWuJl3uIyEdER2ia1r7n/EuIaEeCz3ykadr1/IuEHfszSZKu0TQt0ReUkX6IxSL4VpgAnNuefT7BA8NtzwORBd61C4qjvBw8HKNGmSun7m44+UVFUBIMwkYSDgvC/zfewPVccEFvYl29RKNQBBZLLAfg1q2ohnA6UV5vRn5eUIB1BYOoCMjJwe96Y3TkSCiSpib8SJKoEo1XJrIMhRGJIOun5x6xWhG4rK9H9quyEgbA2LGo0tyxA05KVRWcH6cT92PHDtybigpUQHCVFE9Jnj37e/TDH55FU6YI5fzzn/+cZs+eTXfffbepoZybm0s//vGPafHixTRu3Di64IILDN/X1tZGW7dupbw9fWcLFy6k2bNn07PPPkfHHXc1vfwy0YoVS6izcz0R0c0ZLMpIOmTTJqInnoBhecYZxu9pawOlQmEhWmDih33oZedOVCmVl6NiKRElBFdhRSJo2W1sBBYdfnj/r+Obb5DZj0RQTWk2PVFVgZfRKK4n0frCYdybVatgsP78573blPh44TCM7GSmOieSTZvAkfTNNwhe3HwzMI6DsDYb0YIF36Ozzz6LCgpE0Omaa35O8+fPpr/85e6YIKJecnNz6dxzf0yXX76YqqvH0XnnmWPRxo0Ci447biHNnz+bXnzxOTriiKvpgQeIVq5cQm53Bov2VvrT3sxSVISW9u9+VwwtWrUK1SUnnBCr+/WtzpGIaHX2eESrc25ucklRiwVJwqlTic4/H07p+vVIYD75JALfEyYgoHjYYeZBQDMJBmH35OWZB91cLuwPdto5UEoEXOJODVnGe7dswR7PzsZ6+Tp4r/JEaLatFAVY9/XXuJfHHRd7b4xoYYyuY9MmOLpTp/YOFvRHnE6cy+cTCVaXC468zZbccJhUhSknFAXn5qqhePne975HZ511Vsxrg2EbPfjgc2S1Xk1r1xI1Ni4hyvhpGUmTRCKo3HM40J5rtM9DIQQQAwEkM4YPN9+PPT3oZPB6EUBkrvp4ycrCPqurw/7+6CNwzC9cSHTppbH7z2IR1YiSBP9UloFtrANCIVRst7ejuGTGDOPzWixIUPT0AFOHD8f+1/tedjuO7fMhmdTejvPMmmWcPOrsBFbl5MCvSuQnWyy4bh78oh8ayr7YF1/ApyssRHKHq/8aGqDP8vLg7+2LQZTd3eD+XrUq6Y9kYluxkhJm9hlE1DTt22YJSZIcRJRLGJ29mojO7evzScjTfJP3nG+jJEkeItKHVE4jIj8R/T3us3cTorunEVG/brQkSeWEL+9hvsl7zt8pSdI/CVFYI3kw7vc1RHQdEVUR0ab+rCEjyYmeALykBKDMAcVAAH93OERAMR0A1t5O9PzzAMvDD8e/ZgZ5RwdK1MvKwJ/k8eA1I0J/WRbVe8uX498LLkDmxkyYj4cIxjC39Hz8MZyXUaNQwWTmPMsysnVZWXCKd+1CsCAchqPAXI7cnr1rF+7tIYcYE/6GwwggahoCFUYZJ30gsbERCqywEAb9uHGixWj3bihGJsXVK0Nuk87KQguV359DO3eiOqGyMkROp580TaMFCxbQCy+8YH4Dk5QLL7zwWyOZiGjWrFmUlzeMXnxxF335JdavKG9Qbm4u+f3+IY1F+wPXaEbwfP/5z8gg//KXxkkKnw9OtdMJqoLCQvMpd7W1yMCWlWHCrMMhgiTxoqowMLltsLEROJJoIryZ/Oc/mGJbVASj3owonM9ps+EaElViNTRgqnNjIyojf/Sj3tiuKMAJTq7sTfY7ECC6/360cw8fjkDswj3NJ9xGmJ3NreY5306TttlCpKrAooULgUX6/cWGO7/G7UJWa+99yEmdiy66kEpLBRYdfjiw6Mknd9ETT+A+l5S8QW730Mei/UWSmd4cL+XlRFdfjRbnf/0LNsO77yIZcOSRvZ9Xu11MqeSpzi0t2A/ME52s/SJJSDyOH4+hJk1Nojrx+efxU1UlBrNUVCQOVCoKHFOLBRWDRtceCoFGwO9HhTMPSDFbn8+HJGRVFZ71UEhUKno8SCZypW5WFjBuzRpcy2mnwVE1EosF6413solwT7/+Gv8/9NC9r8wkAq4EAqKisrpaDD0IhYAX6XacObnD+J2o4jFHB3yhUIj8/oG1jSorZ5HDATyaOxfVuUuXvkGU8dMykgbRNNj5kQj4Vo2efVVFxVldHdFPfwo/yMwuCgRgX7W1Ed1xB4oXEglzfS5bBjw96SS0SZvx5vLU+XAYWMDc1D09wLNAALaEWWJG02DndHcDp0eMEFWB3C7N57Zacc0NDXgf8xDGH6+1FThbUJA8By0fnysso1FcU00NMFXT4KNNmQIMDgax7mgUazGj4RlI8fsx+Oztt7GOY49Fgq0vycS2eklKmNlnEFGSJDsR/S8RXbTnYHpJB0tdncFr3USk78UeS0Q1mqbFNDxpmtYiSVIPESUIwZjK2D3/7jT4m9FrLPHrZZa3DN/GIAhXIDqdMF7DYRFM5PYZh0O0PafCx+D3g/vCakVLn8MBJ99IWEk4nZiylZsLg6+rC86ynuuLS91dLqJ33sHvF18MpWHWmqMo+IyqwmHhkvnXX4fjfsghaIM24/mRZXFfSkrw+alTsd6GBgQMuW3J70eAc+TI2OpPvWPO77Fa8b5E/EIWC+5bYyOcBU2D85udDeN+2DBkjZqbcawjj8S9UhQ4Vn4/qhTKy3EsiyVCb7zxe3riiaeopSV2G0pp6G2v0qUmW1ow6Y2oiDo6XHTTTZj2NmtWLVVXV9OmTZsyWJSRvRJZRkVPOIzMuFEAkafCBwIITOTliaB/vDQ3w0DOyUEAsbQUx45GBWk3C7f/ejzAus5OtOlMmtR7ymAi0TS0QD/zDAzLX//aPJmhKIITrqgoMXasWoUJiNxKbMQTG4kAS4l6cw/1Vz76CPeutZXo3HMRIMnLw/X5fLiPzEuE9tcI/fWvv6cnn3yKGhoGHoseeYQoEimixkYX3Xor0UUXEc2bl8GidAu3N3MgMVmy+bFjiW66CZ0BL76IYUbLl4NX9LDDeu9XiwV7QD/VmW2GvLy+K3TjRZIQuK+sRKKhvR0ViuvWoS341VehR7lCsbq695p27YLOnzbNeG+y015Tg66H8nJz7jHuaIhEsCa+FodDtJ0RYX/5/cChri5wQ9fUIGmoquBY5ErFYcNwbzi4GV+RSARbads2YOmMGeadI/0Rnr7MCepoFPeppATXwu3fpaXpO184jOtiXui+ICUSidDvf/97euqpp6iubuDwyOtF5e3y5UQWSxGVlrrogQeAv3fcUUuU8dMykgbZsQP7auZMc3viiSdQGXf++eiqKi42DjaGw7AhamowTXnevMTn5krx994j+vRTcOGaBRBZfD7gGFdWEwH/PvgAGHHyyeZFKJomOGjLywU22u2iKjAcBhYEg2hfDoeB00VFwjZhkWUxTHP48NSHT1qtuKZPPoENUlaGgpqiIuBtVxfwlumuBrt9ORxGheirr8KuPPRQ+ML6YaaJJBPb6iUpYWYyIZb7CL3dDxHRR0TkIiKFiC4hoh8n8fm+xIxmOlXNp5l8Nl15wnSvNyN7IdnZ+CkuhsHKFYo89p2r3HjSbl/CTrvfD+DPyjInh/V6wZWhaZioyMFCux0GJjvpigLFwtO23nkHx7v0UgTwzFpzOOgoy3if3Q7F8MwzqBhYsAAVEGbKLRoF0EsS1sPXIEkA/awsHL+tDZktSULQr7JSKLbGRrzmcOB6urpwv/Ut1YnEYgGRb2MjFJGq4j7t2AHgP+wwKOmaGjhgW7dCSVVU4Bz61oTrr7+eHn74Ybryyqto8uSjSZaLKTfXSqtXL6UXXni278X0IVar9dsA7yef4B6glUKjU1NipshgUUbM5ZFH4PDecotxkoIDdM3NqMopLoaRaoRFnZ1Et9+Oz9x6q8gI8xRmdoLZ2fZ6US391FPAqKuvRnIhHBZtLH3hpSxjmNPKlcC/a681/4yiAI9VNXEAMRQievRR8EMecggIzI2M4GAQxr7VunfT+FwutGAvW4YK6aeeEgFLWcZ9UhRRCc/CWHTVVVfRvHlHk9NZTFlZVnrppaX0/PPpwaLubnDdvvQSvtecHKKFCzW65pqUDpnBoiSFCef7097MMnUqSPvXr8f3dv/9qBQ8+2wE2Y2EaVsiEWEzeL3QPTzVub9xoLIyVM+cdBL2yYYNcLiXL0fFRlERAopz5iBx0NoK3V5VZV6599RTGOzE1CsFBcZBM1XF8QIB6PBEU6m51TsrC3uQCNPPx48X98Hthu2kfz876/n5IshWWws7prAQ2JEOQn/99GXuduGBeJKE3+124G97O9ZVWJgaXzdPeI1GRXdNstegx6Ojjz6aiouLyWq10tKlS+nZZ/cej1TVSi+/LOh3jjoKnTCTJmlUnFpYLoNHGTGU1lZ0KI0ZI6gR4oVx7LvfBYbl5BjjDCdqN22CjXP88YnP7fNhL7/4IoJ155wDSoVw2Dypw1PKGZMUBQmZ//wHvx93HJnuEU0DZnk88HniK/k4qSXLwLeaGmDO7NlioGcwKOYFhMMIXioKjpcIexOJqsIX27QJGHTMMdANPL26tRW22rBh+I4Gs31ZluF3v/YafMrx40GrYVSR2YdkYluxktJ6k1FR5xHRU5qmxZiukiRdlsRn0zVPt4aIjpYkyaGP2O4p2yzc83eWbootF2UZF/c7R12N4taT9mKtGdlHwoTThYViImAggGBVdzcAhgOKRlUrmoasRkMDuI2ysmCMxw8AIRIZmkAAFXTx4J+VBSPb5xOTmzs6MFk0Jwdtg/wZI44fTUNwLxrFGhwO+tapbG9H+1Ii3rJIRLT5GVULSBLAPxhEQFKSBCdiIADlM3o0FFx9Pe5bOIx/+1Maz+eqrEQgceNGHJ+5I5nDsbwcXG6fforrZvL1ggJxrueee44uuugievBBVH4zj+LLLy9JYg19L3jDBijOUAgK6fjjUY2of1aqq6vpo48+ogwWZWRvZOVKtBCfdRYMNCN5/33wiS1ahH2SnW08wbinB5WHgQCq6eJ5UR0O/C0UgtMfCMDQfOIJ4MTPfy5IyznoGA4LPhwj8fmI7roLLS4/+hGq98y2mCwDuzTNvFqACFhz770Imp57LgIvRi2VTCxut+N4qfDhahoc4nvuAdZcfTXRZZeJ4GYohNd58l/8mhmL/v53YFE0ylWd6cGif/8bQZtAgOjEE5FwWrQoNmiTwaKBFX17M1d7JOMsSRL0x6xZCLS89hr2yowZ2O9mXFx2OypHSkrwLLndcNasVtHqnEpgrLAQrXQLF+KZ3rgRFYpr1gBj7HbYCIcfbl6ls2IFuh9OOgnvycoy5h7TNNg5Xi+uI5lW4mAQfI4tLXDaDzlErJslHBYt0B4P7ktTk/h7Rwe+p3HjEKzd2wAiT0LmVkK9DVBQgL/zsD+HA/ZLdzfWFg7jfvZnDVztqGli0mt/bKx4PGJZsmTv8IipF559FonsWbPwDE+YgKnheqmurqbt27ePzeBRRlIVvx+DMZj6yEjWr0el99y5KPSwWIwpFVQV9sTateCv/f73E++pUAi48vTT4EM+/3wUafj9TFvSe09zFXVODnwaWUaiZcsWBEGPO07sbYcjVn+oKgKDPh98MTNaCFkW/IclJfCbOKDJE6K5a6ynR3SApdqZ0dWFe9bTg+PMnSvOF4nATguH4QfqB3wOtGga7u3rr2MNZWXo5DvyyJR5aTOxrTRIMmpOobhIpCRJk4no9L4+qGmaIklSiIhSLKj9Vt4kopOI6Coi+j/d6zfv+fcN3Ws7iegUSZJGaprWsme9BYTosn5tLZIkbSCi8yRJulVHPllK6YlCZ2Qfip5jSFFEQNHtBjjabKLlmTPZK1ci83L88aJyceTI3scOhTCdqrsbzoIR/5ck4Rx5eTBwN29GdU1FBRzC+MyUnuPHYoHCYH4Nbj1euhSG8uWXJy7Z5sEtViuUjpnj09mJ4xUXQ0lwRU9bGxRbaSmubd06BACnT0+doJzboPnejxkjgh2yLKoUFy2CstiyBQpj+3Y4Fah8tJKmCewuLibKzd1OH374GhFB8Y8bZ6zUrFYrORwO6u7ujnk9FMJ5iBBAPOssVHialeefeuqp9M477xBlsCgjKcquXUR//StadS65xPg969Yh2zp3rpjYbmQo+XxEv/sd9vtvf2vMrcpVLcEg8K+xEVhChKqf+KAGv5eDjvH40dKCc7a3o1LwuOPMr5WHKxCZVwxi0jDal3NzERA1Ih/XNOBHMCgGUqRS8dPYiKrNTz+FU3zbbWKolVn7crzEY1FWFlF7+3Z65x1gkdud+LNGWBSJCFLwNWuITj8dumLKFGOHIINFAy/69uZoVFQlJvvZY48lmj8f3+ubb6JK+PDDiX74Q3NdarHg2S4sFI6hywV7I5VWZ73k5qKK7Kij8IyvX4917dyJoNzy5WgLO+ww4JPDgeTao4/C1vn+92GnGHGPaRocULcbaywp6Xt/+v1IZnR2Ev34xwLr4oU7TjjxqmmCwmb9evxbVATM+vRTrDu+DTrZahluHyQyrwYsKMD34nYLzkKmVHC5gJHJtDdrGs4lyyIgmUpVTzweERFt376dXnvttaQ+G49HqooAOMck8/Mx1Xb6dPNnbw8e5VEGjzKSgsgy9rLViko7I91ZVwcO6bFj0WKsKAgmxb9X08BpvHo1bPozz0wc1I9G4WP94x84x2WXoQKRCH4AF6UMGyYwjbGZK8ZlGXq7pgY6+4gjBF8icyo7ncLXq63FMUePNm859njgC4XDqBivrBQ8iayLcnOhG5qasI6xY1NLosgy0ZdfwufKyYHu0vu2XG1tt8POtNnwGR4imo7KbyPRNCSrX3sNFaq5uaDsOPzwvR7ikoltpUGS+dpfI6JLJEnyEybLjCNc8FbCCOq+5HMiWiRJ0k1E1EhE7ZqmJT8/B7KEMMXmz5IkTSGijUR0DCGS/Fbc9JrHiOhGInpfkqRHiCiHiH6y59zxIaH/IaJ3iOgTSZJ4TPdPCNHfIkpftDkj+1CsVmFMcqWd3y8y21YrAlDvv0/0ne8AmIJBgHG8ERyJoC2ovR2OblWVuaFssQD8t21DQKC4GI6hUWUjr5OHqIRCMEJzc5GZe+45rP/KKwVnhpGEQlAoVis+b6SIZRkKx++HMTxlCgKdnZ34vbgYx6ipwfUWF+N++HyxbdHJCGfbGhuhaI49VlQmtbbimlpaoBTLy0XlwsiR+MzmzTBmS0uJTjrpdHrmmaWUm5tLs2fPpt27d9NDDz1E06ZNpQ0bNnxLJGzWNjZv3jx6//336d5776WRIyupo6OMXK7jafNm/P2oo1ABxZwfRnLZZZfRkiVLaP369Rksyki/xedDew07ZUYG0O7dcOwnTAAnK2NB/DMZChH94Q/YJ7/+tflzTySCIdu2IdPudCIAaNQuxLyz3Cqjnxy7ZQvOKUkIJJpVCxDB2GW/1CyAGAwSPfww8HHmTAQ1jYKlPJAlEhFtjP0VRcG1P/AA1nLLLah84vuaqH05Xk4//XRaurQ3Fk2dCiziwIAZn5MeiyoqKqm2tow+/fR42r4dfz/vPKL//d/YFvR4yWDR4Ai3N7PDxNViyQaw7XZU8B13HFp2V6yADXHssWJQkpnwcxiNCq4urxfBqsJC7INUKe/sdnz+9NOxj2tqkLzYsAGVKDYb8AFtq3CsucrOKJDa3Y015ucnVyHs8SCZ4XaLFulkha+5pQU2ydFHw3bw+4GxbNt1dIj35+bGBhZzcnrfO+aQ5YCe2TVIEu4/D1cpLBTUOdnZOG9f7c0crNQ00UWTqvSFR30J49E999xLslxJGzeWEdHx32L2yScj4ZLIYb/sssvommuuWU8ZPy0jKcimTdi/8+YZB6q7u2F75OSAs1hRsLfiE2yahmDgO+/guT333MRJF1VF4PDhh4EnV10V2x3C2OH1wibKzRV+FicPgkH4jy4X/IiJE7EO/jwPlQqFgJ21tfi/GX0ED1rZvRvnOOwwYfNwRTzz9rpcuG+FhViLovQ/oNfcjMIYvx9rnzVLYLyiYC0+H/CsokLggKaJBJssJ88hnKzs2oXuwO3bcR8WLsS9GDs29VZtnWRiW2mQZL7u64koSEQ/JEQ8txHRT4loKiV3o68iTH25nXDRa4ioXzda07SwJEknENHvdOtoJKLf7/nRv/cbSZLOIaI7ieheIqonor8QJuAsjXvv+5Ik/WDPe+8gomYiup+IooTpNjFklxkZ+mKxAHyYND8QQAXaG28AHCsrkZGaOLG3oRyNwshub0fGnHkFzSQcxvvffhvZposvFpU5BQXmitLvh2Gcmwti3mXLcK6LLzZuZ2RhxWaz4fNGBrDfH8uZwQ7MiBFQdmx0l5Yi4BAMwpEYPx5Bv/p6KL5ksj/d3Wg3DoUQIBw/PpaXcdcurGP0aNx3vTEgSXh91CjBl3jSSfdROOyk1157hZYuXUpTpkyhRx55hLZu3UobNmyg6dNxvo0b8V3FfzcPPfQQXX311fSb39xKoVCAKiuPo+uvP56OPx5k4RUV5i0FLNnZ2bRy5UoqKir6O2WwKCP9EE0D/15HB9psjLLPHR2YqlpaCh7EQADGY7yDGY3iWDt2YKDDoYcmPjdPXl26FEa4WQCRRR9I5OFKa9aA4628HETl5eXmn49EsP8tFhi2RnhRU4P70NqKKqQzzzR3tl0uMZAlleEFW7eiCmzrVhiit9wSWwnWV/tyvNx3333kdDrplVeMsaioCEGM7u7YwQ8s8ViUl3ccff/7x9Pll4P8PRkjOYNFgys2W2rtzSxOJ57xRYtgb6xejQDdiSeibS5R0DorS7Q6e70I1rW1icRfKq3OtbU41qRJwJiZM/GzeDEqE//9b+x3HiLy17/CwTfiFeNWXqcTe7Sv/dPTA0c/EMD5zFq8zaS7GwlGiwUO77BhwNdhw3AveL9FIiKgyDywLS34m9Uqgooc/LPZercvm4k+kNjTI67bZottb+YBB/z9MF2EomAN2dl73xLYFx71JQ899BBdfPHVdMstt1I0GqCKiuNo6dLjKRzG1HGjivR4ycZNy/hpGem31NTADpgyxdgGD4eJ/vhH7GHmfs7ONg7AvfACAk/HHAOqlUSUCpqGc//tb6iivu46Y5oomw17IBjE/mU6lZIS7PP33oNeOOEE+C2aJpJONpsIJHq9wFYiVPMZJRmjUfheXV3AjcmTe2O7xQKsaWgAxhQVAXMCAcHXmkxSIhSCj1pXh7WceGIsNVcggACiLKOwI76Djs+jqrHBxP7qxnhpakLl4Vdf4VhHHIGincpKXGcaZkURZWJbaREpvgQ+I0SSJP2NiC4nonxN08zIJlORzM3ez6SzE606ubkoe9+5E6BVUSEcaTYwN2yA4V5ZCScv0dQrLm1ftgzK4sILxXS/nh5RUaMPCrpcUAgFBXj9lVcA8LNno2ImkZMQDEKZmfGEMVdRZyeuJT5op78fNTVQ2mPGwJF1u4XSZsL3MWPMDd9oFBm01lbcv0mTYqsteHoYT2+srobiTaQYZBnVojt24PhVVZhOFu94RSJQwB4PggTjx2OdmganY9kyfIdlZVBMnAnUcy8mKYNC0J3BogNH/vlPVML97GdoC4wXvx9YFI2i6icSARbF860qClp61q7FMJMFCxKfV9PQJvTYY9iHV12FYxpV4sSLLMOQfOUVGHUzZhD94heJA1wcQLRagZHxxqSmoRpr6VJg4A03YC8bSTgcW83Y32qdUAjDX554Ap//1a8wZZ2vW9++bLfHTn/dW9E0wZFmt8NI52Nv2ICKyK++QjD34otRTeV0Jve9xEkGiwZRuPpCVfFs95PM/Vtpb8e++vRT6PtTTkGAMdlnnNvp/H78zq3OyQTZOzqgTysqjAN4kQgqYevq0P2wcyeeVXZQx41De/PcubAL3G4xcTpRopMINgZTsyxenDiZYSTNzVhPTg7wSJ+MVVV8P/FD6liYP0zPr+h2i/bl3FwxwIornvsKznKVtKr2rrgOBBAMIELAISsL52LnO9VnJ51SX4/E1fr1uO5TTkE1lcORuBLaRAZtcMkA4dGQwqIDQVwuUAqNGAFfJ140DdzFn30G22P0aGDHyJG99+abbwrqhcsvB74len5ranBst5voxhtR5ZZIurqA20VF8CGampAMys4GduuDbKoKW81iEZWD33wDDK2qQoAwHqPcbnR7RKPwXcywkbvJmPIqNxfnkyTgm6JgLyfCl927sedlGTQF06fH3ivm8c/Kwj1PhkKDg4mcOLXZ+hdM7OpCgo0r4WfNwrqKi3HP+knjkRmiFCcDgZkHdRBRkiQrEVk0TYvqXhtBiEiv1TTtpDSf8uC92fuhBAKYkBoOQ+HwBMDJkwGEzKMYiSAA5fEg+DZxYmLDV1XBgfT22zjWxRdDyXCbjNUKZcEcGcOGCa7GYcMAlE8JC1oTAAAgAElEQVQ8gUq9E0/ETyKnkp0JLq2Pf68sI5sUCMDJKC83V6zcxux2A7THjoUS7+yE8uMS+sJC3Iv447S3Q1HKMhRPVVXse/x+VAJoGoyGaBRBvfx8HK8v55mDhLt24XcmUtcHRHmydEODGKKzahUqL4qLQcRbXY3XS0pSbiNKq4LKYNGBLZ9/Ds7CE06AsWq0R5cuxd649FJgAA9V0htheq6fSy+Fw9eXfPYZ0eOP41g33oiAeTCINSSqfiLCfrvvPiREFi1C0DKRUx0Ox05NjseHQAA8W598AofhuuvMW34Z17iyur+Z7bVrwXfY2IgKsBtuiD2Xvn05Nze1CsdkhKkgJAn4+OijgqLhoovwTGRn47tIMaiQwaJ9IFxpwgGhVKsj6utR7cUDBc44AwHlZJ/3aBT62u2G7cEVOnr+Lr34/eCYystD8N6I2/Dee7FHf/EL6Fi/H057VxcSm+vXQ59Go9ibM2Yg8DR5cmKnva0Nto2mwS5KVM0cL5oGx7ehAdgybZoxFvUVSNRLOIyfQADX4vPBzgsGxXv0bdD5+cbTshUldniU/rtjihqvF3t8+HDge5qqaVKW9nZMEP/oI6zn5JPRYu904md/sIuIBh2PhiQWDVUJhaALs7Jglxvt56eewjCNSy8Fxrjd2EPxtsv776Oi8JBDiH7yE/gTifRpbS3RnXdi7//qV2Kgk5lEo9gzfj/O39IC26q4GLaRkS3F3IXMgSjLopOLqxN5kGZDA3wvhwOBM7NEbSiERIqqApM5aaMoWCN32EkSMCv+nvp8WHdrqxiopa/W5AClz4fX+wrEGol+LVw1megYXi8KPNasEYPJpkzBPR01Cvc7BTlog4iDiZkHexCxnIjWEtEzRFRLRKMJfePFRLRA07RP0nzKg/dm72ciyzBmm5qgnFQVRuDEibGZdE1DAKCuTrTqlJeLCsV4x0/TUNr+9tswci+5RBhjiiJaWKxWgLTPB6WgqsLB/cc/kJ076yxBMGxmcPr9UKoOB9YW/z6fD9eoaVA4ZqX9XKno88FQtlqhJHNyRCCQp0x3d8PIrqhARaIkwRDfuRNORn4+qg/1SpADkS4XnJyKCnFfXC4otLw848CkkQSDyNjV1WGtkybhu9MrzC1bUPlVW4uA5kknicErhYXmwYskJd2OewaLDlBpaUHwrayM6P/+z5jD51//AifQuediT3m9vVt3NQ1TCZcvR5vO2Wf3fe5PP0UAsaICbc/8zMuy4Ocxa91zu8FBtH072o1POgk4Y+ZchkII+mVlGfOK7tqF4ERHByYfnn66Oa55PMCi7OzEHKVm677nHjgeVVVoY46fOhsMAjstFmNDO91SVwcHZ9UqnG/xYrSw2u34jvcyqJDBon0kXHnBPIl708K1fTuCOt98AxvjzDNR6Zfsc8GVr1xZZ7GIKnu2UWQZOKOqaF02crKffhoVkhdfjAA3Dy2Jtx3q6hAA2LQJDrDNBoybMwc/48fHrr25GTaXzQa7qD+OoaKAiqCzE07lhAmJ74uyp8bC7PtQVWF3GWFgNBrbBu3x4DUi3FeuUmR+RYdDcD1LUmwFdiSC74O/Fw4kDjTmmInbjXbP997DtZx4ohgmmJ0NPNpfsIho0PFoyGLRUBNVRaLP60UA0Sho9t574Co8+WTYDO3teF98y/PHHxPdfTf8gCuugL2TqCK6tpbojjuwH3/7W/OBTiyyDLuFA3MffohjTJyIwHuivez3w/axWOB/5OSIic0WC362b4cfVFaGazA7ns8He9JmwzUa2ZKyLPDLaoWvY7Phb9u2IVnFNBDxOOr3I+mqKPAXE3XbJSN6HmGrVVCCsIRC+I65HXzuXARQs7Kgb8aM2Suu2IM5iDhomHmwBxFziehRApFlGRFFiOgzIrpN07SPBuCUB+/N3o9E04hefhmTqM45B0ZpfT0Md31mnFtgm5uRbSothfEXjSKgxi0wdruY9LxqFTIqM2bACI830mUZypPBtKMDyigvD+99+mm87+KLUS2XKKvOWXOjAGKy7ctEUBg8zKW4WLQeu90IQDocInvGrTsNDfh39Ghcd00NzlldjfupXwu3LweDUAzMv6iXnh68Jycncat0vHi9+I6amnB9U6bgHCtWoGWQJ4mVlODapkxJmwGfbsc9g0UHoEQi4B9sawO/mNG095UrkYH9r/9CVrizE/sgfuDCc88h2Hjaaahg68vR+89/EEAcM4bo5pt7G9WRCH6ys3vjVEMDBqf09GD9RxyB/SvLcDLj9w9Pfs7K6l0NrWlIqjz5JHDqhhvMh8BoGhzxUAjrNaumMvvsO+8Q3XUX1nLJJWjFjK9S9npx3TxYYiArglwufAevvgpM+8EPiL77XVxXUZFI2OylZLBoH0q62pv5WBs3Yp83NaET4Oyz4Vj1R4JB7F2fD7/n5gJP6uvx+vTpxsOJVq5EpfN3v4vkaksL9k98xWAgIDhPefDL5s2oUtyyRQw9mD0bjqHTCToHpxPH7Y9zGg6jctLng9ObTPuzpon2vnhbQj99mXkQk5FQSLRAe734UVX8LStLBBM5IVxaGjvR227HMTo78Zni4r5bv9MpwSDaPd9+G+tasADBGaZwyM3d/+wiokHHoyGNRUNJNm8GHs2ebVyR/OWXGEI3axZ4gtva8JyOHBmrs9evh61SVQWqlrKyxPhSW4sOBVkGv2JfA50UBb6UpgHTPvwQFdGTJqFiPFGQKxRCAFGWcR59oFRRUDyxYwf0xoQJCAyaSXc31uFwAAMT2Q2KAhuHOzmIgM0uF3zBuXNjKye5yKO9XfiL/WwdTijxwURJAufusmXA9dmziebPF75xoonV/ZCDOYg4aJh5UAcR94FkbvZ+IB98gGDfokUAru3bYdzGZ2W2bYMzXVoK5TF8eKwSYL4wvx/KYvVqAOOsWSinN2sV5MqFSATAbbUiy/7mm1CQP/1pbKbNKKvORixXR8Yfv6kJa+PKSTNHORqFIpPl3tfH52logGKpqhIKKRSCkl+3DsrmsMMQGIhXPD4fjs/ty4mq/zweZMGcTgQ++uNcM6/KypVQ8GVlaPWcMwcOA09u5kBiGoz3oa6gMlg0wKJpRH/5C9ps7rijdzUcEQIGr7yC/XPqqcADSerNl/P662jrWbQIgbG+Al8ffYSqxXHjUIFo1qrL/Dl67quNG5HVz87G1OeJE8X1BAL4V//+QAB7127vnczw+dC+vHYtrv/aa83bdFQVlczRKHCiP9P3WlrgcPz732hLuu223tUFsox1qurAti8T4bqfeYbo2WeBP2ecgUqKvDxxz5nKYm+msu6RDBbtB5Ku9mYiPKOffAJs6OpCV8PZZyMh1t81catzUxOc0BkzBFewXjZtglM9YwYGD7W34/PxDmswCH1rswEjuHpN/3e2Db76CutvaMDar74aNleyz7zPh3XJMu5BX4PP9GIUSOTpy1br3rcUM5+qvmLR7xdT6e12OMPDh8N+5GCdLAPnw2EEco06SNIp0SgqfV59FWs84ghUgRcU4BocjrS2V2ewKCN9SlMTsKG62jih2NhI9Mtfwo6/8048tzygUY8dW7agknDkSGBLUZFxkQJLbS3eTwSbbOzYxOtUVWAm6+s1a4Cl8+fjnDzMyeh8wSB8EYsF12m1CjorTUMl965dwNHp03tzX7NoGrDY7QZe9GewiM+HZPKuXcDO+fPhW+mF6a54wvPIkenjhY6/jkgEeu2tt3A906YheU4EPCwtRQAzDYlVoqGPRUNC/p+9846P66zy/jMajTTSqFqW3GS5l7jEJbaJTQpxCikkkLKUBNgQSCgBll1KgLCUpS4svNRleWlLCAmQAE5PSOwkTnfsJI6juMuWZNnq0vR6733/+HLe585ommRJtpw5n48+ljUztzxzn1N/53cKScTxlcJin2B59VVahlatwpE6cADltWhRsnE6cAB0XUODblnJ5MBaFgnABx+kOiVtek6nbnm2O2nSdtTTw+s7d+LkTZtGC5O0S9uPb3eGvV4Ufjq0kt8Pos+yqGplS9qFw1T3HA4Mb6bKUyBAxbCkBKNbVMT/W1sJ3sVILlqkzydIyIGBoe3L2cTvx6BJ0jIfYxKJkBR+6inNlzJrFuszbx7XVlvLfezdi9GcO3d4fExpZKIbqIIuGmN54AHQh+99Lz+pcvgw6LxZsxi8JO1ukycno5mkpWfDBlCBuRy8J57guPPnw4GYraIsiUGl2C9yrpkzGayQ2nJoWXqQg8eDDvH52K81NcnO7b59DIDp7wc5+ba3ZS9myHCC2tr8q+CmyVCAH/2I3z/5SZJ1qWsk7csykXWsWgljMaX+/Gf4LX0+HOSbbqJ4IVMLhQTd69V8jMdZ1CjoopNERrO9WSmemS1b8C/8fhLx4iMMR/r7QeyUlOggsaqKPetyEdTfcgs+zre/rVF3DQ3JxdBIhGNJEk58nEz7+rXXQDb6fCTR4nGu4fTTKfCtWJE5md/XR5KguJjE5nCKCiLSyaEUutU0Of8oJO7TiiAN+/rwfxIJdKPDwU9FheZWFN6w0tKhOn80xDTxie66i2tavhy6jKlTOW9xsR72N4pS0EUFySo+H4mkmhql1q1L35X0hS9gS//zP9kf/f34BfZ45uBBih01Ndj9ykrijEzP88GDUJs4nSQQc02EN032jSDonnqKPbtxIzrUMLgXl2uobgoG8e+cTmINmWAslFZ79xIbTZnC6zLFObVbzDSJ50Ih3Q2Xr3R2AqwYGOBzy5dzPnuBS9qXhV8xNZ4cLbEsYu+//Y37EXqpykr8II+H7yMdOv44ZKLrogkhhSTi+EphsU+gtLYS3M2cSbtwVxcVntmzk5Xn4cPw+02ZovmEUiH0IpZFVeWxx0AgSpthKKR/pB1ZWp4dDhS8YYAYam4mOLjySt1qV1ExlFPQNDXpt8eTzFEk1aq+Ppz7xsbsjrLfj4F0uXAqcwXVwSCJw3AYZzwW09OPu7qouJWVgVqoqcGBjkT4vaFheFXuQADkQklJMvoxVeJxuFA2b2adV62C38fp5PsTHpJp00AnSXvR3r04KvX1JFpG6ERPdANV0EVjKHv2gABctQqHNfX57+vTU+FvvFEnlVIReE8/zWCTVasI8nPt00cfpW3wtNOU+tSnMlMY2MU02XN33gnf4urVXHsmJLVpst/s7c326eZSVPn970lKfPrTGs2YTqJRgoSiIhzlfIPpAwcICl59FcL1L395aDvQeLUvGwZJ41/8Aj28fj2tVbNn6yE2Hk+yTpZri0SGTm8ephR00Ukko9neLBKJ0Kr/8MM8y2efTWu8fRpots/u2oUuWLZMt7kFAtqv+O53eYa/+132iAw7sxdOo1H0ltOpE3/Zppk3N5PAmjIFDtDSUvSiDGbxejnW0qUkFFeu1EmCI0fY35WVBL/Hk/SLxVgDp3N47cvDEctifRIJ1sPt5rxer/b/hH5GhjkppZMKQuOSirQa6bVs367Un/7EOs6bp9R73oMOloExZWX52YYRSEEXFSSjiM9uWdjs1Gc9FsOOt7bSWdDUBFDB7SaOEBGkYmkp/oXHw97J9Ezv3YsfVlpKl0IqGi9VpL1XuAVfeIE9esEFybGiDGUSPlGl2OeHD6P3585N1v+9vRRGLAvQhRSDZGClPZEoHWXxeO4uLrtEo1A5tbSgP9etQ49LkVY4Evv7idNKS4mJx0gfqP37QdS3tHAf73gHcV17O9daV6dBJqOsmye6LpoQUkgijq8UFvsESX8/AV5ZGe3CiYSGeM+cqd935AitxQ0NOOiC6Eun3CyLNsMnniDwvvbaoe+zLI2CCYV0pbqoiNa7zk4QOhs36uDW6+UzbndycN7fz3EqK5MNWTzOdYfDXHM2OL8cZ3CQtZgyJb/ANZHA+L3yCp875xw+K3LsGAYvGuUeq6tBZY50eEkoRNKyuBiDYzfEpsmwm0ceYa0WL2ZIQW0t92YYGj3a1sZ1SyvEsmVcW3s7jorbzedHgHKY6AaqoIvGSAYHlbr5Zp7Zn/xkaHU1FCKBGImgiyorceZKSpKD9h07qMQvWgQqMJeT99BDoOCWLlXq4x/PH80XiTDw5PnnoQC46abcifWBARIKFRXJqCi/nwEiO3bQOnPzzdlRdjIYSrgU80noR6NK/fKXcA1WVZFcvfTSoTpPAgDT5DpHk+NHxLLQ///93wQOsvYrV3JvhqEnL2fSyZEI1ymk7SNIIhR00Ukoo9neLOLzUbTcsoXjXXAB/kOmPWaaoAGjUZJx9j0gbbVf+xrP7mc/CzIwENDE/XLN8Tg6yulkLyUS2Tn0du4kcGxsBGWduvcsC/9rxw5+ZGjBggWa0H/ePIohx4OUk/ZlpfBbRhl1p5TSCQDLGsovm4rUVkqjv4VfcXAQ3ykW4xobGvBRZHBLRUX+1/366xSD9u/n+3vXu0jQSsHH5UqmohgDKeiigqQVy2Kv9/biG6Si3oT+5bnn0EXr1hFXCEpO9kB3NzbfMEAslpXhN2VCsTU30xLt8eBH5UIgWha+TTTKvmxuZk+ef356HyIQ0BQswSBxRWkpCUTRj5aFjpXXTjtt6P3HYhzH5eLejh7l79On50+90tpKASEWo1V42TK9bvE4vpYUN6JRgBRj1b7c3q7Upk3Yn5oa7NTatcSqXi96aPZs1kOKbg7H6CD4/yETXRdNCCkkEcdXCot9AiQcJmgPBgnaq6upTMlUX1GgnZ1U7CdPRnGHQijYdIbDNBnO8uyzusUoV5Afi6HkjxwBtRIKEbQvX46S9Xi08gwGCSxdLl7zevWwAan+OxzDa18WtGIwqNuz8wlsentxSmMxjI5lYRzFAMh6yJRGj4d183gwviOtcIXDJAGLinQicdcukiXd3fztssu4jv5+1lMSMfZA3DBANezdi7FqasLAGgbIiHgcg59u6EUWmegGqqCLxkAMAx7B3buZxJxK2p1I0Grc0QEaeuZM9pdhsFdEFzU3Qxbe1ESQn82JFOTfpk20CX7kI/knzPr6mMDc0gJa6IIL+Gy2irDPx16TqnlJiUYZff/76Krrr4e0P5t+EVqGTJPl08lLL4E+PHwYDsnPfjY9Afd4tC9v306rZnMzOujmmymuCFpbhhXkg0QTvkZJzgyzvbmgi05SGe32ZpHeXvb7M8+wfy69FBR+qq09cIAE3eLFQ/eJZaGjtm7l2V28GN8kHkdv1dezt+2TSaur9TCmTDpm+3al7r0XGpHrrsudFLcszrttG4VB8SGWLWMAwBlnDNs2J01fLilh7TMNWhmpmCb73DA0yjHdsUMhzV+dyT8zDNbg6FGu2+Vi3ZXSKGZpg66qGlqUOHyY5OHOnRRj/umfQKvG4xzP4eAzY9XGbZOCLipIWjlwgDhi6dL0SMA77iCmev/7QVn397NvGhq0/zMwAALR58MPcLuJhzK1+Qq/c1UVCcdcCUSlNFhj71500dy5DFDJpLuFnsrn4/rKyviMvD8axR8cHESPzZvHZxyOoX5JLMb5+/t1Z1c+ezYYBFhx9Cjxz7p16f2i/n7uy+FgLWpruYbR9I96etD/27ahcy65hCFOUixRilg1tUNNbOUoJhMnui6aEFJIIo6vFBZ7nMUwCNrb2pjWOWsWAbPfTwJRjFNPDw5YTQ1KfnAQZ8zeMmw/5t13oyTXrsXg5aoUJRJU1VpacJTdblpM6uo4l1SJ3W7No5hI8JpMYK6r03xaQvgrRiuXsZFJYAIfT3dfqRKLYfR7ezHUCxfixEajOK2WRfDsdOrpy4KgrK3V7T01Nfx/JM57JELitbWVtsVjx0BAXnopzkgwyBpYFveUbZprLAZP24EDepL0/PkcW3hD5s/P26BOdANV0EVjIL/6FbrhM58hIWcXywKds3MnQd7y5TqRNmmSDsqlTXfyZBKJuQoDd9+NTlm1iuRdvgmoQ4doGQoGud41azT9QllZ+v0qCcTycq4rGuXn4YdBQTY00F6UbeKhfQKzcITlkkAAlMJdd+GAfuUrcESmirRmS6KjomL025f37GFYzPPPc78f/jDFDMNgbUyT77KsbHjntrc3u1zoszx1ZkEXncQyFu3NIkeOEHi//DLPy9vfrtS553IeoRmZORNEYKrceSd79n3vU+qqq9jbx45h/yxLI+sSCfZ7bS3PZnFxZh3z3HOaG/rd787/XqXlWobBHTkCaqmlhdenTyeZuHo1Ply2fRWPo5McDs3bqJRu3ZYC7PGITLZ3OPJrkQ4G0Uvl5dl5v6RTRdDTppk8uEUSi1IciUahc3n5ZfTolVeSTC4qYi0NA78wGxJ6lKWgiwoyRHp6KC7MmEGhM1Uef5yC3IUXYk/DYT5TVaWTYX4/BdquLuy/FAczUU1t2wYVTG0t/k0+Q6kGBvh55RV8s5Ur8atySXc3sUV1NQAF0Tn9/SQQTROdKN1bwo8og1ZE+vrQwS4Xei5XMdiyiNFeeYX/n346nSup6yEAEhncJ4OeZJp8URHnPB4d4fMBjtm6lXu64AJ0kcNBjCXglVmzcseqUniT63qD0rxMCCkkEcdXCos9jmJZVOtfekmpa66hVae3Fwd1xgw9NGBggPdUVJCYkoEndg4OkUQCIv+dO+G9uvDC3GSwhoFh2L4djrPp05W64Ybk9pZgEMMpzqlSKNpAgJ/aWq65tJTXhZ9w8uTc07piMc3BOGVKZq4zuxw7RruRJAobG5PPEYuRSPT7uU63G2NeXk71LhLhPoWgvbiY9c7n3HY5ckR/h5WVJF82bOBepOVAEqz5VtMiEQz7oUMYuwULCPiPHmV982xvnugGqqCLRlmefpqk3NveRktrqjzxBG2IGzdSmZUhBXZ+0/Z2Wm7Ky2nBycZ5ZlkkAh5/nGLGu9+dP33Aiy+CGvR4lPr3f9dTCqXVzuEYmgQTmgWPR+s8r5dW6FdeAfny8Y9n3+OGwT3H49xzPgnPzZtZi74+Eh4335y+aCPty5bFcUe7fbm9Xamf/xzeyaoqdPg//RN6JxRCJ8qgieOp7I+gvbmgiyaA2NubjyMwSisHDpAQ3LcPv+Wtb9WJv3TTT594gmFE55/PforHsfnSTiv79NAhXquv1/QqmQp1W7eyN5YsUeqd78wfReLz0fJmmvhfdgSN+GY7doCgMU1svSQUFyzQ62jnJcw0fVkGrYw0kWgYekCLILHzPY7fj56QbpJs5+jt1Z0n0jEi1Dg+H4ieTZtI2jqdSr3pTdgUQY+Wlmrk4mgPbMkhBV1UkCQJhejYKiujjTlVLzQ3022xdCm+j1L44sXFOrYJh/FTWlrgTGxoYJ9nopp65hkKfQ0NSn3iEyADc+1Tr1fHaYYB+jBbMVSkv584pbhYcxcWF6M729rY60uWDPWLDEMXlhwOkqM+nx54JVQomXyJwUG4Gvv6iL3WrUvvTwndlRRopk7FVwkGOX5ZmR4eMxJUYjis1N//zlyARAI/8LLL0D2dnfw4nRSz8uHwta+PJBOdTq5rmDZzouuiCSGFJOL4SmGxx1HEqT3vPAL3cBgnu7ISo6IUhmPHDhzOVatIIBYVYZxSFVYsBuR+zx4SiGefnVspmiYO36OP4igvXUqLT2qAa5ra+RU0TUcH/0pFuqQEhS9E/VOmcC/ZnPVQiAqUw5GdeNj+/n37WJeammS0pl0sS/OFFBfj1EtS1DAwnrEYbQsOB9cQj+vWg1wBRk8Pbcs7d2IYzz1XD02ordUogNrakU1tVIq1bW7GwJaUkCiV9qQ5c4YOaUiRiW6gCrpoFKWtTal/+ReqrP/1X0MdsV27QNGtXAlaRJDETif7QZzIW2/l/d/8ZjLnaKqYplK33YZz/uY3K3XFFenbV1JFBkH95jc4yLfeOvRzhoGOKS5GT1mWplOwD3xqbgYd6Pczffq889irmRw9+wTmSZNy66LublqtN2+mui6BRjqRIVZj0b7c0wPCdNMm9MR113G/FRWaWF0p9ORoJS4NgzUXFFgOHVfQRRNExqq9WSmOuWsXRc5du7D3H/sYOsceQDc3g+RZsoSAXDoJTFNPN5XJpPE4z3VvL/bS40FfVFfr/WtZ7NEnn6RQe9VV+Qd7PT0U9EpLQWZnK0AEAhQrtm/nHhIJ9vrq1fhuc+Zw3lzTl2WoyXDW3rI0Z1lREdc7ku9OBuNVVuYuqHq9JApcLmxESQmB/333gfY0DHSuIA9lUILQupSUJE+DrqrKPkl7lKSgiwry/8UwSHRHIhT/U5/5o0dpM66pwdZ7PPhB0SiJMZeLffcf/0H89PnP0y0UCOAfpYtNHn8czuTGRvid58zJvVd9PhKU27dzDRs3Zve/RHp7uYfKSny/QID919GBXzR9OtebSR9KYam7mzWaPFnHlJFI+kSiYaD/mpvZ42ecoYvAqeL3cy1Cd2XvQAuHuVa3W3e+GUb+Ra54nLV++GH00tq1+KENDaxDayv3UFfHdzFSn0zWSJKJw0BMTnRdNCHkDZ9EdDgc1yulfquUutCyrMfG+HRv7MUeR3ntNabTnX46KETLIjmWSFCZLy5G0W3fzu9r1miEzPTpQ6u3kYhSt9+OoTnrLN6fDqloF9MEvXLXXSjyc89FyWYzKAItHxjQZL2SVGxupupUV4fDXV2tCdsztR729mKEpkzJrsQti0RIayuKet685IEJdonFMJzRKNcnxMJNTTrYTSQ4XiLB30tLuafBQa41ExGy10tVa9s2rvctb2Hd3G7Os3Mn38X8+Rim0QjCBgZY264ujaoUdOPChRnXbdQNVEEXTUwJh5X65Cdx2H7606H8PG1tSv3v//K8vv/9PE99fZpjVCbl3XorTt03vpE87ClVEgmmzG/fzv5461vz4xQ0DJzrhx+mCJJterMgoiVwjUbZr0Kn8Je/kKyYNo1WoVmzeJ9wb6VeSyTCPpO9n00XmSZt39//PnrlYx/T65buvX4/7xvt9mWfjwnTd97J2l11lVIf/CBOvmFwv/ZhBaM9tMGy0OgP81IAACAASURBVHnhMOeoqsp4joIumkAylu3NloUte/55fKDBQfyda67BZh47ptTnPofv8J3vsF/6+thDU6fqooEkEOvqNMJPWmQF7StT2Z96ikTBmjXwlOabQGxrw5+qrob/cDjrEIlAb7JjB628oRDXs3o1wezy5Zl1m7Q158uPmEhw/5L4Pd4hOVKQqarKTYMj7c2RCIjMRx5B77z5zaA9p0zhXkIhvi+nk+uzD24R/agU9ysJRfl3lBHbBV1UkP8vr75K7LNmje78EvH5SCCGQgyQa2jQifO6OnSTYSj17W8TD/zrv3Kcvj6SjqmDSZTCt7ntNuKXf/7noUMZ04nEFTt3ogPz6S5TisRfZyf6S8ASPT3oJIeDwkauGFE6ugwD/zD1vJJIFFqG7m7WwucjObp6dXo9Z1nEM319fHbmzPSFlVAI/6KsDB/GXuQS9F86NPdzz1HMGBigsHvllZzDMPi+ZVDgrFkjH66Zej+STFRKIyZz6OGcumicdcspKWNAN16QgpxYaW8nyG1qQrk5HCi2SATjUlyM4tyxA6fqjDN0oNzQMNTohMMYpiNHqFAtWZKZyFfEsmgxuv12nLhrrsHxyybFxVxDZyfXJTxpsRiO5LRpmoy2q0sjb4Rc2B689/XpCVj2gQ3pxOcjwRoMYujnz89cyff5OLfDgdHzeFDsra0EBTNnaiTQzJn8rb0dYzJpEtfZ04MxDARYR5eLe9myhYDEsqhaXnABxzJNTbI8Y4aedB0OjxyFaJfaWhLDPT0EXv39rLcQLC9enJ9TUZA3nshEwY4OgvJUvdDfD3q5upp2YyleRKM4wcXF7KmvfU3/my2BGI8zJGrXLpKH55zDcXIFtaGQUt/7HgH3lVeSlMv2mZISzaPqdLJHystx8H/4Q4KDc86Bv0gC4bIyvS/tiINAgHsrKUEHZNNFhw+DknrpJdpzvvKV9CTsshaS0BjN6cuRCAWo3/2Oa3/rWxlWM2MGrwv3qww8GOngqFzicOh2RL8fh72ycuzOV5DxEZnWLEGRaY5ee3NbG8/K298OP+qTTyp1zz0UJpYuRW8UFdE2WFGhB7hVV+sEohQ46up04lC6ICor0XEyROAPf8BmSgtbPvdgmvgbnZ0kwRYtGv69u90kC08/Hb26bx/X8corBNkuF4nE1atBYtrb/CR5KNzSmc5tb48uKsrMEztckcKwz6d5GzOJy8XE5Tvu4HtZuRJub+mkiUbRRUolI6Hd7uQumXBYcytKO7Rp8lpJiU4oSnJxLAZRFeSNJW1tPGcLFgxNIMbjDDzp68PnaWjgWR4c1O3+lgXlwrZt+BkbNqAzysrSJxDvuQdKh9NOU+raa7HXuRKIwSA0NPv3c50bN+Y3yKSzkximtpY4yLIoiLS3o0vzSZ6FQoAxioo0FVSqlJZqehOhYPJ4QCBnGjYVixGrhsPogGx0V5I4FH+mrEzz4IptKi7GB7Qs/MdNm4gB58yB0mXhQo41OMh3Ho+j19N1841UBB1ZXKyvLZHIO5lYkDGUgqkoyCklg4M4tpWVGJLiYpJpvb0YKiGj3rEDpbhmDcGyz5eeoysQIJjs6SGpNXcuCjKbcpS2ot/9DuV7000YtlxiGFy/YWAk3W7N0+FwAFmvqMD42IexiIPodGIUgkGuoaaGQCDb+Q4dwtCXloIGyPR+08Roer0YGjsXiUxqbm3FiM6YwVq6XCREJMEohLozZnCc/n54F/ftg9sjGsXpv/hi7QCHw7xPWpdqarg3OVe66t1Ipb4e4yxt2t3dPCddXTjv6cjpC/LGlr/8hcT3hz40lDA8HKaIYFm0v5aX42DJoKTyct7zjW/wjP37v5PAzyTRqFL/8z/QKbztbSTZampyO2rd3ZyjowP+swsvzH1fwtMq7bRlZSQOf/hD9M/NN+Nw2503p1PTLciAJa9Xo4SyJTvjcVqsf/EL3vv1r5MIyfR+e/tydfXooAATCarrv/wl+v6ss0BBLligXw8Gk4cVjCavXSZxu9GlXq8uDI1DW2JBxliE4ykeJ/A73vbmvj6C0qlTddB+/vkULx96CB3Q1weaVqb/9vVh+8Wu9vejZ2prNSez7GsR2XNbtuA/nHUWqJvDh7WNzpTojsexrYOD+AyZ2vByiX36sscDJ+Cb3sT/9+7VPIovvcQan3YaxeJVq7QeEp5BmZSaep2xGK/lao8ershgg4EB9rMMZ7GLZZE8+eMf8f8WLlTqox/lu5UiuD2QzkYjoRTfn/BdyvoFgxqp6PPxLIjIABhJLOY6fkEKYpfBQZLf9fVDeQUtC77C3bsZwrZokaZPKC7G97csfJ0nnoAH+eKL0W1O59CEpAyX27QJP/2qq3jOcxUVAwH0YmcnemHDhvye8aNHudZJk4gJIhHu1ecjtpk3T/tAmTgGvV78MpeLzzidupXYfg2Cbnz2Wc6zfDmUEZmS/D4f16cUsVc+KEBJ2AoXttutbVMioXX2/fcTd02bhi5asYL3x+PEd4OD6Jj584fPfZ+v2JOJ8bhuwR7tCdMFyV8Ky16QU0YiEdrPDAPD4/FoBVdejvKLxXAu43GcSpcLpSsThe3i89GGODiIEZs2jQRiLkf/mWdogZs0CWWbg1tPKaXJtE1Tc3G0temJfvaqmrTODQ6iVKdP5768XqphsZhG/cnnU41jXx/Vt2iUY2fjDbG3L9fVaaJvuzidOpF45IhOYpaUgCSSpF9TE8q+ooL2gXvvxZlevJj2nFmz9HoMDODoulxD+RxnzdIoR0lajpbMmMGatraSDG5p4VyrVpF0Hmei8oKcpLJzp1K//jUonKuvTn7NMECzDQzQViNtgQMD7JWaGvbVt75F8H3LLZn5/pTCIf3Zz3gWr75a0xnkcpz27YNfMR4H1ZduMmKqmCbXmUiw7+JxkNj33sve+OpXM6MDZdhAOMwxlCIYzZbof/VVru3AAVB/n/98ZqS3vX1ZJtkfbzLNNEmI/Pd/o09OP501k6mMMtAgEtHtgOOtAwQNKpxLQnUx2i3UBRlfEQ6/eFy3OI8EWREOU5CrrByamCst5bmeNo3iQ2srLc1nnAEdwsKFnK+/n2e8poZgMBDg8+kGAtx1F4HlpZdCNxKL4Y8I0k1ane30AuEwez0aJamXD+dYOrFPXy4rS94DkjAUNNKhQ/h7O3agw37/e4L8M87gp64ueWKzaWpeZKeTtRuL5JnwOff3a/5pSVTu2oX/2NJCkuKzn6W46nBwbe3t6MqaGtZwJAlO0WN2vZxIJE+CHhiguCXvT+VXzNWKXZA3psRiINbKynSiyS533UXh9dprSdwppYECU6fyrN12Gwm+q67C3+nuZm9Om5a8Hy2LQu3DDzO05bLL2Fe5gAWDg/gzg4PowHz8IqWIbfr78U+mTydm27OH15Yu1QlO4RiUicT2NZAup/LyZLSeZelEogyT2b6d/V5TQ7EmUyeHZZEM7e9n3Rsbh6cXKivZ80JJI3rv2DEStK+/zrq+970UpeQaeno05+KMGeij8ShuCppfZglIQrGQTBx/KXAi6p74i5VS65VSH1JKTVZK7VBKfcyyrJ2293qUUrcqpd6llJqplOpTSt2jlPqiZVn9eZzujb3YYyiGgYN46BBB+9y5KLaDBwm6Fi3CKdyxA+d49WqcXLsCtDujAwMkEEMhEoh1dVS3slVYLIsR9/fdR1LuYx/LrxIkiADL0k5tayvnnjIlMxw9keA6TRPD0dfHOkglT1AzUl3yeAh+Dx3CKJeXsy7ZrlEqZg4HBjzXNFXhgQwEeL8dUdjejoIfGID3sLeXdXrLWzT/iTjTg4PcQ7ZpkKZJci8Uwhina3E4XjEMnPlnn8WBmDYNIvPGxjHl/inoopNcentB41VVKfXjHycHVJZFa81LL+EEr1zJ3wcHeVYnT8YJ+8//xNn+1KdwEDNJMKjUT37C/nn3uwn68yHmf+YZ2oFqa0E55oOkFeoA2YvBIINiXnsNlO5HP5q7wi+8OEIPkSnBHwyydnfeyfu+9CV0QSaJxdAr0r58vG29gvYRVMTcuUyXPussrW/ica7TNLnv1InVJ0KiUQJ9pdDnNTUFXXQqyEinNxsGiadEgmA4NXi8+266M97zHgp1/f38f+tW9NcVV4DiMwydJJJEdeqk8UQCdNzevUpdcolOAohIm+7gIJ8vLmb/W5YOtpctG1nRL5/py9k+e/SoTii2tfH3piYSHWvW4GvF4zpAHY9CgRRspGh69918l5MnM/n97LP1c5BI6O9F9n9FhW47HwuRVkpJDgcCejCNy5XMr+jxKOV2F3TRG1nEpg4OwrucGlts3YpP8pa3YGsdDuxrby/+RnU13R2/+x1x10c/yrG8XvaEnb7IsuCG3rwZ3+T883kGc/EQdnWBWozHSTrOmZPffR05wh5taODn4EH8nMpKKK5Sk+qSlC8p4bosi6RcIMB9CjVV6mck/nvlFX5fvhyQhVLsR6WS/ZBYDN9QhpiMNJFnWbpAGwqRmJVBM6LrnU50jQzPDAT0UJkTSbMiXI7CdfuPZOJwOBHHQ7ecklJIIuqHaIdSylRK3amUciulPqOU8imlFliWlXA4HKVKqSeVUkuVUr9SSu1WSi1USt2slNqnlHqTZVmRbOfq6FBWeTnBX4HXaPTEskjcvfgifF+rV/P3ri6UdlMTSvvllzFIK1dikLq6SG5Nm5b8ffT2kkCMx3GwPR6SYdmSbaZJRezppzn+hz6UXyUoHtdtJHV1GNTOTj19ubw8+zQq08S4dXZyfbNnJ7dFyfTQYBAnurUVBTt/PoYpk6Nsb18WFGe+FR7Lwqj5/dzD5Mn87ZVXQGf19XH+yy8HMSAIgM5OPmcYnK+xMbcjL/cfCJBszTUte6QSj2NQH32U9fzOd8bUWR5zXaQKzvKIJR4HHdLaShIslcPwqad4Tt7yFlp+ldLIPAm2fvhDknwf+Uj29mKfj3N0dcFj2NSkW80yiWXhiN9+O/vrC1/Ir5ghgaxhkHhsbsbhj0bhvlm/Xre6ZJJYjCSFUppDLN3Qka1baVnu6iIx+slPZuc3DQZ1e1CuifT5SHMzycMXX0TXfPjDOMoSjMuwgliMc6UmU060HD3K8JmnnlLqT38q6KJTRezE9vmiKvbtY88tWTJ0nz/9NAOKzj2X6fEOB8FmZyc28/HHGcJSUqLUO95BUG0YvMftTvaLYjGSj4cOYbvXrs1+XcEg/lZrK5+ZNAm0UD5T5NOti7Qvj0Z7cU+Pbnnet481b2ggmXjmmRQUxqtY0N6Ov7ljBz7gNddAnSO+jyChZbiN+ITCS1lcDAJqNFuuM4kUp+2JxYEBEiqHDin13e8WdNEbWfbs4Tk4/XTNISyyezddDIsWMRVe+O2OHtXDHx96SKmf/xy+5X/7N577nh5svp1qyTDghn76aXTWhg0cIxv/n1LoonvvZa9ceWXuhKNSeuik18vxKytB5vn9xChz52ZO4kci3ENpqR6QVF+fWQd6vQwt6ekBGLFuXbKvJ3pQgCHSvuxwsN7HS+00MABS9Lnn0DMXX4x/KoXjRIJ469gxdNDs2bnnA4yneL0kiO+5R6m//nVYScTx0C2npJxEbvEJF0sptcGyrIRSSjkcjt1Kqb8ppS5SSj2olPoXpdTqf7xnu3zI4XA8oZS6Tyl1vVLqf7KdQAy/z6f568rLR3062htOnn2WYPCcc3QCUZJxtbVUuHbuREEuX47SE1RQXV2yo9zVhUOnFFV7p1NXWzNJJAKP1q5dVMTe/e78HFBJIEprS0+PJhaeMSOZ9yFTi5PPx3sqKrhO4Vayt+MIOtDnwwhOm8ZrR47oKll5uXZCo1EMk5CrD9dIOBwkVjo6WM+2NgKVgwc518UX46zLRDOlCGgSCQINqSgNDGjUViYpKuJckkgVNOdoiWXx/QYCXO+11+Lsj7GMuS4qyMjlF7/AWf7Sl4YmEJubSSAuX44uUEpznZaUsE9/8QsSiO9/f/YE4sAASbyBAaVuvBEnu7Q0u6OYSNCWu2UL+vDjH88vuDQMEhGmia77859JUs2axfTlGTN0S28mLsBwmPt0OtnHTmfyoJWiIs7xne8QLMybB3p8xYrM1zXa7cuHD7M+jz+Ozv30p0GL2tdICi8ygfZkaduTybsPPEBBxrLyb8M6ntOqgi4aN5H25tShK5me+aNH8SHSEfnv2UMBYskSUNNSrBPusSVL+Nzq1eisv/2NQSwXX6yDchGhimlvT0ZXZ5PycgqRwSABsbT/BQL4ZPlOUs/WvjxSqa+no+Atb+Gadu7Ef9u8WanHHkM3rF5Ny/NIBr/kI/39FHsef5zv4/LLsQf2rhhBBZkm34cdgVRVpZMT4uuO9QA4h4PvraKCaz5yhNbqQEAPWBhDKeiik1g6O0kgNjUNTSB2duoJzJ/7HM+OZRHzOBz47E8+CQ/i2rVMYjYMzdlqBwckEhQAt21DF61bxzHSIfvsIr5ZdTUt0vkUVgUV6Pejv0wTMIHDAaI6V2zkdrM39u3j98bG9MVS0yQx+dpr6Jp16wBapOq6oiKOEwyy1qEQenbmzONDTgvycMsW1v2ss0B2NjbqQlYwqDvkJk1iPaSl+ETzpba2kvx85BFs1Qh0UUG3jFAKSUQtv5QH6B/y5D/+FVrY9yiltiulDjscDrvqeF4pFVRKna9yPEQNDXoSUiiEcvH7dXVREoonul1qIsnu3SiOZcuo3iqFEmxt1aS1r72Go7VkCUm0UIjAXDheRDo64OJwuZR617s4TllZdnTbwIDm0rrqKhzwfCQWw0AKN017O45yfX0ycbA9kWg3EpbFPfn93MfcuTxPMgyhqorrP3qUwNnh4P6Fn1H4OmQtBgY02XogwH3PnDlyglyBlP/97wS8kydT+TvzTM577BjXNnkyznQsprkfJdlgb//MhlKSSdGStDTNoeTLwxUhHpeWcCGTnzYt+/CLUZIx10UFGZk89hgE09dcM7QF+cgRgsKZM/VUeOFBFDL93/8eR/bqqxkckkl6e0kgBgK09EhSLlsbYCCg1Le/jbP87nejw/KxJUKLYFk891//OgmICy9kCIMk2Nxu9mMkMrSt1+/XrTt23h6Z2BwKsXb/9V96MIsMeMgksRjHVOr4pxJ3dVHoue8+7uOmm5S67rpk/SZ7XtowPZ6Tg3MwHCbp/Mgj2AmPhwTIJZdgz8ZYCrponMXezmwfupIaqHm9+Dl1dUN5l7u60AWTJ8MxKvtMKE+mTuW58vnwCzZsIIl2xx20Ej75JO20K1aw33/3O2z2u96VnbtVxDTRId3dJBUWLkS/+Hxct0x9r67OzO1qb18WrtXR8o0TCY5tWazRJZfguwUCrMNLL4Hy3bwZ32PlSgqfS5Ycf5tzIABS5uGHWaeLLsJ3LC/XvlhtLesuSOhME5NLS/FJhGdNWhrHKqg3DPzJXbt4HoqKSLKuWDEuiKSCLjpJJRCA77SmZugQyUAAjmGllLr1Vu3LDw7yfNfXs9/+z/8hjrvlFva50CjV1yfTi/zoR3SVXXcd+zKRyM5Vb1mAGF54gb1yxRX5FQZNk2c9EEC/9vcTs1RVoQfyAf8Eg7ozo64ufRzT26tbwGfN0lz90pqbupfjcfae18vaNDWNfL/HYiQOH3kEe/CmN1HMmDSJ4/t8XHNXF99HSQmD5qqrNWpedNR480SbJojJv/wFYEdRET75NddkL0xnkIJuGaEUkohaWu3/sSxrwIHmkhTSIqVUmVKqJ8Pn8wBG86B7PJojQRKKklSUaqtMxDzRGf6TWY4epfowYwZOmBiaI0dQbvPnUwHq6sKJnTEDg9PTgzK0Oz1tbQT4ZWWQx0YivMduwFKlrQ1Ukc+n1PXXo4DzkWgUw+J08tPayr+zZg3lHJRknEyhcjpRntKKXVurofHiaHq9BJudndxHQwNrYQ/ChauouprjBgIYzN7e5MmxRUXDd94HBzFKL77IGl5yCY6mTBaUc+7fz3c4bRrrbA/oxeD29HCvgQDfV6b2LkkkHj3KZ+wDaoYjsZhunbQsPblynNHC46KLCjI8OXgQZM+KFUp94APJrw0OEoDbp8IrxXMbi7FH77mHn0sugZ8sk3R10e4ci9HmW1mpBxVlQyR94xs4ev/6r7Qv5iOJhHZyW1ooiCQSHOPss5PfKy000lrndnNdg4MabVhdnXyNRUXolC9/GUfvjDNoaZo7N/t1jVb7stcLsvzPf+Za3/UuvrvUdiJpO1IKHXwy0I10dpLE2LpVT3583/tAmI414sgmBV10gsTp1NMvY7Hk9uZYDPtZVjZ0+mkwSCHANEFLy7MiRcbaWo2Odru1Xpk7F+qD3btBJf7wh/gklsW1XHstdjyXxGIUbn0+rk3Q2lJIqanB3x0cRPdIQbe6Wgf30lItNni0AlR7YlJQPaJbBGV35pkMD4hGuQ/hUXz6aa5lxQr02OmnD88viEZBYN97L7rm7LNJ1NoLnjU16P/WVtbjHzyDWaWoCB9PuCiPHcNXGk0dFgqRFN6zh/OUl7MGS5ZkL/COshR00UkoiQRJQKeTYWT2mDWRUOq738Uv+epXddErEtEJqoMHQSnOm4e+Kinh/fG4jhmUYv98//sg9m64gW4P4Y3P1G2RSJAke/11YqALL8xvXxiGRvrV1elkYlMTHIr5xEODg9xHaSk6UHwM0XHxOAjoffvYT+eem4zgTB20ohT+jLQvL1zI2sRiw49PDIPC5P33c8zTT6eobefNrqoilt69G/07bRrJVNGXEhumoubHOmfh9yv14IP40h0d6Mx3vpPrnzFjxIWmgm4ZoRSSiFqMDH932P59Tin15QzvGxzuCR0OjUCUtklp/7KPW5f3FBKKWrxeOL88HipS4mSKUzptGgqmowPnWJxhmTZnJ59taYHrp6qKFkOZUNXQkHnNd+4k6eh0wqe1dGl+yisS0cikaBSFaG9fTidFRZzHMFDW3d38W18/NJgsLcV47d3LZ1as0Oi+TCKJBLcbY1JWlsx7I4lvSWxnus9gkKD3mWdY67PPBh3q8bDuMn160iTNExKJ6Nb+VCkt5dq9Xq6vvZ3PZkNjybSzvj49zS2XSDI/GMQgy76Ulp0TIOOuiwqSXfx+AvPKSgJte1IrEkEXJRIkqKQQIPu7vJyWtTvuwFH84Acz76GODirtSpHIkyl/tbWZE2nNzaCOioq4xlQkQCaJxzUH4oMP0iY7Zw7ty5n2jVAkRKO6zT8WSz+B2TBAdv/sZ3zuc58jaM6GbjYM1iyROL725XCYgS233YYtvewy0Iep9yWIbMMgEDnRdtayQHQ89hj/GgY6+dxzCZpGigw/DinoohMoEqhJN4Jpspf27uV3GRgnYhgE7Z2dSn3taxqhKNQpwnNobxOUSaCGwfN15pm0FD70kFLf+x4+wNvelt+zFwyCUovFkqeVpor4tPE49t3rZd+XlrLnS0r08JTRQgNLMtayMvMqytAAaR+WKc6JBAm0HTtImGzbhm+wdCmvr1qVOZlmGCQy/vIXfLMzzqCYkTrhXvjOXC6uU64hX0nX3pxPy2Y26eoiiXD4MNc2ZQror7lzRxcZmqcUdNFJKLt2YWPXrUtOZlkW7cnNzfCxil9iGDyjLhf/fv3r2OWvfpX4wuvleLW1+nihELpo/354pJcsQS/V1WVGFYZCdEK1tbE/16/Pn9qlpUV3XLS0oBeWL8+fKqm7m71eUaEnTkuMX1wM0GHbNq5x4ULitNRCiR08UlTEnh4YQG8KX7wgqoXDNpdYFjrsnnu4xnnz8ItSu6sSCeItaTcXOop0vlFxcTJq3unMTL91PNLSAt/hY4/pdfvkJ6ENqqk5br+toFtGKIUkYv5yQClVa1nWY2NxcEEgikIUXqZQCIdPHEBxvk6GNqsTJdEoQXsshgIU5y0apXJSUUFw29pKBUgq9b29fMZe3dq3j2mDkyYx1Vmmz9nfYxfLwiG8/34+c+21+VemhDMskeD3RIJEZT5tIMIv1tnJdU2bNtRoDAxwP5EIkHNB2whKKN01SrXM6UxuX66o4F7lGZSkYlERz6jHo5Gy0ShomSee4Pe1a2nRsaN9BBV44ADGqbGRwS4+HwlCIQdPFUEveDwYNOFUykYkLuTKMvE6tdVLxDD0vRmGRkme6ERCHjKmuqggyWJZBOa9vbTj2qeAmyYot95eChDyDJsme6u4mNabX/8aJ1u4ydJJaytTmF0unG63m/1UU5MZifP440r99Kfogy99Kf/21lgMfdHXB1LvwAHa+T7wgdyoH5dL86aWlKR35HfvVuorX+Hf887j2mpq+Fw0mj44Ho325XgcB/mXv0SvnHsu7eCpaC0pHEQi7PWKivEZTJBJQiGQTlu2oOPdbvjaNmygkHK8XJBjKAVdNA5ib28+eBC7uXjx0Knwv/gFyedPflK3Hdu5x6qq2Beybx0O9l0sloz48/nwJc47jwLsiy+CJl6/HqqGdLa6vx/ET1ERQXs+aFmXC/9n0iTO2dXFcUpL+fto7EnTROfYeaJz8SybZjLXV3ExrZbLloEGPnBAIxR37mQdFy8mQbh6Nb6PZdFu96c/cV+LF1MYSofmjET05FVBaw4O8pMNgZ4q0t7c14d+j0Ry80qniiRR9uzBrimFb7hwIcc6GSbUZ5CCLhpnOXQIe7V48VDap7/9Df/kne8EPS8iBf5wmEJHTY0u0ArIwuPRYIFAAKRiayt67bTTOIZMBU8nfX3QxvT3Y0OXL89PlyQSPPuhkOZkrK4maZmPP2KaIIEF8W3Xk2Vl3MuWLXoa9UUXZY//nE6OdeQI1yN0V7L/hFsyFsvsVynFewRh3tZGTHTzzaxL6l7u69PnmzmT6xNASWqniUgqKtEwkgd8jlQEMblpE3bN6aTIddFFepDYOOVCCrolgxSSiPnLnUqpbzocjvdZlvV7+wsOh8OplKoezTHfpaX81NaiICSZ09+vnSxBho03F8GJFAnau7txHyhfXwAAIABJREFU5mS6lmVhZAT63dKCohSHzedDgdfWasf79ddph54yRSMQo1GOmU4ZG4ZSd98Nt8acOUwzbGzMz0GT6ayhEOeQyVb5IksCAQIBp5PrtScQJbDo6uLeVqzAQEkiw+vVPIliAGQasiAhZdiKXRyO5Nb7SETzKAaDHKO5mcE20SiImUsvTd9GLFxj5eUcxzBYY+EJ7evTAxnSicvF9+n3Y4CPHOEea2vTG7UpU/hepLXZDnOPxVhPaZeSlq4JNOBoXHXRG13+8AeC6E98AmdZxLJA7x04gC6wt+gODvLctbSAxFu+nGmDmRyegwdJBno8Sn3qU5o8O1MyzbJANt51F/vuc5/Lv61MEoivvAKaWimmTa9fn9/nBWGp1NDW30iE+73tNvbyD34AGtmud2Ix9qZ9+qgg8EfavmyaoA7+539Ac65aBXIh3eCReFzrL7GjJyoo7ugAvf3ss6xdUxN8maLDKypO+oJGQReNkzid2MnubooFqYj8TZsInK+5Rg91Uoq9HovpxFRxsU4gGobed2L/enuV+u1v2Sc33ojtfOc7QSv//e/4P+edB7eYIN2OHgUl5PGg64ZbALAszWEt3KSSRKuo4NpHMuBIEqQOR/5t0eJDWhY/qbqhqIiE2sKFcM+2temE4u238yNDZWIxfNBbbgHBl3osw9BFTJcruYBZXa1RmvbCVS4pKiLR4Pfz3R89yv9zfSeBAAjX/fvRRR4PiehZs/gOJgC9UkEXjaP09fG8TJ1KPGSXZ5/Fbzr7bHSHiN+PvonFoF8pKSGBWFurqaZcLo3483qV+ta3iG3+9V/xv7q62CeZYoX2dpKXsRhFxHnz8tNH8Tj+mter6YxmzSJOy5dbuqMD/2jKlKH6+dAh/Ei/H79kzZrc+8nr5ZiWRUIvXReW6DTRc6nJ0kOHSB7u3cu63nADQI/Uc0ejeohMRQX3LjahslInEu1xZKoUF2On4nF+RK8N17/yevGt778fPVpXRxfLOecQM2bi0R1DKeiWDFJIIuYvP1BKXaaU+p3D4bhUKfWsAuI6Tyl1tVLqS0qp/x2LE0vrRU2NntaWOhBDEIonEk0xHvLgg1TIr7giGYJ97Bhr4nZjCBoaqFRI23B/P+sjztjOnUwfnTkTDkRpZ62tTZ/YC4UgF9+/n2rzOedkhnen+6y0WcsgFfsEvlwi37PbjXGStubiYhTsgQP8v6kJxS/XVFTE/UgC1TD4v6CI4nGcy2yDY0TsSFkhKr7/fox+YyPtTvPm8bpcm1K6VToc5tlcupRrOXYM53vmTJwQ0+ReioqyO8yVlXw/vb2sSTDIPaRLANbXczwZtlJXp5OZ9gTpCWpZPh45YbrojSYvvogzfOGFJMjt8txzvH722XoqvFI8Y5EIDtmPfwwq2D7cIFX27FHq5z9nbwoC0efT3LipEotx3KefJkH30Y/mr0uiUfbs3XczPGH+fNqX8+UPFS6z4mL2biymJzY//zzIgiNHSGT8278NRQq43bptT/SUtC8Lwnk4Imifn/4U3bxgAe3g69cPdVwlWRmNsl5VVSdm75smCdzNm0EHCO3E2rXoUo+HazvJA3aRgi4aJ5GJnPX1BO12HqoXXiBxf9ZZdEeIyPAU4TJ1OvXgDdkPMthPKWzlb3/L7x/8oNYLHg9B3AUXwOm3ZQuDRy66iMC+p4fjLlky/AKAJPqky6GoiOc/kUDXiP9SWkrwWFmZe2+IjjHNkQ1lSUUjZvqsw4HPNWsWvNzPPIMuf/JJPjt7Nt/P4cOsz8yZOkEp6EOhi0n13YVzVobRZKNxSSdSgOrpoWBcU5P+GMeOoYeOHOF8U6dyP5J4nACdGSIFXTROEolgw6RoYJd9+/BPFi9O7ryQ4mUoBLdhIqHUd76DjhG0tGVpGqn+fhKIfX0UORcs4DlO5bO3S3MzurC0FCT/9On5gQNiMeLGri78A48HXZbKnZxtPY4eZc83Nib7bYEArcudneypjRv1wMxMyU1BNA4OapCH6KR0e9HlYu3icf5fUsLn77mHTpjKSgoeZ5891OcRmq9jx/iumpqGIs1dLhKLMkCvsjK7Tiwp0XFqNJrM55tN9u2jGPb449zL8uVc9+mnsw7ZunLGWAq6JYNMvPD5BIllWRGHw3G+UurTikk9VyqlIgpCzjuUUlvG4zpcLj0QQ9piQyFdsSwu1gnFk4EcfjRFJmy9+c0EXCJ+Pwkoy8IRqqvTMG3DQEE6nVoxbt/OlM45c3C4YzHWrrIyvZPV20s7Ym8vTvTy5emRe+kkGNRcFmVlGMx8p9iJYQ0EuLbJk7XiDoVAL8l1L1qUPgB3OLgnp5P3ynRGlwtjMZzqvmWR8HjgAQzOjBkgOGfP1uhEab0vLdVtRC5XMj+PTG/t6CDRMmsW6ynoSAkiMomgMSsrWZ+ODt6fbiphTQ3rd+gQ19zUxN9OJProeOVk0UWnuhw7RhvN3LmgEO3Py549DA9askRPhVcKnezzkSD/yU/YI7femtmRfe01WhAbGnQCcWAAJyzdHhgchP9w3z7oF97xjvyf40iEz/3f/8ueufxy0Nz5OmX2YL62VgfXnZ1K/cd/oBdmzSIJsWZN5uPIxGZBa8p+H24BbNcu1vjll1nnr3+dpEY6J1sGJlkW53e7x3//BwLQPjz+ODqytpYhOytX6oTxiUpsjlQKumh8JJEASVJcTDDtcmHH43E6Kn7wA5Bxdj0l3GP24HPyZO23hEL8vaKCz3R0UCh1uaA1SOen1Naid976VgoRt93GcS+/nLbB4SQQJZEmVCKpib7iYq6hrg4fT6hXenu1D5xOd0WjulDodo98P9n5EXPd15EjtC2/+CJ7+PvfZ1+/+ioIxfvuI/laX0/B4LTT8Jvc7uztwWVlnF+GLg6X47CkBN+qv5/1i0ZZT8vCf9yzh7+XlvJczZzJOYUfdiLRJhV00fiIaWJzDYPiqX1/dXfjn0yaBPrW3m0gVEQ/+Qn7+Zvf1EOX+vt1F5jLhV//zW/yvs9/nmKnJLmkyyj1ml54ged50iQ6ESZPzq/TKxqlAHnoEM/9lCn4dfn6IwKKKC4mvpDPSQvxrl1c79q13IfDwWdCIY3cs0skgj6JRpPbl6VN2D5oxS4lJZyzuxvE+LZt7OsrrlDq/PPT+6ChEDFYKERc1NSU2R8sKdFUYX5/bl3kdHLfqS3Oqd9dIoFftGkT6+V241Ofcw7fhQy3PJH5jIJuySwOy7JO9DW8kWTMFlvaUkIh3aYpAytOVNA0mrJ3L4igxYupTIgiEufa50Ox1dZi2JxO1qCzE2U8fTpK8LnnIAxfuBBya0kyut0YsNQ1amnBsbYsAr7GRiq1+QTefj/JBCEATq1QZRO5rkgEoyjoPMvSrUOWhVFqbMz93cq0saNHUf6nnTY8pXzoECjQlhbuRYLf1PPG4zilHR064dDQwDlTq+0C1Xe7NYKyvZ3neMaM/NozTRMHRBLokydzHmlZlomr0SjfgxjKUaisT+DdpJQaQ110Kkg0SvtMdzdOr30gx9GjFBWmTEnmEBRH+fBh0HDV1TjCmZC1wpU4YwZcP6Wlemq7DDywS1sbiTKvF5TfmWfmfz/hMITUt93Gfvv4x/OfJm9Zml/LzlVkWejSb32La7rhBqU+9rHcesWy0I39/azd1KnDC1ZlivSTT7JON97IZL50OllaI+Nx9IPHM/6BcXs7a//881zHokUkXObO1UnNqqrjqrAXdNEpLrt3s8eWLk1G93Z3gyQuKaHgYSf+7+ri2Xe5NOewBPyRCDpOEkZtbeiGsjL2cS4ETjRKcHzgAOgfGXp25ZX5JRNHOn1ZeKUDAf4v6JTyco4ZjWp0ZknJ8fu8lsXxHI70PkNvr0Z1u90kUy+9dGjA7vOh759/nsKRZbFea9bQ2bJ4cfY1k8naHs/IpyEHAnzPLS0UMZTieZk7VyNORUceRyGjoItOcWlu1sNK7BzMwaBSX/wivsK3v508abi/H131s5+RcPva1zRnayCgCwO1tbz+zW9iK2+5BbDHsWP8f9q0ocm9WAw+9o4OEvMLFmjEci6JRNiPBw6wHxcuJBbJV2/093Ptbndyd1l/P0nNgQHiszVrkmM/QRg7HMmovoEB7tXp1F0J9s8kErw33f4MBonRNm9GZ513Hvoonb4wTfzYri505cyZw0NdBoN8D/mssZwvHuceBJXY369blvv7Wb9LLiHZWlzMdY2UxuIfMtF10YSQQhJxfGVcFltIa4VnyrJ0q0iuCbsno3R2QpQ/eTItNnYj0tKC8fD7tVMmClaSS/X1KNInn0TBLllCa45Axp1ODRe3y44dcI7V1jLds6oKo5lPhWpgQLcZNzaSxMw3eI3HuWcZvCKGJBgkYSr3Onu2dsCzfZ8CtRdeROH5mTQp970cO0aioLkZg3HRRSQf0t2LZWlErKCLHA7d3qmUdlLLyzG8fj8BSGmpTiS2tREM2Ae95JJoFIPo8/F/CcplyrLTyWuStGxqOu5kwgTaQWmloPgziGUxQGXLFhB2dtSzzwdysKiIqex2B83rBd3xox/xjH3jG5mnk27bxkCTOXNI6EkCUYLL1GfzlVcY7lJaCrIxdZpeNvF6QR8++SSO+2c+k/m6UsUwuK54nD0l93vsGAnNp54Cmf35zxOM5mp9s09fliA/03T2VDl2jPt44AHe//73K/We92R2MiMRXUSQQtp4iWEwxXXzZtCfJSW0WG/YgB5NJHR75ihQkBR00Sks7e0gU+zJHqXw777wBT3hVOhVXC7NHSxJ6vp6naiTieQlJXr66O238yx+4AO5ESaBAAnERAJfqq6OJOef/0yxcfp06AxWrUrvl9jbl93ukRX0EgntayQS3KckFMvKRrdQkC6R6PfDM/b3v/P3iy4CFZ4pqBZec+FZ3LcP/3LXLnyX8nKKsmecwfCWdDrB50OfVVQMj/bBsnTL8uHDfH/TpqG3p09n/UQHF3RRQRdlk44O0LVz5iTzQxsG/k5zMwOYli3Tr4VC6K///m9Qb7feqjsVYjGezdJSdFt7OwlIpUhIzpxJ8jEUIhZK9RMCAXhgfT7OOXUqezAfxG44TKKvvZ3zrFiRfyJNEH/SCSbDHBMJ9vSePdzT2rUabZkqiYSeSO92E6N5veztxsbMgz0TCfSQ6LhoFD/jkUf4ff166Heqqzl26nGkUyYaJaZubBy+vpT8Qmnp8IoasRjI+fvug/rBMBg4eNll0GFJ63N19fCpbdLIRNdFE0IKScTxlXFfbGkZER5FcYbsCcWTme/E74csXymlPvKRZCetpwdnrLsbA7N2rXaCgkH+XlVFUL55M5DpFSuolotjZVk4VHZFa1ko5M2bcdwvuojXUweaZJLOThKILhfBvh0dkEvCYZJhDgeGSdqCW1sxdsXFHLOhQXNgOByZK/lSKSsu1vwgiYSekJap0tPfzxrs2ME1bNwIn0YmJzMS0ckGj0e3LIvIJGR7YlscV2nbLinRJMZtbRyrqSn3mhuGrtQLT6IY4tT2dL8fh6a0lGMXKu4FSZX774dj733vU+q66/Tfo1GQgwMDoN9kqJNSPP8HDtDGphSV9ExTwZ9+mqEoCxfCZ1hSwjETCZzY1L388MMkz5qamHKcLx2CUlzT976Hg3r11dzTcIoZ/f3oidpazWd4xx1wHikFgvLaa9mzoRB/y0QVEI1qBFFlJfcdj7N2LlfmfT4wQIv03Xdz3He+U6nrr8/MESbDChIJjuvxjJ+N8/l0y/LAAN/V+ecTNCUS3K9QkoxiUrOgi05RGRggIK2vTy4cGAY65tVXlfr3f8evkfbmeBzfJxJh39qnHEtrrPDw7dun1B//iI9y/fW5A8LeXpJRxcUkoezvtywS53fdhQ80bx7FWkk25GpfHonE4/gyg4Mc1+3G5xtt7izT1FNQH3qIIDgSgXft6qsz62TTRC8KEjq1RTgWI/GyYweFIknuLl9OQnHFiuTEidfLeYUbOpvE4+j/PXs0L+aiRSSApMBeVja8Ym0eUtBFp6j4fHRy1dYSa8nelanwjz4KB+LGjfozhoEv/9Of8ix+9rPwtiql0XBKEYO1tsKRWFJConHaNPSf10s8kZoY7OkB4W+aJKLKy3UhIZ97efxxjr1kCZx7+SbQDUNz8E+apPd+ZyfF4UAAXb1yZe5jCrK6/x8jORoachd4TZNrsCwG2DzwAPezciWFDOmaEV3rdqNzEgliH6GbmjUrfyRhOpFYLhN3t11iMYrYmzYBhHG7Nc94TQ3HKSrCLxJ6jVGQia6LJoQUkojjKyd0sS2LQE6SOcKv4HbrhOLJxIESi2kuwhtvTIbOh8MMRzl0iGTR2rU6KIvHMU7SKvfIIxi/NWuAdyulCXQlUScSj8Nts3Mnx9ywgXVqaMgPVt3SQrKvspKW4eFAsQU9INddXIyB2beP+50yBcfc7hybpq5M2RNihoFRCwRQyqktg4mETlxUVGhjEghgmJ99lmfj7LNxCjIZCdPkOIEA5580Kfc9C1JWJj3Lc9nXx3UsXsz9tLby3qam9C2SkpQQlKMMZpCpzGLg7CgMucf2dgz8rFkjTiROdANVUPxpZPduHN3Vq2m5sU8VvuMOHOH3vndoQH/gAEjBWAz0Yuq0QpEtWwiyly1T6qabeC4lMEzlfTFN0Ir33ktA+ZnPDE+fPPIIzn1JCZ/NxlOYKpGIHgQ1aRLXuX+/Ul/5CpX2s84ieWFPlErA7HQmX6dlaTSyyzV0MEIsxl4uLU12ukMhKCxuv53PXn45diDTEBhJUoTDenDSeA0aO3wYvbltm0ZoXXABuszv1xX2qqpRDdhFCrroFJRIhL1WWoq+kD1jWRQVHn4Y+oALL9SfMQyexd5e9snUqdovkn0oPIi7d6OLpk4F1ZvruTxyBD1XWUmSK9PeMgxQJps2ERwvX07hVgYoDKd9OZPYE5JOJ8eMxXSrs2XpQXqjgGhRiQSow7/+leOvXQutjr1lM1Wi0WQkdC6aB8MgwN6xg2Ts4CD3tmQJ9mj1avSHcBtmKkR4vSQOpRNm8mR80cZGPicoUCm2OJ28Z5SKGgVddApKPM6etiw46e17/557oEK46qrkoqtSJNt+/GNahj/xCQAZIkLXNHUqvv53v4teuvVWPV1cYoJUIMahQ3RBlJcTo8l+zwdJ2NmJH2YYoPbmzcs/aRWPg8aMx9FnVVXsqZdfJvarrKRTy15gzib9/XSvKMUezQfVJ8Pk7r2Xzy9ahH6dO3fo+2S4VChETGwYXHe6zruRiPh1kj9IlZ4eCi4PPoheamqCembjRs2L7XTyvdXUjHqxd6LrogkhhSTi+MpJtdj2hGIiwd9kGlt5+YkleDdNpe68E6fquutQlPbXdu0i0dfYCDeYOMBS3TJNFOVDDzFIZf16pS6+GGMhBL/19ckOpt8P/2FbG9wMp52Ggkx9XzoxDGDafX0o6YULh5eQFeJrGb5imhilY8dw7hYuzGwgDUNX94uKcFzFYNTXZ/6caeoEhsNBJXzrVgzkunUY/GwTAWXqtGHo6v9wK0iWpROK/f0k94qKuF+PByNUVESyTyaQhUJ8f5I8lSnLqevt8/F9WJY2UvbBNG1trJkce5gy0Q3USaWLTgYZGKC1uKQEHkS7Q/fgg/BZve1t7A27tLbSguP1kmSzt/nY5aGHcPxWrYJ3rLhYI2hTW9QiEYYlbNvGOT/wgeFNYP7Zz0hqnXYaLY/DQS8Gg9yLy0UCMZEgafHrX7PPP/959GO6vR6Pc/6SEj2hz+fj32zTlyMRPiuDBP76V6V+8xu+k40bQWzOnp35mqVF0zC0DRtryo5EAtuyeTOBQGkpAdb557Pe0n4o1A4ez5hdU0EXnWJimgTe0ShJOHuC57772BtXXknyzy49PdjQ8nL8CI9H050IIs7jAf32l7+AQnvf+7InkCyLAsLRo/gTufj7RGIxgvVNm/Ct1q0DRWznlx2JxOMcWyl0TKrtNgz0l6AThVtrJBPPTZPkyZ//zNouXgyFgt0fTRXpuhAk9EimG1sW/t+OHeiYnh6+wwULSCbOn4/NkMKTZZHc2L2b76moSLecTpqkp9JLF5JwpMfjHDsex9fLB8WVQwq66BQTy+I57O0l1rI/Iy+8QKfD+vXwNNvt2+CgUj/8IT7MTTeBkrO/NjhIcrCtDfqYujpamCdN0h1ZZWVDueplWFFDA+cNh3me0/FIp8revQAkysu1nc5XJK5SiuJpWRkFmx070EdLllDsyUc3GgbH8vm4Fpmkng0ZaFno7b/9jYLO9Okk5E4/PbN+kaExg4Osz5w5x8UxmFYCAT3RWibKv/Yaev/pp/n/mWfy/a9YoSc8K6WprZTSVByj6CNNdF00IaSQRBxfOWkXOx7XCUW7gyYJxfEeq/7wwzhvl102dIDAwYNUoerrmeCUSjQeDOJAP/QQicZzzyUQdTi08aqtTU6QdXURJAcCVJinTmUtJk/OXR0KBkkghkIo6aam/O9TuDWCQT1huK8PxR+L4eTPmpXbMCUSOLx2NKO0L2cT4dN47DEC+XXrgJhnq6QJT1ooxDNSVzc6iB/L4rvZuxeDPW0a31lfH/fR2KinJbpcfC+5+D1lQmUgwDU2NGhEQDiMAyNJymHew0Q3UCetLjoRYhgk2/bswfG1V3VfeIGWkQ0bKETYpa9Pqa9+lWS/tBWmimWRPHz4YfbXP/8zz1wkolvK7K06fX3wCx0+rNSHPoQOzFeOHGHQyeHDIAOGk3xUiusJBtlvtbU4yF/9KonSK64A0ZjP0AWhWZACRUVF7v0VCKCzb7sNtMDatbRHCQF7OpGiQjSqCwpjbau8Xojcn3iC36dMISARhIbPxxoWFWmU9xgnNAu66BSTAwdI7px2WnLQvm0bLX9vepNSn/tc8nMVDOI3uFz4IaWleh8K2t/tpgB7772857rrsu/LRALfpr8fv2bOnPyfZUHCBAL4GEL4f845BL/DTViZJvcg6MNcwwIti3MPDnIdRUXsxZqa3LrIskAX/fGP+AizZ0PbsGyZ5pRODdzlfkXnjRLHoLIs9LogFNvb+duUKSQJGxt1YF5eToJzwQLWR3SjZemOo9Q1syy+30CA99gneI9ACrroFJP9+9FHS5cmxzYHD0KvMmsW3Rf2Zz0SAYG4ZctQWhhJEFZU8Cz/6Ec8y1/8IjFZPK6nHU+dqveZJPQPHAA9eMYZerJ4XV12XSAFv9dfJ2688MLhIW/9fnwSiavicSaxHz3Kud/0pvz1WTjMfo7HiUcmTybWCwYzczcfPEjycP9+rv/tb+f+pZswFfAjFFEdHfxeV8d5xoq+TPTP9u34yi0tfL+XXILfOGWKHn5qmnpAn1y3UHEopQevjIJMdF00IaSQRBxfmRCLnUjohGI0yt+kojpajlE2efFFnNwzzxwaQPf2oqTKypR661uTE4FeL85QdTWtJ6+/rkfFK4WS7ulBudkrUPv2KfX733OPN9yAEg8E0vNwpEpPj56UvHjx8PgPpeU4GuVzbjcGsreXa1y4MH/OCuEeCQa5brvxTSemiVP6yCMkLWbPpnVZpnRlUuJ+P4bbsjCaYxEgR6MkQaJR1kEg/04nDvLUqdmvMZ0Eg6xrIsHzIZyNkQgJEocDZ2gYE6snuoGaELpovORXv4Jz77OfJSEksm8fLbWLFiVPhVeKZ+qrXyXp/cUvpp+WbFkcd8sWWoCFPzAWA2VXUpKclGtpIYEYCpEkWL06/3t4/HEQiE4nXIVvfnP+n7VPYBZOmh/8gGufMQOE5fr1+R9LONmqqthv2XSRZVGx/ulP0aULFyr1L//CembTLTKswDTRnWM5MEyQQY89hn0yTRAA55+vEwuSPFRKJw/HiYuxoItOIenspF1v5kwSRCItLXrYwDe+kWyrEglQaPE4CSTxGySxJZx4zc0k6hcuRJ9lS7hLO3UoxPuHgyC0T192u7HVXi9+3RNPoKMuuoiCZT7t/TKMxeFIPywgl0Sj+C1+f+5W5z17SB7u2YOv8a53JesiGbRSVKT/Jj6zYeji+1jpou5u+MX+/ncQP0L5ct55FLlmz9at1NI+ng8aMhjEF3Q4dMJhBFLQRaeQ9PSQGJoxA3sn0tvL5OSSErow7Ak004THftMm+EJvuil5nxw9yv5tb8dfaWqiu6GiQvMNpnLVR6P4UJ2ddHKcdhrPqsvFs5ptr/n9dJG0t6Mb168fnv7o6+OnrIxrOngQcIpSFI0XLsx/r/f1kUAtLka323VfMIiOq6zU13f0KOu4cye+1Nvehl8nrws/on3QSjhMTBMM4ns1NbFOQq0w0mFWmaSrC71+773Ymfnz+d7PP5/nIxBI7kbJxFcrHP+iW6Wz7jhkouuiCSGFJOL4yoRbbPtADPuEXSFTHe2JlwcOkNBbsICA265EIhHa3OJxlKk9YRcOY2BKSgim9+2jCiKBbySCspMpYKL0n3sOJT1lCglE00Th1dRkrywZBtWkjg7OuXjx8KdUdXZqjorBQYIEy8IJbGzM3zAJzF4GM0yalNlICiT+wQdZj8ZGHPn58/XEQ0kQpnJF9vVpNENd3di1u0tCY88eztvYiNGRVoL6ep6LkhLdypwP+sg0uQefj2uvr+cZjkYxupZFIjHPZ3qiG6gJp4vGSp56iiEFl18O8k2ks5PkYl3d0KnwiQTV95deoo3HTiYuIpQMTz/N69dcw54WJG9REftVdNy2bQxmqaykwp+tfdcukQjT6//+d/Tmpz41PDS0fQJzdTU68VvfYq+8//3wruUbUEr7stBjeDzZk3svv0zy8NVXSY585CME7DJ0Kd3nhOMnFuN9Hs/Y6aJ4nO9l82YKG2VlJIM3btQcb1KFlwmxVVXjzi1c0EWniPj92OeammRahL4+igpFRXCH2QsPMu3X62X/p74WCPAcb99OAm/5ctqKsz2jPp9OUC1bNjzUYK7py93d+HHPP89+uewyir3pitORoOunAAAgAElEQVSGgX02zdEZxiKtzjLVWYYcVVXhy/3xj+j0mhr09XnnpV8nGbQiaGtBQo9l145lkQjZvRvbJFxifX0kNtrauL/KShI+a9fyXQ/neuztzSOkqCnoolNEQiFaf8vKtE1Winjj1lvZx9/+9tDpw3/4A1zOF1+c3OIswywTCWKdX/0KffXZz+oBi52d6A47V73Px9CWQACgQ2OjHhY5eXL2RNORI+xnr5c9sXx5/okpuR6/n71QWoov0NcHGnHt2vz5Vg0D/eL3sz9nzBiqV6QQqRT777776IIpLWUtN25MD3KQ7iyHg+9EdMPMmcSCIqaZzB9/PHrUskhsbtqk+fM3bECPL1qETo3FtJ4tLR0aU2YSw+AzlnXcqMSJrosmhBSSiOMrE3qxJXiThKJM2LUnFI9HMXV1EQzX1tLGZ1c4hoFS7e4mOWg3XFLdSiRwkltbSQjIIAF53enUhLKmCaJx61ac9fe+V0/KkonOmUT49Pr7UYzD5ZkIhbgPaa9pbUXZ1tRQ1RrOsfr6MKglJRg2l4v7dTqHGqmWFu758GESaJdeilGVdifT1DyJiQTrUFaGYfN6eV9t7fCSpcORRAJHQYatCH+l8BYK748McAmHk5Gywq+Ry1BFIqx/PK5RqYbB92AYnCuP72CiG6gJrYtGS9raQL3Nng23jzgsfj9DSZSikm5HJFsWgfzWrQz6sHP9iJgm/KrbtqGvLr+c59k00RuWxTPsdPL7ffcxgXj+fNBG+RCEy/V/73s8u5deStvQcCbuSXHAsth///VfVPwXL2awzJIl+R8rEqH67XBoFF44zJqmJub37weF8Mwz6KIbb2SNiot14UoSiXYRHl+lMrf+jIYMDFCMeuIJdNK0abplWTjIgkF0o2nqlvTxpv34hxR00Skg8TjJ9KIi7LLookgEndDZSdA+a1by59ra8G+amoZOhA8GOe4LL6Cvli1DX5WUZE4idndTwCst5TryHQRkH3bicuW2w21tIJ1ffRXf58orSdCLTozFuPaiIo41mol52b+DgyQaHn5Yo32uuoqgPR8/IhTSwwmP1//NJMJrtmcP1+zxEKgvXMg1Cnqrt1cnGffuRS9VVoLcOuMMdHk+Abm9vbm0FP9oGIF8QRedAmIYFBMjEWye+MOGAZ3CK69Q6Eylb7nvPtqYN2xQ6stfTt6zQiu0ezeJxiVLlPr0p/U+6+nh+W5o0DpHhqA4HNjf2lqOU1SUve0+kWC/HDzI76efjm+V7/4UzsJwmHMePUpnW0kJcWWqDs4m4TD7MpGg8JitW21ggLbl55/nXIIuzpWsHBjAB5RBSo2N6fesJBJFZw1XX4XDFFQ3beJ81dX4nZdfjh8nideBAU2fUFMzfD9N/FFp13a5RoRKnOi6aEJIIYk4vnLKLLYMxJAx7wJBloTicCsdgQAQeNNU6sMfTm5TtiwMyf79tCbbA1upbvn9BHzHjuGMinEzTf5mGASCLhdO2Z13UvE/6ywUYCCA45Ta6pwqPT0kO8XYTZs2PAXp9RK0l5RwHR0dGMJ585KnT+eSRIL7CoVwfKdM0UpW+BEFDt7RAfJwzx7e+9a3ws2WqpTt1XWvlwBZpqZ6PBjTsUDYCG+SnfhbJqsmEiQ9YzGCJBmc4/FgKE1TT3kWuH5xMc+gEP2mE0E7Dg6yDtJOLoZ45sychnuiG6hTRheNVEIh2n4DARJa4tzFYgwu6O2lmGHfl5aFnrr/ftrcUgcbKMXz85vfgLJ7+9s1j6Jwfsbj7CWXC730y18SxK5fD4own2qtZeHM/fKXPO8f/GDygKl8RCYwK4Xu/PGPubaPfYz7yjdwFLRTNDp0+rKgkmQia0cH6/fII+ja668HFZW6T+NxPc3Z7daTRO3DCkZbF8kAicceA71gWdiRCy6gfco+lEnQlqWl2KrxmgKdQQq6aIKLZRGkBgIk+sT2mCZB+/btcK6uWpX8ud5eujcmT06eGK+U5ufbupUW/DVr8HXEP3A6hya9W1tppa6u5jryTYonErqgN9x24z17SCYeOICuffvbSV4qxflLSsYmOef1ErA//LCe1LpxI2sprc65kNDC9zoWxYP+ftampYXrmzqV4k5Tk74uoXSIRnl2ysvxSxMJPYBi506eA7cbfbZmDeuby86MsL25oItOAXn1VWz1mjUkh0R+9SvoED7ykeSp8ErRYv/Nb/Jsfec7yXtCJi1v386eW7mS4q3YzYEB9qOdq/7AAYqMlZWcq6xMDxiqr89s/30+dGlnJ3t44cLkPZNLYjHuXcAYzc1c/5w50MsMg/IoqX155szMeygSAW356KPs5zVrKPbkigmlI04Sq7NmZQfAyGeEIzbfROLRo5rXOxgEQfqOdyj1lrfo71AoI8Jhfioruf7j6RIxTXxBASwVFw/LFkx0XTQhpJBEHF85JRdbKtCCUhRotVQicvGxxOME3V1dBO32arpl4QC/9BIOUCovlyjpRx9Fgf3TP+kko2XxWjRKks3txlD99rcoxbe/nSpbIIAS9niSDaZdEgkNRzcMnCpJPOW7RtJKaxhcayiEwzdv3vCC0FBIJ0anTEk/QTke554ee4yqYVkZlbyzzsru8ArHhs/HOaS1QIacjJaIIy6JAWlL9HiGPiuGoTkSJXHY2anbAtIdU/iABM2UKbEdi+GYRCK8LlVHGWqTBXU50Q3UKamL8hXLwuF99lkcXuH7MU2l/vQngrdrrx06hfMPf1DqjjtAF95889DnKR5nkvFrr6GL7G3OMgm9ulqT3n/3u+zPq68GDZ3PHguHQUlu3Yozd8MNPKvDSSAKT82xYxCbv/wy5OBf/vLwWqETCa0TZZ+liiB/77hDqXvuYU++5z0QrmfjnI1G2Yeik2RYwXCc+HwkFqPyv3kziAGPh7YpSSjY78Pr5TuWNsixQkIOUwq6aIJLayt2Z/78ZB/k17+mYHHTTegcuwiqp6wMn8duN+NxXn/0UXgNZSiUnZsskdATMaUlurMTn2LRovyRHzJIKVP7cj4ig0z+9CeC4tmz4WyUZOJoSjjMmt5/P9d93nno3+pq3XURjxOwVlfzIwkL4RpUinstLdX+7mjwjJkmCM3du9GZxcUM+UodsCMcjPG47gYqKiIZU1SkOZ/lva+/rgezBAJ858uWgVBcuTJzwTSRwD+KxfJuby7oogkubW0kzhYsSC5MPPAAcdoVVzAczi47dughK9//fvLzFI2iVx5/HH20bh2+kySXJP6qrCSmEl2wcydxx8aNPOM9PbxWX58+MSUDiFpaOGZtreaVzTd2CYXQwxI/tbcTA6xbNzyQh719uaqKmDZd0jORIPn64INc8+rVxKUej6YmyJQsHRzku4rH0dnTpmkwT64CqyQSZUBVOpGp3Js20VFTVKSHYi1Zotc0HtfJQ6cTfSkdbEqNDr2L2CsZIpPn8Sa6LpoQUkgijq+8IRZbEorhsObGsicU7QrAsnAcX3+dwPK005KP1dwMrL6piSSY/bOBAMmlBx7AUL373VSdRAQ+LxOWOzowgtEoAfvixXrYSlkZCb10xiYY5LOCqKmowEnLN5g1TRxCGUoSCvHZBQuGN4hF7kmQjNOnp78Gvx9+tGeeYb3OO4+ffCrJ4TDrIQNISku101lTc/zGIB7XyEHL4j4qKnJXxKTdOBIhcWgYrGlNTXrjbpoaKSuJbeEskoSi3emXoTyWxX0PDvJ9NzZmbA+d6AbqDaGLMsndd1NVv/FGAkiRRx5h31x66dBBKZs28Zmzz6YNJ9WRjUaV+vnPCcavvZaEvUgwyD6qqMBB7O5W6utfx2H96EdBu+Ujhw/TcnzsGFxiF1+Ms5wvBYJlaU6wv/4V/tmyMvjWrrhieIWC1PbldMWJQIBz3H4763PVVax5pmKNXRIJ9F00qnnLRpMQvLcXhPtTT3EfjY3YmPXrk4s60SjrFYvxnVdVDS9hOw5S0EUTWPr60BlTp4J2EXnoIQoSl19OocAu4TCfkYFu9kBQeJ3vv5+21nPP5blO3duC8ojFOJbfT/IuXy5W+7TkfNqXs4lch2EQuN5/P+uydCnchPZ1OZ5zPPooSCi/n33+znemHxgjrc7SriwDBQUNYy92SgeHfdDKcCUS4TvYu5dzVlTwvS5YkKyLhOpBOCdTKR3icRKJTic+arrvfP9+1nj7dp10XLwY9NOqVUP5L6Vrw+/Pq725oIsmsAwOUlCbPJkEszw/27dTbF27Fg5Dux1+/XUGo0yerNR//meybZdk2v33ww199tkURCSOEK56t5v4yzSxx4cOEcsJaKSnh2PV16f3M+JxCr9CzVJdzfHsg6lyideLX9bTQzLSMNgXdmqJfCQU4vOJBDo9HTLQNKGYuPde4o7Fi+mgE90rOryoaGihNR4neTg4iF6aNUv7I1JwdTpz+0qCHk9NJIZCxI/33MN91Nbia6bOIRAO/WBQX6d90KaAUZTKPVwvH0kdvOJynfIFjQkhhSTi+MobbrGl3UKqpkrpaXHl5VSntm4lGE6dJnrgAAatqgon2F7disVwuDZtQrm8971UbEW8XhwfGZDS3AyKyOPBIZ82DUe8u3vosBW79PTwI7xeLheKNF/kYCJBFa6rC6VfXEwSbM6c4SXk7O3LYiBTlXIkooPiRILq2Xnncf+5jKBhsF7BoE4YCg+RwNSFE3EkrTvhMMe2tywPtw1IqvTBIAnURAKnYdIk1iOTCFJWkpeCapK2aUHKStIiGGS9pE1/xoy0aM+JbqDecLpI5JVXlPrCF0jyffGLyY7yvfeCyEudCv/oo0r95CegNm65ZShyIxxmQMjhw1Tp163TrwmCze3mOdq7l8EliQTOdz5oG8vCsfvNb9g3N9wAgnk4fDOmyR5/6SValw8fBt10yy3DK2bY25elCJCqi2Ixpf78Z1DfPh9J0uuvpxiUK+EpVBnSciMon9FoYbYsUD6bN/McOBwEzhdcMHTKYjyu0aNOp04ejtXU1eOQk++KhidvWF0UDtM66PGQMJNn66WXmMC8Zg06InXAXFsbSR3RASJSJPjrX0HkXHghScRMEgpxrnAYdIkd2Z9Njqd92S72RKTTybHEFm/ZAsea30/y4uqrh4cGsp/jqaeUuusu7Pvy5RSs7f5iJpEBeL29euBcQ4OeYG8/x0gSib29JD8OHeIY06eTUEhFT0mXhd1/ylR4jcXQ8y4X/lqm67EsbMCOHfx0dvL3+fNJIK1enexXhUL4W0phLzIUUgq6aIJKNKqBBxs2aN/80CEGqfw/9s48TK6yyv+net87SSfpTmffE7KQkASQEEggEGSRyCZuKDq4gArjIDoyoo4L6rj9ZkQFVBQHQSEaEEUkkQQSCJBAAiH7vvS+1r7f3x+fnHlvVVd1Ld1pUrHP8/STdHXVrVu37vt9z/me7zlnzBgSn/Zkwf794FNxMXhlT0Bof7xHH6WabPly/Ba7gq2x0fSqDwTYl1tbWe+zZ3Pft7WZXn+J4q7ubojMYNBUKgwf3rM/bG/W1gbZuXcv+FpTgx+YqjQ40XFaWrh2Y8b09HUsC7xfvZoE8vjxkIfx4hm9Pm4361yP09rKeer06kRxqw5aSaf0V3G8oIDzfvJJ/EyfDxxauZL9w47vSg663fxeWZk8wRsOx5Kh/ZEEVlWiCNe5F58w17EoJ2yQRBxY+6e+2KGQIRSDQUr+nnsOsL7mmtjNSZ0bhwNysbbW/C0ahRR8/HFA5CMfiS3BU3VheTmbyYsvkgkbM0bk5psBPc2AFRbimMaDWzhMFkazwpr1qKlJn/gKBHD2Dx0ygxSmTeu9hC+ReTxstpaFUxdPaIVCbP5r13K+8+dDyurAEHXQk4Gt243TqRm8qiozRVaE14XDPCcaTX/KlvYr9HjMOSQrWU7XolFKDHTYgWbeR4xIjwixLL4XJRR1M9JenuXlbKBtbUY1WVhIcBWXoc/1DeqfEotaW0U+8xnu8//3/4xztm8farnJkxlOYr8/N26kREcnCcb3TPV4IOWOH6c3ob1vmd6fSsxv3Mj71tTQ4yydoN3rReG4cSMk5oc/bBpWp6v+CYdZNw88QGA+ahTvf8EF6b3efhwtX9bJy3aLRFCG338/Tum559JjceZMXqt9DpOdt665aDTWeVblcqrWGMksEKB0fe1anPeKCpzjZct6Bgrq+Hq9ZvhVPGlwitmpe2bp2T8lFkUilBpr438NkA8dIslRX0/LBXuSIBg0QzTq63uq6Lq7mTB85AjKkfj2L3br6sKPEiFg1B6AqXoQ9kf5sn4W7StYVJTYr/L5UIf/7W88d8kSyunSCe4ti8SQlkhPngx5OHt2euenJcORCEF0KAQuJCt1tvtLvVk0yne8c6cZHDdlCt9BvG+nFRX2YQjp9BvXxFVRUXoTlrW3uBKKhw/z+NixEIoLFpgKEC1vrqxMSFIOYlEOmmVRstrdDWZo9U1HB0lGhwMlon3dHTvG3yIRSpntSRARCOcHH6Q0+corY9u16DAgJcO09YLPx76sgxTVDx82rGeyVKeVHzyIP1FVxZodOTL9ZINl4Q+8/TaYWVFB26wZMzLb77XdldvNGq6v74mLe/ea5E5tLQTd/Pm9v48mDgoKOE+3m+9m/PjefT/7dOPejh+N4hf98Y+UjxcV0edw5cqerXyiUXw/p5NjV1TE4l9v56JEYnV1//hRaaoScx2LcsIGScSBtcGLfcL27EFVM2oU5TpaJlJWZhpK+/2Jp2qpqrC0lKDdnnHS/htFRRBLTz1FObRmnwsLTXY5P5/NJh4E3W6T7ampMc5hJgSix8N5Hj2KEzdlSmbNfUVMH8X2djaM+vrYTFw0Sobv2WfZ/GfMoAwznpjQRurxQKtKPr+f48d/Pp3YrEogVTKFQmxkyfroaAZN+xLqYJZMpk6nui5Hj7Kh1dYaxVBtbfpTbdV02qtOsRThfEtLjXqyrY3HJkyIcaJyfYP6p8OiUAgS8PBhSD+d8N7SgrM7ZEjPqfBbtuA8jxkj8vnP91SIOJ2Qgi0tDISyB6iRCFimCt4//hHcmjkTkiCdZML+/RCYLS3g15Il3P9Dh6avhg4GSaL8+Mes3w98gIEyqSb+xZuuByXW4rFi3TqRn/6UAHnWLPoeLVoUewwlIEpKYrPb9mEFmmxI9HctLUwXR1taIA43bOD8x49HEXH22T2xPBIBUzwefq+oiB0ScwrbIBbloO3ZAz6ccYbBgs5OWgtEo/RLtSfGQiHu57Y2sGrMmFjfxeUSefhhfJdrrkHFmMyamlBEl5biG5WWxjaxT6Tw0Mme6ktkW74ciYADqpYpLk69nrU8e+1a1uPy5RATyTBsxw4UUHv34je9732s+XRwQ5XQgYBRQNuxIr7UuaLCJHR664/o9fKd79nD8auq8NmmTOmJRXY1tvpQmSZQfD6uW3FxzxLlVNbWhkJ182YSbJrAVoXisGHcb+pn27B6EIty0FQNe+aZJp7y+yEHGxtJZthVhi0tKBDdbnyqM8+M9UfUL9q2jbV33XVm7dl71dfVca89/zxr4OKLSdJq7BMIcK/Fxw7BIOfc0cH9V1LCuqyr670qyW7hMBj46qv8f9IkMCJJ+6Kk5vGY8udRo3rGIMeO0UJh+3bW4ZVXIoxJZy1Ho6y/xkZDHqYrlrD3EIw3t5vEzFNPcewhQxCeXHVVz+unlSfd3ZxPWRlkYCZVZJqA0XYw/ZWQjURiCdO4z5rrWJQTNkgiDqwNXmwhk/nAA4DJLbcARtq/TptKBwJkQubNi1WB7N5tSvo+8YlYhaKW/GrQ/rvf4bAtW0bZnsMBmDU18f/4yVGWxbkpaVRbC3iqAjHdkp3GRlMiNHEipEGmPbTCYTJPPl/P8mXLQsHwzDNs5uPGsTFNnpz8eEqQ6WfQ/owOBxtIso0znkjUcim/n8+kwY+WDLvdRl2gyr6TMbnQsgiWurtxOlQhNWpU4kEz6ZiqoDwePoOI6UHU1sbnmD79/zbZXN+g/umw6Cc/IRD9yldM6wS3GywKh3tOhX/7bcp3Ro5kavLYsbH3cmcnjnJnJ30NZ8wwf1PCPRplbd1/P47yhReihEy1JiyL9f3rX3NOd9xhegYNG5b+mjp2jABg3Tru3f/8TzNEJl2zLNZWMEigYO97I0Kg+ZOfcL0mTEB5uHRpckfR5zODWPLyDJFvWQQLyZIN4TCvLSjoPSFhWTjsa9eCkw4HZObFF4ORifqEud18RssCs/qjGfgA2iAW5Zg1NJDMGD8+Nmj/ylfMmrWX2+qAi64u1uCoUbE+hduNX9TURNB+5pmJ31dLWA8fxkeaNaunD6QKD/v05v4oX7YsMCQUYg1mc5y2NgLyl16COLjiCkq2ldA8dAjycNs2cPK668DcdNeyVstEoyaZmAzHgkH8D6fTKKerqsCP/HzzupYWCA+tSBkzhr2ivr7nsbVSwt4zui9tHLxecK20NPMKGDWnE392yxbIWa1GmT0bvJ88mb3pxP04iEU5Zk1NqAXHjzdDKTWJsXkzLV/OOss8v7MTArG9Hb/kzDNj4wefj8TrW2+J3HQTqja7tbbiY48YgRhg0ybup0suMUmBjg6OM3Roz9ipq4v7UIm/UIh7vL6+Z5VIMvP58Il27+Y1552XXnsDu6lSsqWFdTp2bKxasrUVku7VV/kM7343sWgmQpTDh8H24mLivExEEkok2getHDpEyfJzz4Ezs2fz/SxebJJIdlW4xwPGhcN8tiFDMhsCardgkO+psLCnD9kX08+pbaoKC/8vVs51LMoJO61IRIfDMUNEdorIpyzLuv/EY0Ui0ikiZSJSZ1lW84nHrxSRP4vIJSLyuoh8UUQuFZFJIlIkIjtE5EeWZf1v3HtMEpFvisiFIjJcRLpOvP5LlmVtS3GKp8/FztI8HgLqYFDkU5+KzZC2trKZeTw4PLW1RglSVgY59+tfQyB+5jOxG0Y0aiYWl5SQkW9txYlUNYySjCIQiHYwD4UgpbxezmnYMNN0evjw9Jy4aJR+F7t3c74LFmTWl8N+jVTqX1sb6/zt2wcRcvQof7v88p5lBIlMgwPNCGn/kGHD0iu/ie/3o4qdggJDAmvZj73P4Mk0LcHp7ORzhMOc0+jRmWcT401LmXTSc2cnBHc0yvWeMaP3DWoQi04te+45FH3XX496WYS18NBDZMU/9rFYBe/+/SJf/Spr7/bbCfzsqpe2NpR9Hg9YZCfwLcsM5ikoEPnhDyHY3v9+GvmnWqtut8h999F4e+FCCEpNAqTbk9SyKGv84Q9xFj/9aVSWmRL6vZUv79qF8vDllwkiP/lJgvpUeGJZJkiORnkPxY1Urw2FTEl0fHmTz2faOjQ3890tXcpPIiWOZfH9KQlQWgphm21/t3fCGhpE6usHsSiXTHt41dSYQXCWRdD+yiuolO0KXi0h1T122LDYskJNhLS20oohWbluNMqabWmBhIzvAWo3+zRMDdK0AX82wZ+SkKpyTFUyncqOHRNZtQp/sboahfaxYxAS5eX0Gbv00vQDXi0bViV0WVn6OKBDEHT4ksPBOXR1sY+0t/OZp06FPEzmmyh5qErPTM6hN9OhXmVlffeLvF4I2i1bIIm0b+3Mmdyzy5YNYlEumdsNIV9ZSVsp9dl//WvanvzLv8ROhXe5IBUbG6k0mDUrVrnm99Mb8e23eW18b+muLn6GDEHg8fbbkG8XXmh8k85O0/u9osK81rJMe6jSUtaSDs8cMyb9/oWHD9P3z+OBAH3Xu9LvK62m7a48np7ly04nLV1eeAEsWb4cLEpXSBKJsK8rOTluHOfn9fYcpJTOsUIhKtaeeoo+0EVFTLy++urY6dsiYJASclqdpS0RMr1GiSwQ4PvSZHR/ml1J73SKjByZ81iUE5ZD7nJqsyxrl8PhaBaRpSJy/4mHFwk3RPTE478/8fiFIhISkZdE5AwRuVFEnhCRB0SkRESuEZHfOhyOIsuyfiUi4nA4CkXk7yJSISI/E5GjIlIrIheIyEwROS1uipNl4TDqQJeLQN4e2HV2QsAVFLCp1NUR1KtCcccOpnyWl1OOp+U3qo7TicJ+P8RAJIJSUYP7SISMm2X1JBDt5cujRwOWHR1sADU16RGI3d04sB0dZPQWLMi83EczWx0dPcuXjx1jY9qzh+t24428RyZEncvFsbUEJd1NTUuZ9XqrisDl4rzy83Ekamr6Z6NJ1xwOk83v6OC6lJayAceTPpmayu6rqsxUuOHDcaA3bYpVnSWyUx2LOjvTvBCngR08CIE4YwaOk/b//NOfIOWvuYa1oNdElUAlJfQfLCgw/btEIKjuvx+H5ZOfNAkHNafTTKj/n/8hiPzkJ3FWu7p6P9d9+yDmOjpQFC1fDraJQCBqM+ve7OhRHPmtWylV/I//QDGSzmvtptir5ct+Pz/HjqF6WreOx//lX8hmFxWZaXypTEsCCwpMz650XxsI8F0UF/OeTU047K+8wt8mTAAf58/n+DpdNP79lRwtKTHJKpcrkys08KYK7NdeQyVy/DjKgt5fc2pj0T+TBYOU2JaWxiYeHn6YfeXjH+9JIOpgAe0JalejuN1gUXs7vaGT7Uvag9rp5H21lUMys1csqGo4m3YkqqxTRUxJSf8ofMeMIbmzZQtYfdddnOONN6KOspMPqUyH/6kSOlMfJi/PDPBrbUW1t307x62rQ8UVr/hM9P6ahM104FwqKy+PbQeRybWJt7Iy9rF3vYvz3roVpdWmTfQeX7as99cPYtGpY+Ew92p+PnulxhLPPguBeMUVsQSi3y/y9a+z53z605Di9tJa/fvOnfx9xYrY99M9v7iYvevoUZSP9jYD3d3cp1VVsfdpMMhxOzsRT0yeDKHo84Fl6Sj0QiHamrz+Ovv9NdfE9tJP17R8WQch6Xv7fJCTa9ZwbZcs4RpmUhnV3c3nCgaJqerrDV6GQrxHimEi/2dOJ9Usq1dDSNbWGlK4N1VyezvfQVkZMU+mVRFRAg4AACAASURBVHS9WXGxSd663X3Donjr6BBZv95UoLz4Yu/PH8Si/rHTSokoIuJwOP4gIudbllV/4vcvi8inROSIiLxpWdatJx5/VURClmUtdjgcxSIStiwrYjuOQ0TWiMgYy7Kmn3jsTBHZKiI3WJb1eBand3pd7AzMskSeeAKi8MYbcarUurtxBgsKAPfSUrLkuqnt2yfy858DQB/7GP+qdLmkBGANhwHKp54CtD/+cYgfEaNSDIdx6pTcsyxe097OccaMMWWIBQVskKlIukiEoOCtt3juggWxvUPStVAI8svvNxMAHQ6c0r/9DWetvBxS4bzzMstQ+3wAbDhsylqKijJXCmppr89nSpIKCoySKNvJzf1hzc0EW5WVRkUxdmz/9WFUi0RQmp5xRmqp/KmMRZ2d/xxY5HLR/DscFvmv/zIO3bp1ZOAvuojhH2otLRBwloUjXFMDjuhaaWgAi/LySFLEK421HP7YMfO8z3/eKI6SmWWxzn//e9bRbbeRjLC3HEi15jVJ8+CDOJmf/jTKy0zXuZYvh0KxpSdtbQyf+ctfePy661BWZuII2ocViBh1U6YlMh4PgfqmTeBvQQHBujZlT2Y+n/lsRUVgYba93QbKLAsiXIceNDfz+OTJBH8f+lBuY5H8k/hFOhDO5zN9CEVQSf/0pwR3t9xiAmr7ZFKduFlfb/ZYpxMFYmcnZYPxjfDVPB78k2CQoD2dkj+7cjAvz5TEZdoHKxg0ZbnZlsElMo8HouOvf+Vcp08nKG1thRi4/noUmamGCng8xn/pS9lwczMkx5Ej/F5XR9CuAbj2JbSX8WnptKo8y8r69xrFm9PJvVdZ2TdiQO8NJYfVdu8WWbJkEItyxV5/HX/n7LONim/rVkj5+fNjp8IHg7RC2b7dtG6prTWEu9dL65ddu6gwiycQdZBlJEIM2NWF8tE+ldjpZG/WgR1qnZ2srXAY4nLECPZDv5+1ng5Jd+wY5F57Oxh40UXZiTxaW/kpLia+KC5mHT//PP6bx0MS6D3vSb83owjHOHqUz1paig8TL4JQhZ3D0Xs58P79EIf/+Aff27x5tLs699zkn1lbM/h8YFFxMd9tfO/q/jJNUJeU9E3scfw4xPC6dabVQn09SY477sh5LMoJO62UiCdsvYhc73A4plmWtUdgkNeLyGGBLRaHw1EpImeJyPdERCzLCuiLT8hZK0QkT7gpvu1wOKosy3KKSPeJp13mcDiesSwrQ23HP6/94x9sHpdcEksgut2UpBQVEbAHg5Bwunnt2SPyy18CmrfeaohBLf1Q8mjHDjbAKVMgEHVjiUZ5TjjMpqcgGgqxsWjfjbo6NiWdpJoOgdjZiXPe2Mh5nX12dpkVt9uUWdfX81m7u8lqvfoqIH7ppQTImWTJo1HIQ50wrJt+KMT1SDLRqoeptF3l7TqZUKc56uTmjg4eH0g1olptLd9XS4spizx61JQC9Jfl55u+MWnYKYtFmQ6gyUWzLBSILhf/Krn/xhv8LFlCM2ldAx0dlCjn5dEsfNgwcEAx4/Bh04/1jjti+7GKgB/BIMHUb35jJiDHPy/eXC4Ui5s3kyC47TbwsLOT7ymdlgM7dojccw8kxfnnoz7sjUxLZtpfSKeol5aagQ2PPspav+EGMDadBt9qOihAJ8BrwOz3m+RGOgG8x4PTuHYtCsSqKs5n2bLes+uBgGkMrlNV+zvB0J8WiRCQvfYaP+3tPD5zJgHBOedkFqTIKYxF/yx26BB7/fTp5t7bto1kw/z5rCn78IH2dkNwBYOxg886OyEQXS4UiMmSFB0dYENeHu+RTgmZDj+yly9rYi4aTV2KHI1yjEjEBKT91dokGEQptXo1WLB4Meu/tpZrtmkTA6xUeX799Yl7RauqWgQsyiaREA4zaXXXLr6PoiJ82+nT2SO0DNztJvhXX1WxTwna8vKB8ZmqqkyCyOHIDP9CIUMcagKosJDPWVxsqlHStEEseoft4EHuxxkzDIF4+LDI97+Pz/yv/2rWbCRCAvbNNw2BWFVl7lm3m8Trnj2JCUQdCOVyES9FIoghxowxz9GexDq0QyS2f2tZGaXHxcWsOa04SIVnfj8x1JtvsuauuCJ1FVEiC4eJJ7Td1ahRPL5xI8mMzk7W/nvfm1rlHW9tbbHKxrq6xPiqbRJcLnwpeyIgHOZcVq+G6C0uJma8+mquk2KR7if213V3m4F59kSH3891djj6vz90aanxCbV/fjqmCdUNG1Aa7t/PZ5gwgaT20qV8DxnsN4NY1Ec7XUlEEZGlDofjgIicJyJ3CMzy3Q6HY6RwQ+Trc0+wyLcLDPQ06dmQc4iIOC3LOuRwOL4nIneJyIccDscmEXlGRB6xLOvoyf1YuWtbt5IpOOssAnc1nw9lRV4eG1dHBxuLbk5vv43ypaoKBaISiCJGjVhQwAaxcyeZ5xUrzOQ8DYAjERwcPa7LhaJIm1xXVfF8bVo+bFjvIBQKAV6HDgG0s2fjxGeqwtPMVmcn51ZfDyBqPw3LglRYvjy7iWEdHbFBs25Mqh5UIjGZBYNmyrJIbGNdhyP2eDU1pt9JRUX/ytTTNVWMNTVxrnl5bPzjx5/cDH8vNohF76A98gjE3Oc+Z1Q6Bw9S/jl5Mg6l3sNOJ6U4Tic9f4YONQGSCGro++7jsdtv76nm0Qnhf/oTGem5c1FApsqy7tpF38KuLlNqEgqBCfn5nEdvDpzPx3k9/DA49vWvm9LiTE2nL+fns87DYY77m9+AmStWECTET39PZTqwSIcV2Kcra68fvx+8Toa7x4+jJHj5ZXBp6lScxunTzTTnZO+tg6D0evYl830yTUtOX3sNktvp5JyVOFywgL0pS2XAIBa9g9baStBeX2+C9qNH6YM4dqzInXeada4EYjDIvt/ZGdvPrrWVZIbHI/LRj/bsa6XW0IBKt7wc5WMqosw+fTleOVhQwNpUMilZSZ22fdCWJ/1VmRCJUKr2xBP4NfPmUdFir/pwOFCgLFrEc598EnXUWWeBFfX1JiEaiZi+g5kSnG43uL13L5916FD8tIkTY9emBt9VVeCpx4NvcuQIf6upiVVzDYRVVZkejlrJk8yCQUMcRqM8poNe+kgMD2LRO2jt7SQ66+q4Z0XwP779be6HL3/ZEMyWxfC4V17BP5k927Qg0dd961vEQp/6FLGK3aJRCMTjx1kz5eUkbu1JbK+XPbq01DweCBDTdXVxnlOncqz9+8GgiRNTxxgHDrCXtrTw/MWLs0ueu92QfNruqrqa/Xn1ajB94kTi01TVJvEWCECQqvpy/PjUWFBQwHP8fv7v9RIvPv00ZOSoUbTOueyy2OujWBSJmCSAKj8dDtO6yb6mi4tNsqW/2lDYrazM9KLtLalhWdwLGzfyc/w4vunkySSJzj+f/2eprh7Eoj7a6VjO7BCRFqEW/b9FZJOITBGRRqGp5QeFm+JOERlqWZbb4XDcJSLfFZFHRORZEWkVkbCIXC4i/yoiEy3LOmR7j+ki8h4RWS4w1xERucayrGdTnN7pdbHTsEOHaNI7bhwZcwWiQACAD4fZmI4fx0nWCVlbt4o8/jiPvf/9PVU1wSCbxOrVgOeKFWxgOsHT4zHTjevqCPpLS41iTsuXi4rMBKriYhz83rLsLS0QCp2dnNvkyQB3pg6VvXx56FA2po0bUWz6/Ti+l12WfrNgtXDYTDYrLo5VL9hNB63k5fWczujzsXHq33XKsj4vfmKz/bVaMqNl0/01gSsT6+hA2VlUxHkWFHD/9XOpdTpS+UEseofs1VdR5l1yCeXEWo77wAOs21tuMQ6bz8cQlSNHcKBHjgSnhg/ndbt2ifzsZ6zT22/v6Yhqv9UHH8S5XLECR643p8uywK5HHoH8vvNOsCQQwHHOz0+dzHjpJUqMjh5lwNKnPw1OZurs6XRi7TNYWkp2/cEHISwWL0YdOXVqZsfVASqBgCH6EhFgdkfS7ghGo1zPtWv5DgoLKcm5+GLTy8je68tOTobDYJG9p2NFxTuDR72ZDirYvJk9LxDgO5g1C+XFrFlG3d3LuQ9i0SlsWnpfWQkh7HDgb9x1F2vue98zCVLLYv/y+/neVT07erRJkD30EHvzBz+Iv5Rowu/+/QS+NTUo51NhgvaTVmIp2fOTTW+2E5AFBdzD/bHWLAss//3v8ZemTsUfTKcawO+nmuOZZ/j/okVgs/b5ytQfaGwkmD16lM82bhzfZyqlubZwCAaNvxUImAS3Di7Qvqwn27RHbCjE+9rb+9iJQ8vicxYV8Zw0v9NBLDqFze8nzigqgnDXe/Gee7ivv/ENo9y1LPqt/vWv9Ia+4AKeW1/P6zo6IBCPH4dgtA9H0dc3N6M+PHiQOGn58liySFstlZSY2Kujg3UWjULM1daaeC8SgbTrjTByucCMo0dZT3PmkGzMlGTSdldtbSZePHiQRPGhQ3yelSvZpzPBOr0ujY28bvToWIFMOrZlCz7apk1ckwULOJezz04tgFGRjfZHra7uHe/9fv5VYUZ/m8vF92tXZIfDiIM2bsTPbW/nfKdPJ4G0YIEhdHvB8VzHopyw045EFBFxOByrROQc4ab4jGVZ4048/pKIvCEi80Ukz7Ksc088/oaIdFuWtTTuOPeKyJck7qaIe87YE8fcY1nWeSlO7fS72L1YezubUHk5/cN089BpUYEAJTZNTQDh9OlsTq+9RhZ56FCAceLEWPAKh3HK//AHXqdN9O3W2koQqWVyTifvEw4D/hMmAFhaalJSwvsl2wz8fjLP7e28Z20tP0o0ZGIuF+ciAmGxYwfOrtOJc3z55UYun4k5nWZww9ChqdWLmpXSDURLljUQqKiIDcztloxIFOGa6gSuIUMGxjGOt64unBv9bDrlrB/7e6T1rQ9i0cBbY6PIZz/L+vzRj/juvV4zFf6TnzSZ9GAQx3n3boL6iRN5bMQI7pW33oJ4rK1F0RhfMqvlFT/8IUH7zTfTD6c3THA6ye6/8QYKlltvZZ35/dy3hYWs32TrprOT8qI//xlH6o47CJB7e00y0/JlHSrw0ksQpkeOoKb8zGd6Yms6FgyCJfZhBb1dEyUxCgs5pxdeoM9QeztEyLJlBDGJ1AeaOCoo4Lt2ucAf7R1UUfHOYFAyczrpR7V5M/tYJMJ5zp1LwDNlCtertDRtRekgFp2ipsGQZfH9FhayNr7yFQLRb30rVkmoCcAhQ0wyr66O++H4cVTBlmUSq/HBXySCP9HeTtA7eXLv606Jo/jy5XQ+l/bDsyzTo7q4uP/22O3b6fF64ACfRYfJZepvdXSQsHn+eT7jpZeKXHVVetUd4TCE7M6dJtE8bRq+aipFsyZktWVDSUksFmlpcVcXvnBenlEtnuze0kpWaysJvQ+UOFTSMIsp2oNYdIpaNArp5PHgd5SX831///soDe+6CxJK7eGHUf5ecw1xWHc38U55OeTat78Nwfaxj5HciyfpWlooOW1tZb0sWRKLDX4/92BhoansOHgQ36O8nARaWRlr48ABzn/SpOSKNZ0+r/1fR43i+SoWycS03ZXXi18VCBCT7tzJ7+95D585U7/C60V9qGXRmYgbwmEU1qtXcx6FhSRU3/e+1CXUijVOJ9emtJRrns57K46JnDwi0elkr9u71wxqcrs5P/WLZs82VXVpJlxyHYtywk7HcmYRZKfXiMjNYuSq+vgNIjJWRH5kezwicTecw+EYISIfj3usSkS8lmX9Xzthy7KOOhyOVhH5J+gylr55vUxTdjjIYinw60Qwnw+1ncvFZjJ5MhvMSy+ROa6rQ4k3dmwsWESjkIyPPYbD9YlP9Bxk0t7ORjl8OIDjdPJYebkZUNDcbAayDBtmJoTGm07DPHTIBHtDhhDY2qdLp2Oa2dIJZa2tbNJtbZAXN91kygsysWDQlD+VlqZf8pafbzZyVR2qo5uq9EnLmS3LOJ5qFRW8f3c35zV06Mlpztub6fd5/Di/BwKmR2J/y/JT2CAWDaAFApCCIgTqRUWxU+FvvtmsWx22snMnRNyMGWCFYsTrr9OPdexYSMlEQeP27SLf/S7v++//Tq+63mzHDghHlwsy89JLuU99PtZLUVHyZIZloQr47nc5zw99iH5gw4ZlNgFQzevlJz+fa3D//fw7aRLneP75mQfsOqxAe6dqf9JUVlBAYuXvf4dcjUT4Pt7/fjLPvTmMSh62toKBRUVgUGXlgK/1pNbeDmm4eTOEtWWRQLroIpxj3edUCXqSznsQiwbYtOR19mwCIssS+e//5vG77oolELu6wAFNVLjdRoV6+DD+VGEhZNqoUT3vkUCAANrjQbGXqu1Ab+XLqUynnrvdsdOb+0N9eOAA/Vffegsf7tOfhoDIZkCUKm6uu46WAH/+M4NsXniB1hErViQuIXS5wMJ9+8CymhrwcMKE1GtT1TvaA7a4OHFVhr2MUPG/u5v7QH3V/pyMqqY9Kx0OElJtbewh6vcNUOuXQSwaYFMi/KyzjC/zyCMQNh/9aCyBuGoVscmKFeBNc7Ppk9zQAIHo9eLDzJ7d8z7VgZAuF2TbWWfF3v+BAHGHtkIKBvGNurtROk6Zwnr3+8EDEWLEZOW+HR0QoZ2dYOaYMfhRo0dnvpe6XMQNOhDqL39h3y4vp3w2XnGZjkWjXLfmZl47eXL68WN7O+XKf/kLn2/MGJK7S5cahWBv5nZzXSMRnqttn9LFalWna2lzf+G8CHvVq69CNr/8shksumgRCs9Jk0wLBe2lfRJsEIv6YKcziSgiMkNEvm97fJ3AFNufIyKyWkS+4XA4ficiz4tIvYh8UhjJbRcaXyQiP3M4HE+IyB5h5PeVJ97nnv79CLlr4TBOYFcXWSotyY1EKNlyuQgMHQ4AcuRIAj4dzz5hAkBdVxfr0FgWDuDq1QRdn/xkz+b+nZ0cv7oa56yx0Uy8mjoVANdhK04nrwmF2DRKS2OdYY+HgM/l4vwqK9n0Ro7MvK9WMGjKlzs7Aczjx/mMH/+4KXPKxCzLOJ5afpnOealzrcF+JGL6kWRC9uXlcS2j0Z6bkpZEafm49lIcSKuuNn0RtaTo2LGexPRJtkEsGiDT/j0HD0Ik1tWZsuEjR2IzttEowfzrr9PL5+yzCaYUAzZtIhM/aRKlvImy3y++CNlWUSHyne8kbuCvFo3imD/2GOd1990mYeD1gkW9EYgNDXymDRvIyt5+O59FBxxlYtGomVB88CD91TZvhpj42tcIrrNZHxo4i3AN0+n1FYlQmrN2LcRKXh7fxWWXmZLl3kyJDJeLwESDklNhaEpDgyEODx7ksTFjUDHMmQNeW5ZRgKWrAuuDDWLRANrRo/hAkyYZBe3vfkeJ1kc+EjsV3ulkP66s5N5taDATfffvJ9ivqCCIHT68517qcpHQCIe5t1K1QdHehtqLKpNA27LMZN6iIrP3p+qxnMoaGqgu2bSJ63DTTbSjyOaYwSC4qkF2SQn+4Mc/Dr6tWkVZ4po1DB9YupRr0NAA2XL8OJ9pwgR8s3TLDXXgXzRqei7Gt4FRf8lupaX86KCD7m7OoajI+LJ98VkiEVOmHArxWH4+mO/1co6lpQOa7B3EogG048fxgSZNMuX3a9awBlasgGBXe+YZFM9LlhBjNTdzXwwbxjHuvZd7+JZb8GHiybCmJlR7wSD7eHwblFCIuE9jls5OFITRKJVYOqDH62XfzMvjvBMJG1TpvXs3f58xwyQQkw0oSWZaZqyCjDffBIsKC7k+l1ySXf9Sp5MkUDDI5x0zJr1ExI4dXMcXXuDanH02ilC7GtvjMRUc8WtXe02GQmZwqZ6/DlqxV6L1ZiowsfdIzNZX6eoi/t24kYSxCnkuuwzFqirvNflSVXXSldmDWNQHO13LmfNEpE1ge6dYlrX/xOMVItIpsMjDTkzQEYfDUSAiXxGRj4hInYgcFJH7RMQtIg/JCXmqw+GYKCJfFuraRwt18HtF5Oci8ksr9cU8/S52nFkW0/G2bsXhnTuXx6NRej+1tRknV4F/yhR6Ab7wAiBy7rlG7Wc/7hNPQCLOns0GFh8odnebXoWVlRBGfj/HGTnSgJ5OoyovB6D8fqPKiUZ5r7Y2o14cO9ao7erqMp/kp5P5GhshLY4eNaA5f372AbtOb6yoSK+UMRIh4LY7uRUVAHY4zOfLBqz1miXKbkUifCfh8Ds3EdXt5pprb7YhQ9jI+xiwpyuVH8SiAbKnnxb5yU8IPj/wAR57/nl+li+nHFbE9Pp57jme+573oBAWIVjcuJFgf8YMCMZE6331anoGTphAX8LeJhV3dTH1+c03ccw/9SmzDjweCAAlDOLvyWgUAuF//offb7sN9aIIaz5Tp1bLlw8fZmjVCy9wnI99jLKlbIh+HVagJEJ5eWoscjpJGj3/PNdnxAhUeYsX81rLSt5KQc3j4TiaYa+uNmWWAxwQi4iZJrllC2r5hgYenzzZZNarq8EhEa51SUm/JFcGsegUMw2MR4wwasN//IN1fMklqOv03tYys/Jy7o+mJtZpfT0E4mOP4S9cey3+Snl57LpoazPlbXPm9J5UsBOAmZQvq4XDpl+eXb2o6059iEx8mo4OSL3nn+d4V17J0KtsfAXtkapK6LKy5EHy/v303d6xg+dMm4Z/V15uSpbTPQclLXXYX7Kei735SnbT5EhXF76eljpXV6ePF/pd6fctYvpV2svOIxG+AxHusz6qoAex6BQzpxPSZuhQ9iGHA5XvN75BLHX33eY7X7+exOiCBTze2ck+W1cHEfmd73Bff+xjkJHxiuijRyEm8/OZVFxfH3su4TAqRR0sdOQIr6mooHzZ7hcdPMg9qmq0eGtsRMXm8UBm1tZy/Jqa3v2xRBYKcR4dHcSpr7/OGrzgAtpLxbexScfCYWLQ9nZwdvz41MNggkGGkK5ebYZiXXYZPmr8tRQxfehFjNpZ2+IEg3xXyRR80aghEdPFa+1fraRiuntHS4sZjLJ9O+c9ahT+3rnn8tlcLtPWpqKCx/roG+U6FuWEnZYk4ilsp/3FXrcOVclFF9HHSgTAeOstiLQzzgAc9u0DjKZNw7netAlCbd48HJxRowxAhUI0E9+8mV4eH/5wT0fH5TKkX1GRaVpbXx/b+0abylZUJN4Ympo4164usxnplMSJEzMjEDWztX8/n+/YMd7zkksAzmyC3GiUjd3tNqqbVERCIMDz/X5+Ly3l89sBOhoFwOMHraRrOvErkQMajZpNrbw880nT/WHai8TnY2MdNox7ow9E4snVDJ18O62waOdOkS98Aef3a1/je922jeB0/nwyuFp+/9vfkuG99lrIxs5O7ovhw1EXPvEEzvUnPtEzEIxG6ZH45JPG0e4t0HzrLfoyejwkPi6+2Nxz2jtUCbD4e3HPHga+bN8O+fiFL5ieNMOGZU746xr43/+FQC0tpST6gx/MrkzEXrbncBjs7c0OHkQBoUO1Zs2C4J0zxziySgTk5ye+tlr6p0qo6urY4QCqQMpm8mqmFo3i7G/ezGdqb+c9Z8wgYFuwgPPQ1hnab6yfS5YHsegUMr+fdV9cDI7k5bGGv/Y1fv/KV2L7EHd1cY8MHcr/lVQ/eBCSq66Octzi4p79PY8exb+oquLYva2/vpQvawmsBp2JpvNGo/hqlpV8erPd3G6Rp55C+RSN4hddc012AbsI56dK6NLS1L5adzcE4vr1pgfX9OmoFefPT883CIXAGyVly8pSX1clEtNd/0oKuN0G14YMSUwWK3Ho9xufrLDQEIfJ3jMcZh90OFIP9Ephg1h0ClkoBHFjWRA2RUXEIf/+78QO3/622ftffZXfZ83C79A2SUOGEBd973tmKF1FRU9Bxd69JHLLyhCQxKuhIxEIRBFev2cPBNjo0STa9J5zu0nGFRZCIMb7OYEAJN/Bg2DF/Pl8zlAIIjFT/HC5ONYrryB+iURoS3PVVaZXY6bW0QE2RyJcp7q63tdUayutFv76V3Bp/Hh81osvTp3ICIf5DCJcA7/fiCVSValEIqYHfrqxUCRiEhu9EYlHjhjicO9eHtMp2YsXc01UgS9ikmjFxVwDy+p96EsalutYlBM2SCIOrJ3WF/uttyhHOfNMAnQFlx07yGJNmwY4NjXxM2YMALNlC6TanDkAh72PhcuF4mf3bspQrr66J2i53aYUMRIxTvno0bEbkJIFqlS0WzhM743GRoBx2jSAUgeg6ICQ4mKObS9TSWTBIEoELdOrroZUvfDCzJWMal4vm1MkYhpwJwNwDabdbkMOak+TZKCsg1YKCrLrP5Rs0IqaTktNRpqcbPP5IFFU+TViRHYDbE5Yrm9Qpw0WdXbSI6aoCKVPRQXfs06Fv+kmc8+vWoXK8N3vJljU9gKVlZQKP/UU/Xtuvrnn+vb76Ue4aROvv+225GspGgULH38csvoLX4gtz3W5cJ5KS3v2MwwERH7+cxInVVU4/EuWsH6UAM/EsYpGCRx++1sc1bw8SImbb+45aTpdC4c5/0jEYGKy9RwOQ7CtWYOzXlxMf7GLL8aRTGRabmknOwIBnEvNsFdVJXawLSvWMe1vnAmH2dM2b2bvcjq5V+bMgTicP98MylHS5iSXLA9i0Sli0ahp7D93Lvf68eMiX/oS+/V3vmMCO68X7NHJpIEA/kZFBev1j38EM665pud0c8vCr2hoYB+bMaN3TLCXL/c2fTnVa4uKek9exE9vThScBgIQh089xZ68ZAmkQ6YTStUiEUPkaQlxMh/Esri2O3fi6+XlEdjOmAEZ+8c/kvidOpVzmjYt8XG0PYr2ki4ry8yv6y3p2ttrtNRZP+uQIWCgfkd6XPtE5XR9uVCI+zE/P7shXSdsEItOEbMs9qf2duIrnfb+pS9xr3z3u2bNvfUWSY4JE0S++U3WbWOj6d3+gx9AOn7mMzy/piY2hnrjDZTEqpiOj68iEWK0aJTf9+/n3+nTY9e9lv4WF0MgxvtgqvQPBiE7J01ivYrgZ2WiXrYsI6nvQwAAIABJREFU8HPNGuLQaJS9++qriU2zsWCQ81dl+fjxyc9JxTWrVxui97zzMp/4HA6zb3R2gkMjR7KHZPJ6y8qOSFS/Rj/Pvn18lg0bIFFFaAdx3nkQh6o4VD/OrrC2f9eRSKzCMksiMdexKCdskEQcWDttL/aRIwS9o0fTpFcBYc8eQHXSJLJNHo8h1bZsocTvwgvJonu9kDoKSk1NEIhNTWxMy5b1BDmvF6m0w4ETpH0nRowwz7UsANbvB5DiJeWtraaB9pgxAH9bG+daVcWGqU6j18t7iJiGr/GlK42NKJVef52NZOlSAuZUUvZkpuUmXq/pbZEs263BvU5GLSriHNJthhsOm1LnbHo0piIStXwz1QTak2V+P/ejbrg6ZTsLy/UN6rTAokgEp3j3bvohTpzIWnngAe55+1T4Z54R+cUvwJvPftZkxnWg09/+Rgb6ppt63pft7ZQt793L32+4Ifm929FB+fL27WDWLbfEqoWVTC8r65k1f+01ka9/nXv06qtF7rzT9GdNNUE+kXV309vxscfArSuv5JpkS55rckKniZaXJycVuroILNav5zPX1ZmS5XScfe3flZfH59d2BOk02I5ETNlNfzQCDwTYqzZvJmDy+fg+5s0TWbgQp7+kxCgBTkLJcm82iEWniO3bB6bMnAnB43SKfPGLrJnvfc/sNdqOpLiY/VwDWocDkuvpp/GXrrvOlOsrSRUOi7z9NnvYuHFgXm/JxGzLl+3KxUxfm6i8ORKh6mTVKrBhwQL61KbT+zTZZ1Oi3uHoXQUYDILdu3aRWC0rg8CYNi0WmyMRWjw8+STnOG8evqf20lXCMhg071lcfHJ8pWSm1SitreC7CKTNiBH8q30qs7FgkM9dUJD5XnPCBrHoFLG9e8EjHdwVCqEwPHCAUmbtVbhnj8h//Afk0733Eqc0NbF+m5tpEVNXRx/mQIC/q0IvEoE0ev11CKIrruipfotGiaeCQfCwpYX79IwzYv2Ari6Ip9JSMM1OHOkAjsZG8PKcc7jHdVBJvGAklWki429/49izZ7PO7YOuMjEdmqktTEaPjo1B49977VrIw4MHuRaXX47yMZNYRJMKbjfvo3iUqXpP+yM6HJlVoYXD+EG7d+MXbdzINcjLI4G2eDHk4fDh3HtOJz+qhNcpy8kwRj+fw2H622douY5FOWGDJOLA2ml5sTs7Uc6UlNCIVwO8AwfIOI0dS6Y3EgFwIhEyMLt2UcIydy6Bt33K6O7dNPcNBnE0zzqrJzD6/WwiWrqRnw9428k6y+LYgUDPIQSBAJtsWxuv0R44TU38raYm8dRTO6GowaKSbv/4ByWR0ShkxeWXZ6/2EYFw6+ricwwZklyq7/ebJrvaLD2d8sJEpo23s+mPmI5z7PezOeTlvTOTmwMBMpr6vdfXZ6WCyPUN6rTAogcfJCi96y4IKp8PAtHngyzTkpp161Apnn02xFxeHt9/OAzJtX49ipj3v7+nU7N/P9n57m6RW28lKZDsnt26FQIxEOD9taWDWnc35xZf1u900oto1SoSGV/9Ko6yYley9gvJLBQS+f3vmS7d3c0533Zb78NfUpn2/YpGwfpE5Jxlcb3WrMGxtCwUesuXoxzItP9aU5MhXLU8JxMiw+czQwMyNY8HwnDzZgjEUIjvYcECiMNZs8zEXS0hVGdcVYcDNB16EItOAWtqIigcO5Y1rEH7vn0E7dOn87xAAAKxsJAAy+Eg+PL5eP3atTz3+uu5p3TKuYgplfZ6TRP6ZNaX8uVg0BBl9v55mZiWN0ejJEdWrcJf06nrej2yMU2W6mdLpoTW3pT79+Mj1tby/uPG9R6UBoO0fHj6aa7hOefQm0yD3mT4l4mpr5TOpFTL4py0x6H26BbhvlE/tLQUnMw2YS3Csbq6uK69VbsksUEsOgWspQWRxpgxpsLrxz9GIXbnnSLvehfPO3yYSgcdDjdsGGvG6cRH/uUvEVX8278Zxb22mlIybP9+/IoLLuh532l/eSUPAwHOadKk2PXX2QmBWF4eOwHdsogF33yT3888E+K/owMMLStLPKk+mVkWWPTII+D15MlMn54zJ/u1rBVOHg/x4rhxibG2qcm0b3C7uQYrV+K3ZqJijka5nlrCrL6hw8Hjqu7LxLQ/Yl5e6msZCuHnquKws5PPe/bZpsehvr+2ntHKkIoKrlG6vbzDYfOZsqhey3UsygkbJBEH1k67i+33E7S73QTNmqE6cgTwr68n4+RwGOJm61b+fvnlbApNTWweOpXr5ZdpzltWRiZ+2rSeoBwIkPVRAKuoYHOyO7t2AnHIkFj1SkMDJKdlsWmp09/UZJzNdPqERSKA+Zo1OJweDxvSddexSWQzzUvETDALBDhGTU1PR17L9nSogZY89dZMPB3TkqRs+yOm0zxcS2eUHM22xDtbCwa5H5ubuXfGj0890TLOcn2DynksevFFkW99i6bTt97KWnz4YbDlox/lOxWh181//ZdpIl5YaLKizz1Hhvvii2NbMKi98grkXkkJ6sW5cxM7iJEIar9Vq3Ai/+3fjHpFTZvkV1QYZ1snzt97L1j1kY/wWQoL+T0U6oldvVk0Snb9vvvAuHnzRO64A5zN1rRHYTDYs6xSLRTiOq5Zg0NdWgope9FFBtfTNS1lUcczP5/gPZvSZA28tbwvlXV1oap47TVKHiMR05B+4UL2IsVWbTIeCBi1lvZiG+BWDYNY9A6by4U6cMgQSCqdFL9+PVhw/vk8LxjEByoowFfKy+O1bW28/pVXjCpG+/tpaZrTCYFoWTwnfiqq3bItX9YpvlqNUFSU/b1sWfSlfeQR9tpx4+i/mm6/wWTH1DWnZcTxyU7LYg/YtQt/Lj8fX2zGjIz3eHG7UQz9/e9ck2XL+G56u/aZWG++UjRq8CsYNM/TMmV7tYhiZlcXvmBBgUk6Z+MLaqJXB35lYINY9A6b10tlRWkphE5+Pr7J44/TA/m97+V5jY1UcTgclDbX1rK2Wlqoonj0UdSKd95pSujr67m31HdqbSW+mzOnp1jCsohhGhp4nk5Pju8z2N5Oy4eKCmIxJRe7usDD9nbed9Ei1ntTE5hZVcU5p4slBw8iTNmxg3P44AchU7NV7UajXMPmZq7x2LE98cWySESuXk0bHIeDvWDlSjA80+nROoQrGjU9BO2+WCgEZmmLmUxM+yMmGrTi95NM3bCB78Tr5f465xx+5s7l+ygq4hh6nuoz6pTlbOJJHQaox8ngmuU6FuWEDZKIA2un1cXWoP3wYYL2CRN4vKEBh3jkSMDF4SAg3r+f7EVnJ4H/mWfy3Lw8M3nq6achB0aPpu/YuHE9VSTad6K5GRCtqzMZfbVolPcMBtnc9BheL/L97m4enzqVv2lZdF4ex0sna68bxJ/+xCY4diyfa9w4NmN1+rTkOZ1yIJ22pTLuoUN7ZvfCYTNlWUuWdcpyfwWvOmglPz87JzQdIlH7V4ZCbA7ZDHfoi4VCBDcNDZAUkyZl5DDn+gaV01h05IjI5z7Hd/a973GP/ulPJCiuu85Mhd+2jWbhkyahCiopIShrbcWxe/ttcOaqq2LvU8sia6x9FW+9NTEWieDk/vCHkE7Ll9Nr0U5YWRbr2e83ZJgIePPNb1LyO3MmZcwzZ5oEggjrPx3yy7JIvvz3f5O8mTSJ/kVLl/YNEwIBcEaEzx6fFGlvR+W5fj2YVF/PNXjXuzJPDKjz6Xbze3k5uKDEQUFBdkkZv59rWlKSWF3d0oKDvHkzqjHLYg/QwSiTJsVew0Qly6Wl2Sm3+8kGsegdtFAIpUxeHsF0QQH9UB99lED1uuvM89raeN7w4WBWKITv8OqrEITz5hHk+3zsj9rDuKUFUqy4mPdItlfay5d1Em8661+Vbpo87G0IRzq2dy+ff8cO/MBrr0WpkpeX+fRmtWCQ66JK6Hh/JxDAt9u9mwSEVpdMnZo5FtmHRmkLh+eeI4guLESVeNll2Smc483eH1EH2ChxKBJLHKbySzWx3NVlBl5VVmaXqPX58EW1h3WaNohF76BFIvgBfr9pG6JVGBdfbKbCt7VBIPp8KBDHjuW1jY3EaE89Bcn1+c+bPbm21lRqrV0LVsyeTbI2vpJHFYi7dnE/jhwJ2Ri/f7e28p5VVRzH4eA8tm8HO4qKSN6NH8/jDQ1mCF66CYHGRpK7GzeyBq66ijitL/u1y0UMqhVr8QIWnw+8ePJJfNXqakq9r7wy86onXdPd3VwD7aOdDAu0Qq6iIvPPaO+P6HZDGG7YYPpQVlfj2y1ezF6l56BVKvre0SjXuro6s/6MySwYNG2weiuBjrNcx6KcsEEScWDttLnYlgVAbtmCgzhvHo+3tOBQDxvGY3l5pp/U3/8OGFx7LY5wYyMbUX09oPO737FxzJsHUA0f3lOWHQrhJLa0EOiNH5+4B0d7O4A4dCgbVzSKXP7wYZy1KVNM/4nubtOfqK4utfNsWWyOTz8NMTpkiMiKFWSYFLQ18PV6jeOr/bmUUIx3pLXMKRTiMw0dGnsufj/ArgqDvpQsp2PaHzGbQSsi6RGJloXDGwgk7hF3si0cJkN59Cgb3tSpaZ9Drm9QOYtFXi8EosdDv56aGvpYrVmDUkRLiHfvhpirq6OfYUWFyR4/+iiE0Xvfy9q1WyQicv/94NWiRfRAHDIk8VTxLVsg7kIhHPQlS2L/br+/lSiPRlEF/OhH3H+33WaGv+igl7y8xOrjRPbWWwQJW7aAaR/7GP0U++IkRyJG4VxYCM4oBmiJ0dq1qPZEUBddfDFKg2z6g7ndOInRqMEB+2fX8kpV4GRqOh1Zhy4cP47acMsW9gQR9pKFC/nO4ye3JytZLi0d+L6uCWwQi94hsyx8FrebgLq8HCz60Y/Aoc9+lnslHCZgdjgIIvPzYxv779ljJoIqiaQqu8OH2aOqq3mPZPe/XUWYyTrRib6akOyLP3HsGG0UXnuNNXztteBCQUHs9OaCgvRVKaqEDoWMEtruF7W3448dPMg1GDUKHBo7NrvgVclDe6m0vl9TE8NXXn2V/eSqq1Bb9xVrVeltJxTtisNsTAdROZ1ccy11zkTR7fFwb5eWDvpFuWDbtoEpCxeCM2+/jQ90xhlmKnx3NyXM7e1UcmgfwJYW0ydwwQJ6IGrv1iFD+Nm716gcZ80izqur63k/HTtGzOdwoN6PT8SJIAJRIci4cfy9uZm15XLxmvnzWQPBIHt2OMz7JfLF4q2zk0Fy69aBHeedR0In2wFOIqzPY8cgSIuLOW/7ujh+nLj42WdZ01OnojpcujQ7XPV6jbpYVcGpkgGqWIxGObdM/JOODkQ8GzdC5EYixOGLFxPfzprVMz72ermnOjt5zyFDeE22VXjJLBAAi4qK0vv+JfexKCdskEQcWDttLvaLLxJk69AQETabN95gU9AehlrSsmoVm8SNN7KhtbUBdLW1APyvfkVwf+mlbGpVVT0zTcEgx3I6CfgmTOjpiEYinEckwuuLi3n+7t2A3ciRHF97WbW3m0laI0emdq4OHRL5y1/YTAsKANclS3pK9O2mWW0lFCMRE4Qqoag9LgoKOG/NcKsD7Xabvo86ZXkggld1+rMZtCKS/hRCnVarG+VAlgNGIgQghw5B3E6fnlZPoVzfoHISiywL9d7LL1OCM2cOzs4f/oD6UEuSDx0SuecenI1vfcsoTJubGa5y5AhYFN+z0ONB2bhtG0Tcu9/N+oxXqEYiIv/7vziMEyZQ8qNqavu5dnaaDG5pKUmHr38dnDz3XM5Ry57dbnCgqAgMSLW+DxwQ+elPUQFWVYl84ANMcu3r+vH5jIrFPnU0GOS6r12LI11eTh+kZct6x79kphl2l8sMj6iuTh40K4FXWpq5SioaJaDasoX7pbmZzzd1KgGXBl3xplMI/X5DfuiQi4GeLt+LnTpnkp3lJBaJQPA1NOBTjBiBGvmee9hDvvY17hcd4GRZPEd9lrY2CKkDB/CjVqww/Za1/H73bu7V2lqOmQwTsilftqsW8/ISJzbTtbY2kSeeAItKSiDXLr88cSAZCpn+W6n8ikDAlHWr+lCE9XzkCNe7pYVrOnky5GG2pcaqpolETB/KZETnwYN83rffJtnz3vdCUqR7/ZS41euvyVZNTPRnn+ho1Ex1DoU4dnV1+gMY3G5wurx80C86le3wYRIaU6eCRw0NkIXV1VRjVFRwf999N0nzr38dUkiEe+P3vyehcf75pj1MUxP344gRJAzffNNgUXFx4n6Ee/awx1ZWEgfW1PQ818ZGMHHoUNNK6o038I8qKlAta79Xr9cMnRo9OjU55fFAhv7jH7x27lx678+Y0TeyX/s2hsPEivX1rFnts/jkkxCgBQX4RStXZpdUFcHf6OoCk+xT2NM1bbNVUJB6zar6dONG8DQa5Tqfdx59/adOTdxqQacsa3KnqorvxrKy76ObyrT3f5pEYq5jUU7YIIk4sHZaXOy336bHxpw5NP92OAC8LVtwNBYujJ3O/KtfAbY33wwgud1sINXVgPxDD+FMXX+9Cbbjp1Rpk3u/n5K/0aN7nlc8gVhQgIPe0ACoTZtmiMloFOfT6wWgU0njm5pE/vpXPnthIWrJs85iA8y0DFcz3UoOdnZyriNGsDEVFgLMHo8pWS4uNlOWB9K0P6JOWczm9elOIfR6TfPmeBXmybZIBOLpwAHe+4wzUn6vub5B5SQWPf44zb4/8QkIs6NHzVT4j3yEe6ehgWmDBQUQiEoOdXSgGjx+nPYLixfHHru5mQEIjY0MiJo3L/GUytZWkR/8AGy77DJwLX5t6ARN7WmYl8d5P/gg99UXvkBJjcNhyp21z0wqErCxkT60f/kLz7/2WoLYkSP71ltUhxVEIkaBowNodGCUxwPpuXw5yqlsVUu61jXD3lt5jpqquy0r+SAFu+kgLy1V7ujgtTNnonRfuDB5mV4oxHtpSWFxcfJy6FPABrHoHbD2djCgro5pok1NDHiqrCTBocrn1lbuxREjzP3jdtOf6+BBEhUXXWQUuVq+un07uDBxounvGm/Zli+HQqbPXl/Ubi4XbST+/nd+X7GC4DlVgBeJxPoV8b6BqvNUCa1Y5PNxzffs4e+VlQTqU6Zkj0WhkHmv/PzepzzH244d7EkHD+K7XXdd8p6PSvQGAia5WlhoFIe6F6QzaCVb01Jnr5f3qKhgv0lFzDidXPuKip6VP3E2iEXvgHV2Uno6YgRxidtNubLXS7lybS333Ve/yp54993sfyI8/otfsMcvX45vJWKIu9paSloPHSJ+mzABXBs1KhY3IhHayRw4ACaefXZif6ShAZ+ipga/7cgR9udAgLWsLSFEwL/mZo6jsVEy00EvdhXg4sVgQzoCkWQWDOJndnWBDePH86/HA+49+SQ+5bBhlCtfcUXmvVft76W9s5XsT7Hekpq2oolvQ2NZEM46GOXAAR6fMsUoDkePTtwfUSdsq9JRE792dbMOOj1ZRKLGzsXFKQnSXMeinLBBEnFgLecv9rFjBMOjRlE2V1AAoGzejOO1aJFxwI4epczOsshsTZwICDU0AD5tbZQwl5fT8FdHzNfVxQJXezsOdTRKH8VEAB0O87xolM3J5UItGAgAiBMnGlJKp34Gg2y6vTm8HR1sSlu28LnOPBOCaejQzKaCxVskwrFVqq4qAFXbqDNbVcX1eSeDV+2PmO2glUyIRJ0MqP0gB/JzR6MEA/v2cY/NmdOrc53rG1TOYdHWrWTWlyzh364uyLTiYhzfsjIw5e67Wdvf+pZRBzqdDFdpaIAgPPvs2GPv3s3zo1GIgPp67tuamth79tVXDaZ9+tM9iUiRngTirl2okvbvR53zxS/GJjM6O7nvKyt7x6LOTgjTJ55gfVx9NUHrsGG8LlssUnLO7+ezqgJn506UCdu28X4LFqA6T5SZTte0YX8oxNrOZFKfiBlmoq0h4i0UYq/YvBnlhJa/zJ1L0KRTGBVf46+DKp9UKaXqp1OgZLk3G8SiATafD1VOeTlqHo+HoL27GyXzqFHcqzoBfvjw2P5RDzxg2ilceKEhEJVEevttE1QnG0ykKtlMiEDtuadVDcXF2d3bfj9J1T//mf9feCFYlIkiOVl5syZZVQldVMR13LkTIiMaBZ81mZwtFqnqU/tA2lXXmZhl4R8+8YSZ+Hr99Xx39onK0SjPV5VpomufTguY/rBg0JQ6KxmgU52Tva+9r28vCdZBLBpgCwQghPLzUY+JoDLct49/p0/nXv/mNxFifOELZtBTNIo/s24d5NfHP87jzc1mIOWLL5IIWbgQLPL5TH9ENa8X36i1Ff/gzDN73keWBdnW0UHcVV3NPn3sGL7+OefExnatrfg85eXgaTKcCochw/7yF+7nadM417o68KEvk8pbWzlnywJzRo4krn3ySXoe+nzEgytX4pdmS5qFQiaRrJOI+6OXoNvNsSsruR+UOFSCeNYsfNjzzjPKTzUdtFJQYEqWFZd1ynIyvFQiMZOhXpmYVvVpe7AklutYlBM2SCIOrOX0xe7qoldYYSGBeHk5zvPmzQDfokUmIGxpIWgPhZgOqtkrBeR9++i9MWYM/cDUgR41ygCxPv/AAd5zzpzEQbYSiJbF3w8fBvzLy9lQ7D0rAgEcPcvquRHaze0mgH7pJUDznHM4VmEhG2C2mSY9tk4m1s1Ce1+43UbCrpM+dTDLyep9mI5FIibwyGZTyIRIDIdNf41MSYa+mhKJe/ZAIJ15ZtKNMtc3qJzCotZWegcOGcLUU4cDVZ/bLXLLLQSvXV30/enqogfixIm81uUiG9/YyDEWLIg99osvolCsqUHBWFbGPahqZhF+f/hh+qBOnszE1XinS8QMdNKSuPvvR7VdW0uZo71nYjhsnttbuYrXS7Llt7/FObviCkqxte1BOqq8ZKaKZw0k8/LAvLVruV6VlRAEy5b1nL6YiWnQGghwXfoySCkc5jqoisfvh2DevBnC0+/n2PPmsSfNmRO7hsNhM6iltDRxyXJpad8m0w6w5cZZJrecwqJIBAIxEoGczssjWN+1y/Qf08ECoRDrVPewUAhM2LMHoknxQMmsUIjjOBzct8n60GmP0EzKkPU1Dkf2KpFwGL/oj38kYF+0SOR978OPy9a0vFlJRe1FWFxsSpbb2ljvU6ZAimQw6KOH2XsQajKiP9oTRCIE6KtW4Y9Onw5WjxsXSxymo6AWGZhKjGjUTHXWskQtdU50f2h/3178skEsGkCzLMi77m7U9RUVkILr14v8679CFkajxGIbNzJs7dJLeW00it+zbh0JyY9+1AzBdDq5FzZtMkmC8nJ8qZqa2DisqQk81NLhSZMSn6eq+UaO5Dhbt3IOc+fGlv1GoxzT7cYvGjEi8ZrRz/7UU+CDKulUGRc/7CQT8/uJI91uMHjMGM539WqSkwUF+EQrVxIXZmuRCN+d281nrKri2vZH0lL3qbVr+R61ymv+fEjD887r3adTX7a727S00inL6cwN8PuNX3kysMzjMb5eEt8517EoJ2yQRBxYy9mL7fcjee/uRvUzYgSPvfYaQLFokQkK29pEvv99ArXbbzebSlMTC18b2s+diwPa3o5DN2qUIcr8fjadxkYzZS+RrNs+yTQcBvijUSTn8Y21PR7Izfz8nlJ8++dcv56fYBDF0jnn4Djl55ONyrakWMlOvx9graw0yhdVE+iUZQ10vV6eL8IGoIRiX8oWszUdtJJtf8RMsux2NZd9ou1AmGVBJO7ejcM0f37CeyXXN6icwaJQiJ6DR47gII8aRT/CQ4dIQEyciBP21a+CF/fcg1MqguP0ne+QWb/tNkp91CyLUrTf/Y7A/0tfMkodewPr5mbwbP9+gsKbbkqMHaoujkYhs777XcjPD3yAAQv2ezgY5LkikAyJEgTBIMH6r37FWli2DPW3lmdXVmafWNBeq8EguOZ2MxBiwwZwZ8IESpsWLeqbGjgUMqVweXlGWd3XgL2tjX1EpziGwxx7wQLOeebM3gMInfaniQ0Rvu/+7kc2QDaIRQNou3ezHs84gzV4330EanfcYVSF7e3giL2/sSoQ9+xBtXfBBTyuA3u6usC00lJ8o0QkjQZnmqRIl5RSJVwmJc92i0YhIf7wBzDtjDNE3v9+VEd9NVVhut1gkQ6T2b2bz1pdDZ5Pntw3LFLMsw+mi5/wnO352ycqBwLg6Nq1nP+73kXLifgWPb0dL92Ea3+aJrI9nuSlzjooLBhMOuRhEIsG0HSg0JlnEps8/jhJyxtvJElhWQyfe+45fIeVK3ldJAKBuH49auibbuI793hY3243pFlhIX5AYSH+SnW1IZ4iEaq9Dh3iPp09G98skQLxyBFix7Iyzre11ZQ825WC4TAqOb8fsjFRf1PLQqn9pz+hYhw7Fr+sosJUlyUjHlOZZRGnNjaCRUOGGKKyqYlktfZ7zbb3qogh710ufldlX1/XezAIyblxI/2rdaLxnDkQrEuXplZmasJX/aOCAj5rdXVm19ROJJ6sAXQ6aLS8POF+metYlBM2SCIOrOXkxY5GUcEcOEDfsUmTAJrXXuPfhQtNZqqpCafa5UKtOHMmj3d28rc1a9hQLrqIfmKtrWbgiZKQHR081+kEuMaOTQx8GojrBDG3m+dPm9ZT5dLVxXNLSnDm4jMj4TCg+9xzbKRz59LfRxvIqqQ+WxWe0wkwi8QGFVqy01vJciQSSyhalinHU0JxoBQzoRD/FhScfCJRe8VptmkgJzdbFvf77t04Dmed1YNcyPUNKmew6H/+h1KVe+4hIHvqKZIQ730vBG8gYMp37r4bh1oEzPn+9yEBb701lkAMhcCpdetwrG67zQw/spdrvfwyz3M4yOKfc07ic1QCsbUVheSaNWTGv/51sMRuPh94lJ8fq3ZUi0ZRaf/85zizCxdyfhMmmF45fSlf1l450Sj3+IYNTHjOz4eAu/jixNMUM7FIBMzTgFRLtftyzI4O1IZbtqDj1+hPAAAgAElEQVROUpXXuefyvUyZktpRVcdWG3SrCrS/sv/vkA1i0QBZQwME14QJ+AOrVpHQuOEGSDUR7lOfj2BbccTvpw3Mnj0E8jrQKRzGbzl+3AwamDUrMZGdafmyZeFjaO/BbBQhlkUJ5GOP4bdNmMDnnDu3f3wO7UcYjYKJe/fyPnl5JIJnzkxMTGT6GbRdg2X1z1T1aJRr6/ebkmztZanfjc/HgIdnn+W7u/BC+uCmQz4okXiyy5oTWSjEd6GlzjrsTvFbB4aFwzwel8gaxKIBssZGiL7x4yH1N2xgKvyFF5K0FCEB+eSTiDU++EEeC4Wo5ti4ESxSAjEUMvi2fz976/Ll/K2lBSzT1goeD0ReVxd++eTJ+MmJBnAcPmxK5xsawLazzuqpWAwEwEHtt5hIOLB/P+Th3r2838qVvHdTE9g2Zkz2ggO3m3NV3+DVV/EPAwFIuJUrUe/1JcmosaASdOXlyVW/6ZrPx7lu2EBM7vNx3HPPhThcuJD38vt5PFHi2bJ6lixXVppBd9rOKpv9wz4Y62T4WC4XWFxR0SOpketYlBM2SCIOrOXcxbYsSvhefRUQXbCAzWbzZsDhrLOMU3TsGJtWVxc9DjVo93ohY558EiC79lqC1c5OQGvYMDaiSISN0ekEFCorySglIo+CQdQojY0m2zJpEptP/PnrJOiKip4ZqmiUoPTZZzmfqVPJao0YwYan/Yz60ihX1YciBsB1+p82DE/XtCeY9oRQ51XLGktLT67T2ddBKyKGSEx3Q9LJzUVFZkjFQJgqEnfu5B5YuDDmnHN9g8oJLHruOYaY3HADmfQNG2hmfcEFOLihkMi991K2ceedOE4iBOQ//CFr75Zb+O50Xag6cccOVILXX8/6dDpZQ5WVHPehhyDzpk6lfDlZbzJVGD/zDKWKwSAJlEQDV1wufoqLIQzs97Jl8fnuuw9necYMiMsFC3BwdTJxtk6ylvE5nTibL79sBlwtXcpPX8oERUyG3ePh94qKvhF0TU1mMMr+/Tw2erSZqDx8uMG/3nBPEzGBQGzJsrZpKCsb2EFO/WyDWDQA1t0NZtTUkKh86SXKBC+4ABWiw4EP4fWaNiUiZnjc/v003l+6lHtW18qePayX0aOT9xvNtHxZp/9qGVo2pfm7d4s8+ihqp7o6iIhzz+0f/0L9GJ+PihMlGoqKICqnTDEBbLbvpwkDe6VHpv6W3SIRozTUgUvaV7I3Ure7m8TXunU8/9JLUTKlaucwUP0Re3t/l8soD+2lznl53OuRSI/e1YNYNADmdoM/VVWo+fbupRJj6lSSrYWFEP+/+x2Yc8st3EPBoMiPf0x56zXXoFgsKOC7bmhAxdbaCjGpqurGRo6nZH5jI+8XjeITjRiRnEA8dMhUlGmF2IIFPVVjHg/PycsDB+MVrg0NlBJv28ZnvvJKEsotLayvvpQvRyKQl01N4PvmzWBfUREJ1ZUrE5doZ2KqtnY6eT8doJdtDNXdzXe4YQPfmRL62t9QhwLazeXivauqDAZqsleH3Okwl/gktbabiB+0ku5nP5lEomXx2bRqzUaS5joW5YQNkogpzOFwlFuW5emnw+XcxX7pJYLjJUtwfiIRSDeXC6CqqeF5hw+jVnS76a+xaBGAFApBQK5aBdB/9KNkjtxuyL3KSo7h80FC6jQ+lVAnytoGAmSrDx0C1OrqEk/ni0ZRIakqwN7/wbLYMP76VzaPMWMgD6dNwzlqbeUcRo3KrnxZyz7a2jhfLZ0pKUkqvc7qPeyEopbA2AnFk0G46aCVbPsjimTe98fnY+N8JyY3HzjAvTJyJMTFiWs64BvUPxsW7dtHX59Zsxh6snu3yO9/z+/XX899+IMfMJXwM58x6p6mJhxllwu8mT/fOFTHjzOBub1d5HOfA9eCQda8Ki4aG1EwHjyIcuRDH0runIbDOLY/+AFE5sKFDFGZMCH2eYoHPh9rM740ZOtWyo62bUN5feutqLVDIbBSpG/ly34/n+eFF8DvcBgcXr4cp76vJbzqyLlc/L+8PL3eOYmOo9MaX3uN70sEJ37hQs5Vh+WIGHKwoCAxpgaD/F0TH0VFsSXLmoHXic85qkYcxKKTbMEga1NLw/bvp3/q5Mn0Xy0sNL2ttK+VCL8/9BC+zYoVBHh6n3Z0UI4fDhP8jx3b830zLV+2T2tWZVyma/DIEXB2yxbw8NprwaL+2nMDAfwiVR1aFu8zcybrvKAg9fTmVKbkofZXzDZJoKXggUBsFYYSh5ngZksL7Sk2bQIfr7wSkqI3TM804XqyzF7qLAJBXlVlrrFNUT+IRSfZwmHisnAY0qizk0FzZWUkRysrGXj04IOs29tvZx35/cZPWbkS30YTksePizz/PN+z9hJWYtHhMOKMPXuIqSoqIA7Lykwiz26RCM/dtg2fYORIjmnfu9W6ulgbJSX83b6m2tsh4F95hfV22WVmkv3Ro+DyyJGJScx0rKsLReW6dfgbbjfx5FVXibz73f1T/eTxsDeEw8bHzKYdVWsr37tWjmhf/8WL+TnjjN5xUpNW+flgju5XloVPlGoSdDhsErDZtMNQIc3JELqowlPb2pwgZ99xErGfsemUtJwlER0Ox9UislpEbrAs6/G4v00UkQMicq9lWV8+8di/iMitIjJDREIi8oKI/LtlWdttr/uaiHxVRBaIyCdE5BoRGSEiM0Vkp4j8h2VZ34p7r3IRaRKRJy3L+lCK086pi71rF5msmTPJWGlpS2cn5SyqzNm/n+dFozhF8+axyVgWSqLVqyHpbrnF9FJsbmbTGDkSZ7qlBXCqqMBhq6pKrP7zeNgEW1rYOKZPTzwRMBSCTAiHeU97OfSBA5RHHjrE3y6/nMDA3tC3ooLNJBvnzetl83U6jXpO+/qdrH5bGjx4vfxEIqaESQnF/nREVcFTUJBd4J1N359gkE1fZOAnN9uJxAULRPLzzQY1iEX9b04nJTnRKOSa243Kua4OYrCggMfXrUOheMUVvO7YMUp1wmGRD38YNZ8qPrZvx8nOy6PsWacWdnSY0uING0R+9jOO/9nP4vgmM5+PkuPf/IZ1duedZPcTTdzs6OD+raqKxaK9e0V++lPed/hw+s1edRXno42jCwuzV/MFgzjgzz/Pe5WUoCa6+OKeRGc2Zlmcp5a+lZbGOHFpH2PvXqM4bG0FF6ZP5/ovWGCSVYksFAL7ior4sZcspzNlWXulaWuJHBmmYrdBLDqJFo0SaPp8+AkuFxPcS0qYxFxVZUrUtLeVCEHaQw8RCF96Ka/VpGhbG4mD/HweT+TDZFq+HAqx3i3LrIVMrKWFnmobNrCOr76aoL2/+i9r+4SdO/GPiotNyXKiIVVajh0/vbk3s7dqKCxkPWfqc6mKU8lYETPEKRtSNt6OHGGS85tv4sesXMkAjGTHVV/uVEhw6BTZ7m7TLy0aNWKAQb/o5Nvrr7NWzz6bNf7lL/N93HsvJNyaNfQ7fNe7wKn8fNbE974HsXfttagMFXMaGqg2i0RIxM6YYWKhcJi1GQzi//p8KAWVlB8+PHlrqNdfJ+aZP58WM/Hr0LLY67u6TLyl97jLhXhl/Xru/WXLwKLyctPuqi/ly6EQCdWnniKmLSjAz1i5kuvWH2tNhQ/BIN9TdXXmgpRjxyg737iRJLoIg5rOPx/icPLk9P0VbUPQ2MjvpaWmZDndvULxMJs4VolEjUtPBpGouFRVJVJQkJxEzFFsOiUt91qIG3tGRDpF5AMi8njc3z5w4t9HREQcDsePReRzIvKYiPxCRKpF5DYRecnhcCyyLGt33OsfFr7Y/xSRYZZl7XI4HC+LyIdF5Ftxz71WRCpE5Nf98JlOGWtooIl2fT1NwEVwejo6aKCrBOLu3cjmy8oAtgkTDIG4ahWlh2ecAYFYXg54K2E4bBgg6XIBZhUVgEBFRWIC8fhxAD8SMY22E4GZ388moxk0zfw3NEAe7toFyFx/PRtxXh6A39DAsUeOzG4SaSjE52lr45gjRuBYDURgqsBcUsK1sxOKOnjGTij2lczMzzeKxGxKjtQpjkZN759UVlTEZ+vsNE2eB2pys5YzKIm+YkXMn3MOi1TddioSJtEoasHmZhxjj4dgvKCAwDYQMH0Hr7uO8kCPBzX0z3/Ovfm+97GOleR6/nleM2oUjnVtLbjT0cFzKiogH//xD9TIn/0sDrInSQ5x2zYUSIcOkRn/whd4Py3bULNPjlf1occD1vzylyRZKirAx2uv5X72ejk3LV8uKOh53FTmdvOZ16/nfq2p4dqdf75RSSX7bOma/TzVSS4qMqWXvVkkApnwxhv8aIb8jDMgXObNi50AmepcAwH2DofDZMwLC7mexcWxJTWJTIlErzf7qdEDbW1tXMPLLot5OOew6FS3Q4dYT9Oncx9985vcY1/5Cn6E2806UIWxCLjy0EPcT5dfjl+kf2tq4p4vLSVwtd/navby5VQVBToMKhIx5bWZBMFOJwq5557jdVddhUopVQP+dC0c5j596y3j3y1cyPXs7T10inQolHqoWzBokqeajM4kkaGJCL2OImCZqr/7MwE7bpzI5z+PH/rEE9wnzzwD/tvbbqhl6iedTCssZF+sqTF9vv1+4oBgkOSUzQaxqJ/twAH8opkzwZ5vfhM8+drXiNVeeoke0vPmkdTMzwebvvMdyOsPfQhCT+Or48fpMZiXh38wejSPt7XxfdbW8v99+/juZ80yCuFEBKLLhQryyBGqw5YtS5wAjEYhszweYi0dFuf3g0PPPcf7L16MYnfoUNblsWMGQ0aPzi5B8PTTJEv272d9X3MN5GF/JFVFwBCdYl5QYBSb6ZhlcV6qODx8mMenTSNZvngxxGkmpiXL3d1mj1DVZ6aJpvx8rqGWPmdiqozXBG9/E4k63bqpCRX9JZf0+vRBbOony1klooiIw+F4QERuEpE6y7K6bI9vF5GwZVnzHA7HOSKySURutyzrv23PGS0iO0TkGcuybjzx2NcEJvlZEXm3Zbs4DofjFhF5QETOtSzrFdvja0VkqohMsCwrmuKUc+JiO50E43l5Ip/6FOTf228D+jNmmLKb7dtxgkaMgIyrrqYsJxIRefhhyjbOOYdhLFqi0thoAurmZsCothYwa2vjvXRDUQuFeK8DBwCJRYuSN6d2u00pcl0dG197O73NXn8dh/ziiwmm1cnUgQiFhQBrpsRUMMhnaWrisw0fnt1xTpapg+31mpIcLfEpK8te0WdZsZn6bI+RqSJRm7BrM93+CnbiLRw2JZoaKB46xM8Xvxib5co1LOrqisUi3cxT/TsQ9uijJCZuvRWC8Ne/xgG6+Waw4Q9/QN18+eU0C3c4cLweeID7+X3vM46pw8GxVq9GPX3HHWCMvTm8z4ca8MgRgucbbkgeNPp89Cx87DHe40tf6hE4/Z+pMynCc4uKwJqHH6Y/bH4+iYz3v9+UzQQC3Gs6HTNTJ+/4ccjVl15irU+dCik3f37/BaB+vynP0R466WBdMMg+smULJKzPx+ebMwcyZc6c7FpH6HRbp5Pfq6uzVyD5fIZ8PJXM5yOQ27kT8mHXLpMc2rAht7FITmG/qKUFbBk9mp9vfANf5Ktf5X71esGR0lITmLe2QgyFw5TE1dSYMr0jRyDTtJdZvMov0/JlJRu1VD+TfdjnI6B++mmwYtkyiKxs+z/Hm8vFet+xg880YgQYPGVK5qRcsvLmcNj4Nfn5fA/pKCe1t7MSh+qD2BWHA0HYWRaK1CeeALsnTiQxNmtW7PPe6f6IdguHWReNjfwcPsy5d3eL3HffIBadLGtvp+S2rg4i8Gc/YwL4Zz+Ln/T66+DT1KkkOEtK8D/uvZf4RIdi1tVxfx84gBKvpAQ/RMm+jg4z1LKxke962DCIrK4u7sMRI3rur7t20Vs+GOR8FixIvIZ0gIuWIldXc0+tX097KbebfvpXX20UytruKhQy5cuZWGcnPuDjj3MdR43CT7zqqv6LH3Qgkc9n+oeWl6der9paa+NGiMPmZl4zZw5x6nnn9YyJ0zH10zwe065F1ZDqK1VVZVearBUe2baI8PsNmdkXi0SIx3bvZl/euRM8CgZ7+kXxloPYdEpaLisRRUR+JyK3CGzuL0VEHA7HXBGZJSJ3nXjOjSISFJFVDofDDj0B4eZIFAL+3H4DnLDfi8iPReQjIvLKifcaJyLLROTbuXoDxFswyLTBYBB1TEUFm0NjI86fEohbt5LBGjsW+XckQibH6xX5xS9wHi+5BIdIJ7q1tPC8wkI2BG2iHY3ieJeW9twcmpsBh+5uNsC5c5MDV2encehrawHPP/8ZaX1+PsH+smUmUFVS0+MhI2WX06cyVbZ0dXHuwaCZJJ1NIHwyTUubhgwxTrf2uOnqMmU/ZWWZkRYOh+l7mU1mSo+h94dlpbeh5eVByjj/P3vnHR5neaz92VWvlmR127JcZFvYxuCGMZjeew2BQzGQciCccyhJvoQEMJAQQionJ8lJQhLKIQYSwEAwNcEUG1wwBvfeLVnV6tvf74+fJ8+7611ptVo10FzXXivtvvvW55ln5p6Ze5oxOJQsuCfGdSDAOLCDhvbMpfR0DKnRo03zjhAZVLpo2DDut0jk93BiBxV7A2hcvhyH6rzzMCL/8hee8/XXoyteegmn9+yzKf11ODAcHnsMQ+uGGxjD+flcx6OPAqidfz7bq+5QEv+NG3H4U1LopHzssZHPbelSIv779hG5/u53I3PmaCf1zEzD+fp//yfy9NM4rZdeKvKVrxjjUDMmAwFDbN0dUP2TT4jer1/PNc6dC4AxZky0d75rcbt5Fm636ZbeVYS9vZ21YuVKMtk9HtM5cOZMstpj4XnU7Cvl5EpNNWPa6Yw9mzAtjf1qM4r+EDWMN21ifG7cyP9+P9enTmRl5ZFgw2EZVLpooEpbGzyiw4aR/fH73wN+33YbDl5HB/ZGaqqpXKiuJujhcADIpaT8q8xTtm7lNXw4Yz90vbSXL6emdr6eammYlpR2BTbaxetFV7z4ImvdnDkETsLxlcUiBw4wZnfswC4oKyMzSrOcYhEl9LeDpvZszWh4prU8WoFDtTeSkw1w2NcAncNhyj2XLeOZ/OQnzOsrrjD6OzQbsa/O07IAlRQwrKrC3g0cntVZWcyNWbMiVu8M6aI4iMvFOpqRge5ZtAgAUSsxNmwQeegh7NP77mMu1NfzWWMjOqugwPDxrV1LsFE5T9WOaWlhjXc4KH12ufC7RowgySMQOBJAbGnBNlq7ljFw1VXh6Qn0OvbvZ1yNGMF5fvQRYGZ9PUkql14anBWo5cuJiXzenbV90ybm1FtvYYscdRQckWedFb855PMZsM7pDO5k3tlvPv2U+7ZsGc9Iu1Zfcw0+dSxN7rSBS1MTOs7p5NlqkyqVjAyeW3t798vBnc7Ykj9UNFteg7/RAomWBR6wdSug4ebNjNGODu5nRgb68sorGUdRyJBuioMMdhDxPRHZJ6Sf/vHwZ9cI0aSFh/+fKCLJh7cLKw6HwxnyELeHbmNZVrPD4XheRK5yOBy3W5blEdJTHSLyRE8vZCBIIECWT3U1fGJFRWQ/7N3L4qQGzcqVgHPjxgHKNTTwfVMTJYP797MwnXmmUaR1dSZi3NaGYistNSTbyo+o27tcKIv9+1FSs2ZhrIRTzMqt0dpqyqLfeAPOC58PI/nMM4Mdfnv5clFR5MzGUPH7OX8ly9XytzFj4kPC29uSmMh5ajdsBRSV5yYx0QCK0UT0HQ4WBeVIjCUyFYuB7HCwMCYkGCCxO52bOzoMYKjdn1XtaxlTUZEZT1Fc16DSRXYQMJKEAxf170AnS16sWY0HDsDbM368yDe+QYnX1q2AiePGYQg++SQA2b//O/v57DN0TlERQQ+/n2fW1oYBvW0bGYwXXWSO29oKeP7880R+jzqK8rJIGTiNjZzXK6+gs379a9NhNZwouJ2Swth55hmAyqYm9NAttwQ3UdByE+0SHK2R3NpK9P6dd0yTqosuMiVE8TKSlQdLo8e5uZ1TNDQ1kRWxciXOjXbwPOkkwJNJk2IvD/T5OA9751l7xqb9+1i43JKTDUDZkw700YoG1zS7cONGxrwCRBkZZJZccQXln0cdBUDeRcBmUOmigSg+H05KYiL3/5VXsCkuv5xgpMuFXlCKDYeD4MKTTzJmrr6afWRmYtusW8f3JSWAAKHPL9ryZQXCNCuvK7DRLoGAyPvvk41TV8d5XH11zzuPinA+27ebDFmnk8ylyZPjp4t0PqrdlZCAHdMZUb9yRWtHZQ0yKGgYS9fq3hCnk6yjOXOg1HjlFYJas2Yx5jTAbe+S2huiXXLtL6WmSE7mPGbNYhyXlESVxTWki3oogQDraSAAyLRiBQHJE0+Eq37HDjIPCwoYMxkZgC0PPcTzvOsubIOUFN6XLaMaoLiYUmGlU+joYO4eOoQ9kpRkaEXq6hh7+flmTQwEmO/a0XnKFLgWI9kvLS0GDCwt5beLFmH3jR6Nz1lZabb3+w23fFYWoGM0497rxS5atAj7Q4Qxe8EFzK94remBgGlOImL8qki62+2G83npUoLlra3o79mzKVOePTv24KfPZ7osa6JOfn7kYHRiInpTm9J111ZKSECXKl9rd3Worlmqm8Mdv6UFW2jLFvPSChinE/0zaxbr15Qp+OCZmd06lyHdFAcZ1CCiZVkBh8PxjIjc6XA4SoQ69C+LyHuWZelDd4hIu4hc3NmuQv6PxJ70J+HBXyAiLwipsMssy9oa4yUMKHn9dQznCy/EcN61i0j8iBEYhCIsQK+/jkNzwQV8n5cHkPj44xh3V19tuAZFTJdiBdxKSnAqXS4cqKQkAyBaFsDhzp0ouIIClENubnjl4PezYLpcKMz164nQtbcT4T3nnCOzG+vrOZ/kZIDJaBSo282C3NFhOn4mJvJ7W2e6QSUJCdyzrKxgTjCNRmp5UHp65/wVoQtKLGVAsRrImZnc+6Ymnmtu7pHPwusNBgxbW00ZdkKC4VfRexFLBtLnURdFk2HYWTajHXTsat8eD4aww0HX09WrMZZPOIGymKVLRX73O4zo//ovxsvHH9NsZdQogLn2dlMy/IMfMIa/8x0oFVQ6OjBG/vAHdM+VV0YuX7YsOFR//GP2dc01AJLFxZGDGY2NJrqq53zwIMbrrbcGG8kibNvWZkDxaAzc3bvJIli+nLE9fjy6WA3+eDmYapi2t3O/hw2LbKTV1ZnGKFu2cC8KC9G/M2d2jwA8nGjUWsGTlJTwzaISEw0vo9MZm8OQmmoyvWIt2Ykkra2ssQoabtrEmNFzHzMGjs3x41mDR4wwTTWiPY/Poy7qS9FGPx4Pzsnq1TRPmjsX+gSPBx2TmGgAst27RZ56irVy/nzmdFISDr3y6I4ZY7oP24+l5ctaShtJtFOwcgNGC4BZFrrymWcAMseOJQgzZUqPb5U0NzOGt23jvmRlYXeNGcPf8bKLAgF0txLzHybO/1cA0j43NAigwKGIaa6kwOFAlcREsqTmzQO0fv11nt28eQTTcnPjx4/o9bI2HTgAuFNVZcocHQ5s76OOYr0rKYkNDB7SRT2XDRuwb6dP51k9+ij+12234Svdey965sEHWaMPHABA9HppupKWZrrWvvUWoGN5ObaVAogeD89/1y7+Lygg2JeYyNru8+Hn6Nypr8c+O3iQuXfyyYyVSFllDQ3sJy0N3fjoowQdiopEvv51dIZ9bHV0kLyijV06a6ymUl9v6BmUB/r88wFbJ06MPlGkKwkEjI+kfNqa0BAqra3YaUuXYhu53dzzuXO5/8ce27PGVZFKlqMBI1NTDYVLYmL37RzlR1Tqje5KYmJwUOzAgWDA8MABttMu1BUVgM/l5WS35+Rw72PtOTCkm+IjgxD6OEL+IiLfFB7+ChEZLcHkldtE5BwRWWtZ1sEeHutdoWvPDQ6Ho0pEJojIT3u4zwEhy5dT9jt3LgDgvn0Y0sXFOL6WRXTnn/8kunzppXyfksLi88ILTOZrr2Wy62LT2sqi1dGBIhg5EuXl8ZgGK0VFGEStrSiPlhb2O3IkRlMk5e/xYPx4vSw4772HQp04kcUjtHzG52N7zYTU40YSy8KBbmszzquWlGVnc26xdAYbiOJ0Gn5BdaD12ltbTYaENmYJVdr2suZYGq2IxJ7RmJrK9o2NGCqJiSzWChi6XGyn3Vc1QpeV1XkWQwzyhdNF0ZYy2wHFUKAxEBD55S8JHDzwAM/w1VeZx6edhrH6y18SyLjzTp71hx/itI8bBzinPII7d4r8/Oc814ce4nsVt5v9/uUv6JT77oMeIZzs349RvnQpxvEjj3A+nQUztAPzZ5+RebhzJ7ryvvuO7PKsZSdaOttV+bLfjxH6j3/gsKekAEzOmYODl5YWPx4/v99k54qYuRJ6fgcOkG24apVxPkaNYm2YOTNy5ni0onpIs/ISEkzZYmf7TU42gEusIGBaGvqvowOdEYvD7vOZbrQKGO7da74fNQonoqKCcTpqlOlCqyBRD4CCL5wuipfs20fgc+xYnOSf/5xn9F//xTOtqzOdSZ1OnOGnn0anzJ9vxmt6OgCix8PvlftZRcuXRTrPKNRsOs3ACAeeR5ING+CY3boVB+yOO7DvejIvNdC7aZOpFBkxgmyivDyuJV6ULkobY+9SrfNReQ31peCh8j4nJJiKit7OKI63pKVBmXHaaWQl/vOfBPDPOAPbNi2te0CiZTFu7RmGdXVmLR42jPExYwbrSWFhXO/ZkC6KUfbtY80YO5Zn/fDDhou5sREA0eHAbsrPh3P1Rz/is3vuYb1sbma8vP46NkplJQChlp/7/eiwLVtY5ysqWIssC2DO4zHz2ufDvtm8mWOMHMncHzs2PBimJajavf7110n0yMnBVzzhhCPHcH09v9GgWme6xLLY30svkWUdCBBMveIK1tTCwugzGLsSe6mw6vdwgd/GRuzTpUtNI9Dhw6HgOeEEMkle4b0AACAASURBVMB7cj7hSpaHDcMn7e6cTU/n2SilV3fWBaWzUiAx+iBnMGC4cSPjT7PEc3MJoh53HDqpsJAxoHay+m1xkiHd1EMZ9CCiZVmfOByOjUIa6jihfv1vtk0WishtIvJDh8Px1dBadYfDUWBZVm2Ux7IcDsfjInKP0Oa7Q0Se6/lV9K9s2YJzPWkSiq66momdn2+i1W+/jZI+5hiMmz17MNZ27sS4GT2aMp/CQlPi0NoK74PPh0IvLUVJaATU6cSwVud/716T3ZeWxgIYiReio4Pz3LyZSG1DA9GJa65BAYVKezuGk98PMNoZ34TPh1JtbzdRf3UqtSlMbm7/d8rrLVFOsfR0kymhWYqaNWUHFPU+hAKJsR472ki7OhiaYdjUxJjQMrLsbBackhLeMzJ6rwyI8xnSRZGks5KHV14hQHHDDRivjz2G4Xflleihn/6Uuf2d7zDG3nmHrJrKSkqYOzp4rVpFOWF5ucj3vmd4ER0OdNEvfkGwZPp0wMhwwYlAgFKhX/2K8XfnnejEtDS2D3cNXi/6Z80awISNGzmHRx6h7Dn0N9qwJ5ry5aYmU7Lc1IR+/dKX0MPaSCEjIz66KBAwDYQsi/1mZ5s5Y1mAhQocVlXx+fjxZJ/PnMn59VQ0Ou52839yMg5MdzKIUlMN+BBLpFp1nB1I7Coj98AB1iMFDbdtM4BGbi7j9YwzcNLKy3G6lBYgKcm84vEsh3RRbNLYiONeWMi4/+EPWe+/+12eS20t7wogbtqELiooAEDUjH4ly09IIPiQlRUM8tvLl1NTIz9zn8+U7iu3cTSyaxfg4aefAgB87WtkC/Vk/fN4GNObNqEj0tIIwowcacDv9PT4Oexut7mX2gjOvm9ttqLBGAVYMzKYW4OxOiRUsrPJfj37bLjdXn9dZMkS+G7POCNyELSlJRgw1GC7COOtpAS9XVqKPdybHemHdFFs0twMQDZ8OHPse99jDt5/P2P/nntYm370I+yl7dsBGVNTyUDMySFRw+XCR/P5CFoVFpqmc5YF0LV9O2Nh2jRDy9TYyLzKzWWcVVUR0G1r43wUzBk7NrxeUs75ffuwuzZuZPvLL4dyJdRH8PsJSrS0dF2+7HZjEy1axLlnZEDlMn26aYw2enT8mqYofZXPx75zcoKv+eBBQMOlS3lmlsUcu+wygMNJk3qerKDci2o7JifzHDMzY7cZlE9WOeC7qwfsdFaRqtCamoIzDLdsMQHq1FTsoYsvZkyNGsU5qO2nJfhajh9vGdJNPZfPwTIrIqDJDwr164sty2rULyzLWuZwOH4uIneKyCSHw/GKiBwSEOdzROQzEZnfjWM9ISILBDLOhZZlNcXjAvpLqqtFnn0WQ+LKK4kCrVvHwjFtGtu89hrkt7NnEwXVbsYrVmCszpzJS5tPiBDJX7OGv48+2nyu2YAiAIiafdjRYdLWtetuJI7B5macWM08LCnBgJ8yJXKpXX191+XL2hlVswMUKNOGG5o1OdA6d/amqDOdlsazsQOK7e1sk5pqwBCNTPWk0UokINHjObIs2e/nu8RExszkyWaBVYLjPpYhXdQN2bCBTvBz5qBbfv97jJprr8WgfOQR5vd995lynOefRzd95Ssma+7FF8nWmDmTbBuNmouQDfbwwxh6116LnktIMMTQIrxv3sxx1q/H4b7jDpP5Fikb2uVCDz75JPquqAjj/vzzwxvA0ZYv79hB4GblSq5x6lRT5qoOc3cbIUUSberS3Mw9SUvj3BITDe/RypUEa5TvrLKSsrsZMyIS6nf7HJQmwuczekezjLsryhenDW5iiVwrKGEHElWamrgvdtCwpYXvUlLImr30UgCkykrT1Ep52dT41gh7L/GyDemibojyMGdkYIt8//vMtQULWEdqD7sK+fk8u3Xr4BcsKSEAkpgIkKzdTbOyyKRRcE2d9mjKlzWrTjM8ou0WXF3NOS1davToWWf1TE8cOsT43r6dMVxYCBhRVGTKhaPtihyNKHioAVx7p3V7R2Vd++0Ao1IY9GawsD8kP5+A2bnn0njshRdYHy68kHJNe7dkbRYown0oLGT9UB7DeOjrGGRIF3VDvF5oFJKTeXY//Sm65Z57WJvvvhuQ74EHCEht3oytlJ3Nd3l5jAPN+MrMpBoiKQngKSGBYyxdis4YN445rfOssZE1T8t0ly3D18vOxlZrb2dfY8eGt2G8XnTG22+jJ9PSoDY566zwQFV7O2BjV+XLNTU0YXntNXRseTkZ4scea3gbi4u71ySzM9HmmV4vz6KwELvCskik+eAD7uG2bWw/ZozJsCwvj8+63tFhSpZFTHJNvLLxdB3SqpjuJoBoVrjfz3qwY0cwYFhTw3YOB/fkxBOxjyZMYCyqf63BoJQU7nNWVp9lkA/pph7I5w1EzDr8d5BYlnWXw+FYJSLfEJHvi4hTRA6IyAci8rvuHMiyrD2H23KfKSKP9+y0+1daWsi6SU1F8bW1kaqelUWmiwhp4qtXoxTPOotJvmULStzlwlkeN85wYDkcOOsbNphuU1ry6/ezYFkWRtGOHfyflsZC6XCgMDXyEE7WruWc9u0jcqYZMOEWDJ+PhbS9PXL5spYsK0+e02my1rTpi3b/jZSJ9EWS1FReeXmmO2pbG45TQ4MhLFenJ5aFXJ0tXTg1O0odFoeD56MLTbj0dk3R9/tZcPvwuQ3poiilsZFMn8JCkf/8T8qMPR7AwaYmyomzsgD2srJEFi8ma3HGDJGbbuKZHjgA7+CmTWRIz59vjBrLglfqd79jzGr3ZTV4VNxutnniCeb4ww9D69DezjjOzjZlX/ZxtHmzyP/8D4Zkbq7I7beTJRjOYbeXLycnh48e+3wAkv/4B5nZqalE7E87DaO6vd0YWbHywISKgod+P8dTYvD16wnUfPyxCaAcfTRZBMceG78If7iS5czM+HRK1QYKyo0WC5CSkMB5rF+Po7BzJ6ChZmHaDePKSkDD8nKTca8vBY7tGYd9oJOGdFGUEgiYEr3x4ylh3rMHp107k6rdkphI9s6LL5Ihfd11jK0DB3C01REePdpkGzudJqtQpPPyZQXKlPszGkdKG0X985/s95JLAJhipVuxLGwsHesJCTjIkyahI9vajNMZa7l/qHg86Djl2MrMZP8aPPR4jgQO7eCqvbxZAcjPm6jNO2YM4++++7jOo48miycvj3GngKFm1A4AGdJFUYplEZB0uynpfOop/r/1VrK2vv99Aqz33cd8XLsWfZWfD4CYmwtws3Yt22mZekcH4yMlBfvqo4/QG8ccE9zNVhsXZWej9z7+mDk1ZQr6bu9e9hHK76rS0ECDzmXL2O700/ETI1V/1dUZbvxw5cuWhV+6aBGAnQj22SWXcN579+JHZmQw9uMBrrnd3Bu3m2vMz2e/W7dyDh98gH4UMRUxJ5zAnIuHaFVIU5PJWM/JMcHdeIvyZirVVzT6PBDgHihYuGkT658GSQsKAArPP5/3ceNMYFdBwz172FdGBvpLbbQ+ziQf0k09EMeRnaiHpCtxOByvisg0ESnrZmvuAXOzPR6RP/4RBf6VrzCJV61iks+ahRJ58UWU9ymn4MyKoDz/+leUzvXXo1zb21GeiYkold272c/RRxuFHgiYkoqEBBS/10tmoHZ2drlQYOEc1JoaFqY1a1CmF1yA4xZJ0WiXOQU3QxcwVZhKSJuczD1Qrpn6ehbd5GQc+IFMxj0QxOs12Ykej8lEHDaM+9qZQa9Arj3LUDMRlCxYS5MV4I1mkdMIV1JSr5SfDwg4ebDqIp+PEsHNmykz/ugjDJFrr+VZf+97bPeDHwD+L1ok8uabRMGvu45nuXUr0ffaWhoFnHOO2X97u8hvf0vpV2UlHQqLioLPwbIA7e6/H2Pm4ovZTn+flhY+G7qujn2/8grGzrXXogs18NFZ+bLqGLs0NnKe774LoFdSguE9dy77V/BQS0/i4Rx3dHAsjbCnpPAsVq0CINHmMMceS5Dm6KPjm4Ht9ZouyiKcg2blxVuUJy2aTraWxdqknZI3byYDS4GJwkKynSdNYlyNH28yKxTEUKJwEcaClp33IqgxpIt6KFu3Mq8rK7FvFi9Gp5x5pmkskJ/Ps1yxgrk/bhz0KcnJ2CerVmHbTJiA465zSIFsr7fz8mUF1AMB0zGzK6C5rY1zee01zvH00ymhi7WJgNvNvdi8GScvPZ2xXlHB+Sjgr/zC8bCL1Hbw+Ux2rsNhMg41Y1z1VFdcoVoJoVmJgznwe+gQdrM2Pzl40GTYp6XxLD77jHEwaRLdeiNV5PSRDIi7Pdh10bZtPMc1a2hYeemlBCgfeIDMvu98B1to9WqalJSWYk9lZ2NPvPUWPsyUKYCE9fX4VcOHs76tX8/YOfZYgl4qyl0ogj1WVYXemz2bz/buZdyNGXMkOO31Qov10kvM25NP5rwjUZz4fICcra2cd2lp8D5dLgKqixYBTmVliZx3HsGRwkJ0rjbfGDHClGj3RDwefNGODtN4Uim7li41dBbTpuF/Hn98dE1fohWv13RZDgTQd501tIunaFWPBnBCpb4+OMNw2zbukwhrwfjxvCZMYB3VjGetdNFkEC19zsjg/mZmHvnc/f64AIkDQheFSg9004CUIRCxm+JwOMYIZJsPW5b1vW7+fEDcbMuCx2fjRozgkSMxgBMTARATEjCkN27EiJ43j9+99x5RsaIiuqFmZhJ1ysvjN/v3839GBga2RsEDAYwfexlqVhbKJiOD37jdBnCyS1MTPDDvvIPiUWLpSA6tkgHX16OESkuDDV0tJ1RDWDl0dJuWFgw3y8IQj1RSPSSRxefDKVDiX20UYOd/Cy1LVm6wxEQDFtp5DJ3O2BZRjSgqYW8co1v9vkANZl30+99TkvXtb/OMPvyQea2R9rY2MhFHjUIXvfOOyEkn4SQ5HETZf/ADxs3dd2MMq+zYYcp/LroI4zvUKGpuZpsXX+QY991H1F8NSDVwRExWY0sL2YpPP42xedFF6MFIRqTDYfSN04kuseuibdso9/n4Y/Y/bRogwFFH8VvNzAkETLOCnhqSbreJbrvdGMhr1/LyernmGTMADidPjm80WHnOXC5TsqyZzb2dLdPRYUq17QBEQ4NpeqKgodI0pKeTWThpEq/ycp5hWpq5L4GAAQ7VuVfwIjm5z6LpQ7qoB1JdzTwYNQoOwcceI6Bwww0AiF4vczwlBUfy9dcZD1ddxfOtr4cvOiEB+6mggDVNubkUGOysfFk5EjX7sKtx4/GQZb1oEfrlhBPQc6GBkmilsZHxv2MHDlxREY5gWRnnpEFXdWzjoYvUTtCmdZr1qxyQei8UOOzO8XReWtbgKW92uYJLkquqjJOuVDqaYVhSYgLjgQAgx6JFjNdJk6DssDcV60MZ0kU9kJoa7IGRI3n2jzyCXXLHHSI//jHcgnfcQVLHRx+J/OY3JGH8v/+HjdPcjF3V3Gw6JldVmWy6TZvQd0pJUlpq5lVrK7by/v0kgogAQFZU8PnevdhF5eVHdkP/8EOSPA4e5JjXXx8MToZKezv70/JjpbsS4Xxffhk929rKOL7kEq45JYX7smsX+xg2DB3V02CG8g1q9dKuXQRTP/qIz5OTsYtOOAHwNt40Seovqe2hHZ/7mjpLqSREgrMMt2xhnRMxWelakjxhAiCuw8FY0GxxlyvYv9PmndEkgiiQ2EPbsN91Uaj0UDcNSBkCEaMUh8MxRUSmCySbk0WkwrKsA93czYC42W+8QUbheeexSKxcyeezZrHYPPMM0bDzzkNhihDZevppFqw770SpVlebsrq6OhRQejrGjkbCtTvX7t2m6/KYMaZzsnb/yskJ5sro6KA05513UELHHstC0pmR7PMBHCiXR2Gh4dfTpiAa7c7ICCbq9no5F7cbxTV8+OeDmLs/RUs46+owQurrzYKioEheXjBoGG7R1EUp1sXE68VJUmA4TtxN/bZADXZd9N57dE6++GIMs7//nYjuiScCINbVkR04diwlzkuXEjy47DJ0zdKlGNQ5OQCJZWWHL8giI+fxxzFWbrwxmChct3nrLY7f2Ej58y23MO4OHcJ40S7lKi4XBvLjj/Obk08W+cY3MK7t+7W/BwLoO3v3ZQUGV6xAt+3diw468URKljWSrvrK6zWNU3rqBHu9pvHQ+vUmwy4QQNfNmIH+r6iIv8MdCAR3WNVso3iULEcrGlzavh0HQbkMlecuIYHxpoDhpEkGQLGLPhfli1TgMCHBAIf9AFgM6aIYpaXFdAtta4NeYfZskW99yzQWUADx3XfJjJkyhe6fCQmA0O+8w98nn8x+WloMQK4UHJGAQe0iriBjV2CZ3895/O1vHPuYYwisdOawRxLl9dq4ERtN50BoFok2OdJM6J7aRX6/aYal2ZnqUCoFgZZx97SLtGYQ6/wcKOL3H8lj2Nhovh8+3DQ9KS01jXwiiYKm771HZmpzM1RCV1zB7/tQhnRRjNLejm2Tno4tsGABgY377wcsXLJE5OtfJ9j6/vtQsEyYgK5KS8N+ee459nPhhczlqirWqPR0fDqlixoxIpg3sL2dQMqGDYyjESOwB9LTWTf378cmUqoOEVN2/eKL/LaoCBvt+OMjz1vtEl5Tg64bNcpwDK5eDRC+fDm/nzcPn2/yZGMXVVUZXVVW1nN+T7+fuVJbS0bvZ58RSNKGbMcdB3A4c2ZcOwKLiLERm5pMhV52Nq++9D39fvxzBQvXrTNNTh0OxsnEiQYwHDMmciMdBaJbWkwwVX277lDwKHew+oox2lQDBkSMk24akDIEIkYpDodjgYjcKyK7ROQuy7JejGE3/X6zV60i3Xz2bLIMV61ikZk5kwn/l7/gZF18MUZIIIBif+UVHMw77mC7AwdQGjrZtcQkOxuDR4Tvdu/GSE9IwJiZMAGlEAhgBHs8pvuXCP9/8AFOdksLCuvUUzFsO4s22cuXi4o4D58PpWbv8JeZyfFVmSn3XnMzn+Xlxc4j9EUXTVu3ZxlqZM/ugCQmGiAhIcFkKNqfS6j0FEj0+w1BcnZ2XLoR9qexvEAGqS7avRsi7LFj4ZF59ln0yiWXmLLie+4BxHniCQIc55/PSwRd9Ic/oBcefNBEsNvaRH79ayLHxxxDR8u8vGAe04MHAR2XLCFavmABekV1gMtloqQijJmXX+Z4NTUEMubPR1d2BkSHK1+ur0envfceOmnECIDDOXOMXlNdqtkn2lhEJLi7daT3SOeyfTv3Ze1aostJSWSyzJyJo1Be3jtgntfLtSiQ0psly6ESCLCOaZbhpk1kWWkG5IgRPHsFDMeP7/yZKmm48rNZlgl6DIBMpyFdFIN4vTiNmiFx772MiwcfNA151DZ56y0cdw1mOp3ohKVLGU+nnkrgUtc7BZkjNUXRRkIKoqWkdD6GlHrh2WexvSoq4MY76qjuX7c2kNm0ievMzAw/B7TRkTpyna3P0UggYLqcavahZvXagcN4i5Y3a0l0X5f6WhYAoR0wrKkx1RcZGdjGmmFYXBxbZpXaSF4v1B+LFwP+nngiYzaeZZedyJAuikH8frL5XC7m4oIF6IOHH2bOL14MjcuVV1K98Oc/wyN/xx3Mm6oq/DqPh2ddVgZYp1yi1dXMreHDsX2VfkoEe0Qbpwwfjl0wejTf1day7+xsPtO5s2mTAQ8zMqC8OvHEzseYz4f90daGrlTKh7ffxq7bu5fPL7iAl/qRIlzH7t0mqKMd4WMV5fN7/3184A0bTJLB3LkAh8cc0zv6SAO6LS19X7JsWTzT0LJktdGyslhbRo1iPTj22M6r8dTWbW01jV80A1+zDmOlkVJ72LIiU4B0IQMJRFwgPddNA1KGQMS+lX692du300V0/HhKXz7+GKU8YwZGy1NPEXG6/HIWKLeb7MMPP+T/r3wFo6+qCkBGuXvy8jA2U1IA8DRq9MknKKi8PDi1NItQOQd9Poz01FQW0RUrMH6069asWSiz4uLIBrZGtrSpR2mpIaVVcvK0NEPSbRe3m/PwelkIc3P73RkcVKLp6vayZFUnycmG70K7XNsXAs1w0M6nynuk24aWHVqW2aYni5KSJSvPYg9kwCxQMUqf66L2dpH/+A/e772XspvcXEpfHnkEw/Tb38Z4e+wxIsKXXkpDJ7+fyPurrxLc+Pa3TUnJ1q0iP/sZc/maazAAExIMD2YggCH+6KPonP/4D3gMExKCx4SCy5ZFxtFvfwuoWVnJ9tOmdZ2hrI2GtEHT9u0YyZ98wvfTp1OyPHFisMHo95tM6cRE9JE2GBI58j2c2Pe3ezcAx/Ll6OukJIzDWbN49VZ2ipYsd3QY7hvNOo4zJ2nQMevqTJfkTZswjl0uvs/KMl2Sx40DgC4s7NpB8PlMqbKdbiEx0WQOxKvBTQ+l/8+gZ9LnusiycBwV0F+wgM8feYTn2d5uqiMWLwaEnz0b59bhYH6tXcuYnjMHG0cDAE6nyXwLB0xrgxUNbHYFGK1bR3B3xw6c5y9/GZutu+Ouvp45smsXc7OkhDkxcmTwvuyZ0Fq10RO7yOdDxzY3M5dSUoxdEA1PaTykL8ub29uPLEtW7tekJOxZe1lyPEsjVec6ndhkr77K+iPCunPBBfEvxQyRIV0Ug3z6KcGBKVNEfvlLAp4PPUTA829/I8PvhhuotHj6aeyI//xPxtPWrWRDWxbVY6NGMdeUQ9PvZ73Lzjblw6qXdu/mty4Xdtf06UYfHTzIKyeHfareW7QI3ZmdTabe1Kno0M6arbW1Adqp3mlrI0D7xhvMl4kTAT9PPjl4Xfb7+V1dHec8enTPxm9dHbbde++R3OJwcD4nnQR4OGVK79kp9pJl5QTs7ZLl1lbGhx00bDrcDzgpCXuoosJkGqr/ruCg9gqwi9drfD4NeKu/Z68m0yqNnuh39RNFYgISB7suGhQyBCL2rfTbza6pgYcsJ4cyv3XrUDDTp6Ocn3iCbb70JQzLQ4eIdu3YQWTq/POJDNXWEn1SJ724GOPU6UQZO50oqRUr+HzsWKIZujD4/Xzu92N4JyezgL72Ggq+vJx0eM0I1I7P4cTrNbwx2pBFMwHU+A3HvRAImJRrBUHjnar+eROv1xDj6svOAaYLiEafQp0nuwEf+jw14qSAoh180CxF7bjbUyBRxGRIpqT0qOP2YF+g+lQXWRYZPh99BP/gBx/wLG++WeR//5dSlttvx1H//e8x8L70JTJ82tooX/7kE5GzzyYiP2wY+/z73wmM5OURlc/P53Plad2+HYBgzRr0yr334jTrOTU24tgOG4YOWLGCjssbN6K7brwRIzklhX1GGnfKmejx8Pe6dWQeahnQSSdxLeEi9Vrap80Koim3D1c+vX075//hh+hFhwPjcM4c7mt+ftfZjLECYn6/abpgWehVbSoRb5CtrQ1j2A4aNjTwXWIiQTLNMAzlfRIxnIyaCW0Xn89kiClwaO+orM/f70dfJSYOiLVjSBd1U3btYo6MGkVjpwMHRH70I1PWnJ2N7fDyywRbTzgB3WNZlMLv3cvYmTIFx0sJ8S3LlOqHOk8KsGvTj66coh07RBYuBKwcPhx9OG9e99a+QADHX0v3ExNxHCsrw3dL1QCAiMk+jEV8PuZZU5MJLqanG2C2P6hieqO82esFaLEDhs3NfKcdSu2A4fDhvRt0CGcj1dcD/HzwAePyvPMIzPUSeDGki7opu3cDyo0fD//zJ5/QWG77dvyys8+GcuWllwAUjzsOOhWnE7vpk094liedhD7zeIyO0syyxET0WmEh809pVT79lG3OOCM4sFhVhb7IzcVeqqlBF65ahV487TT0iCZudMZRX1vLKzmZubJ4MRUmiYmAhtplOVQaG7kGn8/wgcZi9x84QEB1yRLsSssylSAnncT96a05GQiYRikalBk2jPUl3oEMnw/f3A4Y7t9vvh85MpjHsLy864C4y8XzVvtWKXpEeObq94ULhFmWyQDvadaoBoS7ycU72HXRoJAhELFvpV9udmsrWTw+HyWEO3YAoh1zjAEQGxooj6moIPLz5z/zu7lzySIcM4Zt1q3D+CovZ0GqrjbRJaeTfW/ejMKcNg1HXMUOIObmsnguXszxiotFzj3XRPRzcoLJdsNdU3U1i2FWlskqSkkJ3wFVpb2d6/D7UeQ9AJE+t6KZnHbQUJW4SHBXrWi5LtSAdzi6Nt6V3Fc7NooYQFEdr1gbrai0t7OwJybGnIE62EdNn+qi554T+dOfAA0bGjBKb7qJDMGlS+H6Oekksv+2bqUc+YQTMDoffBBD8JprMDrz8zGIf/UrjNHZszGqNXMsN5fx9thjlCJnZJC5eOGFZswEAhipXi86YMcOwMOVK9FlX/saus/lQpd0pic0MltTw++XL2d8lZWR/XHcceGNLG1W4PebiG93xrTfD4C2ahWv2lquq6KCe3LiiejQSNmM0WQ12t/DAY2hJcspKabENx7i85k1RUHDvXvNuY8cGcxjOG5c18fW6LYCPlqqrIEO1VH66uy5u1ymfKcfZUgXdUO0y2RhIXpp5Uqc9ooK5rFSGrzwAuXOp57Ky+fD/mlsZC0aORIH2rKMTaGZJaFjRjNa1UbpbIweOMB5ffQR53LppVDPdGdOdXRwjZs3Gx40LVmOxGel6602Qeuuw+71mo7KGhDUYG5ubty4iHsssZY3K6+qHTCsrTW6KDs7GDAsKuofHsZAgHMKtZEOHBB5/nlA8exsGoOdckrcAd0hXdQNaWzEXigoINC5eDG2RyCALTRvHhz0f/0rlFLz5uHDWRaluLt2YQ8dfbThmV+xAvBo1CgCoF4v/l5uLvppzx50S10dQcZ584J1wv79jPPhw5m7r74KAJ2UhB6aPZvgQEoKx4w0frR8ubaWa3v/feZMXh622HnnhffxPB7OsakJPTR6dPfohyyL+/LBB9iW27ZxLqNHE1A9/XRAtN4U7fJs54DXxqHx8Dcti3tpBwyVrkUEe3XCBJNhn+SdBQAAIABJREFUWFHRfQqnjg6Ooc/a6WQf6vdFo9u00Yo2x4xVFEjU5JIo7+Fg10WDQoZAxL6VPr/ZXi/O+8GDZNXU1fGaOhUHSsHCf/s3gEItncnIYHEZPhxF1NTEQpCczIKVnc0+3W6MpdZWlHV9PUqmshIjXcXn4ztttvHWW6bU+ZxzOJ+aGs43Pz9yyrpGtqqqUCy5uZxTejrnHEmx+f0Y+u3thhtkoBi1/S3t7aYcWbP0VC2kpAQDhpmZsUfQtBGB0xm94aodarW8SoTnF6lEvTuinZsdDsZRN/c12BeoPtNFa9aIfPe76JMxYwCCvvxlGgS89RaZhWefDSi4eze8g7Nmsd1DDzFu/v3fAYgKCtAbP/85BvgNN5AlraUVw4YBNi1YgFF1/vl0LrQTcNsBxKYmdOA//oHhdfPNRMZbW02AorPymfZ2sgHefx9DLiGBUsMzzsBhD2fsWBa/szcriHbseTzo6FWryEBobWUf48ejQ6dPx4HtzljuDGQMZx7YuyyrgWgvWY41q9GyCAxpl+SNG4P5eoYNC+YxnDgxttImPf/mZtMBUIFD7agc7bm73aZEs6cdInsgQ7ooSmlvJ7MvIwNn+5VXcMrnzWM8ZGSwrjz3HOPvrLP4Tn/ndqOD0tIIfCYlYU9pZUUomBwI8BudJ+H4EVUaGgB53nmH/So3WHcyXWtr0Zu7dnHsESOYK9o9M5xoxolmQndnHGuXd20O4/EEV4L0dI3uLYmmvLm1FeCtupr3gweDgyXKX6gNUAYSl3YkIFGEDLe//pVxkp9PqeycOXEr4xzSRVGK2w3IlZCA7nn8ccC1MWOwb2bMELn7bvyxN97Appg/3/AIaidubbyTkABwVlcHV+rkycztujrmYXo6AZM9e9h2+vTgRmqWBYDY0MBY/vRTqikCAQK8553HOTc28n1nmYGtrejXt9/GPvL7OZ+LL0afRrL/a2sBHkW4rs4q0exiWYxnBQ6VH3/sWK5zzhxspDhwoXcqyvmqlSXaZbmnvmZT05Flya2tfJeSwrXZswztVSfRitql6gf6fHymwTE7j2Z3xO831Gc95dRVupAogcTBrosGhQyBiH0rfV5C+NxzpHBfdRWT+eBBFhgFEN1unPiRI+GJePVVIlinn47BNG4cCmXjRpTVjBn8tq4OZZOdjZGlXS6HDwdULCgw56EAYk0NC8uGDSjXM89Eufv97EObokQymt1unMrGRpzHwkKTCdeZAdTaarrzair5FzX7UBsD2EFDe9OS0LLkeDvGfr9xMroLRnq9BlDUsknNPO2u86Pi8zE2AoFu85MM9hHUJ7qopkbkttsA8S64ACP27LNxZF56Cf7Viy4S+e//xlH7ylfIkH7/fT4rKKDMWY2xN9+EE6igQOSuuzCe2tqMQfWHP5DdWFJC6fKJJwafjzZ0qq7GYV+8mGd+7bUEUpKTTVZRTk7nGc1vv42RXV8PgHDKKbw66xiooLi9Q3lXuqijA4N+5UreldNz8mSAtIqK+BmrkUSNSQUc1ChMTe163kVqCtPcbMqSN2/GCVC+nuRkrssOGipfT6znr9mGGozQKLlG12OVjg5THt0fZZoypIuiEr+fzEK1N/74R4IMV19NMEnHwcKFOGwXXEAW8aFDAPcOB06p241uyMw0wYhwQUltxONwRO7OLILuevllKF0CAeyiyy6LnrPX7wc03LgRXZSUZEr6uyLFb283mdDRVhTYgUPN3HU4zLqemBj7etyXYi9v1ioZe5ahrilOJ7amPcswN3fg25CdNaOzLPyCv/6VwN3IkXRynjatx9c1wO9Kl9InukgbJTU1sW48+ii0UaecQjOVo47CfnnySYKt551HJcahQwRe3W7TlE0pm5Yvx56fORM95XLh7yUns9Z+9hlzfsQI1taCAqOTLIsM/9padN/y5axrs2djn+XloTNbW9F9BQXhx4nfjx574QXW9KwsfMlLLuGYkaSjg3GoVBJlZV3bMj4fgZ2lS3k1NHA9U6cyjqdM4by1aUlviZYsNzUZTmvtshxLwoXHY6ovFDA8eJDvHA7ujdp9Eyfir8ea2GFvhtnaGtwEUzPyla9b7dVYRAHJngKJSpsTJZA42HXRoJAhELFvpU9v9ttvswCdfTbKdP9+ohTp6US9/H4yeQoL6bS1fDlZhuecw7Z5eWyzZw+/mTqVidvUhPHscmF4BQJEPlJTUTr2BcbrRSEuWYJSTE1loTz5ZBaJtjaAhsREE9kPFeU+3LMHRaTNVroCfLxeFhaXi23z8gZmVLy3xO83QKG+K5+FEvvaQcPejtKp+HwGhIg1+m3vvK2ggDov0fLKqdgz0+zdebuQwb5A9bou8noB+vbupUTnww8xSl0uHPVzz8Vx+e//Jijx9a9jPD/3HN9Pnkwpj9vNvp56iqj28cdTvpyebni3VqyAkLy2FkDwttuOHM9+P4bqwoUES0QAMW+8Ed3g8RhuPeVrDZXqaoDMJUs4r/Hj0ZezZnUOIGmzAo/HZOl0tn1LC9e6ciXOns+HMTxjBjq6pIR9JiVhrPYmL5/Hw322Z+FEAszCZTEq8LBtmwEL7Xw9DgelRhMnGsBwzBhjcMZqdGq2kccTzN+anMx9S0w0Y6unDR4UGA7Hs9gHMqSLopDNm42e/+Uv4Wq+/Xb0h9ouf/kLgNwll5DBUl3N79LTAbTVUc3Lw3ZROgS7LeL3m8w87Twcbgy73SKvvw6A2N5OwOPKK4MrODqT9nbjaLpcJlN37NjO7Rwt59dM6PT0rrdX0FBLshUYTUgw5cG6r4Fe4REIsN4oWLhvHzaow8H15OUFZxgWFvZbcKBHEg2HtIJZL7wAUDFhAmtyD8o9h3RRFKINjvLysH9KSqjO+NGPoIpasAAAcdkyAgqXXcZ6uWQJc/W008xc1G7rHg/2wciRxmdqa8PmaWjALxs3jjUqP9/MecvCR3vvPUrdvV58vUsuYV8+H8d2u5kLOTlHXk9LC1ndzz3HOCouZhxdcEF47lUVLcutrmbujRzZeYdntxu7aOlS7MnWVvTNrFmAqhUVhu6otzsea8lyS4uhRhk2rHvN1iwL/WPPMNTGVyI8J3uG4fjxPadOUboqrTpTjtjMTF7h+gi0tZnKnFh0Ybz4EUUMkKgVMJ3IYNdFg0KGQMS+lT672Z98gmEwcyZO2Z49GJcZGXAgOhykxmdl4Zxv3UrE6NRTUWQ+HxO0uRnlOGYM27a3s6+qKhRNTg6Kv6WF7e3ZIo2NLCwrVvDdvHkcQ6NChw6xuOnv7M6XLo4tLYaXISeHRaIrh9myTGRIS1V7MxI1EERT0UPLklXsJLgKlPVWF7JopLNGK90RTZVXHibNUNTOqerURJNh0dRkOPA6M3wOy2BfoHpdF/33f5Ppd9NNZB+PG4fR/Kc/UR5z7bVE4JubRW69FR3zP/9D4OO00wAVDx3CUX7iCcb1jTcSFHE4GENbt4r8+tcYwBUVIvffjwEcKq2tHPfZZzGGLrgAYLOkhO87OjiWOpF2Q8eyiHi//Tal2ZYFwHD++dE5Wzo21dCMpL/q6zHiV67kmi0LI3LWLPR4WZkJBChBd28B/6p/tWRZI7/RdMhTw1ibnmzcGMzXk5cH2KGgofL1dMXVGE1TGL/fAIf2TBwtVQ4H8Gkzp1h44OzXrM84XrxH3ZAhXdSF7N+P3ZKYSCOVoiIyfdrbTTb7U0+x3eWXo0N27uQ3Ol7r6ph7eXkGoNYseJHgbFflbgo33vx+Mpiffx6dM2MGlSJlZdFdy8GDzKvdu01QVcsauxLN5g8ETDAg3FjVNVVBdi2NTUkx4KEGRewNoQZidl5TU3CG4cGDRhelpRn+wvx83nuj6UF/iQKJXXFI+/2soS+9ZPjSr7jCNCHrhgzAEdAt6XVdVFWFHZGTQzWYZUGp8NOfAvQ9+CD2zqpVAIsXXsgaunw5uueMM7DtGxtN13OnE1tkxAhTlrxpE+B4SgoBk6wsxn1+vgmQ+v00p3v1Veb65MlwsGrWoNvNvgIB5klogH3HDpr2vPkm5zFhArrsrLO6nkOtregwl4vrGjUqPMDU3s61L12KbeRy4c/NmQNv9jHH8FlrK2M8O5tr7Q3/RjP31FdwODjWsGHRZV43NgZnGG7bxvWJoEMrKoK7JXdW1dId8fuNb6h0VYmJ0fPaq08tEnslnwKJTmfP9avPZ+zgToDEwa6LBoUMgYh9K31ys3fuZBEqL0fJ7tqFgs7MJLqVkgKAKIJjXV+PwTBjBk65ZiFqtkZeHguPy0UUqKaG/8eNQ8lpynxREQrC7YZj7I03MDLnzSPrSBWiZWGQt7RwTvbMRSX4bmszfB4JCRjI0ZSzud0Akx4PilG7tH7exO0OLkvWVHQRnpkdMByIvETdabTS1X7s0XblzVBid3V+0tIYD12Vj+q9TE7GyOvEEBnsC1Sv6qI33sBZP/dcU6IyYQINnmbPJgP6V7/iGd12G/rkRz/CWP63f0MfNTbSjfC115j/3/wmQKMIRsSTT0I+rpyJN954pBHq8+Gs/+537O+UUzjeuHFmG51HKSnoKH3m7e1w7Pzzn+i4tDSarZxySuccYyqqy7xezisj40hdVFVlGqPs2MFnI0YEA4d+v+HZcTpN59jecNi1ZFlLFbVpSGfZRY2NBjDctAkjWUsB09J47vay5Pz86M4lNJsx3Lues4I32tk9MTE447AznkYFAB2Obnf/CxLNNlVQpQ8BlSFd1Ik0NRHESEqi87vPJ/KDH/CdNhF54gnsmquuYrxu2kRWc2kpTt2hQ9gi2dlsrw1IdB6qU6NzJlzDDsuiocGzz5J5M3EipdThOpOGit+PftBO5FruHy0vqK6LmgkdrkOyZlAqcChieBy1GYzux05y35M5E29xubi3dtBQnfSEBNPlVV/2rKre6N48EKQzfsRQ8Xgol/3737mXxx8PqGSnJ+pCBshIiFl6VRe1tpJdmJpKgsX+/QRQf/Mb7PQHHsCu+fRTkeuvB4xTCqhRo6jgUlonBQiHDSN7r7SUZ7x+Pb9JSEBHTJ+OfWOnXdBy9j//GSBv/HjKpadONWOkrc0ki4wYYWwAvx9Ab9EigqsiHOOcc/A3uwLT/H6uWzs2l5UdGbRvauI+LVuGz+nzYZvNnUvG9tFHc17NzVybCPevtwIAfr/psqwly0qNFclHcLl4TvYsw7o6vktIwD+3ZxmOHBlfPer1moQS1YFJScYv7G71it6DpKTYk3I06aOnjVZETLBYs/3DyGDXRYNChkDEvpVev9l1dRjKWVlk7OzebSJITz/N5J8/H6P48ceZ0PPnk6W4Zw+8GcOHs2CpoVxayvZLl2JAKqm9EuEnJZG+rkbya69xHpWVgAHaNUyE41VXo2Bzcw2w6PEY4FCNudZWFtvi4q6VlmWZqJxmE/VVeW5vi5bu2rMMtbRQyXvtoGE/dwqNWmJptBJOIpXtaDaV8ijqNnZAMdxC1tGBEdNF5+bBvkD1mi7atk3kjjtMZNznw8j83/+Fp+bmm8ke9PlE/vM/eQY/+AHBjNtvxxA9cABy8S1byI7+9383Rs+uXRCOr1kD2PbAA5TD2iUQwBn6zW/Qa1OmcE7Tp9tuwGGd0dHBeBg2jPFx4ABBkGXLMNjHjIEbbdq06HkzOzoMuba9xM+yOJ+VKwEOtaR33DhAw5kz0XcixmhrazMR78zM3omwezycrwL72mU5dF663TwTO2iofD1OJ/dq0iQDGpaV9c75aiduLRvVyLq9o3J3uk8rOJKY2DNQxOcL3k8fyZAuiiBuNzaNCODdvn0i993HPFbn48knAeb+7d8Yr2vXssaOH49j19HBnE1JAYRSfictlXO7zTqmWXpBF2dxDgsXorvKygAPjzmm63HW2goov3Wr4WLUkuVo10y3O7gbebjSa5fLZOfpfbHzOGoJtGb62xsp9Zf4/QApWg554AABDZXhw4MBQ20+0ZXYuzcnJfXvNcZLVEdGC7C0tVFF8Oab/PbUU+HGi4Knc0gXRRCfD5vC4yGz7pNP4IBeuJCxdv/9ZENv2kRm4ty5VGXs3UuG4KxZrHnLlwPAlZWZgFxxMc92yRIAx+JikjdKSrCr3G7mQ2oqOuhvf8P+yMwkcHL66cHjvLGRY6Sm4v8lJmITL14M+FlbC7A8dy5BYaWY6kqfHTqELvV6KY0eMcIct6aG+7N0KTrYstjnCScAHFZWmnW9pQXbKBDAt1V9Hm9xu02XZXvJcmhGpt/Pc1JdvXmzod8S4TrsGYZjx/YOZ6yd897l4jN7c8ye+oYul7GXY6WtiBc/oogBEjVwFyKDXRcNChmwIKLD4ZguIheJyOOWZe3q59OJl/TqzW5vJ+PG7WbB378fRZ2eLvLMMxig8+fj5D/3HP/fdBOLQVUVAGBODhmJ2p2puNiAiwkJONMlJUzc6moWgOJivn/tNRaCoiIAzGOPDV6YvF5+4/NxzIwMFJLyLajDrY0StNSkq4hwRwdOgM+Hsuwig2xAi53oVkFDjSKJcE9Cy5IHShZALKKNVnrCjyjSdbRdeZ0UUNSMpdRUAyjaDWyPB4NHRGTnztXy2msvy/z586W8vFw3GcR3XUR6SRc1N5Pp5/ejK5qaKDt57DEMp5tvRkc5nQCGtbUiP/4xz//uuzGyPvmEz1wuuA/POMNk+zz+OCXPiYki3/oWHGKhwPFHH7HN5s0Y2jfdRJTcbmRqgxWPx2QXrVlD1uGGDWx73HEYsOp8RsMH4/Mxf+3NCkTQuQoc1tVxPZWVgIYzZhD0sJ+bzn0R5nhnEe9YJVzJclqa6SAbCKD7N240gOHOnXwugp7X7MLKSozk3uRD06YoygUlYozHzmgRuspm1HcFJZOTjUEaqSmM/T3cebpc7KM37sfq1avl5ZeD9NGQLgojgQDZNtoE6eOPTTaz08mYUZqEa6/FVlq7ludXWcm893oZ8yIEKrQDsc5rHYv2MWOXbdvgWdywAZvnS19Cp3Q1l6uqmG979/J/WRnzTAMM0YhmQmvmTHq64TDUjEMFDpOSgkuVVVRHKAipJdD9Ud3R2BicYVhTY6ovtFusvoqLezb3ounePNhEbZ7urCONjZQ4v/ce9+Gcc3hpgGRIF0UvWsW1cyf66PLLsTc6OghsPP002ca33ELA8q23sEHnzGHud3TwHJqayMQrKuLvvDzsrnffBTCcNg0dk5BgOOFzc9lm0SL0YCAAAHjppcG2h2Vhkx06BOhUXEwTvEWL6Brv9RKIPfNMwEWHg/eu6H+8XnRZYyNjZ/Ro5uy+faaj8pYtbKsVdCecgM2o66y9lNjvN4BevMG40JJlpxPbLzubY2klnT3DcNs2wzefmRmcYVhRERU9UsyitF+treYc7PRV8b4/ra3G145VL9oDVj0VrUIJAyT2WBd9TnGouMpABhG/IiJ/EJFTLcta0s+nEy/p1SjX448DHF50kekYmppKB7aCAtLjP/yQ6OLYsZQUpqXxm9WrMTLnzWOx0ZLOvXsx2HJziYRlZBgwUHnk3niDbQoLKX+orCTqZTdWtMzE4cA4145PSj6emcl7VZXhHeqqTb3fz6LU1oYCCdchcaCLLgD60nsiEpx6rpGkwUjw3ZVoo5We8iN2p2zHDijqgpaSYngUExMZXw0NIk899ZjcdddX5Z133pFTTjlFdzFkLIdIICByzz2AcWeeyb077jiM4+JiwLw//QmddPvtBB5++1uM0HvuYb4/95zI//0fBvL3voeeEsG5X7AAx3rePADHUA6xtWsBD1evxpG8+mq4FUMzULRbfCCAjluxIrjL8qmncgzNiE5J6Zqg256po807tm8HOPz4Y3Sqdg6cMQNDPLQM0bJMpnEgwDjMzo7/nPf5OM/QkuWWlmAewy1buCYR9L4ChvqKF19PJFEOHTUStaGDZhv2VF+EO57LZRqtJCR0zdMoEh5ctDdsibcR/9hjj8lXvxqkj4Z0URjZsYMs2TVrsHmuvx6HXBtoPPEEz+n66xnf69cz16ZMYW56vTi5LheObUIC80HHnXYjVtDdLvv3E7hduZI5fPnlZPt0Npd9PnTGpk048SkpOKETJ0bd7OtfosCfZt4rzYzbbYA3BbnDnb99H6onFYTsC2lvP7IsWTNrkpJYH7TxSUlJ9J2suyOh5c3xyJzpT4mm0Uokqa6GY33FCtbCCy9kbX3iiSFdFI1ot926OgC5k09mjW1oEPnud/HR9u6lMqO8HJDR58MWGTGC361ciX8weza+liZx7NlD9psIwJvyNDc0GPD/n/8kyy8pCf02Y8aRVAiBgGnGkp3N+S1aRAAkNZXS6osuQl/U16NXRo7sen2rq0OPKq9ia6vpqLxnD9tMmsS5z50bnoezvR2d6PNx/Jyc+Pt6WvmhIGVSEuCf08nz27LFZBpqxnNiIjaqHTQsKel9PdHRYXxGpZ5ITze+Ym/6idqNWsHVnvAjxqPRiogJiGlA+bDEA0T8POJQcZXPDSThcDgyLMtq63rLz59YFsp+924UfX294YZ47jmMrauvJg199WqyXy6/3PDs7NiBMpg9m8nY2Gi6L3s8lNqNGYMh6/NhmO/dS1bN7t2Ad5dfTnQpJYX/7YqlpYWFJBDAANEMr9RU/k9JQSkdOMDvRo7s2mhua2ORtCwUvZYiDmTx+YIBw9ZWswA4ndyLkhKzEAw2QDRWUWddy+djFc2eioZIXJ2n3FxTytnebsa+Ok12jhUFVLqSL6ou+r//AzA7+WTm5tSplBDm5lIq+NhjjOvbb4fI+4UXKOn79rfROwsWkIV43HEi//EfGIrt7QCDTz/Nfh54gCxnO73Bjh2ULb/7LiDgHXfQuEU7stsdJuVMPXCAY61cyRycOBES82OPNSTUlhXdPPR6DRXDtm2AmZ9+yrmnppIZMHMm7+FKXJWTr7mZY6emos/izcmlZYteL+e6dy9ZEVu34jDU17NdYiI6/6yzDGAYb76eSKKOu3ZUtgOHycm968gr4KLnkJgYvjQ10rudwzEx0VBypKWZZ9ndrMZ4yBdRH9XUYKfs3AmAeOaZAIgirA9PPslcu/FG0zgpIwOdlZzMPGlsZM6UlvJZa6tZX7TkP3SO1tVRKvjuu8zjL31J5LzzOi8jU/B+2zbGfV4eDvWYMd0H7Xw+ExjTLFoNSjgcXEdGBu+RgCSPx2Tra4C3N/kB1aa0A4ZNTXynQWd1zktKjgxQ95bo/dLyZg10DtYqFx0Pqqe6o3OKi+HuO/dcxvfChcyr7gIAX0RdVF8PANXcTCf2KVMIFtTWitx1F5nKNTX8nZNDyXBaGhmf2dnohe3bmY/HHw+AXlVFoGLfPubryJGUPGujuEOHOO5HH/ESwS6bMAG9VV4ebEN5vYancPVqQMeGBnTfrbdiCyQlcbz6euZgVxz1LhcgYVMT83vXLkqxtRP60UcDRs+dG5kn2eXiWrRktaAg/jQhLhfn2NbGfaivB6Dds4fntm+f2XbECGxWBQzHjOmbxA5748yWFpNVnJHBvcvM7LsAj9PJcVtbTWlzd0UDeVqJ1tNzVxtdKxr7K9nmi6jf+i0T0eFwZIjIvSJyhYiMEJF2EdkqIj8TkaNE5L4wP7vRsqzHHQ7H4yJyg4iUichPRORsETlkWdaYw/seISI/FJFzRSRHRLYLaPIvLdsFOxyOJSIyXkROFJFficgpIuIVkedE5HbLslwh53y9iNwtIuUisktEHj78932WZUWzJPfKzX7nHZT+cceZTKqEBMiRy8pIWX/mGQzqc84hgtjczEJ06BDG0fjxLFhr16Lws7Nx2jMyDHeh30/20Ntvs+Dk5LC4TJmCQklJwQC2LywNDRwnEGB7LavJyODvQIBFpamJxUH5NyKJZhG5XAawHIgE2IFAcFmynaNCJDhqFE13rM+7qOPeU35EkeCurNFIW1ubPPDAA/K3v/1N9u/fL2lp6TJmTIXcfPNdsnXrBnn00fvD/WxIF9nko48AAadMYSyXl4u8/z5z84YbABOHD6dU589/JiP6nHPg/lm3TuSXv2S+XHEFIOGwYZS5PPgggN/ll5MxNHy4KQ2pqoL/VY3u669H12nzgFAAsaWFfS5bhmGYkoJRftppcPqImOzUhISuSbotC921ejWg4ebN6KfMTDINZ87kfnSmnxQ81Ah7vMtzAgF0s73sZscO9LeOvhEjgjMMx43rHb6ezs5RS5XtQRU7cNiXovyISq/RXdH7qvvRzs/2sqxI4nCgj37wgwfkhRfQR+np6VJRUSF33XWXbNiwQe6//3Onj+Kqi9ra0CkHDhB8mDyZTu8izLOnn2Z8zZ+PTti3D0dMObe0xF+DsYWF2CduN89RS39DA6XapdSy0GGXXBK56Yllob82buT4DgdB2MpKjtddsTuZXm9w5pw947AzG0M7N/t8pvlKb5QKqk2or9paU32RlRVcllxU1Le6KJLYy5sTEwd3RUh3KjZCbSPVRVdddZe8/PIGeffdIV3UmXR0YG80Noq8+KKpltq2DaqWxYvxwb71LebdqlUAZaefzu83bOD75GQAq9JSU+Hg8zE/ysrQUyUlPNPaWqilPvyQ53z88fhp6uuNGRO8rmmZ9Ftv4d9ZFgklF19M9ZnDgY2yf78pX+4s81e7Qy9Zwv60oiExkQzIE0/EV+2sxFfphFwu08Sku9nYnYnyKm7bht22axfrxb59xn8YNuzIsuRYG4rEInYfUgNYCuD1Jj92tNLezprYkyBTPButiBhqnsN8vl36QV9QHCqu0p9L4W9E5GoR+a2IrBWRbBGZJiLHi8ifRWSkiNwsIg+JyMbDv1kWso/XRGSTcENTRUQcDsfww9sVi8ivRWSHiFwgIj8XkXEiclvIPtJE5G0RWSIi3xKROSLydRGpFZF7dKPDD+4JEVkjIt8VkSwR+bGI7I/x+uMin34KgDhpkiE79ftZnMaPZzH6/e8xgq+9lki7gocJCSxOqqDffZfFYuxYFqZxAgLoAAAgAElEQVS2NvaZm0uE/bnnKA3KzSWCNG8eyr65mUh7bq4xSnw+shRra9lHURHKxu5Mud0obo+HxTU0gzFUmptNFmNeXnRdCftK7OnlWpasaiI5mXMtLjbK//PAsRNP0eiRz9fzyFRoRmJXcuutt8rChQvllltukalTp0pzc7N8+umnsmXLh/LlL98oBw/uk2ee+aNcd93dUlRUKYmJIg8/fN17Ibv5wuqiAwdEfvIT5nh6OkawRr+/9CUCGMXFgHw/+QmG2003kaHz7LN0Tx4xAi7FUaN4/t/5DtmKY8aI/PGPgJKJiRivjY0AkX/7G+PmmmsABFJS0A9JScFdlltaMKr/8Q+MsdJSyMTnzTOGqfIQaglqZ1yjTU0Y6MuXk0Gk2TKnngpwOHFi1+NXo9/K5ZKfH5+GSJZFEGj9egJCGzcCGipAn5vLWnH66aZBVm+UAnYlyi3n9Ro6AaeTe6AdlftL9Dy0PL27z0XHTUIC46itjbUudExFyma87bZb5dlnF8rXv36LTJmCPvrss0/lgw8+lMsvv1HWrdsnzz//R7n44rslPb1SFi687joZxLZRbW00W0UnPh+Od00N9kp+vqF3cblw5JOT0UubNqFLSksB7g4eNJxShw7xrDIzydR1ufhb6Ve0I6jLRVD1zTf5e+5cbKPhww0YaRevF4d161ZjN40bx0sd++7cD+Xv0mwdDdKmpBieRo/HEO5HumfaVMnOi6rZgD2Rtjbua00N2T01NaYpXHIya8aECdz/oqIjgYJ4nEO8RKsllD823nQKfSnRBlpvu+1WefHFhTJ//i1SWTlVWlqaZf16bKN7771RfvzjffLmm3+UiRPvlvLySnnjjcGti+IpgQDVDs3N2B/Jycz3zZuxf15+mXn3ne8w57dswd6ZNw+gffNm9lNayrwoKMDuWL2adfz445mrgQDzJxAQef119utyAdRdfDG+knIZjx1rMvm8XvzE557DV8vNJfBx8cWmGaYGOxoa+N2oUZEBI5eLIO3ixfiJPh+JI3PmABzOmtV1FqHXy5xvbzf2SldUMtFKSwtrw2ef8b5jB8dRnVlRge5W0LCgoO/nt9+PjaovbYY0EDnw09MNB3isnN1ahaaZlbFemwb96uvJIK2upmlRFDKEQ/VQ+jMTsVFE/mJZ1jcifB+xFt2GAP/BsqyvhXz3iPAQrrAs6/nDnzlE5HkRuVREjrYsa+3hz5eIyMkicodlWb+07WORiMy1LKvw8P9JIrJXRFoP/7798OcTRWSdiCT2R/bP7t0404WFOIRJSSjhJUv4f/p0UuWdTsp2iouJtCjn4KFDbJ+RgWL1elH0paUsHMnJKPC33yZKFQjQ5ODcc/mNAmZpaSwWygXV3My5eTxEx0pLjywJ1BR3p9N0j44kHg/KwePhWHl5/etker1HliWrI5yQcGS35IEQSR8somVDPW20Eg3/TyDAeC0pyZVLL71G7rvv1/8q+VTOOLdb5NVXH5NHH/2qPPjgOzJ69CnS3CzyjW8Q5fqi6yKXi/LkAwcIUGRlMfdbW8kqfPNNAhIXXyzy05/y+Z134jT/4hcYc2ecQQahZQHM/fznbPfVr6K3Wlt5jqmpAJJPPcVxL7yQbYqKTEft5GQTzNi1C931/vs8xylTRC64gDIa+5iwO9hKrxAqtbVkCaxcCTDn93Pc44/HWLcTgHcmHo/JalJQtCdd5FtacDaUy3DDBkPzkJyMYTx5sshRR5nGDP1lhPr9plTZ7sQqj81AC6wo+BKubLU7EggYpyhS52ftNFtTI3Liibly3HHXyNln/1qqq1knq6t50S3yMamt/aqUlr4jaWmnyLZtJuI+GPVRbW18dJFlAc5VVxOY8PvhGMvORoe89BJ2xmWXoa/a25m3RUWGUykxEaeorQ07w+Phu1AHzucje+fvf2cOHnssDnhpafhza24meLJzJ3Ng+HDm5qhR3R/3um5p51A972HDupc96PejNz0eA5x3la3YmXg86MmDB81Lm0M5naYEUl/2oPNgEtVjg7l7c1f2kd/PuJo8OVcuuugauffeX/8LQNVXICCyaNFj8sMfflXuvPMdqao6Rf7yl8GtiySOPtq6dQBVb71luilv2gRtytKl3N+77mKbqiroTqZNM5UC2dmmO7xlQRWzbx8A18kno1PcbtMQ6q9/ZT8TJpAwMnYsv9XGUGPHMsdra9FbL76IXzVyJHRXZ50VbIt4PFCeuFyRy5dbW7HZ3n+fa2ptRVeefDJ23bHHRqeP/H7TBdnh4NqzsmKfWx4P160VGBs2cO+U5qGsDLto8mTu1+jR/Wd/+HyGC7u93WQ7q/8YyWYYCKLUP0p7EYt0xo9oWYxzBQhDXzU1JDgpZYdmWQcCIu+8E1Um4hcRh4qr9Gcm4iEROc7hcJRZlrUnxn38JsxnF4nINn1wIiKWZVkOh+MnwsO7UECcVQIi8ruQfbwrIhc7HI4sy7JaRGSGiBSJyC/0wR3e72aHw/G6gDD3qTQ0ABBqJyinE8N32TKc5bIyAMaCAqJeSg6bkIDCPHTIdDTetYvFZd48DNGqKoyk9etZHA4dQtledpkxkrX5Sno6v2lvN8Z3XZ0hBQ/NcgkEMC6bm/ltSUlkQNCyOHZzM+ddUNAzhzsW0XJAO2io2Qp2TorBoPAHgyiXmPIjxnov1UHq6OB5qTOo4KDyfYqIpKbmyNKly+X99/dISUmZpKQwH3JyeFeemWOOwTCKMF6/cLrIskQefRS9MnUq92rvXozBCy+k4dK4cWTn3H8/c+Ohh5jTd97Jc779dkpcNm2C+3DVKgzpBQv4bUMDz+vdd2mE0NhIxt8ttxC1FzElwdoAZcUKwMNt2zjHmTPJehw37shrUC5DNdrUkLQsgAbtqLx7N2OqtJR9HX88+4t2fHq9nGNHB7o4J6f7UWWv1zRe0Jfy9Shp+dFHk104ZYoJLPWn+HymTFmBw8RExkJnvGwDQZKTOWe3m/OM1cno6GAs7dmD4dvUJP8CB/VVV2cyEl2uHPngg+WyZ88eGTGiTAoKeKbz5hmH8be/FfnZzwDfI8ig0UcFBV1tEZ3s3cs8ePddxts3v4mD3NiII19aSmBj1y7G34wZ2C5appySYtae7Gy2cblM9QDXRbbNX/+KA3PUUTjgFRVHno+W9W3cyPN3OrGjtPNzd0THoTbs0fFYVMQ1dCebWEFt7USelobu7o4uCgQYs9XVXFt1dfAYzslB/2jzk6KiwV0CHCqDtbzZnk1p18v2zzUgLiKSnZ0ja9Ysl5qaPVJWVvavMnl9127h556Lfoogg0YXxUv27UPfL1vG/CgvZ72+4AKCDwkJcDevW4dPMW8e2X+ffIK/MWoU/x84ABimQY+TTgKYq6tjXamthd911y5DF3P88eiH9nZ+63RiK23dCuXCBx+g1yZPpqT6zDOPXIebmgxHfVlZcNVXYyPXtXQpGYduN/pj+nRss9NOiz6QoU06NIiblWWamUQrqmft3ZJ37jRjOTOTa5gxAzt12rT+qb6wiz0RRbnWk5MJXGVmxp/3sbckIYFzbW/vftWGz4d9X1/PeK6pMZz0dqBQ9ZGCg8pVrr0QRo0iIKXZ7MXFRi9FIUM4VA+lP5e+u0TkKRHZ5XA41orImyLyrGVZq7qxj+1hPisXkbfCfL7h8PuYkM9rLMsKbZlwuPeS5IlIy+F9ilArHyrhPutV6eggG8frBdhwOnHMV6/m/+RkIvETJtDQoLGRRSEjg4WptZXFyu1m8pWUsMjk5rL4rVjB9x0dfH755RiEakgrCa1Graurmdza9bOggH2GZvTYy5fz81n0IommJ+siYC9P7C3RDqt2wFAjQyIoyKwsDOOBwEnxeZVQINEulsX4sWcLhntXwMLO/6O8UKmpGBGaefHwwz+Tb3zjOrnhhnKZOnWqnHXWWXLVVVfJ0UfPFBEzTjMzO10kv3C66JVXKBEuL0e3HDyIUXv22RjKlZUYb488gh75zncot1m0iEDGN7/Jvf3d7+gsn5xM1+WrruJ51dez/cKF7HvmTEqeJ08256CcMW43JdTvvot+Gj4cx2bmTOZr6HNT49XnM+XLIoB0H38MeFhdzWfjxpFJOXWqKS2Kdt5rx7+2tuAIe1cOu4KYGzcawHDbNmNQ5eWh3086ifs/bhw6sje6AXdX7B2V7Z3mNaNvMOlMe1lzKG9tIMDaqpmCdlAwXDaWvfFKTo4xeisqWDPz83mtW/czWbDgOtm2rVxSUqbKMcegj2bPnilOJw2KRDCUO2n684XSR42NgIivvcb7178OgFhTQwZiQQEO/LZtzI9p03i3dzAWMbZMRgbjV9d5y8JhXrgQcKC8nK6qRx995Fz2eDjOpk3opvR0Q8bfHefQ5zPAob3k37IMf2p3OibbO8hbFmNbOzd3Jcqhra+DBw1/qQbaKip4Ly7u+2BvX4vaE2qnaNOV/gwgqw1uBwND31Uf238T2vHeDhL+9Kc/k5tuuk7mzQu2jWbOxDbSdTUxcUgXqTQ1kYCxahWAVmkpAN7JJ2NXpKWJfO1r+FjKn+p0sr3TiZ2Rl4c/t2aNqa448UTsKd3/e+/hr+XkkOAxezbrB7y6poR5+3Y4p3fsQK+dcgqgZWXlkT5YIIAObGxkDo8cyZiorjYdlTds4LyLi7E/KioIcpWXR5+NppyEzc1G3w4bFh0Yf+iQAQu1W3L7YUhF6SHOOIPzGz3aBFpi7SQcL3G7TeWa0lykppoklMHaSFP1oAbjExL4W8FBBQMbGoL/D6WqsCxD7ZOfz7iaPp17o3ZzTg7PUik3lO+3B8HyIRyqh9JvIKJlWS84HI4PBET2DBG5SUTucjgcCyzLeiDK3UTZL7VT8Xfy3YDLKfP7MWTr6kyqeG0ti8r06UzMdeuIRp1zjgHtCgp41daSpePzYUhrk5OcHKL1b77Jd5MmkUE0bJiJjoigwBsbUcbqECkvh5b9FBcfuRgcOoRBn5BA5CCSkamOWWsr+ygqig9XWDhxu4OzDFtbg7NlMjNZRDXLsL+zej7votkWLheLUGurKX20lxeHMjBoqWtKCs9J+eUUMFRuqEidXefPv0zOO+9EeeWVV+Ttt9+WP/3pT/Kzn/1MFixYIPfee2+0p/+F0kXr14v87/+iN/LzMQoOHsSoXLWKTLiEBMDB2bPhQ/zFLzD6zj6bMuXt2+n8t2ULHH3f/z66w7LIYvztbwEEKiv57rjjjmxmsH494OH69YyTqVMxkEtLTXOV0HlrL19OTyfLcNUqXg0N/K6yEv1ZWckYcjq7VyqoIGXb4T5tqkMiOexNTYAOChpu3mzOMTUVAOKyy9DLY8eis5VHJjWVV3+V42gWi5Yqa+dPzTjsb+c6FnG7TYffqirGYV0d40NBw9ra4KwdEZ5vQQHr1tixppumvtQYVg4hu1OfkMDrzDMvk+uvP1FefRV99OSTf5JHHx3SR5HE5cKRfP99HNzLLkMP7NsHP1dpKdkxO3dy3ydPZowqpUBKCv/v3s2+RowwDUnS05mLCxcyL4uLRf7rv+D5Ch3Thw6xzfbtjIvCQmyysrLogXOv16xzaoskJRkgU7uGqxMVjViWWVMVgExPj3xObjfj2w4aqh5LSOC6pk41zU9yc6M7j8+jaFBEdV9vBEmUMywSMKh/h2OmUjAwMdGsEXaQ0J55H279uOqqy+TUU4dso2jF6wUcXL+e97w8gg4zZ/JZTg4czh9/zJw+/XSzxgwbRmazCMHTrVvxPyZOZNviYnTYwoV8V1REZvXEiegDBRBbWtj/Bx9g07S1AfB94xvYM06nWYvs4najM7V82eWCL3HpUnSaCADddddxng4H61dxMXogmjXesrDrm5sZs0qF1RnP4vbtwVmGyhnrdHJdJ52EfVRWxn3SzD4FJvszq8/lMv6lVj6lpaFDB6NPGQiYzt/2V10dWENDQ3C1nl2ys03vg4oK8/fw4Xyn3JdutwluiKCn7M3BkpPjZ08O4VA9l35Nwrcsq0ZE/igif3Q4HGkislhE7j2c8hkrN8VOEZkU5vNK2/fdlV2H38MUrciEGPYXk1gWUfXt21kMsrJI4962jUj37t38f+GFOPG7d5vy5ZQUjOGPP2Y/p5/ORPX7Ucq//z2O0rhxZB7m56Poc3KY4IEASqK+3mRyZWTw0pLj9HSUo92I0shWSwvblpREdnbb2gAQ/X6TqhwvZaHcDXbQUJW6EqirYtey5CGJn4QrJw7NINTMBhUlMNeO48pXp6CJ/h0NSGFvtBJu28LCQrn55pvl5ptvlo6ODjnvvPPkgQcekG9961viiH0Qfi51UWOjyA9/iB4pL8dwqKmBT3XjRrJzamuJpF98MQbn3XfzPO+6C4P617+mYUp2Ns1Wzj+f57JmDVHzNWswCn/8YwAAu07x+eB9ffNNdNKwYWxz2mmMkaYmxkRe3pG6RjMXt27lXLV8KCmJ877ySnRpSgrbBgLG6Y7WSFb9YlnovNAuz263yVRS0FCzHh0OQ64+aRJ6fvRovtM5o8CTzof+AOgU0NCXAof2jJaBCBzq8+ksc/DgQcZ46O+Skw1wMmuWKZ3RjMKiItZNfda6vtpfIjg5bW0ms1k7E9rvV3HxkD6KRgIBY9csW8a8Oe00w0VWVkbwYf9+nlNFhQG6NYPB60V/dXTwbPWz+nqypD/+GDvo5pvZt30uWxZ206ZNAAEJCczfykr0TzSia6M6TyIGwFSAs62N8ZOU1DkAGCoKHgYCZp/281f7zw4YNjSY7/Py0PE67gsKBh53aX9LQgJzV4HE7pQ3q47oLHvQH8a9dDgMEBgJHNTzikZUP4V7tkO2UXSi2cqbNxPQSE9Hr1RWAv5pNvTatawbc+Zgh7S1oafGjKEsedky5uCMGfy2uZl9P/kkdk9aGryK8+YZPjrNKPzgA7rPr13LdvPmwdU6fjxzW4SgSqh/c+gQttTu3ejOjz823ZiPOorMyblzDSja3IxtM3p09L5SW5vhcE1JYa20B0ICAXSpHTDcvdvoxMJCAFNtfjJuHPqwtZX9KoexZqv1B8VAaDWbcv2lp5tknIFKfeB2BwODoZmDDQ28QrOZExIYfzk5JAgVFhKIGz6ca9Z3DcDbqTl07dOgntPJ/UlKYnylpPT+ejOEQ/VM+mU4OxyOBBHJtCzrXwmtlmV1OByOzUJ762yBPFJEpLtxzldE5FsOh+NSy7JePHw8h4h88/D3L8dwyh+LSI2IfMXhcPwqhNDy7Bj2F5O8/z6O+ahRLEi7d6PQjz6aCHx7O1Gi3FycpMxMFozGRro419aiXGfPRhFs3cpisXMnC8H117PoNDWxqGh04NAhFhSXC2VQVMT2lsUi2d5uMhbtNkW05cvKjdDRgaIpLOxZSZ4a3XbAsL3dfK/RL22A0p3yxCEJFjvJe7iyYv07dOERMZElXWAVFLQDhcqD0VNQwuk0ZODBTpRfWltbZdiwYf/6LC0tTSZOnChLliyR5uZmyTychtsYiix0LZ87XfT/2XvzMMfu8s73PZKqVPu+9lLtbvdqt902eMNgQsyaxENMSCB+gBDgZpksd3KTS8JkkmBIGCYZGJKZLOQJITdhDyQmEMISwBhvEBuMcbvd7s12r9Vd+6bSfu4f3/7699OpoyOppFLpVL2f59Gj0lrS0TnveX/fd8tmISBykAodiwMHYIsOHsSC+swZOJ7j4xACd+2CgHj6NPq4nT6NRt6/+ZuIth8/LvKXfwkb19WF5951V6HDNTMjcu+9EAdmZ+GovP3tcG5bWmC35uZWTooXgT14+GHYu6NH8T1aW5HNfcMNsKFcsCcSsBmRSPnRYtoclue0tuJ7xGL4rk89hcXFU0+Zfj0iZijWa1+L6717CzOvMxkz3VfE9DBbjwg2hUOWKouYjC0Kh+sJBRE/UZCi4aVLKyfmipjz2sgIMvQpCrKfG89JLH/3awDORT+zyJgZxAU/HWJmS7Cfmn3uUXtUGSdPovLiK1/B8fMzP4PF5733YlF+7bVYAO3YAV+ICxUufBMJk/U+MABf4PRpTIb/znfwvLvuQlayveBNpWCznn4ar21vR9bh3r2lMwTZkoOLKGZdMNuCvUKZQZhM4nZ7e/l+USqF78ZzZ2cn9rXZ2ZVlybRF7FF91VWmLHmtqkA2Gn7lzfQ5gsRBP7+IC+lo1Ii+tkBY7dC5Yp/fDrSKqC2qFNqDr30Nt2dnYYPGx3E8vexlsFe7d0MAO3wY2/raa3Fc33sv1lmRCKo1tm6Fr/Cd78BvWV5GSfTP/izs0uQk9on2dmRcf/azsH1dXSJvfSsCuEND8EnOnYMd2Lq18DzNQZz33QfhkSWphw4hmeRFL8K5kWu9I5eLMcfGyu9ly6F36TS+5+Ag7MrUFL4XBcMTJ8y5ub0dtvSmm3C9Zw/WbPbnnp83/lY8ju9aq0nOlUDfz65mY2IKL+sZeGHgtNhwEl7YdsWmrc1kC27fXpg9yIud7MN2GTxX5fP43bkP2Bn2DDozGE4/0h60spbbTXWo2rBemniniJxzHOceEXlcRKZF5HoR+b9E5Juu6150HOdRgQr8Xx3H6RGkjH7Xdd1SCu4fi8gbReRTjuNwtPZPiMiPichfuK57uNIP67puxnGcd4nIR0XkQcdx/kFEOgRjun8oIi+o9D0r5fBhnJy6u+EQnzgBB3DfPkS/4nFEyjnNiFl1R4/CQMRiuG9kBCeff/kXnFiYxXPLLTjRzc/j5BeL4b3Gx3E7EoEQwNKVbBb/P5WC8+1NjbfLl8fGikerFhZM1kdv7+oa3iaTK8uS7V5cnZ04cVE0bNRIUKPBHpfFMgft4SQ2nPRoDyfxCoSVZFDRMS9Wjlwufo7ywsKCbN26VV73utfJoUOHpK+vTx577DH5yEc+IrfffrsMDw/LDTfcII7jyPvf/36ZnZ2V1tZWueuuu3ZuRlv00Y+ir8/OnTjOZmdhj6anYYsefRT7xq/+KkqST5zAIJI778QQls9/HqLhn/wJsrlSKZE/+AOIAO3tcH5f/3pTHsOJq9/4BhzOVAr/5+1vh/jHkpqpKTzW0WFsCHu/PvwwbGQ2Cxtz22147VVXFdqCdNosvNkvrJz9jYNdslk4kxcu4Hs//TQuDGC0tUHoeOMbcb1/v3+2EqOyyaRxpjj8oN7OKAcIpNOFfdno9NXLliYSwZmD4+PYB7yLcrbEGB5GGevttxeWF4+M4NxQrjhDJ5mCsz2plESjZto0swy9tLWZoT52lutq7NHb3/72n5VN6BuNj8O/+ed/xm/5jndgkfvQQ/BV9u3DNt63D+chlgJT7GFGIqskolH0m7zvPuwP/+k/Qdy3+3xNT+N/njqF331kBLZkbCzYVvCYtjMvHKd4iRZtCRfI5doi2jAOzpifN5mG4+Om1I9DMV7wApNluN7DBsKCdwiJ99r+jb3lzRQDm5v9swer9XFWC6c0s4c0ymLVFpXLpUsQ4f71X01PbiZw7NgBGzE+jiqH1laIZhw+9MwzeK0IxEW2UPrHf0RmYSwGX+X22/F8xzHBsocewrpwdhbru1/8RSMyipgWHAwQRKPYPx97DHbu3ntNVuGtt8I3uvnmQpuXSCBhhckiY2PlnS9TKXwuVhlNTppMw6efNuu+aBT2+uUvh2C4dy+2nd9xkEhAjEokzGDL7u76Bzvy+ULhkGuKeiemMAHHHlDizSScnl5Z5eU42P/6+3EeOHiwMHOQST+VbFe2RFhexm/LwXSEWdM83xUrS6Z4GJQdXSNUh6oB6yWnJETkzwU16D8hIi0iclpE/odg44vruicdx/lVgXL7NyISFZG3SYk0UNd1pxzHuVVE/ruI/JxATT4laKD5odV+YNd1/+5y6v67Ln9Ovud1InLVat+3HM6cEfnc53AQHjgAJ3ZmBiebxx+Hwb3zThy89vTlo0dxYHPBPzMj8s1vIrLV0QEnedcunGCGh3EyOXMGr+npMQvHnh4YF/YxZM8cNte1+xuWW77McqFUCie8vr7yFqTZ7MqyZBpIGvHRUdMQXSPpK7GHkwSVGPuV0TQ1GYHQHk7iLS+uJRy0kstVJ1rYjjJP+m1tbfJrv/Zr8vWvf12+9KUvSTKZlLGxMXnXu94lv/M7vyMiIldeeaX8xV/8hXzgAx+QX/iFX5AcNsyPyCazRffdJ/LpT0OIc13Yk6Eh7Cs7dsCh7elBBPzv/g6veec74fS9/vWwSe94B7KF5uYgSH7+8/gd3vIWlDR3dcEWZDIi3/0u+reeOQNb8uIXIzK9Y4dxcnM5M4Cppwf77te/DqHz6FHs511dyAJ4yUvghHudOzqEFBnsKc1BzMzAGaeocPq0KQWMRk2Db5Ylb9sWvEDM5wsHH7Ava71Llhk9pngvgu/D/qK1dOrs4SR+wiD7ErI/pE1npxED9+wpFAZZYlyLgVx2OXI2i3PP4iLOWxQKuV3K3TYUhjlBnufQ1dgjEfmUbDLfiP1QP/lJ7Je//MvI8n30URx3u3bhGDp4EI/ncjiOIhFT3tvUBDuUTCJA8aUvYZ9/5SvRa4wCfz6PY/voUeyLPLb37w/uB8jjiOIhB3zxXOmXXc9M6HTaZEKXc85LJrHQP3sWAsP0tOljKIJF4e7dRjAcGNDqCz84nKRY30G/4SQiZlp2LGZaWHABTL+o0cvAHccE5VbrG8kmtEWJBOzHF76A4851cXxxMOXBgzhf3HQTnjs+Dh+muxtZgNPTyBDcswev+eEPMbRucRE+z223YX8aHsbv8/WvI+j65JPYp174Qlyuuw6tB5jFzHUYh4o8+CBEyf/4D6z1IhG85jWvgW/kN3yO2cqxWGESSRDJJL7DU0+hPPvcObwHM/O3bEGmIwXDnTuD1wz5PL7H3BzsczRqEk7qmRCSy5l159KSEc3Y57jcljflkkiUzh6cnV35uuZmkyW4f79/9mBvb/X2iFUX3nNcPm8C5/39JsO+kvMN92Fm6a/RuduPtIkAACAASURBVEp1qBrguH7deJWycRznCyKy33XdcmrSK97YMzPoyzMzgwgRe2h0d+MkceAAUtyZhdPTg5T5+Xk4wXv2YIH75S/DoOdyyDq8807Ta2NwEAbp7Fkc7JxAmkjAaPf2Fpb/0JEeGSmMSCWTKHPMZnESLZZlMzeH/+04eA4no3rxRnsWFkwkXQRGmz0MOzrwPo3Yh6ue2MNJigmExYaT+JUTe/9er4UHnftKFurFqNHJqeH2tLW0Rc89h8nIi4tweGdnYWva23GsP/kkFqljY3CMd++GMPjXfw3x8eqrRe6+G6/5+7+Hk5zJwA694x0m8ui6mDr47W/j2N+2DRH4AwfwGKO8InBa2I/x1ClkG7IBOKOrBw+axuR+sGeYiMn28yOXg/08ehRZ4U88AWFBxAyLYnbh/v34/uVmt3GyHbN610qMDyKXM8IhgwcUx5hVVynptBEBg0RCv+EkAwOFGYPe7EG21Kg13j6GzMwRKSyvSadNhlg1ZLOmhKyK92o4WyRSkT2qyBZlMmjr8jd/A7/lv/wXCGePP46F7q5d2Jb795tM0HjcDP/h7fl5BDG+9S3YmhtuEHnTm3Asi8A2MGsmkYAN2b8fPlWxY9tu78HjmUJSqSmSzCLk5OSWFn9/hgGc8+exQH/2WbNIb2qCv7ZlC46TLVtwrKz3xPb1ptRwEl4XG07iV1Js31fMj6A9jUQat0+sF9v+r4KG/IZrZYtyOQRPOXiJCRjNzQg07NuHv9n6JRaDcHb2LES2eBx2Z2AAlRvf/rZpv/CmN2FtxgnvDzyA7MTz53H/a1+LtVwqBSFrxw7sX7kc7MLEBGzD44/DXmYyWDdefTXs2AtfCNHRT4hbWIDPx0qzbdv89weWOR87hu/zxBPwwTIZ05eaYiHLkjs7y9u2mQzWigsLJiO7u7u+JctMWrFbYrG6raNjdVPo83l8Lztr0O/i13als9MIgVxj25mD7LtY6+3D85otGNplyXYrjnjcrN1Z0bNaaJNXmaHdkLZoLahw7Vfb/60iYnk4jhMXkbRrbTDHca4WkR+IyN+6rvvLZbxNRRs7mYSj/MwzMPinTuEAbm6GUbv5Zpyc8nkYkaUlLGxjMZzAursRHfvKV3Bgv+AF6HGxdy8M2MICjFIigRNcZydeF4uZ7J7eXrOwnpvD/fE4HFT7pDIzg5NWLIZIt5/hYOPWTAZCgDcaYjekZbYHt3Zzc6FgWG620EbCbziJ99qbti5iSriKCYS1nni1VjALoBY9gSgOFBu0UgbrtrXqbYuWliAgHjkCp3N2Fsdgfz/2nTNn0Ndnfh725447sD/9n/+D7fzrv47sw89+FvZsaQk9f/7zf4boODMDYe5734MT6jjoVfiKV8DpZC+Vri44bSxxvv9+OMhTU/gdd+6EQ37NNaY0r7PTfwGdy+FzZLOmiTP3KTrGR4+ay7Fjpry4tRU29Jpr8L3374etrQSWNy4vm0UmRal6CfX2RGVm13ASXtCUUZaABmUOXrxYOKCBtLQUFwWZPViv4Q22sMCL7Q5RPPArS2YT91LCUDnQrtMBXwXrarlrYI/KtkWui4DFRz8Kce8d78Ax9NRTsE1XXolF/JVXFvaWY5YES/DvvReDCubmkI1z551YXDc1wZ6wd2k+DyFu//7imcR2o3ief3nOjcdLZ8swc4NZNiytJmyRwMv4uGklwkmrY2Om72OxgMlGpRbDSYKuq/WLWFrOHmBhyABlX7dVfNZNY4tE4H986lPI7kuljMi1axd8l+5u2KXFRax3+vrMMLcrr4TtOXJE5OMfxzlz504khdx4I7b/4cMQD++/H/7VlVeKvOENKP2dnsZrONDCcSAwfulLqMR49lncNzSEjMYbb8Rn4+R4TnO2yWYhcHKdt2NHoejHoXTsY3j8OPxBtl0ZG4MdpV80NFT58eMtWe7owHZc5bmxYtJpswZlgNlefwZVt6XThROLWWZs3y42nISCIMVACoP2ffUIBrFSzR74ZQd6m5oKBcNiwZGlJbxHtS3E+L9X8R4NvqKtnBqt/Wr7mVRELA/HcW4R1KJ/WkQuCCbk/NLlh693XfdUGW9T9sbO5UQ+9jGccA4exOKcDqrrIjtnbMyU3505A6M7PAzD/93volfGuXNY7N5xBw74wUGIfZOTOKnFYiazccsWvPfUFP4/h13wPvbOsE8MuRyc2sVFGHuvuChixsJzkhhLlykY0mDbpXMUCika1usEsh7YPdBWO5zETxjk9UbqAclJsLWI6lcpJK6niFg3W+S6Iu99L4IR27fjWG1thQ3I5eDsHTqEHoDRKBbj//RPKGe59VaR//bf4Gh/+MOwRS98IYamXHUV9ulvfAPlOZOTcIZ/5EdEfvRHTTPvmRlTknzuHMoVH3oIznIsBtt244143/5+2LJk0kSL/RZBLCFlT5102kTSKRqyX09TExYEV1wBe7tnD7IMOztXt//lcuaYZoS10h6h1WBPVLb7xjY1mUbYk5PBw0kuXvSPkvf2+k8spkg4PLz67VYLvIKhbU9ZjmhfSmGLytWKngwArTIDdb0X7tXao7L9omefxfTRBx5AO5a2Nixkr7gCx+XQkOlPyP05mzUi/WOPYdF/4gTs2VvfioBALIb9+uhREwzdvbt4gIClXPYCqxLhkDCQIGJKXi9eLBQN5+fxuOMUlqMx07DWZXSNRDniYKnhJMWu6xmE5sKcNr/RfbIqqjU2jS167jmRz3wGQ00WF+GjMICxfz9EoIEB7KdbtuC8+swzOAfedBOO63vugVDY24sWCtu343zy1FN47yNHcHzfcAPaLNx0E+zEhQuwUxwg99BDyKhmEHb3bpGXvhRlyldeaQYqRaMIhvhVf83MYJ2Zy+FcPTCA72hPSz5/ntsZFSljY/huO3eaHs+rOa7yeTMohW1lurpwqcdxmkqZtSiH2LW0mDUop0D79Ry0b/u1XWlt9S8ptjMJe3rWz4YzkMwMQ9opEZNF7x36VQ4MNrsufsfVfj970EqFdnPDnRVrtPar7WdSEbE8HMfZJiL/W0RuEZEBwdSe+0Xkv1XQJLPshfsXvoD+hTt24GTBqUYdHZhqOjgI45RKwelsacHJ4uRJpMXPzuIEc8styECcmoIhXF6GoR4ehgGbmzOlySJGQGQvg3we77+8bHojkuVlnJiyWXwev34ZiQQMLcuXIxGz0Cft7cZYd3aW30Q8DHCxESQQBg0nKSUQbpTtVC48oYjUptyzCiFxPUXEutmiz3xG5H/9LzgBqZTJQuYAkiuugKO5ezdsw2c+Axv127+N67/6KzjOLG9+xStgm775TdPUe+dO9EO86Sbzm1LMeuopOK8//CFel8vhvW68EU4yF/i5HBwWijp+TnI2C3t38iSyup99FtlMZ86Y57As+cABiAsDA8apZGBjNZkk7EFa75JlHi/pNLb1pUumZ9rUFP62JxpPTPgPJ/ETBXkfRcNGKpn0liXbmUjMQLIvq7GjrmumLNdCyFlexm/FiawVsN4L92rtUVm2aHIS2cz//M/IrNmyBbZl506I+9u3Y1+kIMxSqHgcouGnPgVb0tsLH+r223FMnjyJ7JvlZdg5tiPwHpuZjBEOuT8xK4OTt8uFmdDM2GVbhokJs4Dr7jY9DEdG8Nns79TWFo6sNj/saebFxEFvZjApJQ6u13CScghTefMqfaNNYYtmZmCLPvlJ/N3RAV9h1y4kfQwO4nhtbsZjJ07Abhw4APvzhS9APGxrQ7D19ttx7N93H0qaT5+GP/X61+Nx9o1vbYWtevxxvOeRI/BjKFTedBMSRnbuxOfM5yH8zc3hc2zdulKISafhw3FY58IC3pPDo0TwmfftMyXJIyMmENnWBlu1Gl8mnTYly2zj0N1dn9ZUrHybncW5ZXYW93Gy/exsoUDoXafZw0n8Mgd5WYu2K6uFbWvs0mT6e2xpZQuG1QY7cjn4nZzCvFrozwW1jvChga3r6qjR2q+2n0lFxLpS1sZ+4AFk83R3I/rBrJWhITi/PT04uCcnYQi2bsXzvvpVOKJjY1hkt7biudPTOPDa2/FeQ0MwbuPjMBy2gOi6Jm06m8VzMhkYSDutnVGYWAwnLzvFmwb4zBlTvtzVZRxuWzD0lu6EBUaWSwmEQcNJgkqM69kPLWzY2SW1iOivsgdQ2E9QJW3RY4+hFJkZ0BQQFxexj3Z04O/rr0c5z7PPwoF91atQKnj4MIIg73gHMgbPn0fE/IkncPwcOICynOuvNw5jOg3B8L77cJ3JwI4dOgTnde9eM3mepFL4HCx9oZjlughyPPUUnG6WKHL6ZG8vPsO+fUY07OjA/sUerK5rGuVXaqdct9AO2FPLa7n4Z8amnTF4/rwpfbx0ybSvYPN8wuzxYpmDtRpOspYElSUzcFWsLLlaOAwnEql+scChGhQlN5GzXNIWJRLoo/qRj0Dk27sX/gWzfnbuhN/CbZbP47e+cAF9xH7wAzz+2tfCjmQysFfPPQe7xuCBdyqoXdLFxRYXWZX2CF5YwOd59lmIBBMTuJ8ZjBQMeWHrBi5qWY69CpG5rhQbTmKLhn7Zg8w0KdZ3kNdhJ0zlzasQEje8LUqlYIv+6q9wPLM6a88elPIOD8NfaG3FMX/pEmzP7t3wax55BI+98pXwPaan0XLqW98ygzBf8xrYqqUlk8Dx7LP4vw89BGGmpQV+1cGDsIdjY7Ab3J+SSdjITMaUL5O5OQRQH30UtvH0aROcaGnBZ7V7Gfb34/dfWsJrs1k8j/0fK4Xvw4qQtSpZ5nASrlfPnTN+0cwM1qmJxMrgQ1NTcOYghcNGtkf2GpXXdlkyJySX06+3Gth3vK2tut+XWecVBInCbotCgYqI9aXkxj5yRORv/xZ/cxJkUxNOLC9/uck+XFiA4Y1GcWI6fRonrx/7MZxMDh82xj0SgdCYSOCE19cHIyqCBaPjGAGxvx//z57AbDexz+VghJeWsJDv61s5/ISXaBSvHR3FSdVe4DcyQcNJ7OtyhpN4BcJKMxYUfygWrOOglbCfoAJt0aVLIm9/OxbZdDYGB+EMMErc1obj+oEHsPj++Z8XefhhXIaGRH7xF1Ge/NWvYiL83Byef9tt6JnT14fL8jIcWTq0nHx7442Iwu/fj/tyOTittEWuW1i+nM+jtNHuZTg7i/ubm00fQ2YaDg4WOiN8v/l5vKa1FY5tpUJ1Lmci2ixfa21dXd/RTKawz6C37yBvs8zftkkcTkJRgtmDtki4msbg602ty5KrJZs1+2C1iyD2x+PxtUmc5UBblMsha/lDH8Lxf+212Pf37UNbBLu9AAMEs7PIWHzwQdirO+9Em4THHjO9pSMRLPoPHjRBCXvhxePXbhxfbvZ/Om3EfF4vLJhsli1bEGAZHcXffuVsFA85SbqtbX3LYGs9nMR73chiWq0JU3lzhUHWDW2LXBe26IMfhK/R0oJjeN8+2BKudURgoxwH2YnHjqGvYTSKaozbb8f7fPnLCG5GIrj/ZS8zGdVzc6ZP9Pe+h3WX66J1y6tehZ74ySR8o56eQn9mehp2JxbD/ePjEA2PH8f1hQumH/MVV6BajQHV7dtXHovLy7CpmQxsYU9PcG9AP1gtQhEyFjOTo1cToLUzBYtdmN1P+8Sg8MBA4aR6byZhPYe31ApmytvCIWGvazvTsJ7fj+3Kqp1lUOGglZD9guFERcT6Erixz50T+fM/N5mDc3M46A4cgAOczWKRS6f2Bz/AyamnB4MKrr0WByv7YgwMwPncutVMBhscxMktny8UEEXw/FgM78G+QCMjJkKxuIiU9/l5E4VnqZ/dgygWgyHetq3xnKNMxr/fYKnhJNFo8bLiMA0n2UjUctDKKoTEsP/SRW1ROo0MxG99ywzZ6O01Ylw8Dkf59GnYqDvuwDFz7724/21vg1B4//3od8gynle9Cs4qe99wovLhw3DwWEp47bUoy2ltNROYRQobS1N4fPppCJ0nTxb269mxA847JyQeOBAs8FA8zOVwPLMUqRLSaXwuO3OztdXfBrou7GmxvoMUCmmbbVpaIAYODcGe0/kdHIQwuHUrhImN0Ec2qCzZL8Nwvewvh2q0tNSmBIjZEWWKvBvWFolA+PujP8Lx9YIXYPF48CAW7Xv3Fi6IFhcxWOCb38T2+/EfR/D1zBlkAHG6/O7dsAnt7SbbjwsvCocUDUud1/N5+Et2H0P7uLVL3kZHYZtKNedPJLAfxGLYB9a6MqHRh5NsVMJQ3lyhb9SA36AiAm3RE0/AFj36KH6v0VH4KxTfWlrMQI6+PtiqBx/E8XPbbbg89JDI5z6Hc/3QEITDO+7AcT47Cz/mgQcQjE2lIGjRXr3iFRD9slmsF1MpvEdPDz5fJgPB8fBhvP/EBPwjBtoonvX2IjHlRS+Cr1CMVAqfKZXCsdzTU3ngMZWCn8hBmQzOFguS0efjMBK/HoQzMyvtUTRqhtdwcjID3VyPbt9u+kiGGSa62IIhf+NIpDDo1dy8/okr7HkZiVTXF7vC/ogh/5XDgYqI9aXoxp6bE/nTPzUDCubmsCi8+WZM8FpcxPPyeSy+jxyBA3z77XCsaUiYvj42huf29hrjPTwMI5zJGAFxeto07I7FcMKYnjZlx4kEokdnzpjI1sAADLM9+IRRJpYJ1ntCoD2cJEgg9CujaW5eKRBu5OEkG4laDlpRZxl84AMif/3XptyK09U48Mh1YQ927IBz+53v4Hl33YXS5IcewgTVfB6L/TvugDg4OQlh8nvfw+s5OfCGG2DD2FO1txfvxyl9kQj+Zpbhk0/iby6yh4bw/swwHBszWXltbcELdvaIZYS90pIav5LlpiaIkhMT/pmD/JsDFWx6egozBb2ZgwMDcMK5+BTB78LBKOvtLFZDqbLkWvQxXEuY2VGLXnXZLN6PrS9K0GBbomKK2qLTp0XuvhulfIcOGSHxmmtQPkjxeHlZ5N//HVnP2Sz8ottuw4L89Gk83tkJX2rrVrMP2RkbHL7ChVcx5uYKBcOLF02ZWGtrYUnywADsYD5v/Ipi+20mAzuXzRoBuRaVG37Zg2EcTrJRCUN5M32jMgI1G9YWnT8PW/Tv/45tMDqKrMC9exGw5MC2pibYlMcegx9w440QGu+/H8GNZBLBzTvugH/kuhAnv/1t+DfsPX3rrUge6e83bav6+/H4uXP4PeJx9Eg8fhzC4eHD+AzxOHwJtoDZtw+fd34e/7+vD4JasXVNJmN6BEajlfcpZFXH3Bz+H0uWo1GsE4OyB/2Gk7S0FGYKejMHe3pMv322fojFTB/rMA+fyudXTku2BVRvhmGjtsLKZKBFsJ/vaqGQyCByACH9xcOFioj1xXdjp1LIQHzkEdOPa9s2RJ1GRkyU4fRpZN7EYpi6deONJoIci8FYT09jEe66MKBsqj80hJNCOo3FaCQCgx2NwgizRxCzFJubzaKOC/nhYZws7RK/VMr0PWxrW5s+EfZwkmJCITMibbgoKJVBGNaTi4J91HbAq6WCHkBh32t8bdG//RumJy8tmf5bdu8UBh2uvhr2IpeDM7xvH6LzExNw6F7yEjzHdZEt/eijsF35PCLgt9wC8XD7drwHsw17e+FEfv/76In4zDOIpCcSeLy5GTZo3z4IAlddBfslYoYVZLOm/K+YLWJ0PJ02ZTXl9LRLJmEjz5+HEHr+vMkYnJkxg0r8ouT2IBK756AtGPoJmGyGbQuHsZgRDhtx0VkKLkqLlSV7BcMwfEf2NHSc2gwH46KBx14AG9IWzc6K/OEfinz3u1gINzXBblx/PbJxHAf7zX33ifzrv+LYv+UWZDEzc6W5GfaGkz5jMRMwoDAd1BMqmSwUDMfHjS2KxQrbBYyOmkFPLEvPZIwgWGzBTsE4nTa9NcspFbSHkwRdFysvLtV/UP2i+kE/hpUVjRi0LtM3Cvte42uLFhdF3v9+DFJJpSDo3XIL7NKWLRDnYjHYoJMncXv/fjzv4Ych7sXjCG7cfDP8nO99D72kn34admZwEMLhwYMIlPT3w/9ZWjJZjj/8IdaJzz0Hv2N2Fp8vlzODXW66CUGWrVuNjTx3Dn5JczOCvyy59sLhc0tL+J2ZLFLKFuRypi/z6dPwi6am8F6JBLbfzIz/cJLu7tLTi/1EJ4pSCwvGJjPozQGdYcQrGNqVceyfS9EwbNVviYTJrq1mvVbmoJUQbZnwoiJifVmxsfN5kb/7O/TGWF7GyWTPHpT+sX/eM88g+9BxsPC++WYcgJxmxUmkx47BqLS24vFYDAft4CAMLYeqMJNoeRnPYfPfdBonjdFR07tgack4y3SQRUwzf/Y+XM0UqnKGk3ibwZJiw0lsgbBRIzJKban1oJXN6iwfO4ZswrNnCwf8dHSYY3FoyIhat96KBfrRo7i9Zw9KB3t6zGAUOrljY8gSvPVWOLpkYQEC44kTcHSPHYMDmsvBlu3Zg9ft2YPAyugo7J3XqWQWAHvJFRNdMhkTHY9GYe9Y0jg7W7zvIP+emyvsPeg4sJX2xGKvSDgyUvlwEgZ/uLAUMX1tGjVbJYhKypLDnN3EfpjRaG0WMWyvUeJ8tuFsUSqFyox77sEiva8PmYU334xjzHGwOP/iF+GH7N+PrKClJbyWQ5O2bYM9SyaNsMuAgbfCIJfDcW+LhjMz5vH+/kLBcHDQ/zhMpUyWMX0RP+zeqRSemalYajiJ9xgi3uEkfmLhepb9K8E0enkzfaMAG91gn7hiVtiibFbkwx9GhcbSEuzRi18MsS8ex/ZgtdbSEs73nZ0QCScncfsnfxL26VvfQqXG8ePYltu2QfC77TYER2ZnYQc6OlDSfOwYfI6zZyFO0m/ZscNMke/qMv2lmWVNZmch6mUy8EW2bPG3WZyiy4q3zk68L7O8S/Ue5PRiBi14/hseDp5eXGnSSTpteu4nk7jPHtYZthYu9PO87TRETBLMagd5NSrsOc79a7WU0R8x7LYoFKiIWF9WbOx77sEkU2biXHONyEtfCgfi3DkIiCLI6rn1VhMVam/HJRbDAXnsGE4UfLytDSe07m6cSCYm8J7Ly3CUmaIej8MYt7TgxDQ8bPokTk7CgHn7ay0v4/OyUSpTyW3y+eDMwdUMJ7Gvw7zQVGoPF1W1atBeRjPxsJ+gCo68+XmR178eji8HRNB5SSRMCUIkguj71q2wEU1NiHzv3Anx79FHTW/CQ4ew6L/qKtNTdHoajcSPHkVLhuPHTTn6yAhs0M6dyDK89lpjn9gLtrOzUExhn1iKjn6lpJkMBIFTp+Doc1Lx7CxsIUVDvyh5fz8+l+0IDw0hM4AN0BnEqZZMBhfbkWS2YSMuKIvhl2EYprLkamGDc2YKVAv74wVk1oZ9CxbYonxe5BOfQHVGSwvswStegcyf3l4EJz7/eYj6IyOwExQDx8YQcOjuxm8wMYGFcV8fFrW9vXhPBg3swSeXLhnBvr0dfs/ICK6Hh0svUNnLkkMD2ttX2iJmnC0u4sLMs+bmwrJjHU6yeWF5swjsfqP5urlcYMuXDWWLXBcDmn77t+HvjIxA8Lv6amyDdBpZgSxBjkaxFstmIRrecgv8k4cfht+TycDHuf56DJ1jH/pMBuXPZ87g8sQT8Hva2mCztmxB4OKqq9DHsLsbtubsWfwvinUkk4F4SFFyx46Vforrwh87fRrvMz2N91xeRvCEPQj92q50dMCmcgo1b2/dChtMIbUW5/Zk0giH9NFaW00rrTAM6xQxZcm2YMh1BmcK2IJhI2Yj1wK2PotGzUCz1cIEoyLbKuy2KBSoiFhfCjb2/fdjytfp03Bsb7wRJ56ZGUSdHAeRppe8BAtX9njw9ndgqnpXl1mATkyY5v2JhBEMXRcnpe3bYbAmJ/Few8P4DNksFtyJBN6Ppc8iJmWdE0/tpuRegTBoOIlfibEOJ1GqhYNWaiW4lBASw76XPm+L8nlMUv7c58xUYgp17CvIqPLoKOwQG3NHIijVWVjA/Xv3IkL+ohdhuz3xBEqTT52CnWP0uL0dzubevbB5+/aZ389e6C8uwqawTIW2yHXh3E5OGlFwenpl5uD4OB63m04zwutXUszMwaEhfA62UqDQySnLNfkBXCMcsrcny/J5aXRbGNTHUCScZcm1IJnEvtPaWpvp8ezzVKTfYoPvJSUp8Iu+8Q2R3/s9HHdXXYXhKLfeCqHv859HYLWzEwv5/n4jNF5xBWwQ7XY2i+BIVxeOIy6MmWXIFigMYNhZhpUubpJJI/ayCsSbOZjJmExJ1zW+Tzni4EYU25Xi2OXN7HnbKJToHR32vbTAFv3Hf4i89a0Q9oaGIPzt2oXvf/68aVmQzRrB7rrr4DscOYLXicDPOXAAa7z2drz22DHYsnPn4KOwlUFfHzIUb7kFgVhmLDOIKYLnX7qE/WLbtsKs98lJvO/MjAl8cFAJL5OTWCPSH+M+Fo2arMZiPQi7u/E6DqJrajJTlmt1fmcv/sVFs5Zsbzc9DhtdYOPxawuG9pqYwWG7lcZmsu+pFH7jclt3FKPEoJVNtEXXDxUR68vzG/vJJ0V+93cRnerthZM8Ooo+YyJYYL/0pWaSX0fHykh4JoOT0ZEjOJDYf4flT3Set27F/1hehuHq74dxnpwsnMC8tIT/v7yMk0JzsxEF2e8rlcKB7xUyOZwkqAdhoxt+JdzwhCJSG6d7szjLH/ygyHvfC0eH0VA6hx0dJhuvpwd2oaMDtoMZzNddh9KeaLSwjyF7pTY1wYHm4JOdO02PsL4+PMce8NTUBDtz+jREwPl5U2p86RJs3vnzsEfLyyt/m54eOPx9ffh7YACO9o4dJruIttEPOxDCzOhaiEEixrlMp03GEaPQFB8a2Zn0CoZ2H8ONVJZcCyj8tbZWv7hijz1WGXj2kQbeY8rieVt05IjIr/wKRL7rr0d29OioyNe+hoBENAph8Yor4NPs2mWGxPE4ikRgG44cgc1YWDA9Xh0HZcjMMBwZgX0op+eX2EekagAAIABJREFUX0kxs2Toe7W0FP7WDFpwmEkkgt+P/pweJ0oQ9n7TSEJDwKCVBvmEq+Z5W/TccyKvex3sSF8fBMTRUfgqHDqSTJrkjtFRlB5PT2O7HDoEIXB0FH0Pjx+Hb3Tp0uV/5MKObduG7OlDh0wAbudO2Ktz52Bbhobge2UypuUL/RMOw7x4Ee8/OQkfxhtwammBf8VBI93deF9mDg4M4L5i56pk0vRLZECLU5ar3uiuEQ4XFky2a3u7yThsZDvJYDMFQ7uaJBpdOS15swRTg1hawnaqVhRm5YvPoJWw26JQoCJifXFFkDr+a7+GDJ3+fmTiiMAQjY2JvOxlWHAz8hKN4kBZWjJGllGa06fxGE8Ara245lRmioHT08bwX7wIJzsSwf9g2Q9LnrmQFzHTR1lKODRkek9QINwovRqU8EORplb9EQOExLCfoFwRLM7f+EbYCrtfFstotm7F8d7ebrI829pgpzo64EgfO4aIOzN7du6EWLhlC5zj66830UYOJpmdxd9nzhjBkb1ZL1zANR0DLlLYe5UlxeyRyMzB4WEIBNks3iufx2flQIUg2H6BE9wpCARNUy2XfL4w41DELAqbmxs3uBLUx3AzlCVXSz5f2KezWlguy0EdFmHf8q4Ijvm3vhWZzTfcgIFN584hGyifR7by/v2wPVdcYcr3lpZM6d34OPwYTlwfHIQN27nTlCXbAabVDifheYbBls5OEyi1swc5cdm2nY16vCuNCc8fzIZvFDGlSO/oDWGLFheRAf2d78B/uPlmrK0YuMxkcBz39JgegLEY7NPICNZEp0+jAoMtDrq6EPTYvRs27Lrr8A/Zm/nkSfhFHR1I2jh+HNf5PPyZixexdkynV/bIpUjV0wPbODa2sgeh6xq72NyM55bKAmM1yNwc/LtIBLauu7v6QL29pmVrh0jEZBv6tYNoBPL5QsEwlTLBVAaybMFQ7b0/3B9FgoP65cCAtqfaJey2KBSoiFhf3NlZOMoPP4xF+t69MDZjYyKvfCUW3CylsUVDRn9ETCPZyUkcPD09OHiYMs3SHYqP/Lu93QwVaG01/TgWF/He/f1wuJlizJRjx8H/qLZ/gaLUAzalr1WWR5Goe9hPUO7Jk3COmQVIYYuDQhiA6Ogwk/pyOdgdimHd3RAMx8Yg6u3bh8eOHzeTTLm4v3ABEXT2Q8nnjUDL1gkspRkdRcsFioR0eDmAwG9oxdKSKbFpaSnP0eVkVLu0sRYly+x/w1InEVMyxIzDRkLLktcGlqLFYtWV7ZBMBudvDha7TOhtUSol8uY3i3z72yhTvvpqM7X0wAEEWnfvhi1g64LpaSzqaYvicdOLq7UV9oiB1WJi4WqGk3ARyZJkv0nc6bQpb47FIB42UkmqEi4atbzZp+VL6G1RLifyUz8l8qUv4djet88cz/m8qbpigsfICI7v5WUzJTgeh73asQOBjOFhPHdhAf5QOo2gyfQ0/JbTp/EYbRWHLXV04PVsMTMwgPcdHcXf8bjJhGabKu++kUxCnEynTelxqaAWW0HQn2puNlUo1Zz7czkzUZlrWvbG6+jAGrWRgpH28E9e24M+2T/cnpaslA8D/s3N1fcW9xm00kB70sZFRcQ6kkqJ+9M/DUe5rQ0C4t69aBp+6BCMM6dPUVlnBJuXaBQHyvnziNLn8yYTkNOak0lj8HM5LND7+/G+IjgBDQ3heRcu4HXDw3ieCD7D1BSuW1vx2kaJfipKObC/Xq2azftE3UN9glpaEnf3boh7hIMHOjpMIIEn5Xgc35tDRjjUaWkJ73H2LIIVXOhwW0UiJnuwp8cMT+nogL3ZsQNOcUuLsXvt7UYkZLSakf/29pW2aHkZwZFs1ji7pYYgcIIq+6nUomSZfc/shtksZWmkDBIRLUuuJyxvYu+jWrwfG/lfXrSE2hbl8+L+8i9jgEFfH2yQ68IuXHcdbEckAvvC8kER2JWhISyyBwawEE0kjE/T3g67Yi/sqhlOwpK7dNpkg3qDAcw8ZBCrrU0XlkrtYHkzM57WW3DxqdQItS0SEfftbxf5+7/H92FfZGbesxS4o8MEXF3XVEf09BgxZGoKlRZTU6aai/4Uq8KGhvC+PT1YC3Z0mB71V1+N2+fPm6oyTlfO5cwwqKYmBHF7egq/SDptKj44SLOjI/jLLy8XTmlubzeBmNWSzRrhkC0+YjEzUdkvCLNesI+hLRySaLRQMKRPrFQH2wK0t1d3rvTpj6i/Th1QEbGOvPzl4t5/P04eL3wheh5edRUeY+ZKLGYcWb/sBS7oLl40pccsLe7uxvswa2dpCf+rrQ0L/VwOJ622NkTDpqfx2i1bcPByYuH8vGmwW4syLEVZD+wSoFqc7D1CYqhPUFu2iHvhgrnd3AxnjjbHXiTncsY22TQ3G8d5aAiCIPsm7tiBsprBQbx2ft5kUFNs7O6G47C8DFvFbEf+n2TSTAZsa1spDKZSEA8ZYeekwGKsRckye9Gm00aIi8VMqXIjZOxpWfL6s7yM7V6r3prs2Xn5mA31L/ZbvyXuX/4ltsvQEIKce/eaBTkrIQYGCkv0bHvEnogcotTXZ/pp1WI4CTOR2OPS65dls2bIAvselgpkKMpqaLTyZo+QGGpb9Bu/Ie6f/Rn+ZhCUghFbFrDUtqnJJG7YvwHFx5YWPK+31wRHBgdhu3p6cPvsWTx/1y74RRwENTxspi/ncrCJvb14/7k5ZC6m06Zdg/3/OeQlkTA+VdCkZNeFwEdfiq/p7l59xUQmY6ro6MM1NxvhsBZZ+dWSy60UDO0BfPzdeb3ex9lGhgkEXV3V+cwUEi8HwkNti8KCioh1xHHEZeled3dhT0EuPO1yMTsjxG4auriIA8UWR6JRHIR8fjZrxEhGwey+ZOzlw/soFDBK1EhNnBVlNbiuKclc7YnJNo/2+91zT7hPUI5TOIWQQQsuuFtazFRmBiJYRsy/OXiJE51tsZERRfYB5PuybJjZ08xctBvHe0u3vMNG+H/YM7FU3xlbRON3rSbDjhniFJX5nrysp93kPuq9iBihxXtR1h576NNqhud4f0u2bMjnRf7hHzaGLWJJE8vamJHMBRz3Vx5j3gtLzWw7Vi0sg+YAJK8Qyce5+NTMXaUe2PagyOC3Nf//3s8iIvLFL24MW0S4FmIPwnjcBAh4YbYz13PNzSajjb+PiPEPWMJLkY996XM5854MdvJxuy8+M6EpZBKW3trDBYPWcRSjmW1HX2q12fKsxKDvJmJK72mT1xPaabvKj9AntK+V+sF9l+uASl9L+8O/83mRT3wi3LYoLDRYZ6aNDcvm0mlEzHkf7y813dhxCktlSCRS2FSUjrSIORG1tJhpgiIm64gLdrusb72NvaLUAjpPdk9D4tcov9hjXjZK3IUnbGY/c5FsC4UtLUY8tG83NZnBAvG4yaRm+a5IcQGRgQve5v+n0EJb5M2y4P+wbVUxQYa/OfukiPgLkuVAx6RY2e96CYe2oOQVmUT8BRdlfWCJDXvx2QJXMeHX/n394D6+EWAgIJ1GRs7MjLnfXrB7F+60Adyu1QYIbLzioPf8wQUpH1e/SakXtOe0/d4hZNVQzBfaTH4RgxdNTcamsFrDzyZRRGTPVAY5aRP427CSxR6QtbxsMpyjUZMkwv/HAAl7CNI/I1zDsTcsg7jF9gVWT9hBrdX2aaa/5hUOKbqulxhni4V+fht9X5+Jvso6YPtH1C9sYZDX5dqjjWKLwoBmItaRTEbcaBQ9Mi5eRInxxYvmMj5u7ltaWvn6jg5c+vuR3j44iPKfgQEziCAeR6ZjOo309LY2PG9qCo55Swue29yM/zEzAyPb3Y2LLjSVsONdiLORPrNIipk8r9hSImsr1EfK3Jy4XV0oe5mYMBPbJybQZ4eXiQnTS9WGU4+Hh1FO094OO7NrF+6j0Mgo+vy8ab9Ah5g9hmIxOKFLS6bHa1ub2d65HF6/tGSmxBdr8O1XsswFQSW2zXbMWTpGYZOCR71tZVBZsreP4XpnRCrAm/mQTpt9v6mpMJvVxs5stSsTigjXof6lEwlxW1vNBFJeLl1aeZviog1bufT1oTfYli24DA2Zye2V9HXO5fAbsVqjrc3YGtc1PZxYytjaqpkryvpRbnlzULDCu0C3KeYHFfGNQm2LZmbE7e3FMT49jXUThzh5/56fX7nN2D+agytplwYGUI48MAD/p70dtunSJbxuyxbYG/a5Hx1FyXMqhdLl+Xn4PDt2mCAsy5Dn5/Ea9i/0EwPzeTNlmcHerq7C9jHlQPvHUmUGdNvajF9W76Fx2ezK4Sd2hYhdktzcrKJhI2BnDNrXS0vYvzi0yItfNQL/tu/j0+v4lTYtKiLWl7I39uJiobh47pzIk0/i9uKiEQUJxZHWVjPhdMsWONXNzTgh7dwpsn8/HOr5eRyszc24rc2/lUbHG5HyOr9BjjBL0tinzs8RrpCwn6DKtkXLy0ZcnJiAPXrmGdigxUXYppkZs5CmaNLcbHoBDQ+baak9PXCS2TMxm4Xjx8g8yxnyeTiqdpPvYj1TOLWW2Y2M4ldSGsGSCjuqz4g1L/WC4rfftGTtY7j+2Jlodnl7sXIp4jgmg5dDN/xEwgoJ+69fti1KpUzQg5cTJzBMZX4edmhiYuXU5WgUC/jhYYiKw8OFl6EhXERM9YY9pd11TY9W163NICZFqRV29htbk6xWIKyy3cWmsUXZLOwNhcXpafhF589DrJudNb3euB3Z7qW/36zVbGFwcBB98jmF/vx5vHbrVjwmYsSWuTnTZ7e7238Nl8ngeQsLJkDLASvl/rb8f/TF+H06Okx/yHrZQe7ntmBIW+84ZkKy3SZMqS92ZYxXJAyqnqC9WVzEdXd3YYugVdijsNuiUKAiYn1Z1cZ2XZHjx+Ec9/WZjJhcDieIuTk403NzaMQ7Pg6Df+kSTkIsKWTZs+tiYb9lC05Ow8M4aXmvqx25rijlUkwQLMcJ5nVQxFyksNyjBqJL2E9Qqzb8MzNYTA8MmMnJXV2wOSdPGif60iXYovl5k33NBTp7A+XzJqORWUNDQ2YKYm8vMqy3bl0p4rkunMhk0kTE2QqiXKfW7gvE8h67N1A9nNAgwVBkpWCoWU9rSzFB0L7tZ48o7gZlEfK3W142vURr8Htuals0NWWycNrasF1nZkz24vi4yWq0bzM4QfJ5LIxHRsxleBg2qLsb9o5Zjro4VepBUMagn29ktwNh3+EaCoTlsGltETOpUynTQkoEvgUnNdNfOX/eTHo/exavZe9F9j6MxeALjY3hmoMuOUSOj/kNKUkk4IMlEvidaR/LHWjCzMWFBVMhEokUCodr7YewGsQWDOnDi5iqEAqG2sd/bfEKg8VEQi+2vQnKICSsPGpqKj1NvAS6N9QBFRHry6o29oULIs89Z3putLaarCpOCGttNen1g4OmVDkeh8EfH4cQyQX+4qJxrMfHcZ+X9nazuKdDzQtv9/frolYJphIn2KaCEpqyP0cmY0T4Kgn7CWpVtiiRQIS9q8v0WKVzOjuL262tZshBT495HZ2FqSmRZ5+F7Zmeht2anDQL/Kkp0zuIWXbxuBEY+/vNwp4tHbZtK39xbzcAp3DI8tJ6lLt4BUO/Pov2RakNdHZLCYR+BJUVr2agjuuaBR77XlXBprRFy8uwF44Dm1BJdo0Itv/4OHyr8+dhd+z2DhQd2cqAF06lt8ulmc3I24OD9c1cVsJHLcuL7Qv9nFLlzWvEprRFmYxZW7FqgaW0mYwpR47HsTZjwOPcOfxO/f3wRZ56Cr7R4iJ+t2QSPhIrQTh51h7C2dNjfCL2re7ogOi4fTsqPsoRD3O5QuHQdeFPUTi0W8ysBaxIsTMNeQywJ6VdmqzrztoRlDUYZI+CSop9yovLhln/HGK0SsJui0KBiohV4jhOk4hcKSJzruteKPH0ijf24qLI0aMwqJ2dRkDkglcE1wsLMLR9fXCGk0nTg2N+Hie3SASP+2UYJpOFJUIXL0K89PYk8isTovNsC4xe0bEKQ6A0KOVkDlbqCNuPrQUctlEDgabhTlBrbYuyWTizbCK+uAgHs60Ni+9UCsc5S4q7u015cDyO56ZScA5EjMMrggX9/LwR9ViGY/dovHDBZBFNTa1sJh+JwJmmsMhrRvEpPNIWsaH4Wi60gvoYally7WA2Z5BIWKy8uFTvwbVarORyOBY4Db0KGnKvqcAeVWyLcjks2pNJHN/FeigFkU7D7riuyWBmuTkH2FHsnZnx79HIv2nzbPr6iouMvF2p8KmEg7USCCspQeWwCwbI6kRD7s1raYtcF77JxEShuNfSAhuzuIg1F4fLDQzgt5mcxHO2bTO9D/n4tm14j3TaBGd5HqKwyN6MExP4/5cu4T72crUDqp2dps1VXx/8pL4+3M8hKNhO+Luz06w314J8vlAwTKXM+ZmBGntojWZ+rw7b3gRlEPpRqu/gWou4HDDU2blq/7ykLapwzaT4oCJilTiOc4WIPCMif++67s+XeHpFGzubhYA4NYVoE1PdmZEYi+FAXloyGYcTEzjAR0bw+PQ0Tljt7ThpVHPg53J4Pw5/8Q6GoeiYSKx8bU/PSqHRFhlHR5HdpA51Y1Aqc7AcJ9h7ey3FwUqgmMPjZ5U0wDcpZC1tkevC6c3lcFJfXISD2dmJBXY6DeeTTbs7O03/sPZ2PLa0BJvGgSuRCBziuTnYqKamlSU3LFleXjaiISezzs8Xiox230Yu8hcWVu6rnZ2FC3mv4Dg4CHtV6b6hZclrQ1DWIC9+9qjC4STrAsu12MtplTScLRKpyB5V7IRevIjFdV8fjtVKRJJ8Hj5KJoNzQFsb9ods1twfiZSfBeG6sEWlyqdnZ1e+tq3Nv0+jfbuvTzOSG4VaiYN+962FLbLLm+s0LXfT2aLpaQQ0mLHMYUwcwMTeqbkcBMLZWdgZinnMgo7H0R+xs9MEUVnB4dfHkH0Rl5dxf0cH1lD5vP8wGN6enMS1PVGZPWAHB43NscVG/t3TU7ktcl34hbZgyGCxCLaXnWGoZcnlEVRSXE55cZnDSdYN9kR3HBwTq/hM5YiIV0j5aybFB9X3G5gzZ2DwOzthWDMZMykwGsVBxpRfETitra04CXD4Cvtq1CKiFI3iJDM4KHLNNcWft7hYKDDaU6cvXsSAmMnJla+Lx1eKjN7rwUGNSlVLOX12/CiWpt5IAmE58NihoBWWz72eLCzA/nR2wnltbobTSgExEjFiSEtL4fTlbBbOLh3d5mYzPT6VwvHMfj+EpdKcghqL4b2am83vRed23z7zOnuiMu3j3BwcaJYq2sLjqVOwk94stVhsZUYjHWze7u01iwO/suRYTMuSg7DLi4NEQj8oAjIQUIPhJOsCextTfA/L515P5udx4UTQSgREOxOaImEuB/tGO9beXtk0d8fBIr+7W2Tv3uD/zcFUfiLjI48Ur/bgUBg/kZG3q8xm3fRUKxAG9R9cL2gf2fd3HcqbNzQcOsc2OayuWFzEffYx2dODY951kWmYy6F8OZfD+mZ0FI9NTxcOl+jsNOJvPm/sH4MgfX0QD+3fta0N/4Mkk6ZUmSIeewymUrjfFh2PHcO1LfaJmM9ki4tesZGZYxQMWVovgvvjcePLcdChUkg1w0ns9ip+4mBY1jsM5C0uFuocSmOhmYj1peyNPTkp8vTTOODb23FNATEeh3HnhEdOJeVCfGYGj3V14cTViEYjkyksByomOtqNdEXwXQYGivdo5PVmHApTKnMwyAnmdVCJ8UbCdY2DtMpSn7BvkbJtUSoF57KlxUzm6+mBOJdO4zYn/zGjJx7H8xMJvCYeh23KZs1k+EgENor2TQTvl0ziWsRMQS0WOODvyLJpOxuAl1L7bi6H72dnM3qzGy9dMoIm/68ItsPAgMlitAfEUHDcjCWL9RhOslFwXZO1u8q+U2Hfu8q2RRxEkM9j0V3ueT6XMyXKzBQSwX2pVKEAsJ7HKrOISmU1Li2tfG1XV+ny6Ub1B9eSUgHTepQXrzfe8uYaDZfzIyRbpChl26JcDokeExMmk8/2eVhSzHLcuTk8Pjxs+tC3tyP7MB6HX7SwgPdmViGFQQZdWVnR0gIxz/advCwv4/kMAIuYwEtHR2m/13WNuFgss3Fy0kx+tgOpLS3GNxoYKAzAUnTcjJVn5ZQWlyovLmc4yUaCGb3l7LMeNugWaSxURKwvZW3sZFLk8GGcNLq7zTAV9kHkwr252TRnHxqCM5xI4P7+/qrKoxoC10X2kF82o/2331CYjo7gHo0jI9WXd9eTchxhP8opo9msMBuRGU0VEvYtV5YtyuexaKW447pGQOSixHFMxJ1CCHveMLMnEoGTvLRkyhNYouC6JuvQLlluafE/Prkg4kRlWzikw17Nfu1XlszSiqkpc6ETPTkJOzQx4V+y2NKyMovRezssA6rs7MEgkdCPcnoPblZ7xKxZLkYrJOxbrSxb5LoYRLCwgPN3OYKYbVsYiG1qMgsTLshrNCW7biwtBYuMFy+aAVU2HAoTlNUYpmqPWpUXh1UcrJQ6lDeHfcuVvSAeH0dAg8kc8bjJcuYgEGaap1KmcuH8eTxn61YIbIuL8I3yeTNFORbDvsspyyxZ7uyE+ObXZoHPX1gw/eSYhMLhKNVkobIsmRmG9L9SKXzGpSXzvxcXcR/9pJmZlcdiNIptEpTV2NcXngFV5ZQWl1NeXKvhJBsJHh9dXRXZrE2+1eqDioj1peTGzueR4n7unEn5bmszQwAoejgOjDj7ki0umgX+KvsHhJbl5ZVDYbyi48TEysUtS729PRpt0XFoaG2HwgRlDXqznryUyhzcTPvAaqFItIpSwrBv3bIM/9QUnMSmJnMSX1oyjbAZaXdd00ycj7W04NihUylinNlIxAyWSKVMyXJrq//xls8XliqLmCmF1TbeDpqW7Fc2G0Q6XSgqerMZWU7tLRPiUBhvf0ZvOfVaNToXMeJpmIaTbCSyWQhb7BFVAZvGFl26ZAbGlTrm2eMwl4ON4NR4Zn0yO3qj7pschOUdAuO9zaxv4jhYvJcqn+7oWNvPrwLh2sBzKc+5NRaMw751y7JF8/MizzwD+0KfJZnEYwxWxGLm2OrthZC2tAQfamzM9D3M5fAe7O3K9gpzcziGYzFT1uz1P/J5vCd9LAZh29tNxuFq7RtLnSkY2naCfp89/CTouMrnC0VFb1Yj//YbUNXZubI3o1d0XMvJ0ba9Cdtwko0Cjwm2SCqTsNuiUKAiYn0pubHPnoWIyJIbRs55colEzGKuqwuvSaexYO/vD08Eud6wZLHYUBiKjowk2nAoTLHBMCMj/qn51ZbQ2H+rE7x2ZLNGENtEUa6StogRcjo7HR1wmnmMMLDBrGjHgeMZjZrG4iy/aW835TnpNB6jGFisZDmfN2XKFN0iERPhX42tC5qWTNHLHnyyFsdaPo+MxVLl034li52d/iKjndnol6FVTu9BP3vkLSP2EwnVHtUOLtY4PK1Mwv4LlLRFiQRKB1taUMYcVGXB8nBmQre1mYBFPm8Cs9obDttqbi5YZCxW7cGhMEFCo99QmFqIg97b6htVhl3ezGBcjbZd2H+BkrYonRY5edJMV2bgVMQIeAzIxeNmwGU0KrJ9O+6jQBiP43wdj5uMPiaEtLaakmWbXK4w648BXAZog0qci8FsSXtiMo9D+ly2YLgWtpOZlEHl01NT8Em9NDcHi4wcwFXMFtVyOMlGLy9eL1hpybYBZaC/QB1QEbG+BG7suTmRxx/HgdLVZVLkaZCYBRKN4kTBdPXe3rWPCm8GXBcn5aAejSwT8hKPr5z4yn5Edn80lloGOcRK/WFkvgJnOuy/VKAtymTgJGezZlhKOg3nlQISnUkGOERM30SWH7S2wpbFYuWVLOdyplSZ72mXBlXivPqVJfN0x/Js+9Jox14iETx9emLClCzazm4sBsd5YADXvAwOmuu+PrPtg3oPNto22QxwEnkFWXJh/5UCbVE2K3L6NOzCtm3BfRAzGRw3zIR2HGNzGJjVQGvlJJPBQ2GKVXtEIqZnrF9Lh5ERPN7aqtmD6wHPtzUsbw77LxVoi/J5keeeQ0AjFoONYTCUlRS2f8NgBs/BS0tmwE1PjxlCNzdnWi50dkI8tAMl2awZjJJImCxStoXh8VMODM7agiGPW+4HtmDYaOXEmQyCsEFZjTMzhf302fKmqwu+T28vtn9vr7nNC1tbBGUQqj1aH3j8dHaWdR7XX6kOqIhYX4pu7ExG5Ac/QL+Mnh44u1zkMS3ebtTL7B722VBqQ1DWIC/pdGGZkO1U07HmxDYbliwW69HI+3QKVf1hZJ5OVBmE/QRV1Ba5Lvbf5WUzQY/lBBTBW1qM6ETxVQQneUbYu7vxHLtkuanJlDkTTqfNZIwzyyzHcqf3MXIcVJbszTJsdMrpPZjNwnGmoDg5Wdi3kfdxeISIue7rK8xq9OvXuJqsBqU6mJHhOGUvDsP+CwU6oefPYx/furV4H8R8HtuMmdBNTaZnVyxmKjqU1VFOVYVti4plV1+65F/t0d1dOquxu1tt0VpQ4/LmsP9Cgbbo0iWR48dxPmXvZ7ZGoH3h9ksm4b+MjJge0SxNbmkxU5Y57Kmrq7DnWyZjBqPwmOHzOjrKa21Cv9YWDO11SSxWKBiWKktuBIJKi/k3e1izH6N9mZ01f/M8K2Ku29qCezRu1qEwjYDrmkzUMn4D/YXqgIqI9cV3Y7uuyNGjmMbMHofs8RCLwejzJMVsnP7+te2PtREJ6jlYbRmN3/+amSnMavQrpfZLze/oKN6jkZcwDYUJCxUOWgn7Caqo4Z+dRWSc2YY8cdPBpMhtZ6wlk7BTTU1wkiMRU7LsOGZaM7erPVGZYp89UbnUvh3Ux9BbltyIQZZSvQdrOZzEdeFQlyqf9itZbG31zyCyhcewDIUJEyy/jUbLOs9vaFt07hyCpSMj/vsZ+xyKwL5QFOG2W8uexhuhRmHzAAAgAElEQVSBteo9yOd4WVwsXT49Pb3y/3qrPfxExzANhWkkKAIzi66K8uYNa4uWlkSOHEFgjuszVluwpYuIab3S14fnpVKwRV1d2K7z86YMua0N/hJ9qlTKZByyx2I8bjIOS9kyDjqxMw3tsmRbMGSVWyNRqu9gueXF5Q4nSaWK92fk37OzK/9nNBpcOs3sRg1c1Z5sFsdHc3NwVYKE3xaFAhUR64vvxr54UeSRR2Dwe3tNA13HgfGiwW9qwmM9PY1n/NeTUpmD9nO8lHKA1zratLzs35/Rvp6YWDnQIBZbmcnoFR2HhsI/obveUJQqoz9i2E9QvkfE8rIpY25pMRFdZkGzRJALNQ46icUgfjPzkL2W2L+E/RJZqszyEmZWBy1a/DIMG7UsmSXU1QwnCRIJ1xJmWAeVT5dTslhsKEyZfWyUyzCDhBkiAWxIW5RKYXhBUxMGEXgXZLkcMknY1sVxTBCogr5JG5q1EgjX2sbaQ2GKCY2XLvkPhenvN8Ii/SDv7RKLz01LDcqbN6QtymaR6PHcc7ArPHY6O3HNdi/RqOlz6LrYfhx0OT9v+rSyZLmpCWIhMw65PzOZpLOzuBDF7EZbMLTLkrlu5PV6i+tBfQcrHU7iFQzXklwOAdZiPRr5t99QGJZPr9dQmI1KMol1Rnt7oG+kW7UOqIhYX1Zs7ERC5KGH4BQNDOCgiMdNA3D2P2xthcHZbFH1cspo/CiVORgmo53LmYmv3v6MttjoVybEDA5baPRmNWpqfiEctFIiGh/2LbbiyMnlsDBbWIC9yecRFW9qwm2WCYqYrAUOUYlEjDjI5zc1FU5UpnBoZxz6Zc0V62Mosr5lyTqcxAyFCZo+XWwoTFdX6fJpLVkshOVnJQathH2LrThq8nmRZ5/FYuGKK1a2+OAiwvYJWP7NoMVGphbioPd22Hwj1zUDqoLERr9qj/b2YJFxM1d7VFneHJK9pygrjhzXhS168knc5vbw9leNx7FfURRiBjmnJrNKo7PTCIeLi6a82J6o7N3mbKNkC4Z2WTL7GNoJJ/U6joOyBssZTlIqgzAMsAVJkMg4Pe1vi+Lx8obCbEZbFMTCAo4ruwWAh5DsPeFGRcT6UrCx83lkIB49igOBU7ryeZN23tODE89GFHpKZQ6WGyUvdt9mgiWLpYbCTE+vfG1rq39Wo309MNCYZaFrAUt7RALLEcK+l604uiYnIQJxOMrSEmwQRUKKgq5rhp3EYiYTiCXLbNydzZrFPbMN/bKJ1rss2e6jEyQS+hFUVryZh5MkEiuzGL2Co1/JYnMzbE1Q+fTAwOYqE2IjfTZ89yHse9gKW8SBHdu2YRFFslnT+zCXM5nILS2VDRdoZMKaPdioJJP+QqM9FGZycmWGdTRaunx6aGjjBvZ5jFVY3hz2vczXL/re9wp7QnPoZSaDv5ubzTAUuyciS5a7uvA3S5Vpu2zh0PZvWJZMwdAuS2a2o51puFYCU1DfQV77EZQ1uJntUSaDNldB06enp1faIseBFmCLjH5/b1Rb5Ec+D1E2GsUx5MMm3MPqj4qI9aVgYz/9tMh3vmPKwXgyaGmBQejuhnEI24IpKGuwlBPM60p67Cjlk04bB9qbyWj/TRGNcB8tNhSG1xulT6fr4oQf0B8x7HtiwVG4sIABBiKmX2Fzs3GQuT9we9AmUUyMRrG97OdxMAq3n7cs2XaU1qosuZzhJMWi5KV6D2pkuDqyWTjNxQYw8LZfyWJvb3D59EYqWcznkXXHxasPG84WPfccFk1bt8rzbV2Wl7EwT6dNKwQ7yNHolKqoUIFw/cjljC3yExl526/ao6enuMgY9mqPVZQ3h/BbFlBwBC4vi3z3u5gOzyAoqzLYc5VJHrbAFokYYTCZhHho39/ZifNTJGIGy9mlyRTnGJy1BcNalCXb9iYog9CPUn0Hw2CLGxnXhThWqnw6kVj52vb24uXT/Jvl9RuBdBrJDgwietgg37KxURGxvjy/sScnRb76VfRaGBoyDcA5VKO/v6i6vq6UyhyspoxGaQzyeTMUxi+bkX8vLKx8bWfnSpFxdLTwdm9vOBwNDlopkgUX9j32+SM1nRY5c8b0F8tkYI/s5uHsERmPY1vwtuMY4ZDlzgyGrHVZcjm9B4PKiysZTqKsD3Soi5VNM9PRr0yorc2/fNq+HZaSRQ5aYV9SD2HfW58/SjMZkRMncCxeeaUJTiwt4fvncma6e1tb42THr0X2oP24sv4wk6ycoTBeWlpgb4LKpwcHG2d/tmEpbZnlzWHfW58/UvN5kcceE/nBD/Dd43H4RZz23tmJ84frrqyaYNCDVRvsb9jaunJash2wZ/9bVqKtJoEkKGswyB6V6juotqixSCYLBcZyh8LEYliDBWU19vaufw/NckkkcBx1dq74zLq31oGGFhEdx7lbRN5t3dXkum62yNPrhuM4u0XkuHXX+1zX/b0yXuqKYIf/t38TOX4c2V2MTG3ZgktfX/2diXKi45U4wt77lY1HIlE8k5F/T06uLHloajIOtF+PxtFRPN4IGbjsjxiLibz3vXfLe97zHvvhMNsjVwTf7exZ/F4sVW5vN0IFMw+Z7eMV/JiJyPtqUZZMh7eUSLhyO6z/cBJlfUilzOCXYuXTfiWLkUjxsmn7uhHKhFjWFo+LvO99G88WuS4GqSQSIrt2wQYtLyNYlUqZbANvL7K1RsuLlUrIZAqHwhQTHe2ediLYHzigKiirsUg2cl2+l1958913bzxbJIL12X33QYhpbzd+UU8P1mjMCqRoGI2a80tTE9Z1LS243840JBwqZ2calrIJqx1OUqrvoPpFG5NcDvtvqfJpb7WHCDJsSw2FaYQWImzl5borMr4DP9kG1JfWhZBozfKWy9fPLwEcx3m5iLxTRA6KyICITInIMRG5z3Xdu63n3S2FO4qXX3Bd9yOXn/v/ichbrceyInJeRP5NRN7tuu6ly/ePX/5MAyLyoUq+iOuKPPigyBNPmIV5dzec5q1b18ZBKKeMxg91gpUg2tpEdu7EpRi5XKFD7c1qfPJJkW9+E1E1L319xXs00qFe69T8WMyU6fI4+djHPiZvectb3iIbwB5NTIicO4cMCw5xEsH3ZTScfYB4bQtyzFIgfIzCorcsmdmd1QwnoeMe1uEkSu2Jx9E/b9u24s9hhnWx8ulTp9BexK9MqKtr5RAYr/C41iWLzc1mIidF9I1mixYX4QfFYhB9EwksyHt6cL6pZWCpHGEwKGNHZGWPL/WNlKYmkxBQDNc1tsivfPrMGfRL96v26OgoLTSuRbUHy5kzGZzzveXNG8kWXbok8u1vo61CRwd+r+ZmUw5KwZABZgY1WFHmONhG9GsjEdM3kaKhHUylzWHFRrEMQi+2vbH9M69IqGxOolGIff39xZ/jusFDYaamRI4d87dFHAoTlNXY3b22IrXjYN0yP4+KhY6Oit9iw+hL60EoRETXdT9u33Yc59dF5H+LyGMi8uciMiki20XkBhH5ryJyt8/b/L8ictHn/od97nuriORFpF1EXioivyQiP+o4znWu6yZd110UkY87jnOFVPgjP/mkyL334uQyPIySnX37EIFczYFWygEuJRDaJxl1gpVaE42aPorFYMmiN5ORty9cQEnJzMzK17a2BvdoHB6ufigMhUQu3N/85jfLm9/85udtUljt0cICou3j43Bq6Rw7jikZpBjojWLTsWWTb95Pp5cZnOUMJ6EIyBJpvyxCtUdKtUQixqE+cKD485aWVpZM28LjsWPFh8KUKp8eGKgui46D11gCt1Fs0dIS7D2DQhcuYPuyDLC5udx3AtVmD6o4qKwljmMW2vv3F39eMukvMvJy6hRskvfcGouZgEcxoXFwsPIMa56XmRVt27KNYouWl0W+/GWR738f/iWHW/b0QDhlixsGM9krkT5mKgV71d5uypOZrUi/iSXNvM8Pu7IiqAeholQDRbj2dpGxseLPy2SCy6efesp/KEwksnIojF9mYzXVHtEogowsba7kvTaSvrQehEJEtHEcJyYi7xWRR0XkRd70U8dxhou89F9c1z1R5r/5pPW+f+04zqSI/N8icqeIfHoVH1tEcNL/5CfhIN98s8itt4rs2ePb46hk5mCQE8xrP0fYfo6iNAKOAyetu1tk797iz0uljBNNsdEWHR99FLf9pixy4mvQYJhiQ2EY5fV/LJz2KJPBxMGnnjLTuSkadnSYacp2jx8RY3soGDKrsFiU3JuZqMNJlEanvb10hnU2a6aZ+w2DOXwYf/sNhenrK14+zUuxigQK/P6PhdMWZbPIvOLAlNlZfP+enpXftZyqChUIlY1CS4vIjh24FINDYYqVTz/9NMpy/ao9OKAqSGz0Vns4jplMzPO/l7DaItcVueceiIjxOLZFX19h2SZ9I7ZVsHtB20PnKBimUiv/jy0MFssgVJRGoqnJ2IRiMCHE25uRf589K/LDH/oPqGpvDxYa+/uxNil2nqZNSiTMcVkpYbVb60noRERBimePiDzoV7/uuq6fGlwt/y74kXdV8yZ/8ieY8vXyl4vccYfJzmKpZCVOsH1bnWBlMxCPi2zfjksx8nmcrLw9Gnl98qTIQw+hbM5LV9fKcmlbdKQTnssVnKBCaY++9jWUbnZ2wlFmX9bWVlOmzAWCXzagLQiyrMkrEqo9UjYqsVj5GdbFhsGcP48M62JDYYpNnR4cNOKkJ+oeSlt08iRaKgwN4fbAgAnocKBDuW1XvBk66hspG51o1NiGa67xf47rwufxExl53+HDxYfC+AmNHADJMkdOH75MKG3RJz4h8jd/g+9x6JARUXt6zETl1lYjGrIKgzC46icOavagstGxE0J2BRyFHApTrIT6uef8h8I0Na0cCmOLjb29sIdLS1jPrYJQ2q31JIwi4iURSYjIjzmO88eu614o83W9juMM+Nw/7bpukYTy59l9+Xqy3A/px+HDIq96lchP/AROQPYJu5ggqE6wolRGc3NpsTGRKOxH5L0cPowTmg2P15e+FH1NLxNKe/Txj2OAzdVXQ5Rob4eNyWaNrcnnC6N5fsNJOEhFURR/BgZwKQaHwkxNFV5PTuJy4sTKAVUXLluZH/9xkW984/m7Q2mLHn0UNqilxQxTYaaC9mVWlNpBsfHgQf/H0+nCDGvv9SOP4G97KMzEBK5vuy38ftHdd0MkfPWrRa67Dj5Sby/EwpYWM1CGvhKDrGqPFKUymptxfI2O+j+ey4nMzWHdNTuLdla8zM5ijTYzs3JAVT6PY/izn13Vxwql3VpPQiciuq6bdxznj0Tkv4vIM47jPCwiD4nIvYKml5kiL/2PIvfvFJFnPff1O46TE1Oz/m7BjvWv1Xz2r35VR44rSiPQ1RWcReSH47znbhF594MPmuM4rPboU59SW6QojcJqbdE3vhF+W/SmN6ktUpRGYWAguE+jl43kF504obZIURqFoNLptSCsdms9CZ2IKCLiuu77Hcc5ISK/IiIvFpGXicjvisiU4zi/7rrup3xe9lbBJBwv42Xcd0JEfsl1Xb/XK4qyiVF7pChKI6C2SFGURkBtkaIoYUPtVmWEUkQUEXFd97Mi8lnHceIicq2I/ISI/KaIfMJxnPOu697neclDFTS+fLVgek5GsGOccN1iHXkURdnsqD1SFKURUFukKEojoLZIUZSwoXarfEIrIhLXdVMi8oiIPOI4zgOCJpU/JyLeH7kSvunXVFNRFCUItUeKojQCaosURWkE1BYpihI21G6VZqMNkv/u5eut6/opFEVR1B4pitIYqC1SFKURUFukKErYULvlQ+hERMdx2hzHua3Iw3dcvn6qXp9HUZTNi9ojRVEaAbVFiqI0AmqLFEUJG2q3KieM5cxtIvJtx3F+ICJfFpFTIhIXkRtE5C4RmRCRD/m87icdx7noc/9h13V/sFYfVlGUDY3aI0VRGgG1RYqiNAJqixRFCRtqtyokjCLirIi8Q0R+TETeICKjIhIVkTMi8lEReb/rumd8XveBIu/3xyKyoX9kRVHWDLVHiqI0AmqLFEVpBNQWKYoSNtRuVYjTyENhHMe5W0TeLSKDIiKu606u6we6jOM4ERHpE5HtIvJ9EXmf67q/t76fSlGUtUTtkaIojYDaIkVRGgG1RYqihA21W7UhLJmIEyIijuM0NchUm10icny9P4SiKOuC2iNFURoBtUWKojQCaosURQkbareqoNEzEXcJNij5htsAH9hxnFYRebF11zOu655cr8+jKMrao/ZIUZRGQG2RoiiNgNoiRVHChtqt2tDQIqKiKIqiKIqiKIqiKIqiKOtPZL0/gKIoiqIoiqIoiqIoiqIojY2KiIqiKIqiKIqiKIqiKIqiBKIioqIoiqIoiqIoiqIoiqIogaiIqCiKoiiKoiiKoiiKoihKICoiKoqiKIqiKIqiKIqiKIoSiIqIiqIoiqIoiqIoiqIoiqIEoiKioiiKoiiKoiiKoiiKoiiBqIioKIqiKIqiKIqiKIqiKEogKiIqiqIoiqIoiqIoiqIoihKIioiKoiiKoiiKoiiKoiiKogSiIqKiKIqiKIqiKIqiKIqiKIGoiKgoiqIoiqIoiqIoiqIoSiAqIiqKoiiKoiiKoiiKoiiKEoiKiIqiKIqiKIqiKIqiKIqiBKIioqIoiqIoiqIoiqIoiqIogaiIqCiKoiiKoiiKoiiKoihKICoiKoqiKIqiKIqiKIqiKIoSiIqIiqIoiqIoiqIoiqIoiqIEoiKioiiKoiiKoiiKoiiKoiiBqIioKIqiKIqiKIqiKIqiKEogKiIqiqIoiqIoiqIoiqIoihKIioiKoiiKoiiKoiiKoiiKogSiIqKiKIqiKIqiKIqiKIqiKIGoiKgoiqIoiqIoiqIoiqIoSiAqIiqKoiiKoiiKoiiKoiiKEoiKiIqiKIqiKIqiKIqiKIqiBKIioqIoiqIoiqIoiqIoiqIogaiIqCiKoiiKoiiKoiiKoihKICoiKoqiKIqiKIqiKIqiKIoSiIqIiqIoiqIoiqIoiqIoiqIEoiKioiiKoiiKoiiKoiiKoiiBqIioKIqiKIqiKIqiKIqiKEogKiIqiqIoiqIoiqIoiqIoihKIioiKoiiKoiiKoiiKoiiKogSiIqKiKIqiKIqiKIqiKIqiKIGoiKgoiqIoiqIoiqIoiqIoSiAqIiqKoiiKoiiKoiiKoiiKEoiKiIqiKIqiKIqiKIqiKIqiBKIioqIoiqIoiqIoiqIoiqIogaiIqCiKoiiKoiiKoiiKoihKICoiKoqiKIqiKIqiKIqiKIoSiIqIiqIoiqIoiqIoiqIoiqIEoiKioiiKoiiKoiiKoiiKoiiBqIioKIqiKIqiKIqiKIqiKEogKiIqiqIoiqIoiqIoiqIoihKIioiKoiiKoiiKoiiKoiiKogSiIqKiKIqiKIqiKIqiKIqiKIGoiKgoiqIoiqIoiqIoiqIoSiAqIiqKoiiKoiiKoiiKoiiKEkhDi4iO49ztOI5rXWLr/ZmI4zhnrc/1wHp/HkVR1g61RYqiNAJqixRFaRTUHimKEjbUbtWGhhYRLd5y+ZKz73QcZ7fjOH/lOM4Jx3GWHcdZdBznMcdx3uc4zoj1PO4su4P+ieM4rY7jvPPye8w6jrNw+b0/7TjOazxP//XLn2myRt9RUZTGR22RoiiNgNoiRVEaBbVHiqKEDbVbVdAwymsQrut+3Huf4zg/IyIfE5ElEfm4iDwpIlERuV5EflVEfkZE9pb7Py6r0PeKyAtF5NMi8rci4orIbhF5pYj8rIh8xfpM91x+3R+t5jspihI+1BYpitIIqC1SFKVRUHukKErYULtVHaEQEb04jnOd4If9oYi8xnXdKc/jvyMi76rwbe8UkZtF5Ddc1/0zn/85svIliqJsZtQWKYrSCKgtUhSlUVB7pChK2FC7VRmhFBFF5L2Cz36X9wcWEXFdd0ZEfqfC92Qq6n1+D7quO17h+ymKsvFRW6QoSiOgtkhRlEZB7ZGiKGFD7VYFhKUn4vM4jtMqIq8Wkftd1z1Rw7d+5vL1zzmOE63h+yqKsgFRW6QoSiOgtkhRlEZB7ZGiKGFD7VblhDETcbeINIvI4zV+38+LyBMi8v+IyF2O49wnIg+LyFdc1326Fv/g1Clxt2wRaWmpxbspilIv/uAP3i3vfe975E//VNwzZ0Q++EFxJMS2SNCPQ1GUdSafL31xraP1ne98t/zP//ke+dCHxP3+90U+9rFw26KmJnFdV6SvT+TKK0V27RIZGxPp6hLp7sb9HR0i7e242H+3t4u0ttbiUyiKEoTriiwuikxPi8zM4DI3J/KGN7xb/vEf3yO///vi/uEfinP56aG0R2fOiNvWJtLfX+07KYpSDfm8SC5X/JJK4ZLJmEs2KzI/D1v16lc/b4sqIZR2az0Jo4jYdfl6vpZv6rpuynGcl4rIb4nIXSLyxssXcRznfhF5m+u6J6v5H7GYyMWLItu3i0RClwOqKJuTTEYkmcTfZ86IvPa1zz8UWlukKMra4rrlCYR+RCK4RKMiTU3mdiRigpBPPilyxx3PvyS0tuh97xP58IdFEgn4R4uLIqdOiYyOimzZIjI4KNLWZp4fj2ObkEjEX1z03lafS1HKJ5mEUGiLhtksHovFRHp7RfbsEensxH2ve13By0Npj7q7IYw2N5vvpShKbQkSB3mxA6f5vEg6XSgsOg78I/tYTSaNz7BKQmm31pMwioj8cWtu4l3XnRWR3xeR33ccZ1BEbhGRnxeRnxKRLzqOc73ruqnVvv/wsMi5c3CUR0dr8pEVRVkjXFdkaQknprk53HfwoMiP/MjzTwmtLVIUZfUUEwhzOf/sQeI4Rgz0ioP2JQjaot27RX76p5+/O7S26Ld+CyLhP/0Ttl9nJxYHuZzIc8/BZ9q6FcHXXbuQeZjPQ8iIRrFNXRd2emJC5Nlnjdhh09oaLDJ2dGBBoiibjWxWZHa2UDRcXsZjjgNxbft2ZAX39uJYEcFaZmkJfx86VPCWobRHXV0QK2ZnYQvi8Zp9bEXZ8LhueQKhFwqCkYg5r2ezxqdyXQRPHQd+U1MTjs/mZjx/dhbBx64uZBHHVq9shdJurSdhFBFPiEhaRA6VemI1uK47ISJfFPy4HxeRN4nITSJy/2rfMx7HDj41hZ2+p6dGH1ZRlJqSSsE5phhw4nJ3jDe8oeBpobVFiqL4U2l5MQnKHuTFWU2BjcXSksijj+LvX/ol/K/LhNYWRaMib3vb/8/em0bHdV1novvWhKkwFAYCBECCM0FSIAlqsqzBkiXZFhVJtmVLkWwnHmI7ev06K503Zb33OlFWenX6R3fHnRf3c/zctjw7jm3JlizZkqVIlmRZEyDOgykOIEiAmFHzcO8978eH7XOqUIWaC3UL91urFoBC1a1b956zz97f2d/eCBZ+/WuQfS0tuFZMKC4sEB05QnT8OFFnJ8jEvj6QGXxNu7ulBLquDs+HwwguQiH5CAZBfnB2uQq3O5lYTEc2NjYWfx9t2FgtCAHJn0oYBgLSpjU1IU7x+UAatrYm2ZnfH4PnJGcFp7zGsvaovR32YXYWNiX1u9uwsRaRTV7MG6ipYJ+I/SL+nR9CgDCMx/GIKTSa0wl/QCUN1bVX14mmpvC+5mbYqiLXZsvardWC5UhEIURE07RnieguTdO2VigF9HXCTe4r9kCtrdjhm5vD5LB3umzYqB6YJoLMeBy7WV4v0Q9/KBe2pib5WqvbIhs21hJKJS/2ePLPHiwFTJPoq1+V5JfPp343a9uipiaiBx/Edxwdhd31+UB2uN1Eu3Zh03VxEZmJo6NEhw4h4B8cJNq8Gb7UzAzRxASO6XCAVGxpQRZVa2tyPWrDSCYXmWDk3ycm5EaSCk0DkZhNPl1ENoQNGyVDOCzlyHNzIP84G8jjwTzr7ZWkYbZsXNPEvDxxAsfp6Vn+GivbI4cDGxVMJHZ12ZsGNmoX2bIH1WzAVDAR6HJh/U0lCFkpwGBZcjwOHiSRkOurpklpMpOGKxH44TDsmaZhvpaiNrKV7dZqwapuzqOEDjrf0zTtLiHEnPpPTdPaiOgvhRB/mesBNU3bT0QTQogrKc9rRHRw6c8TRZ31Erq6pKy5r8/e6bJhoxoQjUppDhfsf/VV1B7btInolVfSvu1RsrAtsmGjFlBo9mAp5MWVwpNPwhbt2EE0MpL2JY+ShW1RdzfR3XfjPp06BVJjzx5IlOfmcA9bWohuvhkBxuQkatS+/jrRb34DknB4mGj3bqJ160AI+v1E4+MyUPF4JLHY2ip/zwSWSadmMzLhOD+P48fjy99bV7eydLqpyW6yZ6O0SCSSCcP5ebkB6nBgzG/aJAlDdVM0FzCBOD6O7MUdO4h++9uML3+ULGqP3G5cn9lZbFzYqjEbVkSh2YMsL+ZN03TkYDbeQgjYIyYN4/FkKbPbjRjL45ES5VzAWdDBIN7b2VlyDuVRsqjdWg1YkkQUQrytadofE9FjRHR6KR30GBE5CWmoDxLRNBGl3uT/SdO0OVqO7xDRHUT0HzRNe4aIfkNEM0TURdCrX09E/yyEKEnHHqcTROLEBHbO162zd7ps2FgtGAYcYl3HQub1Yo6OjRE99xyyXDItcFa3RTZsVDPK1ZykVPLiSuHIEaKnn0bdsenp9K+xui3SNNjaO+7APTtzBiTbzp0gLmZmQIo4HMgEXL8e3ZyJQJicPw8y8cUXQY7s3Ut04ADRe9+LAGZxEcfhYzEaG5NJRa9XEseahkCnoWHlYu2JxPJMRpVsnJ1F5kQqnM7l8mm7KYyNXGCaGNMqaRgMyv97vSDmfT48WluLG0eGgbmzuIjYpbsb8zUTrG6PGhthNwIBkBVqcycbNlYb+TYnYajy4kwEYSF2gjskM2GYSMj/8Wfxw+0uzPfSdazdiYTcACy1D2d1u1VpWJJEJCISQnxf07QRIvoLIrqHiP6UiAwiOk1E/7j0SMW/y3C43xLRj5A9o9oAACAASURBVIiogYg+sPS6LiIKE9FxIvq3RPSVUp5/QwN2uubmsEittBtuw4aN0kMIpNSHw7L+FpcXCIWIHn8ci93HP0703/7bSsexti2yYWM1kI0gzOQEWyl7sBSYnib6+tex4/6pTxF96UuZX2t1W+R2Ew0MEN10E34/cwZBx969IPG4ezNnNTQ3w49qb8f/77wTPtWxY5A7v/YabPpVV4FQHBoCManrklD0+/GeyUmcg8MBAkbNVMxGILjdyFZaKWPJNLHWZJJPT08TnTuXvvB8Q8NyYjGVbLSbwtQ2QqHkTskLC3IDpa4OROHGjZI0zDWzJxcwgRiLgaxvbESWcLYA3ur2qLUVtmZuLr9sKRs2CkUxzUnUTdP6+uzy4kJhGMlkYTwufTX2y5qbJWlYCn9MlS93dZU3i9/qdquS0EQ6L71KoGnao0T014QLTkKImRXfUEFomuYjMNMjRDQmhLgph7clXWwh4BRHIthVt6UtNmxUBokEAjjDkLIzXuh0neiJJ4jeeQd1uoaGiB599FH6m7/5G5peSgPq7OysmhymUtgiGzZKiWKbk6z0sEr2YCkQixH9l/9CdPky0Z/9GeSDa8EWjY/jOx86BCJx2zbIlFtaQGIEgzJz3OWCmsPnQ6AhBMg8Jh1HR/Hw+/HaXbsge963D4EOIxqVpOLiIjZ3OVhzuZJJxZaW8pF2sVjmjEb+PVtTmEzy6YaGtTV/rAomrpgwnJ+XknmnE+ObOyX7fOXNkmMCUQiid9/FOLzmGjl3VHvU1dXVZfE4bZktMgzYEU1D9mUtbVDZqCxK0ZwkU+ZgucqiCZEsSU4k5LqYqVtyqT9/fh7rHjenzfG7rrjS1SC/tCqwSibiNBGRpmluIYS+2iezhCMkC2GOFXIALgh6+TJSdNevt+sj2rBRTgghgzAuvK8Gg0LIoPO660AgquiSmjZXLdkiGzZyQSnlxez8quSgTXBICIGmTufPYzNj+/bk/9eyLertBSF44ADGxrvvYswMDuKxuEh04QLGmtuNDo0zM8hi9PlkplZ9PdHtt+P6XbiAWpKjo5CHaxqu6YEDRPv3g5SprwchSSTXCiYV/X7cCya/uYs0S6Gbm0tDMNTVyWApE7gpTCay8dIlXL90TWFykU/bTWEqB8PA+FJJQ67NTITxtX69JA3LIeHLBM7Y1TRkyvr9IPNV8p2xZI+may1OczoxF6emcI86O0t3cjZqA5w9mI0kLEVzknJ/D7VTcjyOvxl8jkwaFipLzhWJBEqBsHy5tbUsH1OT/FKlUO2ZiFuIaIvy1POiSk5Y07SbiIhzBxeEEG/l8La05x4OY6eLpTn2TpcNG6VHPI4AyzQRADY2Ll8Ax8aIvv1t7PT/yZ9IefPZs2fp7Nmzv3/dnXfe6ahFW2Rj7aIUzUlWetjIDy+/TPS97xFdfz3Rww/LzY61YosiEZCHpkn05psgAXfuRFZiWxts87vvSrKwoQEBh9uN1/l8IBaDQQRinZ3oJFtfj0zHkRFkm1+6hM8bGAChODycvuMskayfq9ZX5KxATQMZp9ZXTLfGVApcriNdJqP6t1q7ilFXl10+bStn8ocQsiEP1zFcXEwmprnpic+Hcb5ahK5KIMZiKBHQ1wcSX4Vqj+688847ydpxWsbzDgRga3hu21gbKEVzkpUeqwlVlsxZhqosWa1hWCpZcq4IhWQNZN7gyxPZMhFrjV9aFVQ1iViDyHix2Zno7Ey/y2fDho3CYJqyjpbLhUAonWO+uIjMnytXiD73Oez+rwCr50zZhn+NQIjkDEJ2evPJHrTlxZXFuXOow7puHdEjj4BQWAFWvwMZbdHMDJQaDQ1EL72Ehg47d6LDbGsrHrOzIBN1HTY7kYANr69H1lRXF7KIZmcxD1pbQRK2tWHsqpLnc+fwuevXg0w8cIBow4aVx3gsllxf0e+X2RtO5/JO0LwxVS3I1BRG/TtbU5hM8unGxrW9gRCNJkuS5+claetySTkyk4bVQswmEiDNOHP87bdxP6++Ouv9rFlbRCQbJJW7JpuNyqAUzUlWkhhXE0wzuYZhPC79PpYlq81PVovgLEK+nAqr2yJLwCYRK4uMF9s04czG46i7YS9QNmwUj2hUSoMaGxGMpkMsRvT880Svv050111E73lP1kNbfYGyDX8NoFB5sZ09WL1YXCT6r/8VweoXvrBcxpwGNW2Lzp8HodHcTPTsswjkBweRFaVmBV24gKxCrxdZhRMTICG9XjRW6e6GJJP9rLo6kIldXXJTaX4e2YkjI0SnT2N+dXRIQnHLluzzQgjcO7W+YjAog9G6uuX1FVc7IyUb1KYwK9VrzKUpTDrCsRYaVhjGcsKQyVdNwz1n0tDnw3iuxg0YVmw4HLg/b78N4uG663IiwKvwG+WFFW0R17E3DNgTW/ZfnSi0OQlRbtmD1ThvVQghyUL+mSpLVglDl6s6vlMigTVb10uS8VsF36j2YZOIlcWKFzsexwLlciEjsRYcKxs2VgOGAUc4kcA88nozB2qGQXT4MNFPf4rg9MEHc1pQrb5A2Ya/ymE3J1l7MAyir3yF6PhxSJhvuCEnMtfqd3NFW6TrRL/7Ha5DQwPRL34Bcm5wEJma7e3YIHK7YfNPnQKhtWkT/KhTp0DktbWhxm13t+zGzNlWLHVWG1QEg2jsMjqK+6HrIH6Gh/HYuTN3EsE08VlqfcVIBP/jOoVqfcWmJmvOUd60W4lsjMWWv4+bwqwkoa6mpjBC4H6qdQy5+QgRxpHa+KStrfqJYiLEIIEAxnVLC9HRoyDeh4ezZkMzquQOFYysfpGuyzht3brqGZNrBcU2J+GfLlf6jEIrQq1jmEgky5K5BrVKGlbjmA0GUS7A4cDGXQky9qvwW9YebBKxssh6sQMB7LQ3N1vH8bBho5oQDssALZfaTRcuEP3oR3AqPvvZnMsJWH2Bsg3/KqEUzUnUh9qgxG5OYm08+SRIsve/n+jgwcyZ0ymw+h3PyS86fx5ERiKBjMRIBEReZ6fsUOt2Yx6Mj0Pi7PGgaYphoKZbKITMw6EhBCqhEAiBmRnMuZYWkIk+X/I8ikbRjOWdd7DhFIvh3uzdC4Jlz578gx4mbNT6iix1dTqxDqlS6FpRp+g61uiV5NOh0PINEocD9zgb2VgOnzkSkTUMmTTkTCa3O5kw9PmqT7KeC7grOBOIFy+CvN+2DZm9OaLmbRERxsPMDMZbe3u5T2ltgP2iQuTFtZA9mCtYlqyShqosWa1huJqy5FxhmjJru74e86lE51wjd7y6YZOIlUXWiy0ESMRQCJPJ660d42fDRjmh63CCdR1OfFNT9iyemRmiX/4SROIDD8BhzhFWn5W24S8D7OYkNgrF6CjRt74FG/Txj8suwTlgTdgilif39+Pn888jqNy+XZI3jY0y08IwQPwtLKDG4b59OMbx4yBM+vogc25pwZoxNQVCMRbD+tHdjXuQmm2YSBCdOIH79c478NXcbhxreBjEoprRmA8iEUkqsgyaA0SPZ3l9xVqVU6pNYVaST6drClNfv3Lnaa93ZZJP15Mbn8zPy+Y5DoeUJTNx6PWW5xpUEpxB6naDvF5YwPhmwj0PrAlbRCTnaXs7xpWNzMhGDvL/U5FLc5JaVlaosmR+qNdJJQvdbuupF+Nx8B0lki+nokZHRXXBJhEri5wutmHAodV1u4CvDRvZwDWoIhE4FF6v7GS6EkIh1EB8+WWim24iuv32vD7W6guUbfjzRCoZmK5BSTrY8mIb2TAxQfSP/4gg4BOfyKkOogqrj56cbJEQyC6Mx4k2byY6exa22+FArUKfD4FIQwMehoGfExPIQtQ0EH0bNyLD6tQpvGZgAJmEjY2yqPvkJAgCljp3d6cnCkwTxxoZAenCcqzBQdRQ3L+/uMCIm4KpMmi1yYkqg25pwdq3ljYb4vHs8mlWJajgpjDcPdswZLBuGCAZ6+pwTdXmJ62ttXd9mUD0eDB+4nGiN96ALbrmmryJ6jVhixjT09h0WLcuN5+zFrGWmpOUGyxFVrMMGU5nsiTZ7ba231gG+XIqLHx1rAObRKwscr7YkQgYercbDsxaXaBs2FgJXATcNGUGQi4LayJBdPIk0VNPIUvlk5+0neW1Crs5iY3VRCQCAnFyEvVYh4fzzihYM7YoFiM6cwbkT38/ara98QYCkI0bpXqjrg4ZVVwT1+lE1uDUFAKWAwfwmpMncTwiZIAODspgJhxGZuL0NOZ/czOkzu3t6dcYISC5Hh0FqTg1hddt3SrrKHZ2Fn+xdD2ZVPT7sQ4Swd6wDJqzFnOUxNcsTFOSitPTmGeTk7JjdziMccVyafXR0pJdPm217B8VkQi+v8eDcWOaGLvBING11xaUYbdmbBERrtfkJH7v6amt9X6tNycpNwxjebdkpmMcjuVZhtUuS84VpokM70gEMVtHR9nmzRofYZWBTSJWFnld7IUFOIi8w1yr0hUbNvIFBwaxGBbXfDo8CgH58jPP4P2f+AQyfvOE1ReoNWH4i5EXq7UG7exBG+WAEETf/jYC9w9+kOjGGwvKXLP6SMzLFs3Po+Zhdzc2WFlWrJJ87C+1t8tGHi0tyEo8fBhE3M6dIA2jUWQqXriAOb9zJ9GOHdLf0nVJPsViCOpY6pxpzRGC6PJlnNvoKOrLERFt2AACc3gYm1elsiHRaDKp6PfLjQ+3O5lUbGmxNvGVK+Lx5E7Jc3OSbHU6UXNczTJ0ODJnNfLf6ZrCeDyZpdP8XH199a0XrN6oq5OS7FOnMLeGhvIqp6Ciyr5l3sjbL4rHQUjX1RXkR64KCm1Okou8uFbIrlJCiOTsQlWWrGlSiqx2S65FqPLltrac688XCqvbIkvAJhEri7wuthCo/RONyno/tbTTZcNGIeAC4EJgTuTbtXFigui3vyU6fRqB+/79BZ2G1RcoSxv+UjcnsQlCG6uBF14gevpp1Ot7//tRp68AWH205m2LxsZAlG3ejKDrjTdABHKTla4uWZewqwtBi65jrXC70XX54kUEMVdfjWwIvx+ZjZcugRDYvRsyafa5hMDG7uQkCDtNw/t6erLXxZueBtE5MgIZthAgIjlDcdOm0tocIaQMmh/BoPx/Q0MyqdjcbG3f0jRxT7iG4dwcSD9Gc3Ny85OWlsK+r66v3AwmW1OYdPUZ1b8rRcCEQogrWL1BJOuFbtyYdzkFFWvOFhHhes7NyTm1WihXcxK1gZuN7EiVJKuyZJcrufmJ1WXJuYKbiHGJkAqoK9fAVV192CRiZZH3xU4kQCQKAeenGnc0bdioBAwDTjtL1Lze/J3uhQUUxX/xRQSJ991X8OlYfRZWreFnGY3dnMRGLeP0aaJvfAMk18GDyIArcG1fc7bIMFCLkOXCQhC9+iqe6+sDSdbZiaA+kcDvLhcyr7j77PQ0sgTDYRxjzx6sK7OzaMgyPQ2CZc8eECvqvYlEpNTZMLAW9fSAVMx2DxcXQSiOjkJObZrw7ZhQ3L69PHbKMJJJxcVFmVmnafgOatMWrhdYbRBCkjacZbi4KDeN6uuTG5/4fJXN7OEazdnIxmxNYTJlNRZbOywYxH1XCcRAgOitt3D/h4eLuu9VOGLyQsF+ERPXnZ3lKSEgBEjsbCRhKtZ6c5Jyg2XJKmmoypJVSbLHs/Z8UFW+3NAgs74rAHtEVwA2iVhZFHSxg0E4SSw7sBut2Fhr4No9RHB8C5kDkQiyQJ57Dsf45CeLcvasvkCtiuHPRAoyaShE5iLcdvagjVrB3BzRl7+MoPDee0FUFbGuW33kF2SLwmE0WmlrA3Go69gcunBBZhB2dCBoiUalfCoQwPubmxFAHzuGuogNDVJmTISMQ+7u3NoKiSf/j2EYUuocjSJQZKlzLpkW4TDk1SMjOI9EAmvT/v2QPQ8Olld6HIslk4p+vyQimGxVMxZXozZ3LJbcKXl+XhJwLpeUJTNpaJUakNwUJpN0OhRK3xTG5Vo5m5EbxqQL1JlAbGiQmbq6jkxe0yS67rqi7/GatEVE8Fu4IWZ3d37Etd2cxBowzeXdknnzgmXJKmlYq7LkXBGPIwnKNLGGlFm+nAqr2yJLwCYRK4uidrnCYUzCxsa1UdPGhg1dh+Or67J7YCFOkWEQnTtH9NprCPo+9jFklxQBqy9QJTX8dnMSGzZyQyJB9E//BDntwYPoGNzRUdQh16wtmppCRuCGDfCNdJ3o+echSd61C9e6vR1ZgoEACJbOTrmmNDTgufl5EHmLiyAk9+8HqSsEasQdPYr3dHaCTEzXIIWlzgsLsGv8ubkGTrEYiMTRUcito1FsHO/dC3LzqqvKv4HMWXQqqcilQ4jw+aky6FJKcA0D108lDXnzUNNkt2QmDJuba3vzSG0Ks1IH6tS1VdMwtplg5KxSjwe2prMTz7tcGGtzc5D1l0CKa/W7UZRfpOuwR04niEQiuzmJVSHE8m7Jui7/z7JktY6hfT8kAgHYcpcLNmcVNqDsu1EB2CRiFmia1iSECGV/ZU4o+GIbhixI2tICB8EuYGujVsHBTCQCEqmpqXAZjxAI2E+ehMP8nvcQ3Xxz0ae4KgtUCe1RzraomOYkai0dO3vQxlqHEEQ//jEyf268EbUQN20q+rBrxhYte6PA5lAkImXAiQTRr34FgnFoCESLz4dNo7k5rCPr14OkU+XNmgaJ+YkTsFlDQ6i5SAQbd+4casZFo0S9vSD10pEu0SiIhKkp+G1NTVLqnOuGiK6jycXICKTPgQDOc88eEJz79xfUObcgGAY+X81YjEbxP03Deagy6Kam3Oy6EDiu2vjE75drSWNjcuOTtjbb582ESCQz2RgMyi7UdXXJCQiLi/j/1q2wQyVoCrNmbFEmWXEohLnv8WDMLj9XuzlJNULXkxufZJIlc5ahvbmdHqYJexONVly+nIqqjy5KzC+tCqqeRNQ07dNE9A0iuouI3ktEnyUiHxG9QkSfF0KMaZr2b4joz4loAxEdJaIvCiHeXnr/ABH9r0T0fiIaWDrsKBH9RyHEMymf9SIRbSOi24no74noJiIaIaLvE9FXiOgmIcSrKe95HxG9uHQuX8vydYq62NEomH2HA4t7vg0lbNiwAhIJOLaGIev2FDPOp6dRiP+llxA8PvBA4U7aY489Rp/5zGeIrG+PhN2cxIaNyuKNN4ieeIJo2zZsZuzcadsi0yRRTJCRSKAWYl0dZMyGgay+X/4S/tLVV4OkamnBdZ+ZgX3i7sh+P47T3IxjBIMg76anUa/ywAHZOEXXIX0+eRKfOzAAYi8doWcY+KzJSUlWdnfjkU9WhmlCtj06ivOam8P579iBDMX9+0G0VRLx+HIZNGfpOJ2SUOSsxbo6XAOVMFxYkO9xu5MJQ5+v+Np/NiRRm0jgenJd6VAI2bqHDsn6oaEQiMZ0TWGyyad/9KPH6HOfqw1bpGnFNSdxOGRNTJ9Pkt+2vLh6YJrLuyWrsmS1hqHHY5O6uSIWA4Fomhj32RqOlQumSeRwZCcRa4xfWhVYiUQcJaIoEf2AiHqJ6H8h3NDvEdEfLb2mkYj+DyKaJ6KtQoiEpmkfI6K/JaLHieg8EbUR0SeJ6CoiulMI8bzyWS8S0V4iChHRs0T0OhGZRPQvRDRBRN8WQnwx5fy+TkQPEtF6IYR/pe/yve+RaGzEDmtjY2FOUjAIZ6yuDsbNro9oo1YgBMZ2PC67GRZbU4R3hQ8dgrPwoQ8VV5fj+99/jP7+7z9DZHF79Bd/QcLlwvVVd741LfODiUGbHLRhI38sLBC9/jqC9q1bQVDlK/Hh4vq6TvTmm4/RU09Z3xY9+yyJnTuLC679fqLLl0FAcTfmWAw1EiMREG0zM1hTtm0DiWWaeG19PYgWXYdfxYQgZ6+bJrIcN2+W5xiPg9g7fx73ZGAAx83k0wUCICVZ6tzais/Ody0SAt/z6FE8pqbw/IYNyJzcsye91LoSiETwPVnGNj2N30Mh3AunE9e6sRE1I9vbcR3a2qq3iYuVwQSiYSyPN6JRZLiyXJ7Xf9PE/8JhqQRhclF9qLJOIqK3336MHn/c+rbou98lsXUrMo1Ve1RIc5KZGVy/detsQnw1wbJklTRUx6/aJZl/2sgfvKFUKfmyrku5ufpYXETZqs9/Pi8S0dJ2azVhpbKfBhHdIoTQiYg0TXMSGGAfEe3hlFBN0+aJ6B8JzPLPiOhpIcSP1ANpmvYPhEHzvxPR85QMHxH9ZyHEf0x5z+NE9ICmaX8mhIgtPddIRB8josdzucGaBpZ+ZgZ/O52SUORHNtKkqUk6yE4njOJqFLu2YaOUiMfhcAkBh6sUXci5s/nYGBzf97ynOAIxGkXgtgRL2yO3GwGDriPIcDhge5hUdLnsHXMbNkqFeByZZA4HCKdcm1QYBuyY+pMIc5cbhJDFbVEshmYoLB0uBC0tsPFzc7IGHBHRLbeASDx0iOjaa4kmJpC1uGMHiK6pKWQLtbZK0kTXsU5s3AgC4PhxSIsvXwZR19aGe7drF8759GmQiRcv4u8tW5YHos3NeMRiWJNmZvD5DQ0gE3OVfGkaajb29RF98IOQTR87hsfTT+PR0wMycWgIv5ebnGOyamEBARxLZE1TSsV5PXG55L2JRuV6I0TxigMbEiqB2NSUbGsMA5J9IoxhNcuKN2/5HmVCLCab3YXDRG+/LQ9PFrZFjY0oW+D3Q97Nnd4L8YXa2zE/Z2eRfWxns1UGLEtWSUOG04m50NQkSUPb5hQHw8C6G43KEhTFxA5qLcpMD11fngkcj2MNOncuyTfK+WuQhe3WasJKJOL/xzd4Ca8SbvJ3UzTlnA66lYhICBHmf2iaVk9ETQSt/IsEhjcd/nua575ORA8T0X1E9MOl5z5KRM1E9FguX+Chh2Stt0AAjlYgIAtHL6XgUnMz0oCbm2HsUiekrmNhIsLr6ursLlA2rAnTxDyIxzGGvd7SjGXTlFkiY2NEH/gA0V13FXe8b34zybm2tD36u7/DtTEMXPtYDA+1DozTCdvCD4/HJhZt2MgXpkn09a9DFnvrrTJrLTV4MQw44rGY/MkSK4dDzsP6ehBWDQ0o0UAWt0XXXIPmJUTIqCsUfX2QGhuGzCQyDDz/85+DqLz5ZqwLCwuoR7mwgPWnvp6ovx/2LxCADeR6cNu3g0AcHQUBuXUrSDpep7Zvx3uOHpVNWHbtApmYjjjYtg33laXO4bCUTvf05Je1tGEDrh8RfMJ33oHk+Y03kPXa2QnJ84EDOJ9SBMzhcHLjk4UFSW57PCBfWJrs8yUTWImElEGrUmi/X/q+an1Fq3RariaYJq4nxxCphPbx44grbryx6IZORIQO5rXiF919N+zE5CRs7NQUbElPT/7loxwOzD8mEru6bMKq1ODNNZU0ZP+VZcnNzTLD0CZySwtVvuzzZZcv8/1a6ZGu0ZDDgfvnduMz+Hdeg+fmsAY7HNj4u+22vL+Kpe3WasJK1NOFlL8Xln6OZXi+nYhI0zQPEf3fhJTUgZTXptNyzwkhFtI8/8LSOfwRyZv8x0R0cel/OYELUas1dLhOCZOKi4twKtXXs0PQ3IzFrLkZjkIshtfZXUxtWA28k02EMV7KgGFiAvPp8GFkjrz//cUd75lnsMO1Zw/RT39KRBa3Rw4HFn6nE7aFF38hkknFWEzeIyJZI4YJDbfbdoxt2FgJzz5LdPYs0XXXISNlYGnWR6PJpKEqsaqrkyQWE/iM8XHYNoUAsLQt4iYnk5NYB9rbs70jPRwOZA+eOYN6b3ydm5pQxuLnPyf6zW+I3vc+kIGjoyDY3G6QYYkEyAKfD74V15LzekEkdHWBIDxzBqTigQOyA2tzM9ENN+A4R46AzDt9GuvFwMByG8nBzrp1+JzJSTwmJvD5PT35d8rt6CC6/XY8AgFkX46MEL3wAtFzz4GUGx7GI9danIlEch3D+flkn7OtDdmXTBhma/biduM8VfIqEkkmFsfHJXnu8STXV2xpseWGK4EJRNNMTyCOj2OMbd5cGgJxcpLo179OktBb2hZ5PNh0aG+HHzQxIUnF7m58T683dzLK7caxZmcR16VrtGIjN7BvqpKGKuHkdoPMVrsl2ygfVPnyunVY48LhlbMH09VYd7lkVih3jOf7x2RhuvlmGCD5OaGK7/e2bdjoyxOWtlurCStNszT89IrPs9v2JSL6UyL6fwnFMueW3vMZAvObiki6gwkhhKZpjxHR/6Vp2joi8hCKaf4nIUSG9gO5wemEw6g6jfF4crbi9DQWMn59czMmkduNhU3T7EYrNqwB7vao63LhKOUOIe9KnTyJefThDxdXk+att5DZsX+/LMJPNWCPOFOHM6CJYD+YIPz9F0rJVgyHYZfU16vEou282bABHDtG9PLLyALr6sIaPTUliRgizBcmC/lnpnWc/YB160CoLMHytmhgAGTSuXPJdQnzRX09CLiJCQQXHR0IPltaIP995hmiV1+FzJm7H+/fDz9qehoky/r1CPa5FpyuS/JqeBjZfyMjRK+8AtJy715pL30+HPvKFZCJb76JzxkaAhGZDrw5HI/Lrs4nTmCs9PTAv8t3fWxuJrrpJjwiEZzL6ChqRb30EoLtffvwffbskeUtFheTSUO283zM7m7Z+KSlpTQb1w0NeDAhKwQ+V81U5BJARDh3tWmL12tvoBNhnebu1iwhV7G4CGK7o6O40gGMSATzyevFGOLTyHR6GZ6vOlvU2Ig573Bg3vr9yEq8dAnjsKMD9iHXkhSNjTKe83iyS8VtAGqX5FRZMhNNardkO/YtD0wzmQyMRrFWBoO47k1NMulJhaZJErCxUWYO8nP8yPe+cRb/9LTMgGxqQoZ1WxvR4GBBY8Hydmu1sBbCvYeI6FtCiH+jPqlp2ucKONZjRPRXRPQJIqonIgcRfbPYE0wHjyd5x5abTjCpyATjwgJ2xpmE7OyUMmg7ddtGNYHHcDgspUulLjgdDiMIm5hAtsh734ugsFCcP48un729CTbkEAAAIABJREFURPfcQ/Td7xZ9ilVljzgjUSUSU+F0ykCPwXVZ+REISIKVZdAqsWgHeTbWCnhujI9DxtzYCAKRM3+dTji+TBjmuk77/SjN0NpanOxXQdXYIocDGQTHj6NhyeBg4bWeOzvhG01MwA+qr8fa096Osha/+AUyEm+5BWTd6CiIxN5evIeJRK6bFQiAVOPM0M5OojvuwCbVqVMgdfftA6HI4C7MLG9+9VX4ckNDGAvp4PHgvvb1gQCdnASpOjYmpc6FNNJraEAW7HXXIQg8dgzf+a23kKFomvju69fj85nM9vnwndrbEZxVKgNQ0ySx2teH53RdqnT8ftwP3lTn1zOpyDLotUQqZCMQ43EQyfX1IPyKvTamCQIxEiH6+MeR5VskqsYWEWGu+/3wJ3n8z86C5Od6pm1tsBEtLdkbA7W24h7MzUnyxIYEb1SrpCHLkh0O2USUSUPbnywNMjUn4czBVHlxLAYbrGlYz1pb02cOqlLjUkEIzJ+pKbmx192NsTA6is+76qqK8x5VZbdWA2uBRDSIkrv0aJq2k4g+nO+BhBDnNU37V0LKaT0RvSaEOF2Ss8wCTZMFj9etw3OmiUk1Pi4D+fl5uUCpMmiv1+6AZ2P1kEggsDMMmWlSakdA17FbHIkgcOvvRzH9QjE3R/T44zjfBx8smeNXVfaIuy6bJhbpXO0DN2HhjCGWmqRmLDLc7uTainZBaxu1ANOU452lyUwifu97GOM33gjiaWio8E2TSATkWn196WrbUZXZIo8HMqTTp0GebdtWeEDQ3w/J8sWLOKbLhfvS1QW5769+JYnEo0cRhOzbh/dNTGAd6e6G35RO3uxwEO3ejdePjCDjcGwMmX1qFmV/P4iw8+dB3r34IshAbtCSDg4HzrOrC2vm5CTIi8lJvKenJ39ZZCwmMwzDYRz7ttvwXc+dw4bb2BiIpr17ia6/HmRTMY3ISgmXS0qmGRzQsgyaCWB+vUoq5po1ZkUwwUqE75s6Z4TAGE8kUD+zFH7Ma69hjtx5Z2ZSPE9UlS1yOCSRGAxiHnBH8elp+IaLi8hWjkRwTZn4TmezmHRhEpLJj7UIzmxTSUMmqjh7TW18Yitb8kem5iTcdIYJwtTmJESSBOQ4jf/mxmN9fZjzlSTCFxYwd+JxnNPAAPgMw0DZDl3HRuAq2PiqslurgbUwPZ8gos9omhYidMzZQkSPENEJIhou4HjfIKJvL/3+xZVeWG5w4d66OixmDQ0wCroum7fw7hkRFremJrnYcVMWGzbKBSEwNqNRKdsvx+IjBJxaXUcg5HajDlahwXYkgtqH4TDRww+XtJZN1dkjTcODsxELuWaqDJoDTyZYmFjkTGp+vZqpyJIUGzaqFWq9UCYM43H5f7dbZhc+8QRef889sB07dhS+1iYSIMQcDjTxKOFOe9XZIu6KfP48CKGNGwuzRy4XsurOnQP51tuL66brCIJuvRW1An/zGzRbOXwYdQz37pVE4uQkAn+fD/cwHMZalkjIbK+WFtRYPHsWRM1zz4F8UxvnaBrko1yv8eRJvG7jRrx2pWL0Xi+ONTAAP+7KFbyfZdtdXcvHg2GA4OAahnNzckNH03DOfX2SlGNZ/LlzIERHR4m+/W2i73wHn811FEtRQ6+UqKuTdSWJpK+hNm3h5mpEuGYqqZiJ8LESdB3fle9ruu/z7rsYB7t3l4YUPnMG42RoCBnDJULV2SLuJM7kSUMDnlu/HnNhelqSjB4P7sXiooyxUu2904n3TU1hTip1JGsWHI+qjU/U2r8uV7L/Z8uSsyNTcxI1q1C9xgy1OQnXHkyVFrtc6Ru+cfMULmVRqXvE9YKjUdjvTZukDRMCa2EwiAzEQkugFImqs1uVxlogEf+coEP/KEGnfpJwc3ZRYTf5x0T0ZYJm/Z9LdI5FgevpxGKysGx7u9zpSpVBX74sHSuPJzlbsRYcKxvVgXgc48404YCVMxN2agrjfGwMDto992TvFJYJiQSCvIsXIX8rRf0gBVVpj9T6iKWa/w5HZhk02ytVBq12oGXH0rZFNlYLur68W3Jq53LeiKuvl+vtyy+D9HvPe0Ba9PYW3jTKNBG06zqaYZR4p70qbVFnpyxLUVcHsqwQeL041swMfmfiT9dByt18M+7Vb3+LbNFDh/AYGgLJxkXb43EQVVzXye9HZgTLmzUN2Y69vSDgDh/G2nHgQPLmk9OJe7hlC2TQp0/jdVu2gOBZSarsdsusRt4YPn8eTR8aG3GduGvy4qIcpw0N8AW3bJFkaKbMni1b8Lj/fmzIjY6CLPrhD/EYGJCEYjElQsoFTZNNwrj+JNdfZlJxcVFuqvPr1fqKVlLrcCdxhyNzfcqpKYyR/v7S3LO5OWTx9vRg/pQQVWmL6uthLyIRSboQyQYsTAryRnlDg6wZzRuqTU1yTNXVYR7Oz2NMKnVtawJMZKlZhqmyZI5R3e61m42ZDky4ZiMIV2pO4nZjDKaSg4V2po5Gsd5wOZBKEXXhMMjDUEiW+UhN5Dh3Due2bVvhzdhKgKq0W5WEJtLls9rICE3T6ohogoh+KYR4KM+3l+1iGwacZZdL1oNhBzcVpikzFbm2YkQp98ndn5lYVBdBGzaywTQxpuJxjEevt7yShMVFZI6EQggK9+4tvBuzaUKq8/zzkLfde2/GsV8VM6IIe7TMFgmB788S50qAZRcsCWXnk8E71SqxaNsiG6WGaS4nDFWJFY8/zjTMlDV79izqIG7fDnlNczOImULBWURbtyZLOVNQFTOilLbIMHAtAwFcv0IzwYXANYzHcU+4gYhhwMYdO0b0+uvIqLr+emQjRiLIbOjoAGkyNyebnDideH8ggGMyUaDapPFxHCceRwbqrl3pA7hoFDUgz57FuezYAZJxpYzsaFRmF16+jEBqfl7WwuOMR84YKaSGYiqmpkAojo7iXIlwLQ4cAKFYaLboaiEeT27a4vfLzB2nM5lUbGmpTrVOLgRiKASZvdeLe1Xsmh6Pg0yORon+8A8zbtJWxUgotV/EHa9bWzNf66kpXBsmcohwn7gRppqgMTsrywqUYo6uBkxzebdkJrhYlqw2P1nLG8KpzUnSkYNq4xiG2pwkU2OScmRvCgH7GAjg+B0dlVEKRaPY5PH7Zdfn9vbl329iAhu1fX0rdmKuCluUD4rkl1YFNomYJzRN+2NCAcwPCCGey/PtZb3YkQgmPqfdcwCeC3RdZivyTzZqDodM0edsRasufDbKi2gUDhURdhwLzcDJ5/MuXMDvL76Iz3v44cJJy6NHiZ56Crv2Dz644jivigWqCHuU1hatBpGYCnZOVWKRgzx2qlRi0ZZB28gHLEtWSUPVgWeZPROGuRLXi4tEX/4y3nfHHZg/g4OF26JLl+As9/dnzcarSVsUiyFQEAKBQqFdTWMxZHM2NIBk07TkrOu335ZS5gMHkI0YDEJq3NUFX2hqCvext1faG5Y3M/Gk3mduZHH+vCRxMtWOCwZBZo6NYawNDiK7QghkPDJpOD8vN3s1DYSGzwd/jP23REJmb3Z1lX7zbmFBEoqnT0uJ2/AwvuPWrdbLLhIC95IJRa5/yaFRXd3y+oqrSYhwp18ed+mut2GAQEwk0FCnFETo00+DRP7IR2TDmzSoSVvEjWuYEMy0HrCtiMdlMoZhYHxxXXuuz3nlCv7X3V39df94s1clDVXJLJNZarfktYJcsgfV5iQMp3NlYrAczUlyAcuXYzGsXW1t5d8kiscxb+bnZU3gzs70tm1+HnGaz5e1SVRV2KJ8UCS/tCqwScQcoWna+4loJxE9SkSXiOhqkf/FK/vFXlyEo8l1A4ox6Cw3ZGKRpalEOGaqDLraF0Ib5YNhyCCGx0a5HW3DkDWPDh9G0P3QQ4XXmhkbQx1ETcNOe5bjrOoCVQJ7lPG13GSl0PqI5YBhJHeDVne9WSajEotredfbRjI40zUaxUPt/Oh0SrKQfxZCgug60de+hsDwwx+WxFehNchmZmDburogI82CmrVFgQCy7erqUA+pUDJkfh4Zgt3dsoaeSiS+9hqyAq+5BnLmw4dBGuzahfdEoyB0iUDQqZlGnKXk9S7fNJueBkkZCuH8h4bSS9KFwPrz+uv4vrqO9aerCza4qUnWMGxvz9xAg7sW+/2yZnZPT+EE7EoIhUC4jo6CBNV1jPf9+0EqFkOgrzY421Str6iqdZqakklFr7cyayUTiFyPM9NnHjmCsTc8vGIGc84YGUGH8RtvBFm8AmrWFnF5Hm44kfEAS1lc09OYE9ycSS3vw7LeYBD+8rp11eNrESXXMWQiTF0zOa6s5SZ56ZqTpCMMV2pOshJBWI2bLZEINqxYvlyOdUOFrmOezM7ib17zMvnvoRA2/Orrsc5k8fMtMypLxC+tCmwSMUdomvYiEd1IRG8T0WeFEMcLOEzZL7YQCECIsNAJgQlXiqCad2xVYlHtwFpfv1wGXY2G0kZpEQ7LndampsplqY6PY1GZm0PwddttkCAXgulpomeewc977oG0LAtW21l+kYqzRyvaomokElOhyqBjsWSHzuVa3rjFtkW1Dyab1SxDlWxOlSWXiuT42c9gg/7gD0AkdXWtmK2zIgIBZHk1N0OCm8P8q2lbND0NW9/WhqzMQjdFx8ZACG3ZIoMjXYfNcDqJfv1rZD6+970gwI4cASm3cycyEBMJEImJBIJ+tYHUSvJmwyA6cQL31ONB8NPeLrslz80hw4+zVSIRBFW6DgLzPe/JXxLPNaVmZnB+LS0gE8tVFD8alV2uDx/G3KuvR3bn8DDk4dUoC84HTBirUmhVrcOEImctltoPisUk6bRSNtzYGMbx9u2QmheL8XE0itq6leiuu7K+vKZtUSSCR1NT9vEsBOb27CzmdksLSBKW03NGXyyWvLlRabDyQyUNVVmyml1YK7JktTlJasfibM1JViIGMzUnqXao8mWPB/Llcm4AmSbWdV6f2tsx/lda2+NxrC9CYE3JYT2xzF0oEb+0KrBJxMqiIhc7kcDCxcGSELJOYqnBGWiqDJprmjGppGYrllveaqNy0HXcb8OQu7OVImpmZmQN0GeeQbbOvfcWdiy/H1Lo48dRLPzGG3N6m2UWqAzIaos4qLWK06h2z+WH6ggyqcg/7U6A1oYQyYQhE8kMjyc5y7Bc93tkhOjHPya64QbYIY8HmxCFfFY0CsKJZa05zj2rj+IVbZEQIDJmZhBs9/QUts4YBsgVTYNcmK8tE4kOBzo2nz+P7s1bt4IYm50FIdPfj4BnYgJEgs+X3LE4k7w5kQBZeOECiOaJCfhDmzfDH2prS84y5Cyny5dBZPr9+N/evfkTDboOmdiVK5gfHg+uYTkllIkEumaOjCBrJBTC3NuzB8Hf3r2r1kmz5IhEkknFQEASMB7P8vqKhV5zLhOTjUCcn0eg3dWFrNdiEQwS/eAHsJ8PPJBTY6eatkVEstRTrvfTMGRtVSEw3zs7ZWftyUlc5/Xr8SjnJrwqS+aHKrVNlSRbTZac2pwkk9Q4U3OSbAShVXzhfKDrsoFYueXLQuCzOEu3tRVrUTZC0DSR9R4KYRMux6aZVrdFloBNIlYWFbvYoRAWO94tZ9lWJcCSC5Vc5IWKG20wqdjcbL2Faq2DM1IjEQReXm/Ju4auiGAQQWVjI8i/WIzok58sjKCORtGd8/XXIV27+247cP/9C6qgPmKxMIzlxKK6y65mKpYyM81G6cH3kUlDVZbscknCkB+VGLMTE0Rf+Qq6B95yC85t587Csq50HQSiYcAW5XGMmrdFuo4Mq0AAWYGdnYUFOuEw6rq1tuKeEcnAU9Pw+OUvQeDdcQcyuY4fR9CzZQtIYiHwt9+Pta+7W55LLIayGhyURSI4Z0ZTE3yzK1fg+1x3HQjNTN9FCJCPx47h3Lu7QQ7lK1Hl+oqTkyC8HA4QoD095SX0TBPELddR5JpXO3dCFrt/P+5FrcA0cX/Vpi1cI5pI1sRjUtHrzW6nIhHce49nZdl0LEb0xhvwp6+9tnjCwzCIfvITjOUHH8x5zNW8LeKsLSLcx1ztkK5jI2RhAe9pb8dDCJQwmJvDNVbLRBVL5jBppmYZMliWrJKG1bypmtqcJB1ByBtCKjQtOzlY7d+9XGD5MhHGYrmSfHj9uXIF98nrTS4Lkg3Hj2Pu7NmTvHGXBWvwjlYeNolYWVT0Ys/NYcK2tsIh4F2mSkMI6UwzsRgOJxeuVrMVc3GsbKwO1Lou9fWV79ydSCBTxO1GcHLsGNH998uAMB/ouqz109WFWmY57nARWX+ByskWMZFYzbLmfMEyaCalUmvkpXaDtm1R5WEYsoZhKvnLsmQ1y3A1MgQiETRSMQyij38chAF3xs0XQhCdOoV1cefOvIkdq8/MnGxRJAIiUddBJBZa640z8/r75TFUIlEINJGYmSH60IfwWSdO4D0DA1JavLCA84nFEKT6/VKWHA5jTHZ14XO4WzJvmIZCINWuXEFQdOAASKVM4G7Vx4/DXvX3QyJcSM3NSARk4vQ05lRzMwK6dF0wSwkmREdG8Jiawudt2YIMxeHhzM1nrAzOOFMzFlmtw5uwan1FtQ6ZSiCudK9NU9bevPba0hDDL76ITNiDB1fsgJqKNWGL+J5yZmg+iMflJoTTifnf1gZbEAqBWNF1OTbyyXhUyULVr3E4lndLria/JpfswUzNSXKRF9tIBpN6wWD55ct+P8Z2NIqx3dOTV5xF584RXbyIdaK/P6+PtrotsgRsErGyqOjF5i5LTicmra5XT7aNYchsSSYWYzH8j7uYpcqga4XEsCJ4hz0Wkx3qKj2OTBNBCDsZzz6LQvg33ZT/sYQAAfmb32Bc3X03gsU8YPXRmLMtskJ9xGKgyqD5p7pjn64bdC1eh9WCaUqikElDtRt3OlnyakMIom99i+jdd5EFHYmACNi0qbDjnT2LTb8tWwoiIa0+GnO2RQsLyP7kZgQrEW8ZP2wp8ycSgUyZN1ZNE36Jw4Hx99RTCIAOHsRnHT2Kuobsk8zPS+LQ5cK96+6W0mRNg2/D9fLSjduxMci0dB3k8eDgysF9IoFzOH0a57p5M9Hu3YVlkHBRew7w3G4pdS73HBMC95EzFMfG8Hx/PwjV4WGsx7VqZ6PR5TJoVa3T2ipJkM7O7Dbh1CmoM4aGSlNb78QJol/9Cvcix/IuDKvfsZxtUSwmSb9CVTDT07ARfM+5lmhzM8YEZ7FyPMSfwz6LShry+NG05d2SVyvmY/l0prqD2ZqTZCMIq4kItQpU+XJzc37ZtPkgFMJmVTgM3627O/+s88lJrHXr12OtzhNWt0WWgE0iVhYVv9ixGJzdpiYYXcPAQlSNxjeRSG7aojpWTISqxOJqZFWuRXA9HiHgzJS7Y1cmTEzA6W5rI3r8cfx88MHCxvLvfoed+/l5OMl79+Z9CKsvUHnZIiYSa7EmTDqoxBYTi6qTntoNuho2ZqwADirUxieclUMkCVtVmlyNRMLzz6N+3t13ww4RgQQqZH5cvoxHXx+c5QJQhVcoL+Rli6am8PB6QZgUsh4lElgDPB5kWfEY447N/Dk//jHI3auuwr29fBkB2MaNIGw6OvD5vD6mdkLmjCXDgA+W7lxjMTQjGRuDX3PgAIijlRCLgeh5911Z43FwsHCfiKXOLLVkqXM+GSPFYGZGEorvvotruW6dzFDcvLk67UCpIESyDPrKFfgmbrckqdT6is3N0u+ZmECG6sAAxkGxmJ4m+pd/gS267768/Sur36W8bBFvrBdT0icchq2JRGAvnE6sBawg8/sxFqJR2ShTzSTkJnJMGlZqk5Obk2QiBjM1J1FJzlpqTmIFhMNYz1hOXw75cjSKtSQQkJt9hTT0WlhAJnRbG9bfAsaDPYIqAJtErCxW5WL7/TAebW2yxll9vTWMdKoMmp11IinxUMnFtUJyVALcNCeRwGKwmteXg5yODshspqaQAVRIPaXxcWR/nD2LxenWW9fkApW3LTIMa9dHLBa6nlxbMVUGnUosrtXrpELXkwnDWCxZYqVmGNbVWcN+nzqFLMQDB4iuvhq2afv2wiSEs7PIiuvsLDyLkdaYLeLmJvPz8Gm6ugqrQen3I7O9sxNrG3dL5rplRBivhw4h2Lr3XpCHc3Ooe9jdjdqVmoZxfvkybEJXV/K6JIRUWrDPks42XLkCmW84jKzGq67KnhEYCiGj/sIFvHZwEGOx0HkUieA8pqdh77l2VXt75eyZ34+GLCMjaNBimrjPTCju2FHbtjUYlGNFlUL7/bChRBhz7I+dOwfC74Ybir8u0SjRP/8zrvkf/mFBJMOaskVEcpOgpaW49SsQwLybmMD9HxjAMbkRSDgsy0DV1WFOdHSUvuu52pxkJYIwXXMSpzM7QWiFNb7WUAn5cjyOtWNhQZby6OgozCaFw1gDPB7UzS3wXK1uiywBm0SsLFblYnNHJNPEjkA8XtlGK6UEy2qZVAwEpGNFhJ1+tWlLY6M1yNJqA9fiIUJwvJpjhWthNTUheHvtNaIPfhABXL6YmcHu1rFjkE4dPFjwDrLVR1XetqgWGq2UEpxVpxKLqTJolVj0eGrbFnH2pkoaqtmbfB2YMKwGWXK+mJtDHUSfD3UQJyZAsvT05H+sYBCEpNdbeDfnJVh9VOVti+JxXPtgEIFKZ2fugUYshvs4Pw+p1Pi4zMpwuXBvm5vxs6sLn/Wzn8Fnuvde3K8LF7AJ1dUFObHDgfHPdc24A6uKSCS7vFnXkVV25gzmCMt6s2FxEXLry5cxv3bvRvZeoXbaMKTUORKRGSXd3ZVVgITDyNIcHcX3SyTgB+zbh2uze7c17UgmMIHY0JA5a5UJxdlZojffxDUZHMR7WJ7IWYv5EExCED35JOqP3X9/YTaN1qAtMk3ZrKilJT87bprJNQzjcdilM2dwrzdtwsZFSwvmndMpxwATipylmgvha5orE4PFNCdxuWy/sBrBDX0SifLIl3UdSR2c4djRgXWxULI4kYC9NwzY+CJiT6vbIkvAJhEri1W72FwHgTu8xeMy9d3q0PXkbMVAQKbRc3FilVgs9c5dLUHXcR11XY6V1XQMdB2NVFhC+pOfQDb4oQ/lf6xAAIHIkSMIRA4eLLw4P1l/gSrIFtV6fcRiwUGBSiymyqBVYtGq9pdrMqmEYSqBqmYZ1gKBmkgQ/dM/Yaf9c58DydLQsHJ33UxgOarLBQKgyKwAi1/ZwmxRMIjgRdclaZe6VhkG7heThvPzcnNM0+APBINY5w4cwHH4XnIw7XLh/U89hft9zz34efEigv2ODmQN8mdzJmNTE0g39ZxykTcT4TzffhvkRF8fsjFyCaZ4k2xmBt/pqquwWVbM3FtchApgfl7K4Hp6CmvqUgzicWz+jY4iOzQSgX0ZGkKwOTRkzY1xIoyzYBDfsbExOyEkBK7B7KzMhuUai8FgctNClVRcKVvu9dfR3fm22zBuCsSatEVciol95rQHFpIs5J+q3JdlySxVfvdd3E+fD/ewqyuZwDcMGe+wSqShAfecpca5NCfhpit2c5Lagypf7ugorX00DKwzMzMY2z4fNpqK8WlNE5tGwSA2iopcY6xuiywBy5KImqZ9moi+QUR3CiF+tcqnkytW9WKHw3A0uCmGrq9eZ8tyIxpdLoPm9HvuqKbKoNf6IikExkckAqeiqWn1yVYhEKhFIpDr/Mu/4Nw+8Yn8syGiUQRXp09j3N98s+yyWSCSFigL2qOCbZFNJOYHw1gug07tNKwSi9VojznjUu2WrEq5U2XJtZaRIATq473zDtGnPoVxH4thQyNfW6TrkGnqOgiAEtjZNWuLZmdBbjkcsk6yShr6/XKcNjbKLsk+HwhDpxNj+swZ+AGqpFzt2OxyISvv6acR0N99N+7b5cvIJvX5QGLx3F1cBMlcV4e1S/UvcpU3mybqNh4/juMODeH8crG5ExNY77iG8NBQwZllv0csJrs667okSdORt+WGruO6cx3FQADXeNcukMH79lWunmOx4PHAWZa5BPpnz0LGvHPn8o6lponjqY1bIhH8T9PwGWp9xaYmZNY++SSu3x13FPV11qwtikbhQzc24h5yMxEmDdUGIiz7Vbslp87raFRmAvOxmppwz5iQZDLS75ckNDembGoCqZiNIKy1tdoGxsf8POLeujoQiKXyK1nZODUF37atDeRhKeLFEyewvuzenb0ucA7IulJa0D5VHdY4dVIYNE1zENFfEdE7QognVvt8ckVjIxzBQEDWKmDpRK2RAfX1eHR14W8uXK1mK87Nydc3NCyXQa+VxTWRwHUxDFyzpqbqGA/T03DK1q8neuUV3L8HHigsaD9xAgFfLIZd9iIJxKpCpe2RwyEbEFQj4VVtcDqXNyTi4IIJucVF+T+XK1kCXenmIkx6qoRhqiy5tVWShmthA+aNN0BW3H471oepKRA6+doiIZBhwgTkam/UlBqVtEWRCMboxYsgzbgLpNeL4Li9HWsHk4aZrnV9PV7HTVM6Ovi7yM1WXQdhduedRL/8JR4HD0Jq7HRifTl0CA261O66k5OQS69fLz9f00DgRKNYd+fn08ubHQ6Mkb4+1AccGUFZjwMHsmdorF8P0nBsDNl7L78MX2jv3oK6fxMRzn9ggGjDBqzNk5Mgs8bGpNS5UuPZ5SLaswePhx/GeYyMYI4eOYJrvGMHMhT37y/8O5cbKoHo9eZ2/WZmZB3EVAKRCOOmtRWPDRvwHBNN/Jiawngnwjx67TUE7bt3Y1xaNaNTRSVtEW8KRqOwIfX1Mn5gBUJTkyQM2W8yDNiWUCh95uD8PDZFGhrwuosX8T7OTGR/va0N9kMI3E8uWcVS59Vqhmij8kgkMAYTCblRUApwXcUrV6Q0uru7dM1Zzp/HurJ5c0kIxKqHVXmkVNiZiIV9touIEkT0TSHEp/N466pfbNOEE+JwwLGOxWQa/FoDywGYWOSdPCK5Y8ukotdbe9eIJTSxmOx+XS0kZi8lAAAgAElEQVTyykAA9Q99PozX555DF+Vrr83vOEIgk+PyZThgvb1EH/hASQjiqtlxL9AeFWWL7PqIpYUQyZ2gY7FkqVNqbcVS1SXjz1VJQ1WW7PEkZxlWqvNjNWFsjOhrX4Ns+cMfBmHR0SED9Hxw7hwc/M2bJVlVAtS8LdJ1KUfmLEOuhcwZgw0NIM76+wvrcn3+PNbDbduSSRTThK/gcMhGFs8/j7Xkgx/Ec9PTIOu8XmTB8Toaj2PtMQycW2rznVzlzXx+hw/jtbt25d5gxDQxZo8fx/zu68NGWktLvldoOfx+kIm8IctS51IcuxCwemF0FKTixASe37QJ5OvwMALfaoAQuH66njuBGIlgQ6Ohgeiaa4pbe1nq+MMfwse64QY5/jweSUBw1mKOm0U1b4tUWTI/eJNNCNgltxvXju9PMc1J2D7wRgQ3yHA4QLb4fOnLOHBco+u4dxzL2P5a7SIUkmUnSilfZjsfi8FGpFvLisGVK8gs7+nBulYiVHUmYhE8UlVhDeQQ2FDhcGDXam5O1gLi4LHWsiKywenEtWhrk8/FYsnZileuSEfU5UrOVqwm0i1fxGJS4s01eKqFHOCi+SzF+Nd/RcB+zTX5H+vsWQTtExNwhG+5xXaiSgEmD00TD/uaFgdNk9nTDM4IZGIxFIJNIsL1TiUWcwnyEonkOoZqh2nOgOSC/LUoS84XwSDR97+PgPAjHwGhWF8PIiZfTEzAFvX2lpRArDmYJoIWlTDkcU+EdberS0qTW1tlZ0gO5iOR/Df9+vshHx4bA5HIY9/hSN402bwZ68hLLxG98AKyU7u6IBk+ehQE1v79kuzfsAFE4sQEgn7V3+BGLsEg5nc8jvmXbt5t2oQg69AhEJbj4yDGsmXZORz4Pps2oZzHqVM4n4EBZPIVk6XEBFMshuvPBfYbG0HWdXVV1oZoGhpRbNxIdN99CHy50/NPfoJHb6/s9Lxhw+r4PSqByDL8bDAMkMhEGGvFXtfGRqJXX4WP9cUv4loEg8ky6JkZ+fp0Mui1sD6osmFVnqzr0iaoY4jnwqVLyZJ6tTlJfT3uey7NSbhhUzwumz7FYphrPN/YrvB5OJ2SAI5EpD1dWJAxTCUbJNkoL8olXw4GMfY4y587hpcSi4tYl9raiLZvL+2xbZQftZCJeJCI3kdEf0REPiJ6k4j+XAgxorzWRUR/QUSfJqItRBQkomeJ6C+FEGPK624mokeI6AYiWr/0upeI6P8UQpxaes0mIjqX5pReEkLcutI5JxIkqoV04jqBbW1YtBIJGIm1IE3LB1wrUM1WDIXk/+vrk2srrnYjkmwwTZlxyaRoNd1z00TGhWEgEPjxj+EAffKT+dc4unQJtX4uXcL3veOO0mQhBAJEzc0Zd9ytYo9KYvjt+oiVhdoNmgMatc4Sk39cW5EJSCYOU2sxqnUMq8kOVANMk+jrX0dm05/+KYKxQAA75fkSVHNzMoNx8+bSnePcHFF7u7Vtkd9PQiUNFxclGVhXJ+XI7e3wVzIFv3wMIWQglW+gHAwi07C9fTlRzCUcXC7YuqNHIQPdvp3ofe/Dc/PzkNPW1YFI5I1ZIRCMBYMI7js709dACwZlw5eVzn1iAmRlJAKCcM+e/LpTnzyJOpBEeP/gYGk2kVnpwl2qXS4QiT09q79JPTcHQnF0FEGrEBgjnKG4ZUtlfDcmyU1TEkm54NgxkKL795dmE+LIEaIXXyS6/nqi665L/xrOlGVS0e+Xah2uQ8qkYl0dUVubtW1RMEgikYDPzw9ugMI2iRuRuFySqEltTsKNx7xeXBuXS9qNQpBIYE55PJhPfJxwGERiJCL/l4nkicdlDCOEJDIbG23fzcooh3w5EoGtCQYxnru7k0nqUiESgT32eGDXSuWDpovR0sFq9qkaUQsk4iFC2uo3iaiFiP5nInIT0TVCiN9pmqYR0U+I6G4ieoyI3iaivqXXhYloWAgxvXTM/4eIdhPRi0Q0QUSbiOgLS8ffI4SY0jStiYjuX/q8l4noq0undEUI8dxK5zw0RGLvXqKrr4bj2dW1uhLZhQVZGJV31ezsk+wwDBi/UCjZ0SCSGUVNTbKwcbXIoKNRKQFLzXqqFkxN4br29EB+deIEArR8pYMLCyAjAwE4wLt3F18H0e8HKXn2LNFf/VVGZ9kS9ujpp0mUap4zkWjXR6w8VGkVN0ZSm7c4nXDMGhpk0X6Px7oZ1JXEW2+BKLrpJqzVMzMgf/J10iMRZI3V1xffKVfF9DRq833nO9a2RffeS6KxEQHQunV4+Hz4O981amEB15slgSt1o80EJjLXrVsu2TIMaes0DZmLZ84gQ2P3brwmFAIR6XAQbd2aTJ5xAwQmR9PJEMNhWZ94pe+v61iPJiZwvK1b86v9F43i/VNTOI/+fjxKZcfDYZlFKgQIlfb20srgijm3s2dRn/TiRVzvxkb4CNu2lfY6qDBNfLYQsMm5Bs3T07K2ZrENcogwxl95BWP8+uvzs0nxeLLvy/7vxATRl79sbVv0yCPSFvFYbW2ViQL19dKn56xClUxUEQ5jLeZaiMWC5edeb3I2MxFsyswMPq++HutUpnnGjXe4DjqXMvJ6bR/OagiFsOZxebJiYzrOcPX7MRY4478cJHMigY0MXUet3lLFo/PzRN/8JtGjj+ZFIlrCPlUjaoFEPEtE+4QQwaXn9xHRCBH9SAjxoKZpDxDRPxPRfUKInynv309EbxHRfxZC/OXSc41CiHDK5+wgoiNE9KgQ4u+WnitIy37rrSRmZ2W9jHXr4Kxs2oTJ2tEBQ1CpAM8wYIBcLlnomwiT2d6Zyg+8e6k6V7xzqTZW4K5plQzidV2ej9tdvU1jFhd/n1lDwSBkzDt35l8HMRxGYMeykt5eZBsUAsPALt+VK1hY5+Ywb7/whYzOsiXs0dNPIxOxVONAHes2yg+1rhLLqxhqVij/VJf5VAmVfc+W4/x5ZOkMDsJ2jI8jcMy31l4iAXmsw4HM6lJd6/l5ol/8Amv3//gf1rZF//7fk1BrgPIa1dYmmwLkumaaJq4N1wFzOjPLgzNBCBAiiQSyEVM7K6vZvJqGja7z50HicT0nJqkcDhBTaoAUCmGtc7mw1qUSSWpzBJcr+3rt94PMjETgR27enB9pEQqBTJyZwTUeGABRVaq1IZGAn7mwgPvCBGohBG85EI+D9D1zBtchkcD127IF93RgoDT+GhPERLinuX73UAj3t7kZ51Ssbx6NQorvdGKDttjvxp2yAwGiv/5ra9uib3+bhJoUUFcnm5PU18tNOf7JG3JMKKY+AgGZcVqKsb64iGO2t6cvQ7C4KLPSGhqyJ6qwyopjP64Db0udqxssXw6HMS59vuLGVyKBjYqFBVlPsZQdnVNhmsisDgaRRV8KiXQshvjsRz/CHPjSl/IiES1hn6oRtSBg+hrfeCIiIcQhTdOeJ6KDS91vHiKiS0T0G03T1J4/40T0OyK6XXnv72+8pmleIqojojkiOkVEeVIZy/HkkyjIPToqd2inpjB5W1sxadvbZae77m783t5ePtInEsHC4/ViUeJd/GrMUrMaWP6myqC5kYEQybUVy7ELyMFIOIzx09S0+pKiTAiHEWxv3YoF8Tvfwe7UQw/ll+Iei6Fu0JYtyDDo7ib60Ifyd5SjURAHExO4L1u2IAjo6Mhat8MS9uiuuySxVArbYjdaKR90fXm3ZJXIUBufsIw50zFUKbQqg1brK671bPTpaaLXXye69Vaiz30O2UpbtoBQzMcWGQYIpv5+NMIo1Zo6N0f01a/ifD7/+RVfaglb9Ld/i5/hsKxBPD8vM2U4SOesIJZsZQrMEwlZx9jtxtjON5siFgOp1NAAUk59Lzdy4RpnH/wg0a9/DSJl/XqsW0RY7995B7/v25dcjiMSwTlqGt6TbmzkI282TXz+yZP4e3AQ5Fc+mJ1FZsj0NL7j4CCI71JtKJsmxi7L5DjTpaenevzNRAIKiNFR1J48dAh/Dw1Bbrd3b2E1JA0DZK8QeTUpoXgcjVR6eiA5LlbuZ5pETzyBbMsHHii+C6oQGHNtbTITNwMsYYs+9Sn8jEQwH2Zn4b9zA0quH+5ySQUGS5djseXHczjw/MICbBCTjvwz30281lbMz3g8/cZKWxvm7MICNgVYbdbVld7354zGREJmSXPCAW/g2Akl1YV4HOPS7ca9LoaAMwyMp9lZjONNm8A5lLu0zalT+Lyrr8bnFYN4HONW18GvxOMoP5MnLGGfqhG1QCKeyvDcnUTURUQ7CWmn0xner2rZ1xPRfyKiewi6eBUzVCSam4kOHkRm1NjSp3I9g5kZTILJSTjQZ87INHiXSxKL/LO5udizARoa5CTkQJKDTHs3qjiw7IGNpGnK5ghcY3F2Vr6+sTGZWCxmAU8kZBBWV1fdRbB1HXULPR44y08+ifF3//2FBe28q0aU/077/DzIR74vXV2YrwsLmBebNmU9niXskZqhJkTxjqLaaKUUx1urMM1kwjAalVmemoY50tyc3C05F3BNJpY4sQxaJRYjEfl6t1s2bOGfa+GexmLYwHC7iR5+GKRWNIrNjXxskRAgH2MxZKeViiQJBIi+8Q3cuz/5k6wkgCVsEaOxEYTd5s24boEA7C43geOmD5omu4R7vcldZDmw7uzEGmCaWEsWF5dLAFdCXR3s/vg4jqMGOkwecp00p5Po5ptxT15/HXNlcBDnMjws6/Dt3y/9toYGkMsTE1j7uruX1/zl+c016TgjMx0cDhDVfX1oIvLWW/AxDxzIXT7c0QHifHISZOIbbyDYGxoqrNt1unPs7MQjGMTnXLmCn21tWPvzuUflgNsNwnffPtzb3/0O15Obs/B1Hh7G/cwlgOe6gpqGcZoraSQEyikkEmgqV4rA/rXXMN7uvLN4ApEIGbhzc9hk8aVahWRYyhbx/OzvlyTgwgLmYTgs11L20xm8uaeSi5EI/MrLl9MTeVxPUSUWU//meoqcJXblCmLG7u7lfr2myYZTc3OSCG1rwz1P5zO43VIJx7HJ9DTGKsck1ZA1vNYRDEr58rp1hSeFmCbGxfS0LGnW3V2ZmH9sDOOXCctCwTEuZw0/9xzmxMc/jvU3T1jKPlUTaoFEzAaNUMDyCxn+HyUiWmKbnyUMlL8nomOEgpgmEX2JiEpCwXg8cAhaWyF38XjglIRCmFgLC/jd6cTkdrngTHKXJA4oufudSi4WalBaWqSj3dGBz0wkcA72wlE6cCFqlQBmQ8jEIktn+fVq0xYmD1aCEBg/0ajs0FbN9c+EgFMrBBy2w4fhmN52W37Fw4VAwBOJyAyOW27Jzck3DAQy4+O4dizp6u3F3Lt4EcfduLEk9S2rxh6lSl1LQSRqmuzWvBZIp2KgZjAwacgOEZGUdTJhWEoijwlJJiWJJOGikorBYPLrVWKxmu1KIRACjZzm5og++1n8PTODtTXfTbuxMRAHmzaVbsMvHAaBGAgQffrTJSF2qsYWpYIzYjs7sUZyRj/LfLnmJ3cuvXQJ72P5ckuLzPpvbcX7uJFYruCAempqOVmgadJH48+97Tac68svSzlsYyOIvNFREFF798qamh6PJBInJ5GplFrT0OlEgMcZQlw8P9OGYEsLNs7OngUB9dxzyBDbvj1329HTA59yfBzHeOUV3IehodIQT0S4ntu2YZ29cgWPkydh57ir82o3enI6EYwODkIRce4c7uPICDYavvtdbC5wY5Z0/opKIOYr3z5zBuTT7t2lsSFnzuDc9+4tKMhehokJEGO9vbVvi1gZpkrzg0HYFfbTWemj+lP19RgXPT2wWY2NMr5KV5IkGMTP1CpjvHHBxKJpIl7j+uFMNqp2gUl7nw/r2Pw83uPz4fl0Y9HhkPaTuzozecq1IqtVzVTL4DIdLF/u6CgsKYRl0FNTGGfNzZXNBJ+aQnzX3Y14qhAYhpTg89x75hnY57vvlkqAEqNq7dNqoxZIxJ0ZngsSWOMzhK47LwkhEmleyxgioquI6DNCiMfUf2ia1k7JDHJRhSQdDtR6a2oC8TE7iwnV348AJhDABPH7ZdFiTUMw09YGIx6JwPE6p/T38fnwmp4e/My1poGm4bizs/jMtjaZEVNfX70ZbLUAt1t2n2REo8ky6MuXpVPhdi+XQbOzzRmlpgmyywpShCtXMJb7+uCsvPIKHPN9+/I7zrlzeL/Tieu1Z0/2RYobHkxMwNlvbgahr+7uci3Enp6cHXlL2aNSE4kOh+xiam9AJIMz/1RZcmpnZa9XkoaVtrssjVYdSl1PJha5SQK/nskeJhatfM9feQV1eu66C2vxyZOF1UGcnMQO//r1pSNdYjGib30La/RDD4GczAGWskWZ4HZLgk3Xk8uD8P85C1rXEfByswyu8dTejrnV1wf7nitB1dsLH+ziRZBe6vh2OJaXcbjjDgQ0//qv+IyNGzGfDhwAiXjoEMg4Xu+dTpzT1BR8v0QCvptqh1U5cyCA17W0ZM4a0TSsob29IL2OHMH5X3117pl+moZmZn19WFuPH8d36u0luuqq0nQAJcK96+/H57DU+cIFnC9LnauhMZ2mgRTesoXoox+FjzEyguv7wx/isXEjyMQDB3DePFaZmMnHnk9NYSOiv780WaBzc0S/+hXO66abij/e/DyI6vb2tWeLurrwYFuzsCD9dZcLc7WpCf63YWC95819JvG40RlnVKeC1910RGM0ip8cA05MyM0RbiiVmtXIqiiuO76wIMtnZRqX/B7exGGbW1cnv2O1xxe1AJYv6zrsd6EbCouLGC+xGMbmhg2VbXLl9xOdPo21I0tJqLQwTUnca5qcY08/jTXujjsyd5nPATVhn1YDtUAi/ommaf8ghAgR/b4g5u1E9GMhhKlp2veJ6A+I6C+J6G/VNy513OkQQswQkcFPp7zmjwltuk/zc0IIQ9O0KC1PVc0L/f2YBCdPwnHq68PE5mYOHLDxznsgIEnD1lYQkT09eA3v5o6NgZgkwoLS2SlJxe7uzM4fk1O8219fLzt92o1WKgsO5Lu68Dc7IKoMem5Ovp6Dd7dbpqVbIUuIHbCODnzfn/wEP++4I7/jcCZHUxOCne5uSI3SQQhcu/FxzDEm5/v7l8+N+Xns4Pp8eWVFWs4elYNI5CyhtboBYZrJkuRYLFmWXFeH8caE4Wpn3WQCy6A5C0uVQTO5uLCQ/Hq1tqJVZNBnz6LT8VVXEb33vfibawTlc/7z87AtPh/W81IgkUDW06VLRB/7WF5ZRJazRdngcslNN85ICARkvWGnE2ROY6PMunj3XdkU6/x5+Dltbcn1Fb3e9PfZ6YRPdvYsiKMNG5b/Xwici1oj8ec/B2lz110ggerqpLT58GGMM15TNE2u2XNzCBZ7epYT8nV1OD7LmxsaVs6sbGjAWB4fx+e+8AKCt927cyf7ucP0wAAy2U6eJHr2Wfy9Z0/pglC1mH8oJIn4K1dwf1jqXA22RNMwt/v6iO65B+fJGYo//SkeXV0oYzA8jOuUzzoYCsGPKTTYTkU8jkDb5cJ4LHajJxRCjNHUhO+Y4z2pSVvEY9Y0MS/n5+VPzopua8O1amrC/Pb7YS94THCTFm7Oov69EkwTvq/fL2XyKvEYiSQ3XGNwxuPZs/A91q1DnMjqAn7wfeVNnLY23Hu/X2Y22lLn8iIQgK0vRr7MpSM4rh8YKE0jk3wQjWKDtq4O60++jc64YakQsqyHw4E19s03sTFy881FnWLN2adKoRa6M3Nr7scIrbn/LaGQ5TVCiFNLN/iHRPQxInqaiJ4npJ5uJqL7iOh7QohHlzrlHCWidUT0D4TW3NcT0YeJaJ6ILgohblU+/9dEdDUR/TWhuOaUEOKFLKed9mJz97XpaTiTfX0w3EwcMrGnaXAMYzG8dmICk6qpCcHOwADez9JofkxPy86HLBdRm7eomSe8G97Rgc+LRmVgaKN6oOtYHKansaBzoXJuisBd1lgGXS2FyxnRKIjzhgYEZs8/D+nU/fcvD9RWwtwcApvmZgSLpom6o6nfl+uNjo9jMfJ4MM96e9OPbe5Y2dSUsbh8pi6EVrFHy2yRWh+x2GBtLTVaYdmkShiqzjtn6XHjE6sQa7lC/f5MLPJ6Q7S8aUu1bXAsLhJ9+ctwTh95BKToxATmfaq8dCVwcN3QgA2+Uox7wyD6wQ9AKtx3HzrVrwVblC9S6w3zBobXKzP4XC7Yfg7KQiE5T1UZH5OLagbc1BR8qf7+9LXfdB3zgGuXRaOo7RsKQWLFG4KJBLIRuSslP8/gZnsuF8jHdNmGHFSxRDsXmWw8jmyN8+exph04UFg9qngc6+2ZMziPrVuRvV8O/zCRkNc9Hpey0koU/i8UCwsIan/7W2z4O53wpffvB6G4ffvKdkHX8X5dR1ZNKa7r00+DMPrIR4rf2IjHQYILAbVImvG55m2RELKeK3ckZzl7c7MsDdLQAPueSOA1XE+RKJlU5G7QmT6Ly1yly7DmTb90GY2BAN7L2bI+X3KWmyqfTq3TqOvS1+GGM9UYZ1gV3IQqEsE4KaS5KjcqCwZxz3p6sLZV2vfUdWxixeOwgblmlguB7xAO43rU1yc3IH35ZZCI116LNTabX5QOFrRPVYdaIBEPEtGtRPRHBEb3TSL6d0KIt5TXOojoESL6LBHtIrDF40T0AhF9WQhxfOl1Wwk69puIyE1EvyWi/42gZaeUm7+HiP47EV1DRI2ENNff/z8DMl7sWAwO3uXLMMa9vXA+OHWdjT7vYjU1yZo9Fy+CHDEMTLSNG0Eq9vZiwrFBUolFNZOttVWSil1dcpFj+RA3Wam24G8tg7MwEgncF683WeqlBlNEUmah1lhcrftpGBjrQqCI/tmzyNy49lqiG2/M/TihEAKjhgaQqVNTRB/4QLKEMByWkmXuONffj/GeaVGOxRAAuFw4vwwOXCZn2Sr2KK0tKiWRyE1Waq0+IkuKmDRUOx3zhgsThmu107FhLO8GrXaUTiUWVyuTQdeJvvY12I5HHkEwdOYM1sQcZXpEhO934gS+2+BgaWwr12h85x3YtZtuyjiWatIWFQom2XgdNAxJGHZ3SyK7owNjkzP7/H48eJx6PMmdoFlStm3bcnIntWMzET6Tm4Tdc48kH3UdRIzfj7HS05N8rGhUbhCvX5856OLyAkS51Usmwjo5MoJrs2kTpNWFFNOPREBsM1G2Ywce5fApOKN0chLXjGu99fQU1im5nOCNfyZ+jhzB9T52DPfd65WE4q5dy0kf7o49PJy1UUlOGBkhevVV2I7h4eKOZRjY6A2HUXcsQxaqbYvUgy3ZIiYU43FpJ7q6ZMIIkSzHwKQib0zgeySTii6XXAt0HTEdN+DM19cKhWBvmEzkklmpcmp1Y5DBZa90He9tbJSZ4hwz8saKjdzA8mXDkGtPPojFpK3kMdHevjr3QAjYtMXF5HrA2cD1uE1TlvlRbeWbbxI99RSO+dGPZvxu+ZCIVrFPVQfLkogWxYoXW9dBeFy+nCyz1DQYepbMRCJyl8Ltxut8PgRCFy5A0szk0oYNcBb7+5MdPHWXlx9cY4jJyA0bQKKwIWtosNPWqwG8O8N1ITLt/vFOjkoshsPy//X1ycRipTo4X7yI89i4EWP+O9/B+H3ggdw/n3fENQ3f8+RJ7Nzv2IG/Z2cxl+bmpBRgw4bsC7KuIzAyTdQ/WiEosrpblNEW2USiBNc0UrMMVTKMSTAmDas1Q6YakNoNWi0gz1kPqgy6Erbopz9FF9qHHwahw6VAdu7Mfa0zDNifeBzHKFUNt6eeQrffW24hev/7VzwfC86sJJTNCRUCflIgIJvdsPyurQ1rkGrjhcBayaQiNy8gwtowO4uAbM8evN/rleM0HZHo94NIJCK69165/hgGgqv5eYy13t7k804kENgnEiAbMsnPDAOfoeuyzlo2O2sYILxPn8Y827cvv+x/FYEAiKXxcczbXbuQnViuuRsOI0CemYEdbm4G0erzrf76EovJrB/OOFP/d/QoZM+HD2Mtqa8HiXvgAOTtk5PYwNi+vfCmAyrGx4meeAL34667ij/eyZMY/7t2rZihbduiFRAOg0ycnJQlCXw+2BIm71SomYr8YHBXZ27UsrAAe5RP9rwKvx8ENjeAWbcueS0zzcxZjdyYc2EBf6s169RMxnSdp9M1hVmr4AxWlsrns8GTSCCOn5/HtezqwmbLal7X06cx1nfuxAZeNnBTTMOQyTGp1+DQIaLHH0es9+CDNe0XWQI2iVhZZL3Y/z977xkk2XWdCZ40lb68r65q74A2ABoECEIAAZAAYQgRJASAIAmIIqSRZsWJ0e6vkTZiI3YnNjZiY+bfzsiERqTAoRWdaACSAGjgSBh2w3U32ndVl82qykqf+TKf2x8fju7LrDTPVXVmd56IjOquynz53n33HfOdc77DPBfxOB6o/n44SX19cEQKBfGAsVObTEJp9/eL7OziIqq9ZmbEpN4tWwAobt1av4WTAcWLF8HBZMwkMajJ0/taLQt8pQtXGqoq7p8d0M/II8U/eSIsG34jsOg2qfnqKl5jYwiMvvc9/P8LXzCfpeKMuCTB0L72Ghzlm24CAD8/j8AxGBQty2aMsa7jmSkWAZ43ufZ2N1ANdZGbQKJxgmkrC7flGkFDo9NeDRjaqeDpiBDjdGp+VbdBG4HFeiT0duXYMVT6ffSj4LGbmYEDv3u3ea43XUfgn8kg+HeLa+j559Guc9NNRPfd17TC64rWRa59yQf6nduZUynoeJ6+3d1de515wm4mA9ty9ix8H24xi8VECzRXBHq9Qt8lkwASAwEAiew3aRrsWCKBPVcN5GkaArBCoTkvLxPOm21vJsI6HD2K8xsbQ6WaXZ9ubQ3XEo/jGAcOgF5no4A9RQHgsbQEvREIiFbny9FhIUnwn2sBiNWiKADkjh1DlTEHzdEonveHH3bONZnLgQYhHCZ69FHntmp6Gnt/x471gHeVdLxe2QEAACAASURBVHSRSWEqIlUVPng4LADFWv4nJyqML/aveBDn0BA+b4ZXsdbxUymcF1fOWuXiy2bFkE5FEd0ZPp8AHI2t2yw+X/32aX5dqUlaJ+3LqoqCoEQC/x8cbI3p9rOzwBG4M7KR8FBQWcZ584DBajl9Gnpt2zaiJ55oeo3trovaQjog4uaK6cVmUulcDg8Ug4nMt1MsQvEEAniQVlehSFQVTtzYmOA1jMfhBExPw9HxeHAs5lGs5TjqOo7JRN/xODKbyaR4cLu7K7kVh4c7Lc8bIdwSYRxp7yaAwS04RnDRCPxUt0Hb/e5cDnuotxf777XX8LrvPvPDAnQdhmRtDUHX736Hc9y/XzhkfX0Au7k136zMzSGwmpoyBQa0u4EyldAgcg4ktio/IgNYxmnJLH6/AAtDoc2rjLvahQMqI7Bo3IfGSdBOKj8XFoj+4R/g4H7pSwicLl2C3axuL20kly7B7m7btp7fzq4w18+hQ0QPPmiKZ+qK10VuiaIgwcqtgfE4fs+gG08ebdQePD8P8IrpXhhgNNrMaBR2aGAAtiSdBmVHLFZ5TzUNbcErK6h837at8rt0HX/LZPDZ0dH6uthOezOD4CdO4LgHD+I87Or7eFxUWPb0YA83AZ0cCYMeXNnl9cLvHRvbvMmjxaLgWa43oKeeaBrW/vvfB60Lgy08lOWGG8xP1GZRVRwvmUR3h9O26Hgce2R8HHujiXR0kdkv0rFnicTk5FQKfjIR9gIDio32MrdBM+93Pi+q2Lgq2sixaMaP0TQxWFDT4LNbje+46CGbFbEqxxDVVY21JlHXap/m6stmVY2XuyrZipRKAAA1TVS3mxFNw/1ZWcG/+/tbZ6jm6irs2vAwKpfrCQ/5KZeh92Kx+v7OhQsYMDc6SvTFL5qyb220C9pXOiDi5oqlxU6nYRSyWSjKaBSVVaGQaNMpFvFvnui7tiYmMfn9eIiZB4gIDzcDimzARkYEoGgET7jSMRCAglIUwe+RTCJ44vMjguIeGBCg4ujo5eNiuFKEMzSaBkcjEtn49eS9xaAit0GzqmCOCg62otHmlQ+yjD3X1YV9trhI9N3vosz9vvvMn9vFi/js1BSC7dlZBD7RKPbb1JR5I2yU5WUx2MjIqdhA2n1Xm9JFxtZdR1+mi2EHl0MfKMp6wLC6LdkIGrZ61eTVJBxUGPkVWRfxMCkjuNhsrxaLGKSiaURf/jLs5OnT0K27dpnfn8vLAnicnHR2jSxvvgmwae9eDFIxCYRcFbrILSkWce+YpL1YxL85oGHKD64qq67I1zSAKqqK6lO/XyT6uAU6mYTd5MQJD8Q7ehSg2sMPi2MyHcfSEmxjLaCGq4NCIYA59fSTquJ7Zdl8ezMRrvnYMQBGAwNEN97orKp2bg6VidksAI1Dh9wD2etJsSimOnOr8+ioSKZv1HcygGiVu4wI53n0KPbOhz4Ef/vYMbQ9Ly3hPTt3CkDRzDCcX/8aa//AA9BnTiSVAhjQ1wcwwMQ6dnSRBeEkBOsaIjy7DChms9APXV0CUGwEVGsanmFFgR+rqrXboKv5FesdT1WxJ5lDnyuirSTvjLqxXBaFED09jY9TPRSmGmjk39WCMJoBja2SFGZ7YaV9WddxP5aXcV97etYPSL2cks2i5TgWA2dhrXVmLIELY6JR2Kt6+3Bujujpp7H/n3rKdIdcu+uitpAOiLi5YnmxCwXRnunxQAGOjorsoqYJMNHjgSKJRMTkLaPy5xZSllRKtDyvror3bd+O18AAvp95hKJRociNVSDFYiW34vKyqOrp6oLzODYmqhbtgDxXm/C0yVJJVANezvJ0TUOAZQQWjZVb0WglsGg0CJqGPaYo2FeaBh5ErxdtzGYrG5eWwLHh9cJJnpkhuuUWEJVPTNjPwKVSeMb6+y1VTbS7gTKti9wCEjeLH5HJvo2gITvRPDTKCBi2Qua2I+aF26CNwKJxGnZXVyWwaJyGretEX/saJrn/u38H8O/sWRzDykCUdBqf6+tDK6ob8s474DDbuhVT6i2AOFeNLnJLUincw4EB4dMMDgqaGCOHME9d5iqaSAT75dw5/L9eqxYPbeGp0ek0QOejR/G9d96Jn9wKzQPAJicBTlZLPg8b6PM1p+jI53HuVtqbiXB+776L9di3zxo3aLVw+/iJE/ARx8YAJlqtrLMqqipanSVJ+MxuV+kUCoI2xa5Pe+oUfI9Dh9YDhIuLABOPHcN9IUIRwZEjABS3bFlvR99/H1XMR45YG1JXSwoF7IVgEGCAyX3Q0UUWpVTC88qxm1FUVQCKPPSJ+Vz7+qCTqv2ychkxWDAogHtdF4Aicyxy5TQRjmkEFatjDVlGjJhK4fsGBqAvrfqErBNZr/JUZye0SXxNjaoajdfKwgPemlU1boSoKmJzSRLDaMysZSolptVHo603XEqSQNHg9UJHVa8fDzljvIJ5MxvFA/E40Ve/ij3y1FOWkjXtrovaQjog4uaKrcUul1FxxaCNxwPHcHxcKB5Ng2KWJPw9HMaLyVY5axEOQ/FUE67mcgBlpqdFBrSnB5nx/n44SUNDUAqSBGUQDtdXfKzs+Lu51ZRItFszqDgy0uEXMwrz67CRbSUjYRSeGG5sg2awhkvTYzExFGjXLhiNZ55BEP/YY+ZbBy9dInrlFRxf0xBw3X47+MycAFKFAvZ8JGKZw6ndDZQlXeQ2kOhWpZ+RV48BQ+YXIhKAknFacqcy+soTBo6NwCLbGwaOg0Gi3/4WeuThhzGIaWEB9okHiJmRQgHBfygE4NGNioZTp1CZPTZG9MgjCBA7umhjZXkZOmN4WFT8DA1V6iYOerJZ/OQgPhYTwwS2bKnPV8gTVrnap1xGkPX889hv114rdKvPhwA7m4WtvOGG9dUlpRLAJU1rHkAyTYmum29v5u94913Y3O5uAFImq/NriqoCcOXhQ1u3gjNxM5LJ3OqcSmH9udXZ6XdzFU0oZL9tenERVX7btjVPRCQSABTfegtrqevYtzfcgPuzYwd87O9+F3HBQw8500uyjKSGpmHwjgVOvI4usiFcMNCILkjToB84AaKquMe9vbAXxmRBPg+QiqfL1xLmVzQObjFShxgBRZ42Xi4LegWfD3rBzlAjrphmjneuxDQOqnJTzLZPV8MhXLjj5lAYO+3L2axIioRC0GF2Kp83UhQFOqNUQmGH0TYxPsFV/lwl32zd1taI/umf8L4//VPLCah210VtIR0QcXPF9mKrKjKWhQIeKFa8k5OVjiY7vVw2Hong75oGxcVTmH0+0epc7agWi3Agp6cRZLFxmZqCQzE2JgDNRiXI1ee/uipAxXgcxpCF+Rz4ZSfL1e7CQ094sja3W7WTVLdBLy7CEPT2AixeXERm/aMfJbrjjsbXp+vYK+fOIfAKhVAZcfo0WnzuvNMZIFQug2fD74cTbnGt291AWdZFbgGJPGXeznEUZf205OrWViNg2G7PT0fcE25hZ3DxxAmiH/wA9AcPPIC/LyzAnu3caW6vlMsAQ4jQ3udGpcKFC0Tf+hZs4KOP2rJ9V50uckN4iB0DMlxlMzRU265wdwBX5KuqAPQOHao9CbPWxGYiVIy98grszi23wFZyaxtPtOzvR0ViX5+oVuzpwTEXF7Gvh4cbg98MPMgy9KIVzr54HLa6UMDzcfCgs/0uy7DdZ87gvHbuBIi6GW14kiRanXmICfOGW7VDuRzWnull7Eg2S/T73+Pe3XCDNT8mk4E/9NZb0EV8PYUCANq/+itnICkP/Mnnsa8tHquji2wK86qaqRzWdTHJN50WE5EZNOzrE4n9oSHzlX7G9md+sX/FbdDGadCFgug2M5uEq74OTtKUSqLVud6Qq40UI6haD2iU5dpDYbg9vFH7NCeJuH2Zi3IaCU+jz+fF8KiNruS2I7oOnZFKVVab6zqugYtiGDw0O/jrK1/Buj/1lC06jHbXRW0hHRBxc8XRYrPzmMlAoagqXiMjKC83CnMOyDIe2EhEZBONrc66jgd+bKy2EeAqyLNn4QB6vXBut2xBxnNqyn4mVpIAEjG3Ik+kJqoEOfnl1uTLVhTm1iHCerYKv4UTKRYBRHu9uHezsyD87ukBgMgVs9wCHYvh2mVZTFnO5fDv4WGiW28F34/PBxDASfWqqiJ4V1UEMzaO1e4GypYucgNINDtoRdPWA4bG6rLqacmdtuSO1JO1NaL/9t9g4558Enbt/fexx3bsEAES7yluhTYG95qGoF2SACC6Mbl+dhZk4dEoAMShIVu0FVelLnJDymX4HsEg/JpEAv+u9qeqhYOjZBLBk66jrdk4hIwDpXpA4jvvEL3xBqpZb79d/J6HbZw4gXMZGqrsQmHqEE4Ub9nSPMCy296sKKiWO3cO53LDDc4HpUgSjnnhAs5/714kBzdDf3Mym3nDu7pER4wZH4ABxEjE/vMvy7jvuo5qaCd+DLccP/001nRqCvvluutwrw4csL6up09jjfbvbzwRvI50dJFN0TQxHKinxxqwbOSq504M9qUjEYDLdumQjICiLFe2BksSvlNRBDBvN07jwVBGwKmnxx0766aoqvWhMFx5yVytw8OCaqXWUBhJQjycyeC+cYzfql00584hTtu7V3SXcUGJpgnKB7N7MJ8HgJjNYvDd+Lit02rR1bqypAMibq64stirq3hxa16xCMVUi3C7XIajoSh4gCMR4bQwd8bysshU85TlWk5mKgUwMZmEgisU8P3bt6P9ZutW545gJlPZBs0Tp4lgTIzToEdG2h9sY/4lRRHT/a6ECkxFAYDI+4OI6NvfhlH43OdEJpWzpbIMw7G6iv3MJfucmT14EJOYV1cxiMXJxEHmaioWcW42qwna3UDZ1kVuAInV/IjclmwEDY08d9yOyoBhNcDTkY7UE1nGJOZUCoNU+vsBYGSzaCP0+SorFo38mdwOHwiAQiGfR3WYnaqLaonHwc/o9wNAdEDr0e5PwmV1QnM5gIc9PbjX6TRsgtl7nMsBkGb7zXqL+b64o4DbD42+1Ztvoqrs8GGiD3+48rhzc/C3BgcBsuVyOLd0WlQXptOwY0NDAMO5arGWX2Rsb240BbOWJJPgcuT27euvd+575XIAYGdnsXb794vncTOEBxcmk2IoYL02QV0XU0SdAIi6DvB4bQ2DVNxIjL/+OkDJ227DdRw7BmCxUMB+PngQgOLhw83v2cwM9t327bjPNqSjixwIUwSxLrEjxSJsHU96TiQA8O3eDdvn9LnlpIixFTqdhm8uyzhvLkrhNmgrUt3qzImPdoqNjENhMhlUQJdKuA9dXQJ4rIZf+P08rGlsTAxGNVY4ttI6zM+DnmpqCjZIkkSlPu9jK7iAJBH98z9jPz35JOgebEq766K2kA6IuLni2mJnMqhK9PuhmFIp/HvLltoOTqkExcRt0JGIeLA1DU5NPI6Hn7kuxsbWG5xEAkZjYAAA35kzmJjLyn7LFjz027a5A/AZ27C5DZqHxRDBaTYCi9WcRq0qXMlQLIoS/iuFF1LXERgwSBcMEr30EpzbT32qcvqkpuG+nj8vBvJwpQVX3e7ahf29sIAKxoMHnQ2ZmZ/H8SYnHYEB7W6gbOsiXa9scbEjsiw4XKsdKp+vcvCJmYm7HelILdF1ou99D4H7H/8xMuUrK9AB9Sq4VFWAigwszs3BqZ2cRCWWsWLRjr1JJFA9pGkAEB1OV7xqdZFbsraGoHV4WNCK9PSY77LgVtlt22DHmdKDKwiZO4/pZYx75tVXUUX2oQ8B7DHKwgIqw/r70SZm/FyxiOB9fh7gT7mM9/l8OAdugeaffr+z9mZNA6j5/vvQx4cOwb47TeakUkTvvYc1DIdRPefGcc1KqSTun6IIzm5uT+eEpyw77xK5cAH+8v79tkG6CpmeJvrJT1AZfffd4veKAt+ceRS5omn/fnAoXnfderB0eRn3d3TU0bCoji5yKJIE34h1hRPhvX3xouDgC4XEYBa3+NaZc5Apq7had3AQz3T1NGgz/hzHSJkMjufxiKnO7dJ1wgmfri4xuMsoDMQWi9D1rIO4ml3Tag+F4UE4jdqnN2MQZyKBivmhIeiMbFYULMVilrhUiQhr8bWvwaZ9/vOOh9a1uy5qC+mAiJsrri52sYjghgigHpeV12pvZmEDpWlQNJFIpbLJ5WAEEgko8d5eOFTMccDtIDySngjHY5BvZgaKxOOBM7J9OxxrN0m0jRWU8TiMJLcCM6eRsQ26t7e1qpZkWWRqOLhopfNzKsvLCMomJmDwp6cxdfS664juugvvKZVEyzJn9ycnsdf8fnzm/Hncy2QS/FFjY3CWibBu1W3QZhyTlRWc38iILY4No7T7HXNMraDr2LfN9i6DMsYqQ65m1DQxlZBBw8s5hbwjV5a89hqC7I9/nOhjH4PNPHMGesOYzGgkKyvQRczrWj28h9uguVq22fCedBoAYqmEKcyjo/YpQT6Qq1oXuXICOnwJWYadyeWgp8xW7ug69ki5jEpVDha5+i+bFQPpurqwl7hiUNeJfvMbtITdeitANKPE4wDuenpQTVZLPxYKsKfFInSpJInJ0CxcXcmAoteLc+H/m5VcDgnBlRXY5yNH3CH5X1lBBd3aGo538CB8gs0STROtztz6PTyMdeNEr9Wg2Cirq0hmjI+DC9KppNNE3/kO7t8jj9S/h5oG8JInPScS0E979gC0vuEGgBInTohhPw780Y4uckG46tXqs1lPMhnsP48HMWI2i98HAgJQdCsO0XU8w1wUEIng+Eb/3OdbP7il0XeXy0Kf6Tr0Zk9P6w6dVFU8Z1wUUW/4DOuclRX8e2AAPoYRbHRrKEy9SdR273kuhyr6QACFHooiho7ZAb9Vleib34QdffTR9XbQhrS7LmoLaXsQ0ePx/AkRfZWIdui6Pm3xszoRfUPX9Sc24NRqieuLLcuo+pJlODyFAgxELAYQp1aVhK7D2SwW8e9gEMrY+F5ZFiCdLOM93ELMpLo8fZe5y5jjLpEQk56TSRxvaAiA4vbt7rSCVUsuJ86XAU1uKwoGK6sVR0cvD88GE7OXSkLZtktGzaxkswAGeVBOoUD09a9jvT/3OVz/3Bzuj64DiJ6aqjSy8TgMCROf/+xn+PvHPib2N1d6cDDPXFFGTqrqe5xO47v7+lypAlhnoK42XVQLSNT1SsCwVFrflmysMvT7xTE61YYdcVMuXSL6x39EsPzkk9hnZ87AWd23z1xwlk4D3OnpQVbcuM+Nk6BLpUoeJAYUjT+JoP+efhoB0Wc+AxvtAgBT01luI33UEk6ooqD63eeD7Vlbw+9qVZDUknIZlVzhMNq6qoMzDt6TSewDvx/7orsbtuvVV+E33Xkn9qxRVlYA8sRiSMbVOp9yGUCiquL8o1F8J5P580+2mcxL292N94+MWAvKp6cB+qkqknt797qjw+fn0eacySCoPnQI57aZwp0+c3NYwy1bQNdjt/24WETLcTiMilOn66QomMSczRI9/rj589J1XNOxYwAVeXBiIICg/TOfcQzcdnSRC6Lr2IO6jnvrxnPFVEEjI9Bx6TTiOP4ev18Ait3dzgFFVYUOTSTwfx74QiQq8IxDSoyAYldX7diVq8SNFW9cTNAq/qMkiSKc/v7aCUIj0KooWBtuW7YjxqEw9QbCmBkKUw9orL4XpRKoOCQJvhQXw5gdtFotmgZ9dvIkJssfOWJvHaqk6Zm0kV5qWemAiG0WuNcSVYUzkM/D4fX7AcQ0am8mEuXikiSIbMPhSmXMyi4eh+LmSj8+5sCAGPIiSaIqgyWdFoDiygp+19cnAEUbxM2mRNfhrC8tCXCRFTsRjI4RVBwe3tgKqFIJ90fTBKfOlVR9SIRrnJnB/d+6Fb/7138FyP3xjwsA0O9HNn5ysjbQd/IkjOru3UQ//zmM4AMP1A5wjJUexqmZRKKknh0M5r1yqVXqqgcRiSoBw3IZL37G/P7KScn12pLNDlrpSEfMSi5H9N//O/bgX/4l9MzsLHTArl3mgLtiEYNUgkE4ys3allV1PbDITjvzfv7whwjaPv1pAE1uBGvUCdxdEya051a81VXcN7M0KakU9hknXOsJDxHggWqsM48exTEeeAD7wyiJBMC1cBichLXoT3hitCThnGtN8uQqRQYRlpZwPsYKSWMrdCMAVZJQXTc3h/ffeGPzoTRmRNfhS5w4gfUZHQWY6IQL2YpoGu5PoSAG6CgK1n5sDP6iWVulqpjELEkYpOJGAvu559Dm/qlPOeIMo7k56KT33xfJ7fFxVCceOYIEr0X91NFFLomq4jn1+dzhztQ06DZdx/PE+oy/hyc9axr+xqCfUxBTUaBHUynBPTowgO/QtPX8iqwLeRCVsRWaz4MLYTIZUcASjeJcLxctFAO/jdqXdR3rwMU5zB+5WUUtxqEw9cDG6qEwRKJqvatLTGIuFKCTx8exT+xWNeo60Y9+hKTGffcRfeQjzq/zA+mAiJsgVwKI6COiLiIq6RYvpl0D95oH/qAdJ5VCYNLfD2eyXIYz2wis0zQBJnI1YS2QK58XYJxxMvTOnTAIrISCwdqAXD4vAMWlJUHwvW0bgJ3R0Y0F1mRZtLNyxSKX9Xs8WCMjsFivBN2KqCquu1wWoNaV2K6pabivqorgx+/HIJRnngEYuHUrjDy3LNcKyAoFcCMFgzBOr76KgOzuu3E/zAg7F0ZgMZXCs+DxCPJ5rlh0QNZcC0S8onWRoqyvMuQBKR4PAMNwWFQZWuGKqx600pGO2BVNw2S/uTmiv/gLOLnpNHihRkbMTZeVZQTWuo4qK7uBiSzjOcnliL71LST77rpL8LHy88LVim7pIqK20kct5YRmMgCN+vsBJhrpW8zoptlZ7LedO+tX9hknNnu9oromlUJrcyqFgGrv3kqqjmQSNjIQANBTq3KFfUHmdRwebnzeug6/aGkJvgrTT/COCYcrQcVaVT+LiwgCi0XY+wMH3PFzVBVdCe+/Dx9qchJtzm60T9cT5o3kSapdXYKbm9fI5xPdLc1a906cwOeuv96dpPm77xK9+CLRLbcQ3XST/ePwFPBcDmuqKGhPPHYMFdvcKcItz7t2mdJPHV3kopTLuD9M+eJUZBm6gaeSV+sFBs8ZUFQUMS2ahzbZ5Zwvl6FnGBgdHKw9cVhVK0FFI6jl9a7nV+RhNLmcaHXu7sZ6bZYvaaZ9mYeGSpJIRrhJ8+WWGIfCGIHGUgn74t138XPPHlyDUSeYaZ+uLlL6xS8QK955p6C7cknMgIjtopdaVtoeRHQi7RC4WxUukQ6FECyx0m7U3syiqgBySiU86OEwjlOrLWd5WZDNx2JwHEdG8DdVXV/RWC2ShHYznganafgMD2WZmNicqqRCoRJUjMdFuw8bWiOwaIW7iqsMiJxN9GsHmZ+HId+6Fev39ttE3/8+jMynPgXnv1EVgSzDOGkaeJ/OnUNQcuONggfRjqgqjpXNwmnhykUmvPd4Kqdochu0CefDVfek1XSRpgmgkEFD49Ta6mnJnKE0w5HY6Dt1vT0GI3WkdeVnPwOH6qOPInCXZVFRuGdP8/2paajyKRYxiMBp8KYo4C6bnia6917owkBAPGPGdn+eBs0vk9l910OlqzlwJ4LfxBVwHg/8qmDQXJUdDyDRdey3evrMCCQy4MadHz/8IXyr224DCMgJr1gMwfK77+Iz119f369IJAA68qCQZv4UTwblQJz/z63QkoT3cQu0cWhLJIJrOXECoF84jEq2sbHm62VGZBnAFtMR7NgB7j63fapaAGK1ZLMABdfWsFZ9fZW84UaZm4Mu2blzfWWpHVlcJPrBD+BnPfigM5DkzBns8337ULVqlFwOFabHjgHAVRSsx/XX4742oIPo6CKXhQs83Bq+WChAN8RijX1ynkjOk55lWTz7DCjaoWOSJOy7XE7wjjbirdd1UUHHoKJx2AgPGfF6Bec/8/PxVOeN9CmLRaELBgbW+wv5vOBZZVqwjaD02ijhjsV8HonYtTUkHcbHm7dPNxsKc/QopsvffDOSZuzzuFRos6EQcgdEhLQ9iFirHNXj8dxCRP+ZiG4hIh8RvU1E/7eu6z+r+qxORN8goq8Q0f9LRIeIKE5E/1XX9f/P7nsbyKYsdjYruH0mJ8XgE58P7c3NgiJFgcKQZSjmaLR+xntuDopF13HcgQEoSCstu7KM40xPC37HQACtFNu34xo2q3pP1+E0G0HF1VXRlhaNVoKK1SS4RFi/XE5wzTiodmsL4Qw9EfYNV1PEYmglrOVcG4Uz4vk8jFMuR/TLX8JRvv12++fFLVGFAoBpIwDMGV4jvyIbPOarNAKLNZw3U+3M7aCLOPNoHHxiHBzBwIaxNbnWc21l2Eo9Md6DjnTEqrz3HtG3v40qnT/8QzHwolBA4GuGc+j8eYAvu3c3113NRNOQTDl9GhXV+/ZBDxn1iRGwZ1oAfg6MgD2/athC0y2ELaqPWs4J1TTYNE1DsFQqwS/glrlmUihgmEVPj6D2qPc9qgr/wKjzCgWiH/8YgNbtt0MHM+DI/tv589gb119f36fLZBCwd3XhOpoF/VyJVC5jrxnb7UulSlAxkxGJJb9fgIqKgnMrFuHDXXeds4EkRpEkgFoXLuC8du8G0O8GuMJtncxD18zn5AF/3JoYColWZ+acO3oUycvDh51XRRUK0G1+P9FnP+tsTWdnkcTftq05B6IkQa++9RZ+lkrw7Q8fRoXigQMV59LRRRsgmQz2Z0+PO75RKiUS62aTZOzbp1IiCR+LCR5Fq88gF3AUi/jsyIj5CmP2WY2t0MY26HIZx5VlMTyqu9s9PcTnkE5jHbu6AMQbdYYkwYbw30dG3Olu2yzhri6m4VpbA66wbRuqks2IcShMNdj4+uuoqN6zB1WIxnUxDoWprmi0MBTGVjtzi+qllpUrDkT0eDx/QES/JKJVIvoHIioS0ZeI6BoielzX9X8xfFYnouNENEpE/0hE80T0WSL6je/EYgAAIABJREFUKBHdrev6L+28t57oOumbpUAkSVT4bdkCw8OTcIeH12cea4ksQ4FwVqc6+GFZWxOl76kUfrLTOjFhTWmqKs5zZgYv5mmZnASgODXlriEwe06rq5XAYjot/j4wIABFrmTz++uDr1eSJBLg+8nlYCBjMTj4CwuoBJqaan6MM2ewvvv2Ye2eeQYO+f33OwOPFxYABmzZYg4M4DZoBhV5EhyRIMCPxXDcm29uDiK2si7K5Ug3VhnydXq9lYNPgkFrTqtTILHDj9gRu7K8TPR3f4dA/s/+DPs2Hofju3WruSqyuTk4/lNT5ikU6omuAwh67z04yQcOiOr+ZsLUAUZgkZ9Rn08Aix/QlZgK3FtVH5XLpF8uHqtGIsvYO4EA9gLbhJ4ecx0J3CLcrAqfgUSfr1Ln5XLYP5oGQDwQqBwoVigggRsOg0eq3ncUi4LOY3zc3P7jyhOu5qllh7lCxTi0hVsKNQ3P48oKPn/TTdj/biWH8nkkHmdm4Gvu39+46rOZWAUQjcIB9tISrp/55GZncW9uvtl5ElzTwC8dj8OvMuO/15OVFfhcIyPrB/g0E6Z5eOstdJvk81j/AwcALNx7b3vrImpREJErZIkaV+2ZFaYwKJeh26xWFBaLAlAsFvG7cBh+ttmJ9izZLM6FwenhYWsdXyzcBs2gInMUZ7OCqou5bt3geUwkcHwGUvmelMuCWsznw/UMDraXPytJgl8+EMA1nT6N63A4vZ2IUOH8ox/hWI88IsBGu0NhqoHGYpFoctI6iNjCeqll5UoEEd8gogNEdI2u65c+eE8vEb1LRAEi2qrruvzB73Ui0ojoJl3Xj33wuyARXSKiF3Vdf8zwPabfW0+SSdJDodotwhshigJHplQSJdRLSyKjPjFhzrkpleAsqqoAx6pH0K+uQkn29uLf8/NQQpEIQJxaFXvNhImAp6fxKhTwHePjABS3bbt8LcKSVNkGvbAgSInDYVzz2JioWNxIDp/LIWtrCGBOnIChPHQIgfryMtGzz8JxvvXW5se5dAmB+7ZtWK/nnoOzdP/9zsikGfQdHrY/2ZGnaRuBxVdegfP81a+aAhFbVhf9zd8goWGciOf3uxPkuQEkOq1o7MjVJYpC9Nvfwtn8gz+AjS2XoQfCYXMDGfJ56LVmbV5m5dQp6Lbt24WttQuWcUsXt3LxpN90mugrXzEduLekPpqdJb2vrzUTboUCAtzuboDQySRs/8BA8/PVddhISQLA0uj9qioGGhiDzXSa6Kc/xe8ffFBwaPFglpUVtJ0yDcjYWO2KG1kWk3jNVvxwO7Om4XvN+FpcycjA4tISQPRMBs/U4cPwh7hqMRp1puPTaZD8Ly3hmq+9Fs+blYCdJ1h7PM4rvXI5nMvvfod/33wzWpmd6pNXX4XfwdXMdiWTgc/W0+McDNA0gJHvvAPde+YM0S9/2d66iFoURCQS+7Sry514QlXhI3s8eCbtglzMlcdJBCI8i1yhaBYQTKehz2QZnxkZsQZG1hIGFfkcMxkc3zg4JhQS/q8Z4fZlIsGby9+1vIy/Mcc+VyW3izB/s3HytSxD94TDqCp3ej0nTmAS865dRJ/7nPl155b2ekBjoYD7m0iATuSv/9oWiNiqeqll5Yoa8eDxeMaI6CYi+ifeAEREuq6nPR7P3xPR//PB339r+NjrfFM/eG/J4/G8RkS1CnatvHedMELOxKpOFWQz8fsBziwsCK6/iQkovaUlOLgTE82VPLdRMd9EOo1gKBIRE7N6e+FgF4sA+cbGBLg2PQ1QcXAQvzdrVBgwHB9He9rqqgAUX30Vr9FRAShuJlAXCgE0m5pC8ClJ+MmBaDwO5/ntt/H+cHh9G3QrBk2NRFWxb+bmYGgSCYClN9+Mtc9k0IY8Nob71UyYV3N0FMd5/XUc86MfdQYgMoFxb699AJEI+6+7W+yrF1+Ek2MGHG11XdTXB2dgI0A6PqaxvcTq5xlItPP5jlxdouvgiCsUoItCIQS4yaQIFpqJJOH9oZDzFmYitHPOzcE+MBexk2o7I+BPBKBG08xPwm1lfeT3Y+05oGsliURgizIZERgz12Ct6ZtG8Xhw/8+dQzJ31676usznE0CxMXnS2wuuqGefJfr5z4k++cnKYTxDQ6h0/N3vAFoXCmJCKduuUAjnOTkJ+83tt832TlcXgmROosly82ni7AvyM3fwINqxjx9H9ck774jEnscjKh27uwW/opU90NuLpMHqKr7jrbcAZh04gOttZjvcBBCJALby9NtDh/DvM2cEF9rwsPWqxLNn4UceOuQMQJQk7JFQCMdxale9XlSAjo3h+o4cMfe5VtZFrSx+P/RRoYA4y2kBBQ83WVlBzGK3upX39ugodARPfecii64uASjGYvX3HT//ySSe54sXxWAou7aTbWYohOPrOvRZIiGSHUzbwFQhxqEtRn1gbF8OBLB2fr/oVFtdxXv6++0VzVxO4eE0RoCVE7HHj+M63agkP3cO9C5TU6BksKILfT68jPaBQU+uACfCntu/3/q5dfSSPbmiQEQi2v7Bz/dr/O3kBz93UOUmmKnx3iQRHa7xeyvvXSexGJwWHrjBYOJGgkleL5wpzpDIMoKacBjA3qVLMB7Dw82Pxe2NDCamUvh/JCJ+MnlsMAjnordXAI9ra2IQy9hY7clc9cTjwTkOD6M1JpkEmDgzA/Dp9deh1HnSsxuVJM2kVBJ8EeHw+umNqioARX5NT4u/9/VVAouDg62ZtSoUsFcWFwXB9vAwwN2pKfxf0zDQQNdRRdgsq5lOI9Du7UWm/vx5OMsHDjTmkGomxSLOlStg3ZJXXgFAun8/0ec/b+oj2z/42ZK66D/9JzPvcibcfmC3orDDj9gRM/LSS7AHf/mXABSIYBdSKXCmNUtaMc9aIIDn2+l++93voIfuvx8ACgMUboHhnMjp6zPPTUQtrI/GxnD/CgUEXq02gIzb2rNZwYmcSMD2N7PZgQDRjh3Yj2trsJmN3qsosKF+v9gv4+OoQnz2WdigT36y0mccHCS65x7RXspgGFfPc0VJdzf8o5UV/N7jqT2htVpCIeznXA7HrzdwpNF1feQj4G58+20klnVd7F1O+i0uivcbp0GbaS+emMBrcRHJ22PHAEIcOlSfloB5gEMh5+2NLPE4ns89ewDU8aCcpSWcG3dHjI2Z2+dra0Qvvww/66677OsmRUEA39WFalC3nrFslui11/BcPPig6Y9t/+Bny+miVpdQSMSQDHg5EU6MJJN4Dp0k74kEN+DQEPw3BhQTCegdv19UANaa8u7xIC7kZM3aGs6rr88eAF8tbIt7egRwlslAtzEwa9StzM1HJDrNjEmS1VXE1qqK342OtldxCPP38zDVnh6hG1QVAKKiuMNrOzMDTteREaIvfME+MMy2iCsmiUQ1aDqN+P+662wdevsHPzt6yYJcaSCiHakxP4iIapNyWnlvTTGWCDNpqSThIdjIzMXICB7apSU8zMwxuLQERVgoAHRppqQ9HlFFyUBoqYT/R6PIXKTTMCLMr8ZDV7ZuFW2m7NDw9GOr197fj9cNN8AQMKB47BhePT24vu3bcS5uVjNpGhRYuSwIxWutG/NhDA8jI09UScYdj6NC4fRp8f6hocpqRTeqYuwIO79zczDmHHBMTeFvi4sIXrhK7/XX8bv77mte+VMs4po5I55MEr3xBhzr66+3f86yDFDc78d5unHPdR2gwAsvAJB4/PEN5TbZVF200eL1ionLdu6Fzyfa/NqJT6Yjmyfnz4MC4dAhUSG8tgadMj7eHEBUFCQvvF5nnGosx44B6LnmGlRU63rj6gursrCA18AAwKkNrtLdFH3k8cCWJ5PwHdhfaCUZGoJ9W1nBvurvr6xIbHQfenrwHk6gNuqY8PlEy7rRpxgdBVD4i1/g9cADlX8PBuELvfMO/KCDBwGqcbCVSonK3FgM7+fWvvHx5vs+HIaPlsngWGbbm6uP8ZGPIMn39ttoa9uzBxVsHg/O08ivuLIiPhuNCkCRKxdrrTl3wVy6hOO/9BL8lkOHKisvGUDgwNkN+5LPIxnR2yu4BrmtcXAQf4/HcV3xOL53bKz+sIVyGfzQXV1ISNjVTbqOCkRJwr5wC0AsFNCdQQRdt8HP7BXlGzmRaBT6IZfDXnO6d2MxMTgqEHCvGtznwzM3MCA4HZlHMZEQVct9feurgL1exE1GPZtO41huFVt0dQnAMp8X+lDTsAb8nHCLLBHeq2mI29bW8O/+fvNJgVYR3j/MFRmL4fk16qHTp/GegwcFjYZdWVwk+sY3cL+ffNLaHjNOh2aeRiO/ZTQqaC2iUXcHeZmQq14vXWkg4sUPfl5T42/XVL3nsgoTgPIUKc4Yc4vwRkhfH76TB5dMTop25qUlDMTYssVcu7HHg/eFw5VgYiAgqhS50pCrF1UVynZsDH+Px3EuHBSNjdlTVt3dcBIPHcJ3X7oEUPH4cbS5RaOiQtEJ9weRaFsmEtdvRQIBrLtxIl4uV1mtePIkzpsIypaBVgYWN9JYKQoU/vw81pIrKbZswb8lCXsnEhHtD/PzAAGvuaZ5GTkTc3s8eL+qwtEPBoluu81+UKxpOC9dB1jtxjOk67iu55/H3nn8cUvHbRtdtJHCQKJdINDp5zty5Uo6jcz20BDRww9Dd5RKSHzEYs2pDDQNySxZRjLD6XCPEydQLbZ7NwAfVYVtcquSloe+DA3BnlnUlS2tjxhIZN4qXbdHrr9RwgnBeBxgIE/aXFvDOTfrfBgbg98wNweAqZ4d4bZ15vIyvm9ykuhjHwNI/dxzRPfeW7m3eFLzu++iGu/aa3Gevb2V/L7cCl8sAtBKp1EV2Cyw8/sr25vL5drVRM1kyxas5fHjaPWdnweQODJSWQnFQB8Di6ur8BWJBNWIsWKR/SKPB8/H1BR82pMnsWZbtiAgDoUqKzTdsCuKgnVnfuhax4xG0XXB3NHxeGWr88hI5f1+4QVc+2c+4+xZOHcOx9m713mlGYskAUBUFAyNskgl1NK6qNXF48F6c2utG1XuAwN43hIJ7EW3Y1CvV7Q0c0sxA4rJpKgQ7OvDs8zf7/fjfAYGoKsSCXxmcBC6yI1n10hbJEnQNQxYcaXb8DDOK5MR/PzBIJ7ZaBR+Byd+urpat3uG7UChIGL4SGT9Ol64AH27a5d5ypR6srJC9LWvQT9/8YvmdJmm4Ry5+p39/2hUAJ58zqur0PGRCABEB8VYHb1kQ64oEFHX9bjH43mTiB73eDz/p67rc0REHo+nm4j+goiWiOjNy3mO1RII4FUqwalj4txIZGMUEQNqc3MAXSYmhAM2NwcAjglhzRgmr1dkpQsFXAdXWQYC+BvzQZXLeL+RI0OSRHY2kcD5cauzHQMRiQDI2r8f5zI7C6V/+jQUTTCI69+2TUytNiOcuZFlnD9flxsSi+HF7T3M52UEFn//e8ER19NT2QY9NOTc6HOAs7QkSvMPHMA+4PvAk7N9PqwdB+0//znef9ddjb9D03AfymUcOxgk+vWvsW/uvdd+BlTXcZ/LZdxXN7JQmoY1/8UvEIw8/rg1kKEdddFGiRMgkFuhjcNWOtIRRSH65jehk554As+mrsOmMYjQbK9MT0On79rlHLA6exbTBrduRZWYquKYbgVjly4BeBgZsUf30A76yOOBT8DBMVdxtooEgwI4TKcFgJXJNG8F9HoFPyIP26knzBWoqmJqM8uOHaj6evFFol/9iujjH6/UqV1dCKTefRf+jqbBnzIGylzZkc0iAFtcxGvbNuyvRr4NB/vcUpZM4v9WA7dAAMDh1BSqd19+Gd9/+LCws1wpZAxiJQlrz8Di3JygzejqWt8GvXs31vrMGfgeMzPwaQ4csEan00xOnsSa3HBDc//D74ffPT6O9VtaEgPmuBPl9GlUWd92mzNalrk56I2pKXOURWakXEbit1gkuuMO690y7aCLWl047spm8Sw7tV8eD/ZePI44zAzNgZPv4opi5pRnQDGdFpVxHCNywc3EBGLT5WVBzzU0VDkZ2anw4FNJgn+QSokKWx5CE4tBrzA9mSyL4S2SJK6ReRWZY/FyJsF1XYCHuo54PRarfU6Li9AbExPOKaFSKQCIXi/RH/9xYxupqgK85fPkyvlalZJE2Afvvw+7dviwM3+ro5fsyRUFIn4g/xsR/YqIXvN4PH9HRBJhRPc2wohu+XKeXD0JBivBROPwEreVDwNpc3MAhUZGRHvU0hKMCLc3m3UOfT48yOEwzntlBcDO5KTg0FFVASSykxoK4VwmJ0Wr8/nzcPa4As9uhUgwCGW/ezeU/NwcDMPFi3Aqu7pgxLZvx/fXulZdFxyWnAHc6FJpr1e0wFx7LX4ny6INhvmDzp6tfL8RWDRjWHUd93puDgbZ68Vn+Z5VC0933LZN3L/nn4fi/+xnm9+n8+fh+O/di+O/8w6O+eEP4/ztytISDI+ZIUFmRNOIjh5FtceWLQAQbQKcbamLNkKcAInVn+0AiR155hnorS98QVRELy5CT+/Y0dxuzc9D501OOufPnZkBWfjoKNFDD8HOhULOKxuJBDC6ugowyFjBbkNaXh8ZgcRcDte/mQPTmkl3t5j0GQiI1sJ8XnSS1JNQCMDRwgLuZ6NBBl4vrl3TsCZGnbl3L/yo3/0OYM4dd1TqRL8fQOJ77yHAUtXKYJArUDhhOzUF2zw7i3XngI1Bx1qBWXV7M1e0WJXhYUwcPnUKwNnSEs59aqr2+znIZ55DXcc+4RZorlhk4cE4vb1EH/qQAHFTKVSEXnON8+d0Zga+2Z491nQJ878NDEBvcTL9vffQAXHddc7oXVZXBWjqhGfaKIoCfuhsFgCnA7+t5XVRqwvzxzI/otO4xO/HXlxdBbjttALNjDAoF4vBtnEXWyoFfTQ7C93CgGIwCN1QKABAWlqCHR8edq/KtlDAMZn7fWEBID8RYtKhIRGXcxEQi6pWAovFIl5EooCGwUUj7+1GCcevXM0XCjVOEiWT0JEDA5b4lmtKNkv09NNYiy99qbauUBQBHBaLgg+4txfnGQrVX6PFRdiMvj5Uf7tU1NPRSxbligMRdV1/1ePx3EFE/5mI/pqIfET0FhE9qOv6s5f15JqIxyOGlxSLwlllx8lNMNE4uXl5GU7p6KjgkVpcFNObrVQDMEdgMAgDwCBlNCramkslGD+jcvD5BACWycA4LCzgPJhzwkkw4fcLjkRNw7FnZvC6cEFU123bBocrFILyYw6GYBDXcLmySZyJm5gQv2PHk19nzqBFiAiGzdgGPToqnHxFwfXPz2OfBYNos2kEGq+uQtmPjQlA7fhxGJzbbqtPXs4yOwsHeetWGOH5eTjLu3YJ/iA7wuTLQ0PuDNNRVfA1Pf88rumxx+xz/bSzLtoIcQokMj9iq7aKdGRz5OhRBNkf/ahIsmSzsGNDQ805WRMJ2JWhIegzJ7KwQPSd78CRfeQR7M9AwB3KCV2HDV5bW6/77R2vffRRby/8g3we6+BWgOiGDA7CN1hdhb/U0wObmk5DNzUK5gcH4VMsLTWnQ2E9xwOmjDrz4EH4bEePYr8xH6jxs4cPi5ZhTasPzPX2AqxaWhKthYoi/AquWunurgyYje3N+byY3mxVt/t8YpoyP9uXLqGqr5nt5cRud7cASlW1ElRMJnE8ScI5b90KX+SNNwBeHjoEYNZOFcvaGgDY0VFnQF0kguRHfz+A4UgEz/tbb4lWZyvVntksksxcjemGqCrRq6/imj/ykeY+XyNpJ13UyhIOCyDG53Ne+R4Oi+pq7iTbTIlExN6XJAEozs/jFQ4LQHH7dujS5WX8jSso7RYS6Dp0BdNVyTL2us8HQD8aFQmLTEYkKIwFBjxBmG2AruO5YVCRKxZZjIAiv9wSBg9VVdzLRjqkUBBtwddc4wzgLBZRgZjLoYXZqCs4rmZORiKcH0/vNlOwMT8P/TYw4M7UaJaOXrIuHp17JNtUPB7PnxLR/yCiKS4/bWGxtNjMW1MqCYCxETJvV4ytxBMTeCDLZTyokoQH1U55e7kMx5T5jfx+YfS83uaBVqkksrOKAuXG7btugXm6LqYmz8xAsRFBoY2NwTF0Ug25maLrgmuSX4mEaPcxltT39gIw3bGjeet6LofsfW+vmC65toZ2wokJcPY0+vzKChT+yAgc2mwW3GHd3et5naxINovggNsinIqioDryhRew5x95pCEwue6Kr2Rd5KbwfrT6DNerzOnI1SMLC0T/8A/Qy1/6EvaBogAM8PsBBjTaG9ksQJVYDO91YkuZ6ycYREUkt6G6MUhF05DcSqUArjQBO2t+Wxvpo7q6iAGqcLg5OLyZwtzBfr+4N4kEft+MXoQn5Xq9sIfNdFmtic0sr72GZNyRI0Q33rj+s5qGasTlZdj6Rm3Uug5glKsR+/vFhGcO9oJBAdoZwVIOWLlV0S4vla4DlDtxAv8/cACJRqcBbTIpeLkZBGB+s2QSe+vwYYCz/f3m/D1JAhAZCBDddJPzQFZVUdGcTBI9+iiueWlJtHgODprjDS+V4McwkOzGwEZNQ+XrwgLRzTfDd6wjV6wualXRdewRIveGBK2sYB/xQM7LLeWyABQ5RgsE8Kz29UFHrqwAoIpErHPHMx8kU3Lx8A6eNG18thVF8MKqKp6vnh7zRSaaJgBFrlpkGMbYBs2t0FbvZ6kk+ByZeqvZPSyXUTyhaeYoGZp9/9NPI/584gnYnVJJVBwyiBoMiipUK3vs0iX4RkNDSCI3WB8TQ93aRi+1rFwJIOL/QUT/FxHFdF0vXO7zaSK2FltV4QiVy2I6cjDoLpiYTsNh4Rbfri4BsCWT+E4r7c0snHEIhUQVEitKbn1uJpomWp0LBXx2eBjgntutxYuLcLpnZgSv48iIqGJspYoIMyLLKPk+fhzKN5WCQezvxz0ZGKisVqwmKy6X4WgHAoJnTFFQfZPLwUg0yvxlMggIursREKgqOBQLBXCH2c10ShKqdIJB3BenjpOiIBj75S9xjx9+uHG7GdUGEa94XeTKF+vCabILJHbamq8+KRSI/vZvcf+//GWhdy5cgFO/b1/jLLYkQbd3dSHT7iToTybhKBNh2iCDRm4MauCBL5kMwNJmA2KofuDeLvqooS4y+hBW+dc2UopFgHOxGAAeVYWfwsFno32Qy8F+DQyY451qBCS+9BJs/Ic/DNCoWng679ISbPjOnY2/K5XCdQSDIqnM1SPMwUaE54gBRU4OZzKCD9TJtN5CAVV4S0tYoyNH7IHIhYLotjD6GswLmU7D1+PvCgYR9E5Niamxvb3rByQx5Uk+DwDRDRqVX/8aftoDD1S2EhaLIpmuqriOsbHaU8FVFXyY5TL2glsV0W++iXW64YamlY1XtC5qVWFwnIcFORVNw57TdcQFrdT9oSgCUGTuXOZD1TSAVJom2pGbxYj5PPTd2hqOFQiIwplGySDmGcxkBE1XLAadYbWikNugja3QLF7ven7FetPcmbff78e5mImPNQ1Jh3weFZdO9o+iEH3969AVn/40bBufE5Goao9G7SU3pqfxGhkxVS1pBkRsF73UstK2IKLH45kkok8T0f9ORBd0Xb/tMp+SGXG02MyxIMuiks9NEK1QQPUhEaofjCPuFxfxwI6PW1cynKHnNiDO9ni9AK2sXEM2K9pudF20OjsF93hqVakkFHAuB2U4PS14dvr7AVpt2+aMx2+jRZZFy7IkIQCbnMT9U9XKasXlZVFp4PeLNujhYfHZ7duF0n/xRTjdDz0Eh7ueSBIcWr9fkN6++iqCp499zH57niwDOPB4EBA5bQGQZTjvL76IPf/QQ6baHP/NQF2NusjxlxsGpVgFAzUNn+0AiVePaBqq/i5cIPrzPxfcgCsr0HGTk41Bf0UR/HDXXOPMbjLXT6kEAJETZG5MYlZVVG3nctC5TRIZLBVPQRvqo6a6iCcLh0Ki1bkVhIcBDAzg/nNFi99fG+QxytKSoPhoBpDpOvYwJ2Cr//arX+HZuP12DJWr9fmzZ8Wzsnt343PL53F+Ph/sdDXvF095ZgJ89pliMcF9HQg4B9VnZxHglstIEuzfb/4Zqwcg1pP5eVQWLi3hnMfGBDjI3G0MKi4uwgc9fNgUyN9U3n8fHRA33ri+NZ1FVbFflpbgW3V1ieQvJ/1PnsR+PHDAvcrdt95CUuPgQejOJnLF66JWFa72CoWcAfgs5TJiAy6kaEVh8DSZxE8uUuFkczgM3Tw0tB60MnLCZ7PQVcwtb7X6slTC9xsHl/T0OAPxq0FFprUgEq3rfE1cZOTzNafJqJb334deufZa0/5G3fP9n/8TOuiuu0BTxUVPbBuc+EfM2zs2Bltgwgeo+4421EstK+0MIn6aiL5ORK8T0Z/run7+Mp+SGXFlsXn6saLgoTRb0WdGymUoVVkWfD/8e7vtzZyhZ8eayV6TSVxDf3/9SVGNzpMBMEXBGnCrs1VFJUmCByMcrm2AjYAiZ+i6uwWguJHTzKxINov7F4/DiPb3I6veLKBJpyuBRc7MFYvIJnFbtyShreVDHyK68876x+PKPlmGox0KoVrizTeR7Tp0yN71aRpAyHIZAKbdic4s5TIqJV95Bc/QAw+Ybo02gohXrS5ydAIuAImtlCHvyMbJCy+gUufTn0blDxF005kz0MONqqt0Hbonn4fz6YTnqVAAmJnJAEBk4KgZ35AZURQAPTwcxgKxfXXg3m76yJQuKhSw7sGguxM5nQon4bgzQpLg24RCjXl6dR3AX6mEgKvZ/mkEJGoaBoHNzmJic73n4dw5vGd8vHkgVioBLOMJz7X8Ik0TFYpM3s8BrhF4c+KflkrwJWZmcLwbb2we7ObzIgFqpUpQ1+E/HT+O62LaF79f8CzG4ziXiQmAasaJ0HaSEysrRN/9Lo73qU+Z84NTKYCJqZQY0FIo4Jz37HEP9Dl+HCDDvn21q1xryFWhi1pVuBDCaotoo+M1wy5hAAAgAElEQVTxkJFWqgKvJZoGPcRViuWyABa7u0UMw3Rd58+L6uOJiUq+d7tiTLBwq3N3t/X4tpaw/jdOg2aqCR5sylQSfr8533h6Gh1qO3faG9rGFd3ZLNEPfiAAxFtvFRWHblz32bMoitmypXkCzCCNQMR200stK20LIrapuLrY5TICKVUV7cFu8J+oKgDDQkFwQhDhYV5ehlHhyjaz38ckuZyR4O9hoCoSEQCeleBA03CMpSVBLsytzs0MgqqKUmvmjjCjeItFKN6ZGawTZ7y2bQOoOD6+uZxtfF/m5gS5O0/ytNtms7qKwJurChhcfPNNOCd33w3Dy5nwgQGxdpwRz2SQEe/pgaP83HP4zJ132gsAdR0BUC4Hh8Ap6TO3N77+Os7nnnuat3oZpEVCWNvSEorfCZDIvDUdfsQrW06dQob7xhsF/6qmAUBUVQS4jaqRL15ExcHOnc4mTpZKaNVZWSH63OdgZ0ol2CynHQGKguspFtHKaDFgu2p0UbEIG8d8WK0AJGoawDZdh+33+USbGw8kqSflMgKkcBjAcbPr0TTsea93va+iKEQ/+xl8gU98on4y7OJFBI+jo81bwpj7sVTCfm9U3cbtfcwXVirBhwyFhE/mRFcvLxMdO4bv2LEDicha/id/d72EsBnhZOXJk/ATJiZQiefxoJvC64W/l8mIlkoi6AEjqNjT09inlCRQw2ga0eOPW69ckiT4ZcePw//bvh2t34ODzu3i6dPoJNm5szbfZh1pgSfSkbSEX+REmFKg2d4zK8kknqmhIXfa4zdDeGp7KgV7vbKC/zP1As8ZGB/Hc+wGHUH193PSi7vuolFnfLEsnLjhacZdXdA73BJt5Fc0ToPu6qrU9fE4nvHxcWvDLblrL5fDNaoqurhOnYLd+cQn3LPLnABeWkLMZyE+I2p/XdQW0gERN1c2ZLFLJSgUTYOiCIedt3jquiB17umBomHFkM1WOs1mW4nTaZznwIDIkrFCZKXu8cDxszNAhqceMrdFXx8c11oBWbEouH2iUfsZqHIZwNbMDH4qCq5t61Y4dFu2uDtxq/q7eWpZuYz7zi3LTr6TQdJoVGSndJ3oX/4Fge6ttwrntVjE343grSQJgt7hYbzn2WdxTvffbz9DurQEMGB83BkYwNd46hR4jVQVwOa+fZYO0e4GqmUUv10gsTNo5cqXRAI8iAMDaGNmB3x2Fn/btasxSLO4CP24ZYsYCmVHZJnoW99CoP7YYwBoGCBxGlgxb225jCy7DWqOq0oXcUKyq2s9h+/lEh4iFwyKSZTpNPZIb29jMCuVwn7mKbzNxDilvvray2WiZ55B4H///fX3/MwMqiCHh5uS0/8bP1o+D1/KTMubMYhmLuuuLlxfLY5Bs6IoAPbOncNaX399JadkNos14KS0U+Hq4NOn4TMwzcAdd1T6sNls5URo9i2J4Ef19gpQkQcv6TrRj38M/fRHf2R/2nEigW4KrxfrWizC1+IErx1/68IF+EZTU+DatGCXrypd1IqiadiHXi/2m1NAh4sUZFm0zreb5PPY0y+9BH0UjaKbysogJbvCFYP5vGh17u62nuDgJA23TEcitSv9qoe21GqDzuUQ/wwOIhnTbI+oaiVwyF1AsRgoIN54A1Qa99xj7ZqaXS8PBeOZBBal3XVRW0gHRNxc2bDF1nUBJjI5bDjsPBOVSCCLwwAVH0+W4fwUi1DCo6PNFRFP/yOqzJTKsiCmNf6bwUSrwlwebPhCIcHxxxkqRYET6ka5NQtXcHKJOPMrTk5CAU5NuWOsMhnRsqzrCLCnpvDTqcOgKDh/jwfnzPf76FGil19Gq5SxFTmbreRWPH0a+6W/HwHM8DACFl3HxEG7gfzaGgABnlDoRAoFkWUvlQCKHjxo+TDtbqBaSvHbBRI7/IhXrsgy0d//PQKiL39ZtIamUoJcuxGv6toaAofBwcbcrc1EVZFAOX8eA5f27IEN4ep1J1IuQxfJMo5rk9S83Xe+ZV3UikBiLgd/qadH7NW1NdiYgYHG1aqzs7ienTvNVcU0AhIliegnP0HQ98lPwgbXkrk5AGQDA7DpzdZwdRXnGI1arypMJkW3SCCAVyQiWv2sAhPJJKoSUynogOuuE10TbgGIRimViH70IwS1O3divRpxq8pyJaiYTovhAgzuXLiA9b/3XgAadiSXQ6t3JCLuYSYjkukej3Xe8EuX0J0xPg7fyOKzddXpolYUWYZvHgg4t1FEeLaYI9VMrNdKUixCv87NiSnnkoR4RVUri036+py3NNeT6lZnv18kFRo9Y5yQKRSg80Mha/yC3AbNoGI2C57ZQABxTyhUObSFj6soYqgZF4uwz8O8iy+/DKqZm26CrXFrX2gakkWrq0gUm6SYqpY22qXtKx0QcXNlwxdb16EgJQn/DgbxsDtxsnmwit9fCYTpOgCjRAKKaMuW5iAZE49XT1oslaC0mKS+UMB7fT777WJMnBuPQ3HKsnCAh4c3NvukaTC609MA0QoF3IOJCYBzW7dac3Q1TbQsZzJYl/FxAJRukCgTiXbhYhHnyGu+vEz07W/DeX7wwfqfTyTgZPOEuHgc/IlnzuCzQ0PIxnPFxdiYOQ5L5qPs7sb+c2Kocjmcz6lTCGhuvBHtPzak3Q1Uyyn+DpDYERZdJ/re9+DsfvGLot2GQbdgUBB315JcDu+LxYj27rW/L3Sd6Ic/hEP7yU+CFyybFVU/TvabJImW7L17HbVUtfuut6WLSiWASD4fgLBWABLX1rA/hodhlzlxqqqwdfU6BDQNgJKuY1+bCRAbTWzO5wEkyjJsdj1uxsVF2MK+PuztZt+bTsPnCwTgy1jpeOAhCPm8AEFLJfwtFBKTns36ZTzF/MQJwSt57bUb03J54QLam7dtE0kMnw/P7d695kDQYlEAijxIZWoK/kcgUNkCbabtsVRCItTjAYha/f5SqZI3PBIRPle9Z2VxEe3aQ0OoLLJRhHBV6qJWFElC3GG3GKPW8VZWcLxWHirJwvt/cRHrMDyMSn9ei3we8c7KCt7LA2k4Nu3rcy+2MoqRR1CSBHdsLd3H1c+aJgZEOekyk2Wit9+GH3XoEJ5vBhh1XVCkSRJ0NMffDHYa4/A33yT66U9hNx5+2D3fW1VBz5BMQqcbK80tSrvroraQDoi4ubJpi61pAkz0eASYaPdBLxYBYBHhoTYq11wOpKdm25uZN6enp/I4XEXJ51kuC4fT77c/Fp4rExcXxfQyY/Zpo4EHBlt5PH02i9+PjYnBLPWyhaUSqhsXFkSWnVuW3R4qwXyXExPiHsoy0Te+ASPzxBP1nZFsFs58NAoeRK8XwN/LLyOTtHVrZcViLofPeb1wWBlUHBmpvCeSBOc9EEA1kZNAMZtF0H7xIgKBgweJbrnF9uHa3UC1pOK3CyRyy0Zn0MqVIa+9BhDk7rtB1E2EfXHuHOzEvn31E0ulkkhm7N/vzOl+5hlMJ737bqKbbxa62+nUWR4Ko+sAIRwGK1etLiqXEWx4vZW8vJdLdB02rlyGje7qEoPlPJ7GAE6hALCqpwf20owoCn76fOv1ZSaDZ4gIAzvqVbnG43heursBRjV7XgoFJEg9HvgKVhK83AnC04VDIdEmx9UuPNW5u7s5+KHr8OuOHsVxxscBytms6K0pq6tIZoyPA6Qkgh5gHsJgEFWJu3aZ0wnpNJKyPT1E990n+DPTaTHgjwg6wcivaKxYUlVUIEoSgvhG+kPTcA1LS7h3fj8AlbGxynu3soJ2z95etGvbbFu9anVRK0ouB13U3e1OGzIPLunrc/cZc1NkGTotkcD1RyLQU8PDtZ/PTAZ7P5+HPu3qEqBaICAAxWjU/ViRh79wq3MoJFrQOfbl6j+nRS+aBp2RzUJncIxXKmGdmH5DUaAjGFQNBLBuRn7FkyeRXN27l+izn3XP7vJAzkwGPp7DrrN210VtIR0QcXNl0xebq/rKZTFuPRi0pwxlGU5TuYyH20iybWxv7uvD3xt9RzKJ4wwOCqeVgU8+TxYm6GaFGo2aCwyZALZUEvwNRKLVuVwWHEbDwxvHXVgta2sCUEwm8buhITGYpa8PhnpuDsZN1/H3yUnnXID1JJvF/ePWdJbnnoPBeOSR+tO7OCPu8wmy81SK6Oc/x/HuuWe98c7nK6dBc+s5EYzW6KhoOxgagvPuxAlKpwFAzM/ju/btI7rtNkdOQbsbqJZV/HaAxA4/4pUjly4R/eM/wkF94gmxB5aWBMF2PT2oqgBEFAUAopMKjBdeAJh5220IqrNZMenRidNcKABA9High1yoErmqdZEsw6a2CpCoqgC2vF74QUzTkkjAhjWiHVlZwR6fnGw82Zml0cRmIvgXP/kJbOqnPlUfbFpZEUnA669vbmvLZSQ2VRXXaLWKlnnCiMT0ZkUR7X7MKcgBdC0OMV1HsKko+PvCAgJQfvb37XNuC4pF8H2Fw2g5rj7e2hq+c3kZ53fgAPy4evdXUUCNkM8j+K5OuCsKrsnYCs3Vml6vmHYdj2PtjxyxNoQpm63kDedWZx6OEImAH9rBoKirWhe1mvAzouvYN274RqureC5GRpwPFHNTVBXPYSIBfevxIEYdHGxe2KLriFlWV0VHXDAoqoe54psBRaddCNXCvKqJhPg+jqPdqqw+dQrrc8010COcvOG4KxwWg0/9/so2aH6pKuKoH/wAsernPy/aoZ3Gz7IsQM5rrnFlwny766K2kA6IuLly2RZbVUWLsNcrwESromkAYvJ5KGcj346xvTkYRMVive/g7KjXi+OwQlZVAEd+f+VnuU2bB8gEAnBc6wUMnF1hAtrqKkxdhyPFrc5cEeem0jYjmQwq9qan4dxlMrhHvb1Yv2uvxc+NPKdSCecQDCJA53U6fRrTHm++Gdw4tYQz4lweHw7j/H/2M/zuk580d+66joCHAcXFRQTasoz9MTAg2qAZ9DULKiaT4DNbWUGgsWMHHGWHDlW7G6iWVvyahp8dIPHqkmwWg1T8fvAgGluPzp5F0LttW+3P6jp0Ri4HANJJpcQrrxD95jfg+rn3XuFs2+FvM0ouh+vw+RpXU1qUq14XybJIyA0MbF5CsJ7w4LFIRPhIkoRzDIcbAz8XL8JX273b3P5oBiSurKCiNhZDa3M90DqRQHVdOAwgsVnlC4OlkgQbbQb0rP48g4AcwBr/lstVDiTw+SorFJnb2tgGWCqhXW9uDsABTyq2I6pK9Pvf45g339wY7I/H4Qclk/jeQ4dq87U+9xz8qoceMl9tWioBUGRQ8dQp3NOJCfhC3P7MVYtmKpbKZeFrJZO478PDOC+HHHpXvS5qNeHnzOezNbRrnfCgJV3H/rvcSRuOJVdWRGzIVc6Dg9ZsrKbheVhdxb97e2FPikWAjDz52ufD3/r63AFnZVkMGC2X8R3s+/JUZyeViNPT8I2GhnDOfPxIRHAcmrmP588Tfe1r0BWPPYbPGKdBM5jIHItm16VcRiFKoYCY18zwLhPS7rqoLaQDIm6uXPbFVhRRsuzzwXmzqpy4ZSeVggM3Pl6pLPJ5AI26vr5i0SilEhQ2cy6wlMtQqsHgeqdY16HQufWZS66NbR75PI7B3HzNlCO35yQSMBw9PTCO/f2bw7EmSVivCxfg/KZS4jx6ekSF4kYQGmsaDIym4Tt4vTMZoq9/HUb40UdrGwOenpVOQ/HzfX7xRVzHPffYzybNzgLkjURwf+Jx3COuXmCCZAYVR0drV3jwcIV0GhVOExNoTXTB8Wl3A3XZdVEzYSDRioPW4UdsX1FVoq98Bbrw3/970cqiqgi8iQC81Xt2p6fh/O/Y4Yyz6c03iX7xC7T8/OEfCloQu9y8LNksAMSuLlyHi5y87b7TXdFFigJ9T9QaQGImA/+mv1/4NwyMcXVdLZFlsU927zanxzQNz4nXW/v5WFxEYm9ggOiBB+rvvWQSYFggQHTDDc33O/uCuRyucXjYmt7liaPFIq63lr/GHSXZrAAOCwX4ruPjCIqrbcTiImgIikW0GR88aH0/nDgBn+P6683rk7k5AHLZrJh8yiDyu+/CN7rlFiQn7MjiIgL5/n68GFjkhDkR1qWaX7GeDc1kMCE6mcQaxWLCp7JZId3RRS0o5TL2CMdLToVbhnni+uXwtbgQhDk/OcHHlFhOeHJVFfEg25P+fjzPPh+emVQKz56iCH5k1vNW9IyiQLdJEo7DA0uYzot1Hse6XJFtZr1Zt05PQ6cPDMCeGIFDK+szN0f09NPQt089JYpDVFUMbeGKRRavt3JoSy3+3lIJdBGSBB3kYrddu+uitpAOiFhHPB7PnxDRV4loh67r0y4dtmUWW5ZFi7DfD4VgtcKCFXgohPYbo/JUFASDhQKcGW7rqZZMBu/p7690WJnYtd5QGE0TYKLHg3PweASvTjRq3QlSFFwPcxpxW+3IyMYEJMkkFDNPrOaW5f5+KNZLl2AA5uexFqGQABTd4kScn4eh2rpVOBeaRvTd78KIPvFE/ezl+fNYq927BVh44gSc9xtvREm6HYnHsSZjY+ud92JxfRs0t/t0dQmun5ERrM/KCj5z4QL+9olPuMMNQ5tooK50XdRInACJlztD3hFr8uyzIPV/7DFws7FMT8Nh37OnfgC0tARdOj7uiIib3n0XQfW+fUR/9EfCTgaDzoKvdBr6Mhg0P4jBgnR00QeiKLCr3Krp8jpblpUV7B8jKMPcU3199av0uUNhaAh72ow0mthMBH/i+edxLvfdV9+nSafxHPj9ANDMdBIkEqLKsjqpbEaM7c3d3fXBS65+zGYrk0XRqJj0bJwuevw4nrtwGKCo2bWcm0PiYudO65PddR3VpCdPwvcYG4Pv8ctfwn+zO8U0mcQxBwbQrm08Bk9+NU6DliT8jQc3GPkVIxH8/de/ho676y68j7nkjO2UVtqlqaOLWlYKBdzzaNSdCvhCAXslFrNehexUUikRp0WjolpQUdzla5RlMZWe6TIGB/Fv5ndNpfDiFupYTLQ917M/XOzC8WskUp9zUdPwPVy1bazIrpdwyeUE1+rZs4iHbrrJPq9jPE701a9Cjz71VOP11XVcnxFUZM5yIpwzg4qKgpiRO9ks6ppm0vRKN0iHXFXSARHryNVioEol0SLc1QUlYQUw46EqPh8AsOoW5NVVvOq1N/MEZU2rJB3nikOixgNhNA2KMpGAouJsuBMAgfkxuL2YW65HRx1N0CQinCMHvPk81nxiAmtTD/RkLsrpaVToyTI+t3UrnNLJSXvBUiKBIGdkpDL789vfggfo/vsRTNeShQWcz+SkaMtZWoKjvHUrJvvZkWQSxx4YMO/ws0PBoOLqKo6TTGK/FQpY34ceqpwu7lA6zvImiR0gsTNopb3kvfcwbOAjH6mcAL+2BvBjfLySq9UoTFcwMICg366cOkX0/e8DNPjsZ4XzzrxsdiWZRBIjHAaAuAEJqY4uMoiqYt9oGoJbFys+LYumwS5qmkj8cQUNc0LXO7+FBdjo7dvNB8UMJNaq+CDCc/KrX8FG1+IqZslmUR3i8QB8MwOgZ7Owv11dYqiMFalub64OePnvzO/m88FHZB5FbumORASg6PdjrY8exWcnJwGMNgJR0mm8f3AQ1ch2q6yYP+yddwDWjYwQ/cf/aK9KOp+HjgyHUa1jxq6Vy5WgIq8tn9vZs7j/99yD/cBrwlVm8Tj+HQoJINTE93Z0UQsLt+Py8+NUUik8ewMDzmMjM8KcnpIk9iWfh9/fWJ86kXIZsRK3hQ8Nre9UKxRwHsmkKGyIRgWgGAyKGQWFgqDaslIRyFOdGXyMRnEMbofmDj2/H+d5/jzec+SIfb9jbY3on/4J5/inf2oP6NP1SlCRKzCPH8eaHD4sugf43F2QDoi4CdIBEeuIx+PxEVEXEZV09xapZReb+QZ5IlU4bP5BliSAXJoGQKw64MrnK0m4q5WQolSSjrNwtaHPVxtg03UoVT5vIsGHFom4QlhPxSKMFnNkdHfXb51tdhyessyl91NTOJZVcIQBvEuXsPYM4G7fjmOayTLm8wAke3oquXvm5oi+9z20J3/iE7U/u7aGgHtwUICM+TwqiUIhgI92DFY+j+qLaLSSm9GqcEtRKgUHvlgE2MrORTW/ImcWLcpmOstXlS6qJVaBxA4/YvvI8jLR3/0dbMOf/ZmwO6USqoEiEbQk1tIH+bx4z9699u/1hQsAMScmiL7wBRyHuXKdkKivraEqic9vg0Dtji6qElVFMKeqlx9IlGVUz3Fng8cD3cTJU+NwOaNoGoJARUEVrlmbypNF6wGJ778Pzs+dO4k+9rH6ezufB7+grgN4MwOkF4u4Vo8HQKJVH8zY3uz3C7CjGkCstRaSJADFchm/C4fx/EajeMZPncJnDx2Cv1Qt5TISqF4veBCdAv6aBn/q3XfRsRGLYd2vvdb82pTL8GOIUKFtdy+zv5xIYGjUygrOiWlogkH8mysWu7tFgjabFeBJE97wji5qYeGiCyLcZzfakHlI5ejoxlV+M+VUPi/0aE8P7Gux6Lx92awUi2KSM3c+1aLrkiRRociAIZGYPN/fX1k1bVWY4ml5Gf8OBHDM4WEc3+9HR5iimK8mryWZDADEchkViMYZCE4knyc6dgznx7qQ7RaRaIM28iva2KtmQMSN0CFXlXRAxM2Vll5sHl4iSfh3MFi/nbhaFAXgjSQJPsHqvy8sQHnUam8uFKCw2OEzfq5UghIxOk+ceeE2X85ac4ZDlqGgnfJYGc9jZQWKu1TCuYyM4NXIcK6tiZZljwdKeHLSnbJtTYMR4UnP+bxw3rdvB3BWq4JAlvF+vx/v4fsgSeBB9PsRSNe6rlwOAF00ikmEXi/uwXPP4f7df7898uZSCcF2Vxcqgew6AwzUdnUhYPD5BKjJU7mXlvCTK119PnEvmV/RxDW0O99GS+uiWmIXSOzwI7aulEoYpCJJGKTCzx0PSZFlJCpq6aJyGYCI1wvqBLsB/+ws0Te/iSDkySeh242tlXZ10eoq9Gx3N4L1DayKbffdvSG6SNNgf1VVVIJcLikU4D90d4tEqapWDpertc8kCdVssVht0KuWcDsZUf1n4p13AJbt39+4a6BQAJCoqgCwzNh2WRbJ0pERe62FxvbmcBj/93jMV1DxYL1sVrT1BoM4xtmz8FWGh1Ghw+CoriOwzWTQ+udwwAgREb38MtbvE5+Ar3XyJMBMrxdJhXq6jUVV4W8ViwA+3eiEeeklAIl/8AfwdarboNkvMg516OoSCXsi4cP39a2zrR1d1OLCE8CZg9SpqCriIo/HelFEM+EBVZkMdBl3TZXLIgnT1+fOs2pF8nnEEJIEvcLgXS1JpxGXcMszV1lzhaLZZ5p5LXkAC5HQHezn+nxYi+lpvO/w4fozCcxc41e+Av3wpS+Z7wxrJtkskioeDwBOY4xqrFSU5fVt0EZQ0edr6te3uy5qC+mAiHWkuszV4/H8MxF9kYjGiOi/EtEfEpGPiJ4lov9F1/U1E4dti8XmVmJ2vkKhxi3FLJoG5zGXA4hYq/2Mp2gFAutbeFMpMenP6FiVSlAqwSAUBxPRssKsF2DyABm/H4rKjWoEXYdRWFrCTx7wMToqDBnz9szNiSwRtyxvZCCzsiImPafT+N3IiAAUe3pw/jMzWJ/t2yvX5Kc/hYP7+OO1B6KUSmip8XhgnHjdX38djvkdd6AS0qooCr5X15Glt5vNnJ3FfYlEwLOhaQAQ6xnRTKayDXp5WRitcBhrwPyKo6Pr7t1la9u5mnRRtVgFEjuDVlpXdB3g3alTaJMxgiTz89BnO3bUfn5VFZ8rlwEg2q06j8cxbTAaJfriF6E7ODllZihXPVleRqV4Tw8AxA2ukOjoojrC0zZlGQGbG90JdiWZhM0ZGhKBY7kMoJO7MGrpqEQCftX4uPmplc0mNhNhgNDbb8OWf/jD9Y8lSXhfuYz3mkmAMm1LsYjrskOWz9Wka2uCa9FOokCWRYUid63E4/AXeIDMnj0Aay9dQnKU2ySdyNmzRD//OcDXj35U/J4Tsfz9+/fXTjLoOnTc2hqqdZzyzmkaOGeXljDcpZ6vJssCUGRwUZbFOcmy4AwfGIBvOTz8b/emo4vaQEolxFHhsP0qNaOUy7B5DKg5FW6pTyZhO4eHBd0V782NbF82K5kM/JRyWcQMDIpJkvAljIBtOi3awHUdf2NAMRartAF8DB4WSgQbFoutj32LRZzP8eOwGQcPooPDTswpSUT//M+I1598Es+4G5LJCL7d665rvvfYjhkHt3AMQFQJKtZog7bczuySDrmq5DLPr2tLeYaILhDR3xDRPiL6D0RUJqInL+dJuSnMKxMKCTCxVML/eYBJLfF6UWW3vAzHR5YBnhkDqKEhHHt+HmCXsWqxpwefSaXwPv4e5pLIZitBnkZTqgIBvEolUeXY1YXPOCm593iEwpck0eq8ugoFpijCUe3pgfM3MrI5LZXDw3h96ENYQ65QfOMNvAYGYHj4vIzG97334ETffnttAFFVUfWjqsiI8xqePw9n+cABewCipsFxV1WACHbvzcwM9l1vL4yoooA8vlEWjicX7tkjziWREKBiPI7jsvT24r5zdWMLiG1dlM9v7IltlOh6ZcuDGenwI7amvPIKwIl774Xe4j2ZyUAncJtn9V7VdeiqXA5OMpOUW5VEgugb38B3fPrT+N3yMmxQLCaSaFYlHod96+2F/ePqHbdF19Eq+Sd/sjHHtyi2ddG5cxt7Ypz4u3gRgdzlBBJXV8XAFLa/koQ9HwrVr/RbWwPotHWr+fNvNrGZp4m+8AL8mAMH6h+ruxv2/5lnUD1nBkhkvmumKBkctJbIkWWRMOZOAictgCxMkdPXh7138qQYnHTNNdArTvdkKgV6F06mVx+P7//p0+jiePFF+CGTk2KNZmdxzVNTWMdEwv756Dp0Ld/nUsncNfIAB24Vz+UEsJHN4pisL8fG0MHSAnLFx2TaHPUAACAASURBVGhOJRgUsQqDME4kEMDzxIkSO91IRKLji/f60JAYkqhp+Jsk4Vnt77/8VDXc9p9OiyKOQAA6mgGu6ir4oSG8VFUAisxPz9RdnCxhwCwSEVWL9RIp4bDoeNu3D88k02j09JgfqiLL8IuWl4k+/3n3AMRUCnFmIAAA0Ywd83iwN3leAxHWxMivWCpVDpHy+5F8aZQYMyEdHWJSOiCidXlF1/X/lf/jwVP5Hzwez5d1Xc9cvtNyX3jkPIOJxSIe2HC4cXZjZASKYmkJSrV68Eckgoqz+Xm8p1AQE/16e+EwZzICANI0Me6+qwsK2KzRCwbxkiR8TzqNc4tGnTujPC05EkF25eJFKLeREThqu3ZdvixZXx9Kxa+/Hg7f9DSq815/Hcbl0iVRoejzwYndtg3tPdXCrYXFIpxszrStrQGcHBvD99iRhQUcd2rKXkZU13Ftq6sIUo4fR+Bx773Wqx8448mZVAa0Z2exV+fmYARzuZYBEa8aXcTCThCDiWacIubU4paPjlx+uXABQ5gOHEBVDIui4HkLBuu3z8zOionydoOVdJroO9/Bvx9/HLamWBTtRnaDqoUF2LT+fujXjap+VRSi//JfAAC1CIjYsrrI48H9TadF9Ycb1Td2pL9fJFk5uRgKCSDc56vd3jY2Bju3uFhJQdJI+D2NkihHjmDPHz8OX4UTatUSDCLxeOoUgK89e5rbV49H+GrcyseAQDNhANHrxWcUBXY3nXY2XZZ5Fnt6cNzJSVzT88+LexEOiwnGdp7fcpnoN7/Bd91xR/3r7elB23QigTU9fhw+JPNfLi/DF6mV1LUqx49DL+3bJwbhWREuIGDfyDgFdnUVvtGZM87P0yVpWV3UShKJiOeqt9e5bxSLiWE+DKSZFU0TXWo8EMvIsVgqifZl5hNsFeHCkkgEvkk8jt+PjjYe5OjzQYf29+MeLC6KmJlt1NgYEpH9/c315soK9MfEBKqb+RnNZMSwyVhMcCbWElUFN/TsLNGjj6JC2g1ZW4MOCoed8boSYZ9yXM8iywKjeOEFoh/+ENfhQDo6xKR0QETr8rdV/3+RiP6KiLYR0XubfzobL9w2rCgA4pj4ulGLMI+3n58XQKLRqPh8cGYSCcEtwe3NsRiUKisJrjLhdh9jObNZCYVwPFY0PLk3ErEHJiqKaFkuFnH8u+/Gua+twWl++20xxMMN7hG7EovBGHR1IWhQFICIx4+DfPedd/Ce++6rDcxMT2O9du0SFQilEoDHYJDottvsOdvLy3A4THIQrhNdBxixtoZj8DCVj3/cuuOtqmJv8IsnGYZCMMoTE0S33mqfY2QDxLYu2oxJehspRhDRzN7rDFppHUmliH78Y+j7z3++0oZcuACdsm9f7SAkHoc92LEDNsWO5PNE//qv+PdTT0F3lErCqTczibaWzM5Cn01NAejZKAAxmyX6679G4uqL/z97bx4l11ldi5+ah55n9SCpZc3W5HnCxsYDg40N2AYHbGN4fiGAgWRBXmJCBiB5kPeSrPyAhCkMYUwCITwwGGyMEca2hGfLkmxJLfU8V3dX13TrVt3h98fm5Ls131t1q9Ql1V6rV7fUXVV3+s53zj7n7HNPdT6jDJRti+wKVEpB1/HsyXKu9nItMTyM59jvz9ynwmHsO+3t+UnOoSGsj2DQ2rPPCRSXK7/t27wZgdf4OO5FISKRCL87dAjP4PCw+X02FsM5u91IDhQLIjlhzIQfryMeCsETg7Nb/8qBosD35Aq6U6dAhu3YIbpdeNKz2X3jpz/F8b397bBxpbBlC6pmpqfhw5w8ifM8//zyfSsjXnwR5/ma16DFsVoot3K7CjjrYrRy4HCIKrpoNHOtlYuODqFX2NdXWn6AJ9UvLOAZbWvLlQ4yti9Xc3hLuWAiVpZFciKRQMx06hTseXd35rXgKc3cqsz2eds27Es8SGp1FbEaD8Fsb8c1yr6ukQgSEa2teA8iMRSOK4mNEgXBIP7W6GPxEKiREaI3val4ZboVhEIoYGlqAoFo1/0zSiuk0zj+n/0McenFF1f89g0bYhINEtE6xrP+vfK772WovtQX2KlLp4UB5OEl+QxDUxOCqakpOKgDA7lkWldXZnszC+dKkjCc7DC6XMJoOJ3WjRG3aQcCOH5u02ZdEDNOYjyO85mbg6Fva0NVZU+PeH1PjxAE5tL8pqaKJgFXBFXF9XW74fi7XKgoTKWIvv99GOPubhjfgwdB7g4PwwFeWABZOjAgNC51Hdo6koSKv3Law8JhXJuODvM6T0bw5MpwGMd25Aje75prSjvuPECIycJEQuiNEMGBaW4WQQRrKHV2lpxMWGuctbbIWJFo/Hexv3c4rFUwNmA/FIXo3/4NNunOOzPJhMVFOLrZCScGVwV3dJRPICaTaNWJRvH5fX0iOcZyF+VgfBzH39tbXqWPWUxNEX3kI7DJ999PdPPN1fssi1jztsjhwLNj1KM6HRUtPh+OY3kZAR0npdraRIuby5VLtDU14flaWIAfZTaZ5XKJYSv5ki5OJxJvDz0EH8DjKTzExeNBIPjSS2gD1jRz+oHNzfA/OPHa359/H+WBKNkEIh9nezt8MNa7NjtopRCOHoUPcOGFeO99+4ieeQbrORZDQiAaFUNGmFAs9JnPPgvi4KqrzBGIRgwO4hgefhifOTODwSx79pSvh3j0KEjRLVuqSyASnV6ZgCyseVu0VuB04nmORrGmKk2scPXx/DzII55Gnw2WmJifh+/d3Iy/Ne6/qgobye3LhTRjTxe4elyScFzNzaJ6uaUFsV4ohP2G7bzfj+vME5tdLpHQyq58bm8X0+p50jNr3be0CEJR0xD/+Hxi2GU2uJJYUYQ+LOv18+f/+MewF69/ff6OtHIwP49K75YW6OnaMe2eOQAu8uDz/c53sC+99rVEd9xR2edQw4aYRoNEtA61wP+vIfNWXXg8MF48vISzxsFgrpHw+UAkTk/jq6cHxtWIQACVJdwKxqSbogiizvjZ3N5cSOunFNghZDKRdR+ZTMzeqHRdtGyw0G9fHwLZQhWG3Oo8NITzmZuDczkxIQZ11KrVmSclcusyg4cX3HEHJvUxkTs+Dp1DNtRchcc4dAjveemluffSDOJxvL6pqbyJX5omJiyuXw+tppkZZO3z6XekUpmEIU8fJ8LzGgjASQ8EcN+M1ygWQ9DmdiNYOp0iznlwVtsiq0Si04lnpzGx+fThJz+BHb3zzszkgSRhDbe15U8qJBKwn01N2CvKQSqFFpelJdi8oSHsL5wMKyeAYjmFpSXYh3LJTTM4dIjoT/8Un/l3f1ex5o/dqBtb1N6OYCwWw7U8HV0CLS0gzMJh7Cnsd3R04FlaWcE6yPZventx3NPT1vSd3W6hIeV259o+l4vohhug4/fLXyKQLESCud0ICA8fFjrJZggzvx/rY3YWa72nJ7MDgQlEHkJQyD43NeFvolHRolcOgTU2Bv9n61bRYdHdjevAbdujo6iKbmsTmoBEmRWKfA8mJ4kOHMD7lSPvwtPmh4aQnJ2awr8feQR+zq5d1p7VEydALmzcWL7cTJ2ibmzRWgDrzbE+YqXDH91uEH7cQpste8ADDTnm2rQpN5mTTIJA1DS8fi11znCbMGsdB4M4vmzyzu0WNnx8HP6Ly4X/49ixVEECk5PNzXhNIiEIxclJ2KeJCdyzq64qvR+43dhjOBnDrc4//CHs+WtfS3T55eVfGyNmZ2FD29uRCCk32cPxPusgEgntSK8Xdvmf/gnX4+1vJ7r2WlsOv2FDTKJBIjZQNozDS3gyFDvERoPhdqM6Y2YGTls6nZuhcrkQhPFUrngcjhNP1jM6T14vDAtXEZZLBnAWjslEJpiYTFIUHPP0NP7f50Prz8CAeefd5YLj39uL6zM3h/ecmRGtzuXqeplBKIRruW5dpqMdjyPj3dMD8s3lgrO5caOo8vvVrxBsjY3BoeV285ERVDIWa3sqBFkWUwnXr7d+71QVzjG3Ux07hg36kkuQbef2JCaHJUloQrHmUVeXIIyL3UeeDOn34/o1BnOsPZRDJBrb+xqoHZ59FhNhr74a+moMTYONcbvzD2dKpbDm3e7yJx0rCqqup6eJbr8d1eO6DlKAHXWrtkjX4cQvL8M2lpMQMYuHHiL69Kexj3z609iHGigfbW243/G4GIJWa3R1wb8JhcTkYadTEInLy7mdCw4H1siJEwgezznH/HPLg99UNX9FiMcD8vAnP4FvcOONovsg33vt2QOS6vhxrGEzg9U8HgTDc3NiiFFXF/bseLw0gcjwenGdIhH4Ajzcw+y1WF5GUN/Xl1s57HKBsBsaInruOZD3fX2oznE6BZk4Py/a0okwibmjA1WdVqFpIAzTaVzXYBBtiZs24foePw4fbNMm2M5S5MPYGOR0BgfR2tdImDVQDIEAbAPrslZaMRYIwKZyTNjcjPdmDXyfD+suXzU1TwT3eBCfrJX2ZV0Xcl6sWZhPWz+dhn3gIUREOI/+fqG7bNSNtLI2g0F88cC2gwfxGT09sGd+vxj8WayrwkhOPvwwbBxPc+ZK90o6rqansUd1dsKWWvW1VVUQh0ZNX47Z+P0mJ4k++1lciw9+ELazgdqiQSI2UDF8PhjEZBJfqRT+z9gi7HDAoeH23lQK/+ZWG57C194Op3JxEV/soPFn8HvxsBQm/SoBl5QHAjiO+XlsdpGImK61dWvmxOhywILesixanZeXYez7+vD+drY6s+g1TxVm6DqC0nQaA0KyDbyiYJO74AIYZZ70/PLLyLQHAti0jh4F6Wg2S6iqCHwcDjgQVjcWRYEznUhgszt2DEHMjh24rlw9yeBpl0wY+nzm9fMWFxEsNDcjcG844WsXlVYkNlB9TE+jXWbzZmjHZv9OlkEQZgcvqoqkhaZhnZcTUGgaMu2jo0S33ILKIiYQuaXV6nOgaXDaw2GQJ4XIlkqh60Rf+xrR178OZ/yTn6zeZ51t4HZZDgprrXXrcGAfnZ3FfrNunZguaaxIzK7293rhO/H0XrPPA7+3sSIxGz4ffIIHHgAp9sY3Fu42cDoReB49ijWqqoXboLNf198vKpUiEfhfLCNidq/Nbm9Op3FPSxEgySSqbngacyG0tUEe5eRJ+Bm/+IUYmNfTI7QbWeM1EkEVz+qq9erI48dhj3buzKzK8njEZ778MmzO+Dj80e3b83dGTE+jJbuvD0OrGr5LA2bQ3CwqtFtbK/eNuFttbk4UfXg8sF0dHbnPparC5skyYop8f3M6oOsgqVi7kO2U0c5wFXU8jp+J8HddXfhb4zpNJIRM1NIS4otyquGnphBDXX89yDqe9Dw3hy+emN3eXli248ABSFNdeSXRTTcJ8pPlXXiqs5VnYWICdqq7GwkPs69VFNGqzHMPuMPR48l9nxdfJPqXf8Hx3X9/dbtAGiiMBonYgC1wOEQFH7cHy7KYdsebQU9P5uRmFrXXNKFBx1mS2VkhBkskphkS4TtXQaZSlbeZssDv1BSc21QKG8DQkP2tx5yFM7Y6j44iKOjpyRUWLgepFKoduYrOiOeeg6HnzccIVRUtSrt34zhYy3FxERV/27bh5wMH8NXTAzJxeLhwIKbr+Mx0Gn9n9XryBMlwGAHI449D/2LzZmxWiYRoSw4Gcd7lOEGahvshSWJyWgNrH1aIRB6uomkNfcRaIJEg+u534ezdcUfmugyHhQh7tqPLg5MkCUFzudPbH3gACYfXvQ5tmHxMioJjslp1oWkgTSIR2D2eWGo3ZJnob/8W5MW11xL9r/91egd0nYngqjcmlI3JtlqAW94WFrAOuJWfA0CjnpYR7e1CaqO52Xwiz+EQ0+pVNX8iLxhEQPnjH0Oo/uabC+/rDocIFEdH8Z5mqmSZQE2nM5MI5dhiY3tzOFycwNM0+A2aBltQKpHpcOC4BgbEELqJCWgotrXBLzp0CH93883wF0IhfHFVZamqnvFx3PtNmwpPvPb7MWhl2zYQmq+8AnJzxw7YRj6P+XlUJ3V2YghcI0nWgFmwll8kAjKs0r2G5a4mJvAc7tiRGcMZkUxiDej62mpfTiZhZ1VVVFRyIpN/F4vBjhGJAotsktGIYBAxUDSKOGpqSrzO7HlPTSE+Xr9exHc9PfjiIhDWnV9YEBJk7e1iz3vuOSSKzj0XyVWugufETDQqElk81blUEnd0FPastxcJkVL2PJ0WX0wcejywd/mIQyI8I7/8JdH3vgf/6wMfWFPDLs86NEjEBmwFDy/JRyZy6XZbG5yeEydg5DZsgPEzGl2XCyTb8jKczNFRGBmjNp/bLYRWnc7ySvDZiWVHlifx9veLCVpckcjOql1wOoXhj0ZFBml2Fsa8r68846hpOB+u/jQa8vl5ZJ7yCW3rOu5JPI4NwLihHTyITekNbxD3gFudx8eR+X7mGRw3E4rGCobpaVzLoSFzwwsURWgYrq6C2JRlvDe39+zbh9bIQKDy9gsiPAuzs/jsfKRGA2sbVolEnvTe0EesHjQNzl40SvSe92TalFQKiZNgMP9wBp52vHFj+e2mDz8M0uCaa8TEPknCZwcC1pMZLKcQiyHoL0cT1gxWVog+9jEQFHfdhSnMa2ig0xkFrn5jEopbnWuFQACfubqKpB0H79xiyJqd2fvRwAD26snJTCKpFDgwM7aJZaO5Ge3MDzyAacO33FJ4P3Q44DO5XCAMNM0cIRiPi4QqD6zr7y8vgcrtzTw0IJXK3xZ9/Dj8ub17rQ1RCgahGz05CSLxl79EJaCmIbl5ySViGIGqipZnlkRxu0UgbhygMD+P8163LtO3LYSmJnzW9u2way+9BHu0axfe/4knYCuvusoen6iBsws8KJOre8sZNKYoIK6WlwUJr+uwO/lIIWP7clfX2mhf5spCrtju6MBxSRLWdDyO33HxTEdH8YFL+cAJhtVVxMETE2J4VrEq5lAIydWenvz60G43rmNXl5jyHA7jfoRCOMaFBdiwnTuJbrstVzKDW51lWUhGRCKiTT2fL3LyJOxjfz+SHYUG6iiKaFXmJL7HI76K7RuahsF8+/fD3t5775rTqT/r4NA54mqgFjjrLraqiqCNDS4RjHAqBYPscsHwFAoUk0k4f+EwjGa29k4yic8xO2GZCEaR24FYvHdoCIY324ixxp6mwWDlGyBjF1IpHBPrBQUCotXZ7AY1MyOGjmQH7d/9Lq7VXXflOutjY3jtpk2Z+l7HjkHLbN++wpoT8TjIxLExEKHcJjg8jO+6jvPIV7WjaUK/kL84s5dO45hcLmTMIhEcy/AwCES7gj1JwnE7HLn6kQVQ77TTGWuLjBOYSz0fXI3Y0EesDh55BNqqb36zIPGIcM1HRrDutm/PtUULC3Cq+/rMaa3lw/79qFi+7DLRQp1KCfLCaoCkKAjYE4niVUOVYmwM7TkLC0R/9EeooCxBrDRskQ3ghKHPh2qMWicWFhbga2R3IoTDWCcdHbn7kiQheGtttT4V3KgNW8hvWlqCRmIggEq7UkT2yIgIJLdvL3wNWb7G74ePIstI4PG053KnpBMJ3TKXK7O9eWYGycjh4co0RVMpkPuHDqGa5+KLid75zvznqmkgI6JR0Q7JhLCiwGfq6IBvU87zFgqBSBwfx3OwdSvRW996WhMODVt0BoDbcrNbcYtBVUUVrq7jue7tBTEUj4PE4qnC/PdrrX2ZNfjTaTFsjddwLIafeTAnV4DbUe2r64iFQyFcl9ZW0bVnRDSKJEZTE+IxK5+taXj9Cy8Q/cd/IKa8+WbEvDzpuZAfzImRaBQ/c5U1J+BOnIB9HRzMTSDpembFoZE49HrzD/rKB0ki+tKXUIn9+tcT3XprydfVuy2qCzRIxNrirL3YnFEPh2GEWlpEFQdXqXV355/MSSSEp5eXQfYZNfVYs4Ko+KAVTUPGZ3ISwQIPcxkaKl1GruuCTNR1EYRWi3zQNJzr3JxwiLnVuRjBtbKC7Ha+KdgPP4xrePvtudMUeXp0f39mdmtxEa8bHDRP2iWTIADGxrC5LC0h4D7/fNy3ri4xjIerVRk8mIfv4/g4rvf27TiW/ftxLNddZ1+rDrcVuN04f5OZ0HrfoM5oW8StEWaIRFUVLc4N2IdXXiH61rfQ+veWt2TeB6663rgxVzJgdRV2o70dwX45gcWBA8i0n38+WjOJsAdFo1jnVlu1FAXJlGQSx1StttdnniH6y7/Es/jRj4KgMBHENWyRTZAkIXpf66BW00Ck6Tr2IaN/s7wshpBk70+Li1hLg4PWiW1FwfsXC+Tm5zG1ua0NGomlnsfRUez9fX35W9piMTEUz0gWKgrOX5bhv1TSopZOw8fTdfh2ioK11d6OScWV3tdkkujLX8Z5Xnwx/JPdu4tfG9b+jkZBFhw5Aj/ywgtxXPmGNJhBNEr0gx/Ar920CT7tnj2nTTu1YYvOEEQigtAq9lzqOnz8hQX8fVtbfkmmlRWsfY7xlpdF+3IlSQM7wPEp6xk6HDgXLh7hIZzNzZmVxHZD08RQLU2DXeAuPVmGpILTCRtWTgXexATRN7+JfeT222HHwmHYS25nZ0IxXxzEw2UiERyPwwHyMB7HkK9zzhF/x9WGXBDidGZWHFrB0hIGqMzNEd19NzQcTaDebVFdoEEi1hZn7cXm1lRj6zGXzrvdMA6rq9iw+vsLZ3VHRmBQWK+QNx+uZuPR70akUqJlmdvYhobEREQrYCOaTOJnvx/HUE0CIhbD9eFNt70dm3R2ICtJoiQ+W2T22DHoG116KcS/jQiHQS62t6Mtia+9JCF4cLvRxmx100okILq+uIhNZmwM140rT9evR1UAt/gYp3pLEo6ZCcSVFZACPT1EN9xgXyXo8jLeOxCA823hPtb7BnXG2yImEkvdU10XGeYGkWgPlpaIPv952On3vCdzvcbjIAlZ+sCIRALkI8tKlHM/nnsOduvccwV5yZl0p9Pc9FcjUilUwqdSyLJXa5LvAw8Q/eM/wjb+2Z+hqsikzW3YIhvBgZXHg2e0ljaBBxGwFjFD00SFT76uhNFRrJ0tW6y3A5shEqemMIytpwdtzqX234kJVMZ1d6PVlq8htxrzfp8NTQNpGY8jkK1kmB1X3sTjYpDKJZfYo5/94x/Dn3zzm2HrTpzAdT/vvNwEbTbSaVQTJRLwf1Ip0RoZDIoKHzM+TiJB9OijONdrroEvc+QIzrm3F2RitSqmC6Bhi84QaBpiMqdTDKEyQtdhJ+fnxXT0desKV8HqOv52aUkkELq7T2/bvaqKgSiJhDhOIlEx3NxcvDilGlAUoUdIBFs4OYnrfP755ZGus7MY0NbSQvQ//kdm4Uw8jnsZDgsitblZDGbJZzOTSSRmpqYQb27eDL+Nh3cRidkFHk/593l0lOhzn8O9et/74BeaRL3borpAg0SsLc66i80ZHkWBk8Xl36kUiCJVFROYWBuCSb582S9uewmH8X49PSKzxZOd2GhFIqJlWdcF8djZWfmGwHqJyaRo0672RpNO41x40/b7RYuwroOkczoRmBuvXSRC9O1v4/zf+tbMoCiRQDuM349MOr9O09CKuLQEAtFM5Y2qimpNribSdVxztxv3ZHVVtGuzsPvQEJzpDRtw7xIJEIgOBwjESATVkO3taO2zQwND13EMPImujGCl3jeos8IWmSUSua25oY9YOVIpoi9+Eev2vvsyKw1VFWubCGvbaKfSaSQziOAolrPOjxzBJOYtW2DrXC7cV65Ksjp1UpZBICoKSL1q6KRqGq7X974HrbYPfxhEhIVsfb0/sWvOFiWT2KtYD6uWRGI8DsKwtTVz7SiK0LTK3q+41d7ttj6ghHWqeHpzIYyOIpE3MIB9uFTV3NQUjqmzE0Qiy9o0NZWWCwmF4OOxXmq511/XUZU8P4/gm32RSnDwICRVrr0W50WEY332WXwfGACZWIgkPXwY93j3blERLUmiXdA4pIF10/LZgmQSUhGyDAKRfTSeHH/0KH43OIjPqlbyIwsNW3QGgdt7eagIIxJBskOWsUbNaIizVuL0NJ7pbdtOX9KWCdLFRUEeBgJi8rLV6erVAsd8zz6LWOXSS1FtbPW6hUJEX/0q7Mi99xav8pYkQShyh18wKAhFvx/X78gRHNvgIGz66qoYQNPRYY++5TPPEH3ta/jcD30ov3Z2EdS7LaoLNEjE2uKsudhcsSdJohQ8X1DIba2aBoOjqnD43G5UquV7DTtbPLSlqQmOm9uN95qdhXFLJESL6uBgdUrmueSdS7t5qEw1iQhub5qbw8bicIhM4I4dmZUImkb0/e/j7++8M9ORTKeh66PrCF6N1/rZZxHQv+pV+cV7ub3bqGPIGSxVFRPBtm/HhpV9TXgKMusoJhJiOpiqwtk/7zyc389/juv6hjfYs7GrKj47mRR6IGWg3jeos8YWNYjE2kHXYW8OHcIwkK1bM38/NgZnc+vWTHusaahATCZhw8qx1SMjIOKGhoje8Q5BFkSjCGBaWqwRCKzFq6oIeKoxMVKSiP7mb6Dd+LrXoUKgjOqMen9a16QtkmUEUi4XiLBaBrzLy3hue3oy14Is43c+X26FWTSK9dXdnalpbAZmicTjx4l+/Wsk/cxIiszOwo9gX6C11fwezkG+1yv8O6s4dQrk5+bNWP+aJiqLysHoKDQizz0X528ED6Y7ehTXcc8e+E7GveT4cZzTjh2FhzLJsvBx2afy+0WFos8HMnb/fvhHV1+d/70UBZ937Bhs2PAwjrvKraMNW3SGgQsDgkHhOycSeA7XrTNHTksS7BYR3icWw/dqDSYrBFkW1ZCsydrejnPgtbXWMDICu86SB243bLxZ3d5wGASipsG/sHLNeQ8Mh5H4IMJ+ODmJ3+3cKTr63G7ElMbiGh4mZTUhrOvonOOE8H33lZXArXdbVBdokIi1xVlxsVMpIULLwtnFjJ2uCzJR17FR8WQvznJk//3ysqhi5Ko2FqflDWrz5kxtoWpCHZY2XQAAIABJREFUUWA8Uyk41kwmVhvxODJCxuBh3TqxwTz5JNFTT6EFads28TrOiCcSyFIbDfTYGILa7dvFMASuHDV+selwu0Ulpt+P+yHLcFrNOKy6jkzZ4cM4VknCebS1oS2qvx8TxOxwflMpBDaqiuxpBcRAvW9QZ4UtYpglEotNLG2gNA4cQJB9ww2ojjFiaUkMXTC2auo6Wh/DYZCL5WihjY9jal9PT+bQKB7g1dRkzZGVJATgug67WY3AOxRC2/Lx40Tvehe0Gzs6yiJLGraoSuDhb04nSLta2QVu/UulcnV6udI/GMxdKzMzWGcsE2IFmgb753QWP8/Dh7HOt20jevWrS/t2IyMg1np74U9Yeb4TCTHwbGDAWpAfCqFteGAAwS63N6dSYgq2lWRROIyBBO3t0BMrdI3icUgqLCwgYL/wQnzWxATs38aNuXIzhcCVYNGoqApyOnEPZBnVkKW0D2UZCZqREfx7yxZcjypNNW3YojMQi4uoINQ0UXlohsTSddgqrmbs6sL6j0axntrbrdspq2BSPhTCsWgaSMO+vsLaf2sF09PwjYaGoDeYSMCuSBKuZ09PcRI3GkUlXyIBArFcnVRFgV2bm0N1YCiEPWb9esSdPJ2an4dUSkyoZ9kvnupc6plRFGhpP/kkKi/vuafse1Tvtqgu0CARa4sz+mJrmpjqxXoSVhY/D0iRZRihpSUYnP7+3Gox1oxIJODkHz0Kh2vTJmSA2VjVekpdOo1roChiulc1R9BHIggcmpvhXHLg4fMhINi/H1WGN9yQ+bpjx3D9duzIrGgIh6EnFgxCO5GrDZlccTqxIbCmSSCQeY+np/Eeg4PWKvxWV+Hk+nwINk6cQBYqGsX95Jbn4eHyhxokErg+/ExVmHWs9w3qjLZF+WCGSGzoI5aP8XGir3wF5MJdd2U6i1zVFwwiiDVichLrcsMGrH2rmJmBXENrK6akMuHHtouTG2aRSOBYWU6hGsmgEycwOCUWI/rDPwS50tFRNknVsEVVRDqNpGWtiURVRcLL6cxt6eUArbU1MxHGpB2331slpJlILDaxmQgk2bPPop33iivy/w3LCCgKfLoTJ7A2zzvPml/IiT9FMdc2SYQ1/PTTWPsXXZR5LpKEa5c9vbkY0mlUWMfjRHfcYa76anwcFdmKIiRn+vtz7Z9ZKAr8pEcfhc077zz4RVztUyrRkUgg4Tw+jnPevr28Z6QEGrboDAJrtHJra1cXYiwzNpBjtFQKz2dbW6ZPEArhPXt77a8A5DXOX1w5yXryp3F6uWksLyNZ0NWVO709FhPFGn4/rmF2QYQkgUAMh0HEmU1cMHgoSiolJiwfOyYqEAMBxN7RKPYNtxv3mCs7HQ6RuOFuEB5q19yc/xmKx6Glffw40S23YJBXBV1B9W6L6gINErG2OGMvtiyLrAOTS5UIYnO1G1eNDQyIAJNbYUdGYEhbWsTGxqLd69YJo3U6StRTKRhErpZsarI/4yXLcAh9PgTgDoeoxhwfh9Pr8aCN2TiEZnwcZN/wMJxableIRNA6LMtEV12FzYmnUHOloc9X+L6GQmIytBUyYGUF2bZAAI5tOg0iU5bRTh2J4Jjn5/H3bW049o0b8VlmEImI1qhyBurkQb1vUGesLSqGBpFYHUSjRP/8z1hf739/JvGm63AK02msb6MdXFzE2u7thQ2zisVFTBv0+eAoc1UD21+v11q1cSwGssPlwrFWY+948kmiT34SjvSf/AkqDCogEIkatqjqSKeFyH1nZ+2GASST2PeCwdy9bmUFv+/oyFxvsgzfKBjML0VSCqoK+1eKSDx4EHrKF1yAajsjNE1Md+V2tuVl/H0gYH26KBOqLEGSPdE9+2+feQbX4ZJL8icBuMJPVc21Nz/0EGzYm95kzU7JMtb7k0/i/t16q9Dwtgpdx/vMzOCad3eLwTG6jvvFAXqx7p9IBOTE9DSuzc6dsEE27XUNW3QGQFFgd1ZW8BxxtVk8Lsj3YuDiDiLYy3zri6utVRXxWiXJGS5AYdJQVbHGuRuurQ1f1SzosBOxGKqoAwGiffsKXxuWfEinYe97e/EaWSb6xjcQJ991l5iaXAxMFPKXruPeezz4+eWXcY137cpsiWZbHw4LXUR+RnjSs9MpYkxudW5qwt/wPVlYIPrMZ7BPvOtdqEKsEPVui+oCDRKxtjjjLjZPt0qnRZbBrkw96w1OTcH4sKaVcRpYSwsMUW+vGNzB7S89PTBQPGjldIAJOtZ8ZE2LSqFpaDvWNBBq2e/5wAPIGl19tfj71lY4B9PTMOx9fWLKtK4TvfACDPgNN8BRDgTMO5arq7hPbW3WMl5LS9AZampCBZOiQAsjHodGmNHhTiSEhuLsLI65qQlk4vAwziff8S4tCZH2Qn9TBup9gzrjbJEZ8LNOVPw5aOgjmoeqIuM9PY3pedktM9PTcHTPOScz+IhEQNi1tlofBkGEIOUb38DP99wjiAVFQXDN+5FZRKM4Hq8XtsjugEPXif7zP4m+8AWc74c/jD3KhsEd9f6E1oUtUhSh69XRUTufIhLBs97Rkbl+dB17m6LkitgvL2Pd9feXR1qZmdhMRPTYY/AzLr0UHQ9EIqjUtNyhIOEwqvO8XhCJVqp8mXTgCsyenvzHduQIfMDzzy8+mVjXhe6g15t/Ai0RgvnHHiO67DIh72IWkoTzXV0VHTabNyMQt9ql8/TT8H/OPz+zmpE7gLg6VdOEDnlLixhmmI2lJZC6i4v4m9270Z5Y4X7XsEV1DFXF87C0hGeusxOxFccXLFXFQzKzwRObY7HM9uVCSKexpj0efI6VZ4+HW/J0ZX7ueV15PGIgzFrUOiwEWUYsRoS1XsoP4cKRUAj3LxAg+sUvkGy4447i04yZOOQJ8Ubi0OvFvUulYAOTSXSFFUvgsE1lHUXW2TUSilyhzkVHPh+ega9/HffvvvvKr9bOQr3borpAg0SsLc6oiy1JYrKVmYl75WJxEe0z3O66Zw+qRNrbsXEsLeHveGKhLMOB5qErnZ0gkE5XVREPIUkkhNEMBisjW6enYaw3bMhtYzl0CC0vl1+OMvhoVExKHBmBIedWGG6DOXUKDuVFFxXfdPJBkkAEBgIg88w6AouLIAR5UpuqIuO/sgIis9gkLllGG+TYGM5NVfH8bdiAYxgYwHEsLMDBaGuDQ2MjIVTvG9QZZYusgIlEh6P488BEYkMfsTgefJDoiSeI3vY2ZM2NiERgW7q7M5MLkgSdLq8X9sbqNY5GQSDKMlqYuUqLCQx2XM2u99VVVEP7fLBFdhNEikL0uc8R/fjHaP/8gz9AcNPebsu+1LBFNQLrNet6bYlEniLa15fpZ2kagkci7G/GdTQ+jnWyeXN57XtmiERdh69x6hS6F7ZtK0wgMiIRBKVud+EpxsWwvIyvQAAkqXH9TE6iYnDzZvgBZsAVTE4nbIbxmGdmIKuycSM0S634D4oi2pn37sX5HjkCHywQAEFgdgDO88/jdbt3o3KwEHigIbcQqqqo+inURjg3B98vHIaftGeP9cE8BjRsUR2C46jFRTwz3Pabj8DiwRlNTZnkHE+PT6fzty8XQiKBz25uLk5Q8XHG44I4ZP+M40+uQGQSvR7alo1QVdhGSYJttNJFoWm4f9/6FuzxrbeiiCTbBmtaZsUhkSBfmThkJJM4nlQKNsyKXrWu4x4xoZhKiWEr3PIsy5gw/73vYf/60IfMt8ubQL3borpAg0SsLc6Ii60oMOKKIjI9dhN0PAVsagqGyOMB2ZVK4ecNG2CIvF78HzuVbOQ0TZTjO51wiqwEldUAl9zzUBK/vzxykzf73l6Radc0GPypKYh/d3ZiA2GiRNdB9LG+UioFQ93Vhd8dPIhretVV1o4lncaG5XSi0sis8Z+fh9B4WxuyTrqO7NnCAtFrXoOMuJVjmJpC0DQxIZyIpiZco927y28hKoJ636DOCFtULswSiY1BK8Vx6BDszeWXQ7/GCEUBUeh2g1xgO5dOozVG18sT+E8k0MIciRDdfbcIeDkLzlXXZu3qygpsWCCA47S7VTUeJ/r4x9FiefvtGBLl9SJgsmk/atiiGoKJRE3DPaxFixxLuGha7rA4Dt7d7sxEmaoiceh0Yo+16meYndisaUQPP4z995JLQLiV0hqMxVBt43BYD5aJsM4XFuAL8uCZcBjJ5q4uBLxWCT9uv25qgl8WjxP9+7/jve+4w1o1k6aBMIxG4X8YK0iXl6EnGYkgsbJvX/EE/OHDsJfbt4tqT7NgQpG7hYhwflylyPdI10HAHj6M8+7uxmeVMT23YYvqCFzFtrAgyL9160oXhLDGXWsrbFEiIYZhFmpfLoZwGO/Z2ZlrC7jbLRYTsZPbjWeYdffjccQ/7PdXIqd1uqDr0PZfWoLNKFZFXej1//VfsKtXXCGkLDo68F5MsCoK/t/pFF16+Wx1IgECUVVhC8zowBZDIiEIRe6AO3gQEg07d0J2i/1xTnqc5br1dYEGiVhb1PXF5iynJAljXQ1B3KkpIaTd0gJHi9tQk0kEfJKE6pP2dqEBEYuBlDJuYJGIqEocGqoKmWQZXIbP2hBMJprZ9OJxEGU+HzYHJiVlGRvEz36G73fcAQcwEMAmf+SI2Ax8PryGibzHH8d1vO02ay0FqgpiUlGwYZl9FmZncY87OlAxoOvIRk1OYtqjGf2OYsc0NobM/eSkGAIzOIjKhA0bbHtm632DqmtbZAfMEIkNfcTCWFiACPbAANG99+YSrSdPwl5t2yaCEk1D+6MkISi2Sh7IMoaoLC4Svf3tICwYHNgYg+NSWF4Wcgpbt9pPFs/NEd1/P+zdfffBufd6zU22tICGLaoxNA3PDlft1KJdLp3G3un1wh8yPj+yjOPx+TKDz3gc/lJHh3VhfSLzRKIsI4BdXCS6+WZzWozxOAJeXQeRaGZgihGShPVFBF/n0CHY6EsuKS8RYGxvdruJHnkE5Ozb3madTDtxAvZx27b8us2ahorJl1+Gzdm7N3/l5LFjOK9zzsnVnbSKZFJUKKZS+L9AQFQoer04rtFRkBnJJGz77t2WKpAatqhOsLqKGECWhY682f2YK/7ZP+LBJdnV0FawsIDnkuVQuNqQp5J7PII49PvxuUwsOhw4h2JaoGsdJ08iVt2yBevOCnSd6Kc/heTB9dejGCSZxH4RConW9O5uXDuPp/h9isdBIOo6khxWbXMpRKNEX/wi0VNPwb7ceKPQcnW5hC6vzyckGcq4r3X6JNQXzmoS0eFw/CsR3UNEHl3XlRp8ZN1e7HRaCNb6/fYaa86GTU6Kicy9vXB68zkviiJ0EltbRQsKZ6m6uzMNZCqFLHkkgg1qcHBtbDSs+SjLYhP0+3OPLZ3GubGGmKKIaY0ulxh68vTTqPy57TbhkHJGPB6HDo9RI0xVQTpOT8OQu924jn19QmOyEHQdBGQ8jkDerPMxPY0WIZ7yRkT0m98g0Ln8chALlSAeh2PkcuEarayAVBwbw+94MjPrKJaaaFgEtj5BDVt0emCFSGzoIwokkyAQUykMUsnOUi8uYq1nJ25OnsSa3LLF+pR1RSH67ndh+9/2tkzdHLajTU3mK8NCISGnsHWr/STx0aNEH/sYjvujH0XCpAoEIlHDFp0WnA4iMZHA2mppya1USSRADLBgPWN+HgH6hg3W2tEYPLGZ/Y1scBVfOo2E5Ooq0RveYK4lVpKQ8FNVBKtWq13SadgZbo+++urKA15JAoF45AgI0V27rL1+chK+0YYNpTsqolFUT4ZCIBsvuEAc/6lTqFhcvx6ak3bajFRKEIrJJP6PA3bWNT9xAiRmOg1/adcuU35ewxatccRiIN8lCfFGX195VWaShP2TCKRXpZ1ekoRnPpEQnQQ+nyAOeV/nNlmWh2LysJ6TvDMzkCsYHISfYBWPPII46vLLYQNTKTFEUFVR/cfar93dxX2QaFQkZPbtqyhGyotIBEP4RkcRq15zDfYM1tIkEq3V3GbNQ6Mszlwo62k8DTanrlGjGXOnFw6H4wIiuoWI/lXX9bHTfDh1BTbYySQWb1ubfTpAiiJalhMJGI1Nm7AhFXPI3W44aDMzMHgOB4yLwwEjxCQkw+tFwDk5CYc6mYRTdLqGrTDYMAYCOP94XBBdxqpP1iaanYVR3bYN9yEYFBvrqVNw+i66KDOjPTKCa7R9e+6QgaefhuG+8UY4quEwrs/UFBzzri44GPmc8tlZXOvBQfME4sSEmN68cSPO88ABHPtFF1VOIIbDIKF9PtHytW4dvi67TJAG4+P43AMH8JzwpOdKy/XNoGGL1hbYkeJcWj7HignGBpEI8ICQlRVUIGavG0mCbW5ryyQQp6fxmqEh6wSiquIzJyaI3vKWTAIxmYSD7PebJxDn57EftLXBabc7APnVr4g+/WnYur/4C1SC+XzmdaJqgYYtqgxOJ4i8lRV8tbdXTxeaEQxivUUiQkrG+DtFERNUeV/u7cVePTUFX8Nq+7XTWbgimwlEnuB6000Y6vbQQ/g5XxWeEYEAiLMXXsDX3r3WbAMnj5NJ+IRcYVcJJieR7Ni9G9cukTAfSC8uwkb19JiTZGlpQffF2Bh0CR95BK19Ph8IxP5+VFbabTN46EVXl5hUHYvBRwqFcF27uiAtMz4OP3JyEraSj89ONGxR9cGVu7EY7i/vw+U8W/E4bJ7PJ/bdct4nmRStytxuTwQbMzycGaNxTMSDKv3+/Bqf9YaVFdibzs7yurB+9Svo0u7bB1uRTOK6ccWh0wnbLElIJs3NIU7q6clNKq2uwg653Xg/uzUlZ2aIPvtZ7Bnvex90YYlgZ3t78QwwoRiN4p6rKo5ndVVUK7a2Vm6DGjbHHpwVJCIRXUBEf0VE+4lo7LQeSR1BlsXUq0DAfMttKfDE5dlZGIjWVgwA6e01H8w5ndgEFxaEFkdXF5zapSUcs/H9HA7Ryjo7C6M9OGhteqfd0HVcYyYLIxEYSkXBcbe1iXbt1VWc7/r1uaRePA5Nop4eoiuvFP8/MQGHcOPG3HackREh1M3Obns7vpJJBNmLi3h9UxOIuM5OHNfSEjY+zmiZwfg47lVfH+4DETTCjh1D4LB7d3nXkAjXMRTC9Wtqym31YnR34+uii7BJ8aTnp57CV0cHHJfhYet6JBbQsEVrDGaIRKdTtFjUu9NaKR57DG14N92U24LHU+Pd7swgOhSC3e3pKT4wKR90nehHP4K9uvHGzMqgVAq20+s17/DOzoLQ7OiA025ngK7rRN/5DtFXvwqbdv/9uBZ+f3lVYFVGwxZVCCORyIMpqi3m39EhdKC93kxSsLUVdioSwXPn8+H5Xr8eScbJyfKeebZ5rBHrdAryiQeScKXijTeCSPz5z6GTWmpYgt+PYPKFF1BRuGeP+f13fh6B6Xnn4XOWl3FcVie9MpaXiX75S/har3+9SO6yXlwx/zQahY3iafNm4XAged7fj2vwm9/Ad7v4YlQWVbvCyuPB9e7sFHrn0Sieaa70uuACPDsnTqCCaNs22wdQNWxRlSDLWCerq7AJAwO41+WsD+4ai8dFsj6ZxB7MnUylXm8kDlkqIRDA+m1qwu9XVvCe/H48/IhbXJub7dcuPh2Ix9GxEAyCnDdzT1hiIpUi+u1vYWe5JdjnwzXL9z6BAGLBWAyx3cwMYrneXlzPlRVoonq9sKd2JwqOHCH60pfw/n/yJ/nlGzweEaepqiAUIxGhu8l6mJ2diD8r6Ips2BwbcAYswwbsBmtNpFJYsKVEss1A12GwpqZgCJxO0bJcSQVYby+M0twcjA5rSaysiApFo8hub68gEicnQa6V63BaBQe8xi8mLtxuGENuxeZSdLcbDqws43fZBKKuYxNRFLQQsbO/uIhr3dsLstSIpSVUIfb3505TJYJTv3Ej7k0oBAfk5Ek4tsEgNnmuUiwFXQepEArh81iX6dAhbFg7dsBBLRc8QCeRwIZiVruICdN9+/CsM6H4/PP4amkRhGJPz9qpHmqgOjBDJBq1Wuq5daYSnDyJAUh79yLAzcbUFGzVli1iz4hGsb5aW0UCwQoefBCO9vXXZ9oKRcG6d7vNVwpNT8P2d3YicLdzXafTRP/wD6jCuv56ZNrTaew/tahybuD0wOFAABwOI+hh4qWa6O7Gc7y4mDuhuL1dJPq6ukRr2OAgfB5O5lmFyyUqQ5hsMhKIjKYmQSQ++CDRLbeUTtb6fKIi8aWXkCgopV8djyOZ0d4u5Ag8Hpx7Op07gKYUUinoinm98KXY92XSY2Uld3ozI5nEsXi9IAPK2R/8flT6HT4sdKuPHEGCvVaEidstfCNVxTWORvG9pwf+59QUWrBHRnBs1ajkbqBypNNY6xwL9fVhTZV7r9JpsbZaW0VSjCugWZM++/25gpA1Do0Tw7myzPia5masxdVV+FqKgtd4PHj/WgyyqgVSKax1lwskYDFbpeuZE5V1HbbhkUfw2ne8w7yN4PbwSATPx+QkjmVhQcREdl/jX/8aUjQDA0Qf/KC5JJHLJZIbmiYSG7zPzs3BZnZ0wNa3t58ZxHK94YzXRHQ4HB8nsM3ZeDcRXUPofV9HRH9PRDcTkYuIHiSi9+m6vpz1XpuI6BNE9Foi6iCicSL6KhH9na7rmonDWfMXO5mEoSfC5lBpVl1RkPGYnoZT5PPBmR0YsNdQxeP4DKcT78+OD2soBAIiK6+qOJblZfyN3w9yy872Zv4M4xdPxeLMm/Er37VgsfTxcZzH9u25RvKZZ6BDdP31opovEsEGwxWe2QLsDz6InzlzZQarqziOo0fxml27YLiLBQe6jlbl5WVxz4mg23jwIJzPK68sP5BXFARS6TScIzsCdUkCWTo2hudW07AOWEORtSipDL2Nhi1a+yilkXg2D1oJh6Fl09wMgizbZoXDWDd9fUIPzRhc79hhvYrzkUdgK668Eto5DBZ2dzjMazGxnEV3t5BTsAuRCNqWDx0ieve7id70Jpx7MFiTaveGLVoD0HWhPcVi8NUEVxj5/ZnyLUTwP5aW8LOROJiaQiB2zjnlH18igeedq2sLraOVFRCJXi+IRDPEqqKgGjEaFd0phf7u6afx/ZJLMv2YWAzXxe2GHTLjZ+o6/KLRUaJbb80dbGCc3swabMbfHToEP2Tv3vJ95uVlBNvBINGrXoWqv1OnRCVgOcSvXdA0+MpcQba0BBIxHkeQf+GF/52Uadii0wxVFV1EREgk9PRURrBw+zJXXmfLNmgaYgROKrAMFn9x4pWJw2CwuP8ky3j2ZVkQRLXQnK0VNA12Lh4vPFSKicNUSrR6OxywZ6dOEf3gB/Bj7rqr/Hur61jHTz0lEjmDg/Zda02DDM0vfoEK8/e8p3LJDx6AFQ6D+OQ9l7vQDN16BW3RGrM5dY2zgUTcS0QfIqJ7iehTRPTy7371JBH9JeFheZaIThHRr4hoOxF9gIj+Tdf1uw3vs4WIDhBRnPCALBAett8joi/ruv4HJg5nzV5sVRXTLb1eMSWpXMTjcFi5QrC9HURdNau6ZBmfqSj4nHQa5+Dz4WenU5CJbJxlWUyvGhgoL+DjEn0mCxOJTG0en0+QhcGgIDNLgacfy7KoljTem7k5ou99D2TcTTfhNckkHFqPB0bbuLnoOrQz5ueJXvc6axMH02lsXOk0zoEF5XmqW1dXplOgaahaCodRfcQO8MmTaNfZsAGkQLlETDKJ89d1fH41Wsh4IM/ICL7zAJxAgOj3f78sZ3nN2CJJWru26HSDq4ALEYmahufubNJHVBS06C4twRHMrhJKpTBt1OdDFaLDgde88grsxI4d1h3TJ59E6/SFFxLdcIP4f3YiNc280PbEBAKr3l5zWmVWMDUFAnFhgeiP/xiOeDIpAqZqQZKIPvMZok98or5t0eHDZ5YtikSwHoLB6lckxuNCKyo7icb6Ui6X0D7TNKwFTUMAatXHS6VEkrm5uXTidXkZazgYhOC/GRugqrAlsZjoBDBC1/H7cBhVf/mSh7KM9a7reH2pwPXwYQT1F14IW5UPXFHF05ubm3FNT57EsW7eXH7CIBoFKep2gxTl411ZQRImHgeZsm3b6a/E0nXh505P4/zjcVQE3X9/fdsiWsMxWilomtC0VFXcD+7UKhe6jjWcSOCZ7OwsbDPYJ+diCV3H33Llm7EjrBB4gGcqhdfHYnhtIYmiesXRo7hPu3ZlxmGaJmJTvo5cYe31wj6MjhJ9+9u4JvfcUxnhNz8PH62pCfE5V3+2tcFuVlJYI8tE//IvsKvXXYdheNVIvMfjuJacIOPk7Y03FiUR15LNqWuc8cWfuq4fcjgcBwkPyy90Xd/Pv3MIq/S4rut/lPX/H3A4HPfpuh753X9/loiiRHS+ruurv/u/Lzkcjmki+ojD4fj/dF3nB7FuYHQIuP23XKPE2nS8mJ1OGLqhodpoD/p8cDqnphDQ8eARrqhkfZtkEv9mgm3DBjHgpaOj9IaVSgkdQxb1NrYls74HE4flGs6ZGTgDmzeL9hZJEtf2pz+F8b/+evy9omBzcjjgXGdnp158EZV7l11mjUA0Bh5bt4rrHArhup06hd/39uLaud3IokciCFS4mmBiAlWT/f0IKMq9LrEY7i9XGpS70SlKZqYv33dVxfF3duLaHT2KTbwcNGxRfcDpFERhIX1ETTu7Bq08+CDs0dvfnksg8qR2XRcVfpxESKcR+FrdU555BuTD7t3CvjG4JcoMgajrSAAsLYm9yE4cOkT0yU/iOfg//wcEZTIp2rSqhVOnQFxOThJ94hPWX9+wRdVDaytIIZ4eWs3noKlJBN5ebyZZ5vHgWFZXsRdzq+G6daIqN7virhjY7/F44FNxW3OxKpjOTqIrriB64gns/a9+den92uWCzRgZwV6raZlVeDMz8IGGhwt3H/h8eE0oBDKxo6MwoT83h3U8PFyYQCQSLZhut6jGXFrCvd6woXwfN5HAEBWnE3rNxnvY0YHJzGOcxbehAAAgAElEQVRjQhZm+3Zzk6+rBYdDEOTd3Tie0VGQEeWgYYsqAxN9CwtYj62tePYrrfhKp/G8KQpsR761xm3MsZiIhbiIo6vLfHKfB0Ilk1gHLEPV3o71u7JSVZ3ymmJ0FNd182ZcI00TMQcThy6XGFhj9HGmptAW3NlJdPfdlRGIs7PQpG9vR8GJy4X7trSE5ykSgf3p6rJe6biyQvS5z+F43/52omuvLf84C4FnCigKrtX69XhO5+fxVfy1DZtjF854EtEkPp/1718T0R8S0UYiesnhcHQQ0euJ6B+IyONwOIxh1M+I6CNEdC0JNrsuwM6nqsIYZWtTWHkfbllOJoW2y8BA7Scgu1xw6GZnhRgrkWh3ZV3CWAx/y9VGGzZgs1pexu8HB2HAFSW3LdkoLh4IiM0yELDvfBcXsamuWyecgWAQP0sSCMT5eWR3vF5sRK+8AqO6e3euAzE1hWz7li3WRL91HQGHLIvBNHzuPFErEhEC55OTCFr8fmxMTDjMzhLt349/X3tt+VWuKyu4R34/rk2+98luAyhEEOYrwvZ48MXizV4v/s2kxbnn5mpM2oya2KJqi/+fCeCKxEI2ke3AmT5o5ZlnEGRff72YpmcEV5tv3Sp0kk6dgu3cubP0YIVsHDqEtr7du4luuy3z+rMGopnpfCynEI9jP7I78H7oIaK//3vsc5/+NOwzV41XqwJN19Ei+nd/hzX8j/9Ync/5HWpiiyoZqrWWwf4HT1SuFnRdVAD19+cGfPG4GDrGx7FxI17DQxZKgWVuPB7sg5woUFUxUKUYtmxBS9v0NAaWmAlK9+yBLEsoJPRUmSy57DJzz42m4TwTCREQGxGNEh04gIqgt73NvP+mqvC3wmEQfzt3mntdNiQJ01XXr8ck5ELPyb59uIfPPotrwFOtq13pahYXXZTfn7IRZ2WMVgrhMHzvVArre+NGe56JWAzvzX6+ca/luDEWg10ggp/c2Sn0DNNpc2uJdTclSZD0xkEZLJmwupo7jb4eMTeHGIkLE1gigUjIbnk8+e3p/DwqEJubid75zsp8+KkpJGk6O2FH2cdyuXBsHR2wu8vLeA46O3O7zQphYgIEoiQRfehD9u3v6TSeN1nGd2MMx0PEhoZE0Y0NaNgcE2iQiMB41r9Xfved3authP76P/7dVz4UUG9Ze2C9imQSRqO1tbyS91hMZLQ1DYZn61YQRaezOsfhEJqLCwsgtxwOUdrv9cIQSRIMExOD3Go9OgpDaKzKdDjwc2uraEv2eqtznqw5wwLXRjiduOZTU3Ck29th6Ken4RBv356bEY9GUQnQ2YmJf1YwN4fjGRgovIG3tuIrkYBDHgohE8qagrqOiYdtbSAiyiFadR3EajgsCL5wOJcc5K9scEuAxwMnpb1dEIT8vdBUs2gU1yEYtF/LMw/OKlu0lpFdcWj192cCpqdBWm3enFsRSIR9ZG4OtoXJwpkZoYNqlUB85RV83qZN0CYzXld2IP3+0k4iV0KuriJAt1NPTNeJvvY1OPQXXED08Y+DwJFlsT9UA5JE9Ld/S/Szn4FU+Ou/tj7p2iIatqgCsFZnPI5nplrTuR0O+C48aGXdusx9rKlJVPrwEKKeHuzrs7P4fbH1xJ0qHMTzezud5jViWb7k0UfhC9xwQ2mb6XQiAD16FGtZkuBr8iRTM3A6QayGQkhAplKwBU4nfL4HH8Tx33STNb8kHMb79ffD3w2HcwfMlIIso9palnFtShHNra34u1OnkBB++GGQnywfcbpR5WNo2CID2Cfloo3hYXu6vTQNz3V2+7IsC+KQpZr8fhBLnGxneL3Yd2OxwmtC0/AZXK3NOqP5/ra1FZ8fDudOo68nhEJYt8EgYtFkUthjj6e47VheJvrmN/F399xT2b2emIAN6e5GUUS+z/V4YNu6uoS+5soKXtPRUXitv/gi0Ze/jGfi/vvL7/xQVUEW8ndjUt/nw37KvmCVBqo0bI4JNEhEQC3w/46s718koh8U+Nsymxxri1QKxl3TBBlmZfNnMmdqCkbd5RJTd6stJG4V3d0whjwow+USlXG8Ea2sgGiMRGCcmpthlOJxnF9fHzboStqSrSCVQhDu9+cPfFdX4YgPDcGhZH2gqakMQdn/hqKgqsfhQAuxlaopLmvv6ipNBqTTIF/b2lCtpKpw+F98kei3v0XQ8sY3Fg5WdL1we7EkwWGKRrF5trYK0WgibCBMBvKGnE0QlrvJLC3hKxAAgViDqrOzxhbVA4oRhRw4F2t9rmckEmidaW4m+r3fyz1/VYVd5QwwEdbKzAzsrNXKv1OniH74Q5CPb3tb5lpjO+D1libpNA1ZdpZTyNZUqwSyDCJv/34QD3/4h7BL6bRwaquBkyeJPvpR7GV330303vfWpMq/YYsqBFftxWKwEdlJQbvgdmPNLSxgDWZLDrS1Yb2yRqLPB3/hxAk8U4WIKJZt4cRdNniNGrszCmHzZuzpjz+O9fOa15S2mQ6HGA736KM4jze/2do+zCSrxwO/YXoatumxx3C9brrJ2n2JxeBztbaC5EynxeTQlhZzJEc6DW3oeJzoqqvMJ1scDlFV/fzzqNqenISWY7VI6jWChi0irMe5OTw3Xi/WsF02JZWC7eD2Za8Xz3QsJhLzgYCY0l3Ip2ZZrEhEDLdksK4oJ1b8frxXqfXc1YV4ggsU6qX7g4saVlcRC3m9sGc8TNOMzxiJEH3jG/Br3vWuyu736CjkXfr6IN1Q6vO9XvhjnZ2I++fnERP29GTaG13HELzvfx8+1wc+YN4ecVuykTDktm4isfcwYVhDErlhc0zgbCERKy20P8Xvoev6I5UfTu2haSKLxO1gVogVJremp7HQAwE4ngMDa3uselsbnK6jR+F0cUuuJInMhscjnOymJjjge/YIAml+HkFytY2XpuH6Ohww3NkGXtNQheJwoCXI6RQZok2bcC9Ym4QHuBw8CDL0uuuskbyc6WxpKV3Fk0pBWyOVgpYRZ9R9PrRCtrRgGuRzz+EYeLpbdgVhdjsMnz+3VWzbBmc7u3qwGuQut4hFo0JjxiaS6Ky3RfWGUkQit/adSfqImoahTdEoBqnka5GanISzt3Urzj0WA6nY0gJH0gqmpuCAdneDsDQSZNzyxFn7YlBVECOxGGyiFe3XUlhZIfrYx1At+d73Et1+O4KDahKIxvblYBC6i69+dcMW1RO4eo+JJh5yYjdYQ4yr9bOrVbhFjStK3G74NWNjqEjM1kfkbpVCBCLD5cJzqqqFh1Exdu7Env/UU1jjV11V+rz4PVnbemoKNsfqNWxvx2fOz6OF+KWXiC6/HL6JWcgyfEmvF+fCVTFuN4L91dXc6c3ZUFUQqeEwpjCXk+TgCc5TU0QvvIDqzm3bcEz1QrBkoWGLikCW4Y9GInjWWIbALjsSi4lKXa9XDGfhdceaomafLZcLr4vHhaQDy0hpmrApZmNHpxM2a34eMVmhqe2nG1wMwXEN//v4cdiEiy6y1qkQj4NAlCQQiJUkREdGYC94QJOVZycQQOwcj4vuvqUlHE8wSPRv/4ZilQsuILr33uKxMg80ZdJQlsXv3G5BFrImZBWLdxo2xwasYfrHVsR+991icxWg6/qiw+F4hIju+Z1Q5jHj7x0ORysRybquy/nf4fSC9WyIYMisGLFIRAwq0TRsXNu3IzhbqwGzpuXqGLpcoupw504YUtYx5Oo4zpBx5ognis3OIoOzbl11s71zczCo69fnrzI5cAB/c9NNQrx9ZAQ/b98OY8vi59EoMl8jI9i4rFQFJZO454EAgoxi9zkeh26RJIE4YGdkdRWObSqFtmufDxms2VkhhLtuHZyBlpb81YOKIloljdqQ1YaqYpOUJDguNgs6n9W2qF5RjEg0/q5OA7gc/PKXIOPe8pb8LSlLSwiCBwaEFuDICNb55s3W9ob5eTihLS1E73hH5jrn5BfrJRV7X0XBMScSOAarrdTFMDqKSsBwGINMXvUq2DlVBTlhkwZPBuJxov/7f6G9uG8f0Z/9mXVytgQatqhG4Gc3EsEzVC0isa0Na3FlBfuo8bl0OLCXsdZVdzfWXFcX1nNLiyAemUD0+80lH91urD8etFLs3Pbtg1/wwgs4xksvLf7eMzP4uvJK/HtiQkx8t3oNm5rgWxw8iAB4zx7zr1VVEIiahgpEo4/GE7CZNEmn8w9+0jRMnQ+F4BdVqtM6NAQf6qWXkMidnkYgb2f1dY3QsEV5kE5jfzQOquzuto9YUVUkA5eW8GxyAUAwKIaDlftZPh/sQTgsutY8HkHmW4XHgz2ddfqqVdVtFUYNdkURXSncAXX0KM593z5rsXcySfStb+Fc3/lOa0Owso/vxAnYUNYMLHfvaWpCcjYSQWXiyAi6RyYniW6+Gf6i8b1Z5sVIGGa3JXd0CNKwxv5zw+bYgLOFRHyGwBh/1OFwtBORRES/tfge7yOM/37W4XB8hYiOElE7Ee0iotuIaDcRjdl1wHZAVUUpusdjPpOkaSDbpqZgLFwuGLChobUj5MzgUmgmCxOJzMwGt7h2dcH4jYzA2PNmZgQbeDZyySQMYn+/aNNLJISmjp1YWcG17unJ77RPThI9/TQ0cLZuxbG98grOb8cOcTysGTI5CSedJ5KaFTpOp+Gk8xRHSSo8vTgWE9MTN27ENSLCvw8exL15wxvg5HLVoNsNgjMUwuvjcXF/jORBNIpNqtIJzFaRSsERZ5H6KkwVPytt0ZkAh0NogOUjElX1zNBHfPlltBtedBG+spFMYo20tGBtM3lHBNtkpTJ9eZnoO9+BE3nnnZm2T9dFG2hLS/Hrmk4j259MokLezmTP009D9zAQIPrMZ/D+RgKxGhXqx49j+vLEBNFdd6EKoQqi8g1bVEOwdMzqKp6fYtpSlaC7W+gj9vdn+nwuF4hEo1RJfz/2Ya7w44CP5W7MwuWCLVDV0jbg4oux1x46hLV/3nn5/y4SATnW2YmKQYcDn8N+B7c6m4UkIUEyMICK3rk5XK9ShISuw9+SJHxmvuvicGCNejyZ7c1M5Oo6KjDn5tB+vH69+eMuBq9XvN9zz6FNe3gYBGkd6cc1bJEBioL1yz51dzf2WjtIFk3DemctdW5fXrcOz69ViatCSKVgSxIJEUdVGj82NeF9o1ER250OaFquBrvTmdkhRQRfKhqFzbASS6TT8IsWFjDduNzkIdut+XlUElqpui4GHlL65S+jPfqNb4QNCodxr3kP4bZkhwPXhu2h31/7oat50LA5NuCsIBF1XT/pcDjuI4hf/gsRuYjo3WW8xwVE9OdEdCsRvZ8gtHmCiD5BRHO2HnSFYC0bIqEnUAqyLFqWUykY6G3bsLmslZbldFqcmyRlCq7ydKu2NlFlmL3pnnuu0AFKpbCx8YbpcIjpx/xeTKK1teEahMNierNd1SeShM2iuTl/+50kEf385wg6rrkGhvnll7FB7NyZe28kCc5qXx90hxRFTDfjID3f9GJZhi6ZJMFhicUy35c3Ap4etrqKv9uxQ2QXdR2VM+3tRK99bf5W6GAQ/x+LYXObn4djzY4MizsHAvh3rUiZREIM4Vm/vlrtiWefLTpTwHZC1/NrIJ4Jg1aWltBWPDiIzHI2NA1Oo9MJp1TXodcny6iGtmITV1cxnIQIRFk28RePg5AolfxKpUC6pVIgQeychPujHxF99rNIQH3qU7DPKytikJjdjrCuI7P/mc/AVn/qU0RXXFGdSseGLao9AgHYjXAYQXxHh/22wulEMnJuTmiIGcEJ1JUVHEdHB9byyAj8Cq4utjogyOHIrUgshiuuwJp9+mkc065dmb9Pp1Fh5/Wi8o/t7fAwzvHkSazDXbvMXUP2TRIJSBFwe2QohM8qNhDw1Clcqy1bShOOxvbmSATXsbkZk5UnJ4n27rUvmDeitxcDa44ehY87OwtyttzhBrVEwxYBmobncXFR7DF9fZXvM1xQwnJH0Si+t7TA121rsy+hwQUGqZQoQDG291b6Oe3teL/l5UzCrtpg4pArDolgd5gQy7Z3Y2O4j5s25WrUFoOqEv37v8NWvPWt8GnKPd6XXxbHYGcXw8mT8FFSKegftrbC5rC/1teHZ4tbk32+tde52LA59sChZwuRNVBNVP1iKwoMuKJg4ZopR19dFS3Luo5AaWjIXs2NcqCquW3JxswGE4X8ZTbrGg6DLFJVGL+hoVwRf65W9HrxmYkEvkuSKM1ft67yknpFwWbjdMLI5wuWf/xjBO533IHg4OhROKe7duUGzLIMR3lxEYNUAgGRuYtGxcaePSjG5YLzkkrBSc83vZg3yXgcQbvDAQKRyTZFwcTAUAgajIOD5q5BOo1nb24O31UVx7BjR+3I69VVfLbHg+M24ZissS3RMhqGv0wYScRs+2isVFxrTlMppFJEX/wibMsHPpDftk1Pw7accw5sz+goiEer+oOs9ROPo1Unm+jginLWdi0EWYYtYm1Gu6r1NI3oC18g+s//RNvhX/wFjmNlRQzIsDt4icUwtOXRRxH8f+Qj2BNM2MA6e9JycNbZIp426nZXh0gkwtoKhbBO87X2x+NY683N+JqYwNfwcGVVcpqGPdzpLF05pWkQ5B8fR4KUA2ZdRydFOIwKl3yJgelprP3OTpCMpT7r4EEQltdem0lYhkL4nGAwf9JyZgZ2bnAQ18YsdB3XWJJQDTQzg8/dvdv8e5SLcBikZTiMRPn551dvanwWGraonA/VQYotLGAva23Fs1hJ8ohjQSYOibBG0mk84x0dWDt22R7+PFnGe7J0lsMhhg/xhPdKwYMbHY7qdIYZP4fjQR4exW3ZXm9hmzM/jwrqdetQiGMWmoYk7tGjRG96E6QJyoGmQWZqaQmJj0oTCYoiqgsPHsTAvdZWTIrm59TjEd1lLO/A2runCfVui+oCDRKxtqjaxeapV5IkpgwXI9U0DYZuagrGndtGh4Zq5mxkQNdhpIyEobEt2efLJAz9/vKDdE2DceUNz+OB02y8XlyKbdRp4CpIWRYakd3d5VfL6TqyTckkgsV8DsOLL0IE/Mor0Z5y7Bic0fXrYcSzKwqPHAEped55yACyNgd/KQqO2+1GpqitDZ/LUx37+4trAPJkQrc7s/JIVdEmNDuLYMBq1ktV8dq5ORwjT77l61vNZ5I1opqacP4m72W9b1ANw18BihGJPK25nohEXYfzeugQHMN82e9IBBU5PT0IqGdnEcgPDFjT60kmib75TRByd96Z6+DKMuys31983SeTsEWqCkfdyuCoYpAkor/5G2iX3XYb0fvfL6qjiRB82e0Yv/wy0V/+Ja7pXXehUspCgFcnT1lBnJW2iIlEbjGuRiC8vAz/jgXws7G6iqDP6RTTi9Np6GZVsueytIPLVfq8VBWJz5kZouuvB1F38iT8mJ07i9uW2Vn4RG1tqPArFNSfOkX005+iE+W663J/H4nAB/J6M4cFLi9jbXZ1IalZDl54AYTepk3QUq1GVXE+6DqqS48cwT60e7doCa8iGrbIyofpWIPz8/Djm5rg75bbpptKCeKQ4ycuJmGSR9OwXuyS6uHBZ5IktIvztURza3MwaE+XD8dhgYC1Sr9SUFURUzFx6HYL4tBMUc6hQ7jGe/aYX2+6joKR557D4MzLLy//+A8fhr+ybZt1LUVNy52WrKo4vl//GnHeli3wi7q6cpOpiiISM6zD29V1Wrpz6t0W1QUaJGJtUZWLzRuHpgkh7EKGi/WsZmZgJJuaEMStW1dbUdNUKlPHMJkU03ndbkEW8oZj97FxOTxPTdQ0BMbGQFSScEx+f6YB5OEli4swlK2tcHytOocLCziG3l5RMWhsL15YIPp//w8b5FVX4fNmZ5F5Y+LSWC04N4cNaOdOiJVz9WA+koOvucMhqiuZsCuESAQl614vCEQmXTUNm8v4OJxkq+X3qRTOS1Vxbk1NcEp4EhsLPnPlp11OsK7jc2MxvG9Pj6X3rvcNqmH4K0QxItGYta4HHDhA9JOfoB3ummtyf59OI1j3eLC+w2EE5qw1axapFLLYs7OorM5u6+NWKNbwLQRJAoGo67BFdiUZFhcxwOTkSaIPfYjozW/GMYXD+L3dBCKTt5//PGzchz+MBJDF9rKGLapTpFJiaEJnp/32QtcFQZFPW1jXsW/H42hpDgaxxzscYup6uWAisdSgFSKssQcfRPB58cVYhwMD8GVKYWEBlTstLRhekL0+w2Gi//gP7PG33174GicS8KFYv01R0E4dDIIMKOdanDgBEnFoCHZKVWGrSg2JshPxOPzChQXY6wsusFfyIQsNW2QS0Siet2RSSPeUU6XHgzO5hZgIMQtXGLNGJ1c+d3XZo5XJ2opc5cjPdbF1wsfIwxQrBZ9Xe3tlpKhxojLLY7ndIr4yu/Yliej55/G6884z7yuw1MKBA/C/XvOask7jv21WJAJ7Uyye489l7UomDFnjkUgM53K74ac8/TTIzXvuKX1uqRTsOM9V6OqqeXdjvduiukCDRKwtbL3YbMRlGYuUN4x8CIdR9RYK4d9dXahms3OCZSFwW7JRy5CDbNaUYA2eQKB2GhecsQsGQVbJcmaLsq6L7Fq+IDWZFOLE3I5szIjx1K58Q0lWVlAFGgzmvwcOB1qDFQW6GIqCVqO+PmTTWZeQEQ4L3cQbbjC36akqHMtTp7ABb9ki2g+ysboKh9jvx+bE90jXiZ54AhnvSy7BsVlBIiHaEvr7c4lYRcExciDk8+Ea9PRUFswrCoj0ZBLvVcY6qPcNqmH4bUAhIpHbmrmidi1jfJzoK1/Bur7zzvzr/+RJ7DXbtmHtHDuGgGH7dvNOIWv9jI0hmN++Pff30SiuV0tL4feNxwXRsX27fdqlJ05gArMkoSrw0kuFreb2LztJnkgEmoe/+Q2Ikw98QGgJWUTDFtUx+Bnjqg27iUSu8udhaWyPdB3PYCqFLyYYWBu5o8OeNjhdN0ckyjLRD36AKp6rryZ63evM285QCBV3wSCCd/ZP0mkEv/E40e/9Xum1xQnNRAJ+R1MTKhzLIV3GxhB0Dw4i8HY4RGup2w0ir5ZJpvFxXFtFgd00DuSzEQ1bVALxOMjDREL4s1YHgUmSiF9Y4sk4UZl9Y+66SiZFrFHpPeeut3gcPzN5aOZZZpvD1ZB2PH9LSzienh5rvoAxNmMaxCjfZJXsSqeRMFAUyAdYOZb9+9FxdtllqEIsh2hLp7G+YzHEYfkmtKfTudOS+dxdLqFhyN+dTrzfF76ApO0tt2CIipXjSyZBJnJyuLvbXg3OIqh3W1QXaJCItYVtF1uWxeRKJuDyVcNwy3Isho2FpyxXY2gEETaH7LZkzo4RiRY1/jrdgqvLyzCsnZ3Y2ONx/Nzbi9+rKs7H7c4kuDRNON/Ly3C6WV+or08QiNngc+VBKhs3ionKxg1s/360Mr/lLXDsDx/GRp1PRJyz+IpCdOON5qtyOFhwOnG+rGPU1JR5risrIBECATifRvLut79Fu8/556MKwAoiEWwuXi8IxGKkoK7jOObn8TqnE9elnNYPWQbxq2n43DLbIOt9g2oYfptQz0RiNEr0z/+MNfj+9+ffFxYWhIRCczPWu8uVf6hTIWgaCIJjx+CI7t2b+/toFD8Xm8Qci4Hsc7tBaNrVGvjEE0R//ddwbj/9aVRIplJIzlSDQDx8mOiv/grkxz33wG7zQLAy0LBFdQ4mEongf9jdLp9MYu8MBhFcapoI5nlwUSgEW9XdjX15YQFrvhLdZ13PrMou5uupKtHjjyOYHhoSvo9ZLC+jCsfvB5Ho86G65/hxVBSb1XlMpXAMkQiGv5jVdjZiehpVRb29kKLJ7mSJRPCzcXpzLSDL8CsnJ/HZF15o7RqbQMMWFUAyiRiD5aP6+sxPaDcSd7EY1orDkUkcZu9Psiw6edrbK9ci5KKKeBzv6fPhPa3aKlUV1Wl2VMRytbWqFu+oMxZ1GIe8GOWeKpHIeukl3Nu9e62d14EDKAA5/3zoIJZzDKkU1rUkIUbs6sL1yG5L5ipLHiLKQ0/8/vz3cX4eg+WWl4ne/W4UipQLTsxIEvzN3l77WuoLoN5tUV2gQSLWFhVfbJ6ylU6Llq9soylJomVZUfA3Q0PYtOzOfMpyJmFobEv2eHKHn6y1YFpVsdG6XNjQ2XkOBODMq6rYvInERsSOMUPXhbZeIIAgtLtbCM4ay+LHxmDMh4fzG+6TJ4keeABtJ5deiuySy4WWmnztSI89BqL4hhsE+VkKigICUddxrB4Pzisex+9cLjgm0ShExZuaELQbn5/nn8fGtWsXKmmsIBRCdSNParbyXHD1YiiE69jSAufBjEMWj4uqjAonbNf7BtUw/DaiEJG4lvURVZXoq1/FPvG+9+WfpJ5IgLRrbUW748svw07s3Gk+EaXrsGeHDqG6KNtW6LqQlGhpKbxHRaNCTmHbNntasrid+ItfRILkf/9v2H1Zhn3ifcGufUvTUI35pS9hf/jwh9E2yoOsysQae7Iso2GLCPvu8jJ+rsbk70gERCVXPfF6M1btLS3h3x0d2PeTSTyflaw1XRc6x8UIh8OHsa9v3QoyUdMwId5KlVY4DDvD2mW//S2qAC+6yPyxvvIKrgMPBWhttSZ1Mj+P4+/oIHr1q/OfM5O46bRoPa3l/jA3B/8tkYD/t3u3bc9bwxZlIZXCM8H6pz095jTiWHaIh1XwwLamJjwvwWDh94hEsH+53XiOK723yaQgL3k4SiXvyRJcrNdYKdJpXGOPBzEQryWO13iiMvtoHI/ZZWOPHcPn79hhPgYjgszAj36EysG3vrU8P0OWUQEZjcJ2+v34v+y2ZGOVoRnC9PhxJJidTnRJbN5s/djyIRpFnC3LiJV7euzTs85CvduiukCDRKwtKrrY3BJMhEWXHcQtL4NM4oxyTw/Iw0onCDN4OrGxNZkzG05npo5hIHBapzLlhablby+OxeA0sgbG6iquJWdLPB6xgcEWYSgAACAASURBVPPYeiMxaCQIYzGQc4mE0C40DoGZmhL6Q/mqTmIxom9/G47r7bdD60eWkd3K9/eHD2MDuegi88LfmgYiU5ZBZGa/Lw82mJ8H4dbTg03OuMEdOYJWna1boYNoFpoGkjYeR3BQiSCyomAzmp/HMfP94nuWjXAYn+3zgUCs8Pms9w2qYfhtRikica3pI/70pxgecscduZWBRDjuY8fwfft22LVoFASelQzyQw/BVlxzDapyssFJsWKByeoqJBP8fny+Hc6/oiDL/sADaJ/86EdhG3jghceDvdMuAnF1FQNbDhzAdfiDP8B1bG9v2KLTfQBrBaoK30PXq0Mkzs/jq7sb+2T2c5dMgmhkP25kBGui0oEcTCQWmtg8OYmgdfNm+CThMNaly4XKZStVVJEIxP8ffxyVhLfeav7YR0eRVDnnHHQpLC+LxLAZ3fBQCEndlhbYlFLkazwOX+t0tDcrCvy4kRGcHw/jqxANW/Q7sBQPa693dcGXLnaPeUhJLIbngv0GI3FYqpp3eVm0L1eqQccdb4oihjHakbwjErrs2Z1P5UKSsP64OpNjPSIxOMrrtT8unZhAPDU8jLjOLI4eJfre92Br3vEOa8eVSgk/5fnnce7bt+P+uN2ZVYbclmwFTz6J4Xe9vUQf/GD+1uhKsbqK+I1nM/T22t4hWe+2qC7QIBFri7IutqIIQ85ZIDYKqoqsIpNTHg8IkoGByhYktyUbdQzZIDscMExGHcNatmTkg3GiVqHvrB1iBA8nkWW8R3c3DFoqhQ3J58PgAJ8vU0C41EbO+pM+H5zR5mbcn8VF0caQDV0n+q//AnH3jnfgvq6ugsDLl42fnSV69FG0ROcLzgthchKO9oYNhcmA+XlR9TM8LHQr3W78/xNP4P+vvtq8k6IoOCdZFroYdkDXsZnOz+N6scO2bh3upa6LITjNzeVP085CvW9QDcNfBRQiEtfaoJVDhzBs4IoriG66Kf/fTEwgINmyBUmWxUWseSvE//79COYvuwyTV7PB1evBYOE9ZGUFBGYgAALRjiAgFiP6xCeInnkGtvZ//k/cr2QSNoSrseyqDnrxRaKPfxw26Pd/n+jaa2FbbSIpG7boDAITAZqGZ9CuoF1V8fzNzYmBevnWUiyGZEFzM45hYgIBXr5KZSvQNCGZkq3n/Nxz2LP37hVrbmkJw54CAVQkmm31j8eJ/vVf4R9dfz0qn81UuszOws4MDGQOi4pGQQaxHFAx3fH9+2HHXvMa8/53KoXP0HVc82pJDRXC8jKu/+oqYofzzqvoGM56W6Rp2CtDIZEMKJTYJoJfbCQOifCs8WAUYxFCMbBGO9uNSqq70mk8k+m00NuvxnMZjeL8KyXQWV6K/fyODrwnVxtWq6BlYQGVy319uRrPxTAyggFzg4NEd99d3MazpJaxNVnT4DsdO4brdv758MsqHUKq6xjm+eCD6DZ573vLnxRu9vNWVrBWVFVUfdu059W7LaoLNEjE2sLSxWYdDEnK1apLJNCyPDsLI9zSIlqWrQYlup6/LZnh9Wa2JGdPK64m8g0nyUcQckWkEcbpWoW+s8HVdaEf0t2N85NlEG6aBucxGMS1YQHaUlhaEvenqQnGsqen8DTTp58GOXfDDbjOc3PIzOdz3uNxGPpAAEK8ZjdJbgNet66wFs7sLEjpjg5kybgyUddxTE89BX2h664z/xzIMl6raTifam1MkoRzXFzEZ3Hm1uNBVtbGjFq9b1ANw18lGHVnjG01a0UfcX4eQtkDA0T33pvf6VxZgRB/Xx+OeWoKyRAr+mAHDxI98ggc3HxEJdsVTkjlw9ISqoOam1H1bAcJOzuLqsOpKaKPfIToDW/A/0sSkitM7tlBIGoaKsu/8hVcvz/9U1xDvx8Os00kZcMWnWHQNBACqopnsdIkLeuQEcFnWFjAc87rOxvhMNZDezvW4MoKfIFK2854YrPLJXysp57Cvy+5JNePmZsj+tnPkHB84xtLB5eaRvTDH+L8broJ/puugxgrVs24soLKoM5OdHRkXxNJwrEQwXfKJjSjUegoulwgEK36N6e7vVnTUAnKerd79hT2U0vgrLVFHEMsLOA5b2vD+sq3dtNpMRiFYy0uEMnXZVYKq6t4fjwe+PXlVjBzwYosY302N5et02sK/NwTWR+2oaoiBjQmacNh3Iv+fvsSMPkQiSAZ29JibXr7xASq/Lq6oDNovNeaJgaeMGHIxS/chu3z4XyPHcN93ru3cr1LIlzLr30NidWrrsKQvVolvXkAEJPg7e2VD8+k+rdFdYEGiVhbmL7YvMmoKowMO2/csry0BKPS2wvy0EpVVzqd2ZZsFFx1uTLbkgsJrtoBzh7lIwWNP2cjW9OiEEFo1QlTFFxXr1dUCioKrncyKSZnyrL4nFKQJJS6nzyJe7RzJ14bDGZu9HNzqAzauhUO79gYgs2NG3PfU1XRIhiNQpDfbGvhygpadTo7scHmA2tpdnXBiTSSICMjRL/4BV7/+tcjCDazccbjIC5crupv7AyeSPnSS1hHvb2oqCqWEbaIet+gGoa/ijDKPDCYSDyd+ojJJNHnPw/bet99+W1HKgUH1e+HHTh1Cmv+nHPMf87zz6Nd+txzMSAh+3x5f2Nd33xYXASR2dICu2gH+XrkCNGf/znswyc+AYKTCPtgNAoH3a7JgSsrRJ/8JJJD112HCkSPR7Sm2YiGLToDoWl4hhSlMiJRURDwOhyi4ieRwPpqacHazoaui6Fz7e0IfDXNHiKftclcLtiJaBRyLIXWxNQU/J2eHvg7xfzRxx5D1e/rXoeqZUkSE1P37cs/8CAeFwNZ9uwpfH7ptNAZNw4FSCRAIKoqCMRKhgVwezMPnai1JFAshqrExUVc7wsusGyrzjpbZOyEYVmOfESzLIuKQ1nG//Fwkubm8vxi1nSXZewr5VbPs9Z+MonXNzWVbp22C2yfPJ7Sa0dRRGzIPpbbLeIxp1MMFHU4yiuqMYNkErbL7c6cBl8Ks7NEX/86zvPd78YxG6sMjUNIPZ7MSck8hDQaBXnpdMKm2VGQEYkQ/dM/Ie68/XYUspwOH1VRUOQSDuPfnZ3wQcvcc+rdFtUFGiRibVHyYv//7H1neFzVtfbaMyPNaNSbLVu2ZVvu3bRAbBPTgwkBAgmYEtOTkEBIuyU3AcLN5XJJuXwJEBJuEnAIgQApNBOCwbTQccNNbpJsNUuakabXc74fr1b20eic6SNp7HmfZx7ZU07ZZ++113pXU1VsNIGADCMXAsKnowMKRnExCKbGxsQbD4dCa9OStZ4NTlNl4jBbBE8kkji9OLY5CRHuOZYMjCUIc6lY+XwQqBUVUjgrCsbf7cYmXVWF+7NaE1+LqsII7+iAcco1sIjwW7tdRqsQISrm4EEITqPw+HfeAaH3qU8l33HQ64VBXlqKNGa9DaK9HZtvfT3IS+13enqIXnoJ47JqlYyq4nljtOEMDEDJ4bTu0fJsBQIYc+5e7nbjWoTAxtTQkLEhn+8bVEHw5xh6ROJYNlpRVaLf/x4E4fXX6zso2FkQCMA5dfCg7MqerDK+YweigWbNQrHw2DUfjWI9co1ZvXHo6UEUUWUlorGzYQi88grR3XdDvt19t5SdbNhlk0D86COkL3s8qCm0apWMTslBWlhBFh2lYCKRybxU544egchwOvFZba3+XsjRIRzRz4S+ntxIFZEI5FBXF4i7RKnSBw+izuHkySAI9fSIlhaQjUuXoqEJIxAAkRgKIWpHWyM8FIJBrqr4XTL6dHc39OiaGozLq6+CBFi9Ojv1x8c6vVlVQSZs3477XbAgJSfOMSWLXC5ZooebBGrXEjck4bq/RNhPOeIwE4d2IID1ySnT6UQJK4okrrnbc7yGLbkCE6xsT2ihtSVZp9J2VNa71lBI1j/Pdj2/SETKk+XLk4/U7O4m+tWvMM6f/zyuTasjahufWK36Mm5wEOvSYoG8ykaUaEcH0c9/Dplz/fXSsTqWCIfhyODmdrW1aTW4y3dZlBcokIiji7iDzYKUlTZFwQLv7pb1AqZOhVDUW0yqio1Fm5bMHi8iCKbYtORUDSZVjR81yH/1plW8qEFtc5KxhtOJ+6itHU4ScpHk0lLpvU/Ucbq7G+TVlClyY+OOaUR4xps2QUE//3xZGHjRIv3j7tsHEnHRInjAkkEwCCXcYkF0od7m1Noq6zXGFgfu7yd68UVc17nnYt5w1+pgUCof2vnE3apdLihM2o5puYbHA+PEYhlOtAcCMtU5GsVzbGjAs0xj3uX7BlUQ/KMAPSJxrOojvvYaHAHnnYdaiHro6sIamTQJsstkQvR0so6bfftQLHzKFKK1a0caSYoCZZUIhITeumOHGZdTyFRuqCqcNL/5DUiEO++Ukfvc+dJmy06NVkVBPbaHH8YY3HabLBtRWZmzKOyCLDqKwXWjQiHMoWQNR65rZjIZZw1wU7KGBv25ydkZJhOuo6cHe6pe9GIq6O5GxODUqSCpkkFLC2TY9OkjS6n090PuTJiAyOfYew2FYPj7/SAta2oghz/+WL6XLAnDNZb7+kBAFhWBQDQqD5MOxjq9mUiSr+wAP+64pJ77MSGLvF7MYS7H0dCANaaqmE/smOKu5EwclpVlZ9/PNH2Zg1W4TBFf31jaX14vZBHPda0tyZ3dU8ky83ggNysqslt/fft2jP+SJcbH5bRkjjI8coToD3/A7z//ecwXLWGYzPNzOiGvrFYQiNnoQ7BjB9GDD+JYN9+cHQdRNsFj5/FIuzmFUjP5LovyAgUSMUkIIe4gotuJaLaqqvvSPIzuYCsKFkkohA2G68c5HBDqEyZA2YoN9Q6FhhOGfr8k7yyW4YRhSUnizStecxL+d7zmJPHSiy2WsUvhSxWKAgXRZMIGrb1uTlvgzbu42JiM5SLm3JWNCM+oowPjWV+PVJ0XXpA1e7hLnt6m0t8PEmDCBBTmT2Y8IxEQiIoCgzz2uKqKz/v7QRxMmTLyHjZswO/OPXekoh2JQBEJhTBedjvGhL31VVXZVa4TweHAs7PZYOzozfloFN/p6cE1FhXJrs4pGPljNptzKYsKyD6MiMTRrI+4bx+IrcWLib7wBX3Z4fHge5WVck3Pm5c8adHWBkW5vp7oyitHKrmqKkt0lJfrr02u81tbC7Ig0z0jHCb68Y8hN886i+g735Ey0O3GfZaU6Kc5por+fhCUH36IaKmvfQ3nN5lAiOaQNC7IoqMcnDIZDA7PkjBCKIS1Fo9AJJJlP4TA/m8U1eNwYN0MDmLNzJqVfoScx4O6W2VlMMRNpuSdFB9/jO7mc+Yg2pDJhscfx1q77DJjMjAcBinm9cIJ29eHNTt/fuqkaCSCcg2HDhGtWIH7yMX69vlwvWOV3kyEFO7NmzH3mpuJFi6Mex1HtSzimttuN9bDxInYL/1+6ZDivZ3LVpSWZm+fzzR9WUtyKookqMe64RsHp/T34y/rB5najw4H7rWuLjtRey0tsG3mzpXR03zt2rRkbRmuUIjo6afx3nXXwZZP9V76+0H4lZQkFzGdDDZtgr7W2AgCUa/Z53iBzwcy0e/HvdfXJ6Wz5QnjkByyJN+yjjHYksYGQohriahCVdV7x/patOC6hJEIFLT+frxntYL0mTwZi4ZrVvD3/X4Z0cKh0DU1spahlixSVRyfhZsRUZioOQmH38cShGO9AWUbJhMUA6cTyoJWWFVV4b45QpSbsMQazBz1Vlo6PJy+pATPtasLdRL//nf8v7oaz3fmTNkRTat4BIOo92OzoRNzMpuQqkLJDYcRgahHIO7fj/vkjt5aeDwwvk0morPP1lfOLRaMTziMzZo7bXGX8EzqA6UCVcUmMziIczY0GI+R2QwFYOJEfL+nR9aCrK7Gb3N53eNVFhWQG5hMkK1cD9HovVxhYAD1VjlKR29dRKMgAYuLpUI8e3byindnJ85RVYUIRD0vOe9zRkbLoUNYi3V1I8sppAOXi+j730e00DXXoAsiH9Plwh5qt2dnrb//PghEnw9NW049VZYeqawcHxH2eijIovyAEFhbAwOYu6pqTJZxOizvzfHWkdkM/YQbrk2YMPI7PIcHBnA8bjg3a1Z6dae3b5fNOywWyJ5oNDk9ctEi3N+HH0LHOOUU6FBuN2RbvGjCoiKk6m3dioZPpaVExx+fOoEYjaIBXjgMvchslo2nsh1pzLq8y4XxH4v05smTMUc+/hhOps5ORCVm2q07FuNZFoVCWCMDA1J/tNmg83IzCG5+ycRhtoMmtOnLtbWp18Jj8jAalU1cslQfPC1os9q0qd6qilc2SotUV0snyMSJmZHwhw7B5ps0Cc+3r082QeHgHW66WV6Ov5yZIARqIscGaSSD3l40fWKnS6bPTFGInnwSMnDJElzXaMuUVGG3w6ns8cDO6+jAWpgwIfNmX6OF8SzfMsExQyIS0bVENIWIxsUD5LpQAwMy7ZMIiuLMmRBCfr9k37UFV61WKHMlJbLYKteN4A6TsQRhLLhjbXGxjB7LRnOSowVWK4ST1yujDRmlpRBohw6BDKyrG55SEI1CyFksI4k5IigbDQ2IQAyFcOxAAAqt1So9WjabPO+bb+L9s89OPoyda2hOnTqSDFAUEIgDA0hfjlUI/X7UF4pEEIGYyOtTVIRr5c50VVWye1quFRWOpvD58BxSiXysrMQrGJSpzg4H1gR3sM4BATCuZFEBuUcsacidm1lhzpWcjUSIHnsMa+SKK4yN3PZ2fNdmw77U1JR8dF5vLzzadjvOoWfc8B5WUqIvD9raZDmFZOu8xsOhQyDzjhwBkXj66fKzwUHI0mw0OIlGkSa9fj3G7Gc/AynBEY5GNR/HEQqyKE8gBIzigYHhNfO0CAZl6ley3b+tVhzX4cCx9er6lZTIzq1VVdBZu7r09RsjqCqMYb8fJJRWj0mlxMNxx0GWbN8udZhTT03uWiwW7OvbtsmIsVSgqkTvvgu5cuKJ0AMDAYzF4cM4djYaHWhRVITn43bjxZFaoylXmICdOhX1Xt98E3rjkiXZSascwriTRZGILGPEZaasVthYXK+tvFxmEeXimagqzsXRj3V1qZFhLBMiEfy+omJ0mhvqQVFGZrVxEExREe6rrExmCWRKEAmB8WIniVE3eiNEoxi/zk7IGy7Bwo1buI4ypyZrn0swiDIq/f3IzEiHQOzpIdq9G8+MnS6ZIBhEXcZt24jOPBOp1ePVwakHLgkwOAh9sb0da3LChNx2Ec8Sxp18ywaOJRJxXEBVIRyZgAoGsYirqmRttv5+CDwWshYLNiuLRXafGhiQhE0stM1JeMMYzeYkRwvKyrDZce0RrYJbXAwF8vBhPIdQSKbPcge/piZjpfjtt0FQrlghlRFFwaZps8n09EAAIfSdnfC6J0uQcVTexIkjyQBFIdq7F/c1ffrIwsPBIAjEQABpecmEubOHyGZD1CPXTBwclFGsuZhz4bBMD+e6NOnAaoVSPGWKTHU+cACbVH09xjGLynIBxyCYNGQiUY9YzDaefRbr48orjWVHfz/WqcUC5b2hIfli5E4nmrWYzSAQjbo9BwJS0daCi/hzOYXGxpRuTxdbtqAWodlM9L//i/Q7PtfgoKy7lKmB0tuL5ilbt6LO5Ne/jvsMBLJz/AIK0ENVFeaxx4M5zWsuEIBOwV1OU5En5eVYp4ODsna23neiUeglZWVYs2Vlye+57CiYM2c4Ucn1FrlZWzJG7cknQ4f+059AIC5dmtw1DAzgOk44Afeyezf+JmPgqyoijjs6QKhNn473bTaQa52deE2YkJ3yCFpwdgynN0ciY5PeXFcH8mH3bjTG6e7G2MfW0c53RKOYqz09MpCgtBRzgJ3kpaW5Jy649E4ohLWWQj24f5Y0CIcxTzLp8J4JOJggHJY2LUfs6WWyFRVhXP1+XHem12yxQPfhAAEjPUhVZWQhB3KEwxjD3bux/hYtkvXf4wXZRCJwrHZ2El16KQKDUkVnJ2y/6mqcN9OMP6cTDVQOH4autnp1ZscbS1RWQv5x5ltrK/an+vqCnTbaGFMOWghxtRBCFUJ8WghxpxDisBDCK4T4mxBi2tB3viqE2CuECAghPhBCHK/5fZMQ4udCiB1CCM/Q6w0hxLkx52klohVE1DR0PlUIocZ850IhxOtCCNfQcTYLIW7WuWy7EOJnQogjQgifEGKDECKpcqQ+H2qLvPACmmMcPiy7MLvdIC0OHgQZ09+PzzgN2euVimM0ioVSWwuja/p0KGYLF0K5WbZMdlSbPl2mI1RWQgAWCMTkwClEbHzGwmyG8lRbi82ptVUqHZzuoIdDh1ATaPJkCL7jj0etGfascIRBRQWUtM2bYdgnG6EzMIBjVVfLBi6MaBTKn8uFjS2WLAiHZXrQGWeM/L0enE7ct9WK+chRidwxLhKR0RN6pHe68PsxXmwEZKK4P/zwwySEoJdeepHuu+82OvfcKXT66aV0663n0JYt7bRlC9Htt99Pzc2zyWazUb7LogJGHxx9SCTTX9hg1islkSk++ACv1atR90sP2i7mgQDWbLIec7cbBCJHOeo5GyIRSWzERuhw5/r+fsiNbBCIGzYQffvbcMg98IA+gVhenjnB9847RFdfDVn6/e+j1iKna1dWZnZ8lkUvvvgi3XbbbTRlyhQqLS2lc845h9rb24mI6P7776fZswuy6FgF63JeL/Zy1hGLi9OPUqupwe+1Dmy983JmBqfx6mW6xMLhQNSgUaSx2Yxrjkb1m/LFwu2G3jFjBn63e3fi3/h8+F5JCfTjJUug3+zdCz0iEbZsAQG5aBFSubWwWCA37Xbo7319iY+XDux2qZM6ndCBcgk9WVReXkrf+MY5NGdOO5WXE9155/00ZcrRIYu4meV778kGGpWV0JMnTIC+P3169mrsxYPfD/0/HJbdaZOtg+504sUNOWtrR5dcYWeDyyVLhxDJ+sOcSWdEjHHGAhPmmYIbp/l8sKGJZOOpvj7IsYMH8ez7+iBPmTh2OhEQcsYZMlChuNj4WUSjaPJ08CDRhReirnSqOHwYBGJtLSIQMyUQ29qI7roLduEtt+Q3gcgQAnvWrFlYn14v9MmuruT2JBzj2OKgcoExbawihLiaiH5LRJuJKEBEjxPRZCL6FhF9TESPEdEXh75jJ6J/JSInETWrqhoWQlxCRP9JRH8molYiqiKiK4loERGdparqxqHzXEhE/0NENUT0DT6/qqqPDn3+bSL6ERFtI6I/EpGDiBYS0TxVVc8c+s4dhKKWHw59/gwRNRDRN4noQ1VVVyW6309+ktRJk6BI1ddDGSguhnDnKA2OEuTQbv7Lr3GeGnVUIhDAxmO3G6eq9PeDHPR4QN4a1YsJBEAiRyIgDmtqZAMB3tC4mQ7X3rHZZCdmbei/Hvx+eLBKShDZo50vkQg2k0AAinws6RaNwjju7yc66SRsmPHAiqzXG7/IMxMUXDuE53omYfQeD66zqAhrKdOU6WeeeZhuv/0amjdvORUX2+iccy6j3t5OevTRn9DMmYto1arL6W9/W08nn3wNORw+euWV73soj2VRT0+hmcFYQZvCrI1OzGajla4uot/9Duv80kv1j8slDTiiqawMClky1+Dzoa6O2010ySX6soLr+JpMI7uLKgqcLi4XyMNkIx+NoKpI2376aZAD3/mOJPKYQAyHZdpZuohEkKL09NOQ2//yL7K2qhAwVDKVRY8//jDdeus1tHjxcrJabXThhZdRd3cnPfjgT2jevEV00UWX05NPrqdzzrmGXnvNR1u25Lcsev31gixKFxztr6rQJzNNpY1EQPiZzdBN9PZzRcE5ufZiaSl0DSOEQkS7dmFdzJ0b3yDWRirFM9Jfew2y5VOfAjF45AicsUYpzZEI6vmpKvQuTufkSGinE/dgdB8HDkB3mjYNvzcCyxqPB3Kmujo36YKKIp0W7KDJhW2wYcPDdPfd19Ds2dCLTj/9Murv76QnnvgJzZixiM4883J6/vn1tGDBNTQw4KN3381vWbRxI6l+P+b0lCkgC0tLRzf9V5u+XFwMIimZwA/ebwMBWZ8xVynWeohEZKoyO0W1NfVTXQfa4I14zaGSQTSKcenslHXTeUy5pr22W7LFgvvZsgX3s2xZcrJVVREdvW0bshNOOin1a21rAwFZXw/nb6byY8sWoocegu5zyy3ZcdaOFygKni2Xc+vpkVlk69YlbqxyrHFQucB4iUmLEtGpqqpGiIiEEGYi+jYRVRPRQlVVvUPvO4noPiI6lzCAL6iq+pT2QEKInxEmxL8Q0UYiIlVV/zL0kKz80DTfn05E/01ErxPR2aqqBjWf6U3CQ6qqXqT5Th8R/a8QYoGqqjvj3aTbjUlfXIwosClToLQk0zm5gLGDzYbN0eeTjWViwaklqgpSzagW4DvvYKNvboZQnzZNbvLl5djAenrgidq7F8c46STMkXAYG6HPh/litQ4/RzgMzyV3jdPO3nAYm1MwiHPGph0qCqKW+vtRcygRgajtEseh5UYQQtbvZDIxFJKbdqpKzsAAFAGbDUpeNteOyWSmX//6dbIMaRiKEqX1639MbreT7rtvB7W0lJLPR/TKK9//N8pjWVTA2CE2GpHJw2w1WvH5oMiWlhJdcIHx8Xp6ZLpTWRmiepI5dzBI9Oc/Yw1edJG+rGA5SDSyyLyiQMnzeKShlgmCQdQifPttdGC+4QZpIHBX20hEFjtPF729RD/6EQiRT3+a6PrrJaFiNkMOZlsW/eUvUhZFo1H6xS9+TAMDTrr55h307LOlQ00hCrLoWAWXQeDmJJnCYpFN5QYH9esjmkxYSy4X9nCv17iWIjsqVBU6b6L1YTbLezEiTrZuxfV98pO41uOPR53CzZvxm9jmMIoC3ScSwTVoCSEh4AwwmeB40Uttbm/H7ydNik8g8vGqqnAfg4M4Xk1N9rN/2DHD5RPc7txmGZlMZvrZz4brRY8//mPyeJx0//07McNzmQAAIABJREFU6ODBUmptJXr33fyWRVYrIscybcKRLiIR2aE42fRlRZFNN7krdC4au+iB05S1jTk5EyletF4yEELKGY8n+UwjTkvWdkvWRjNyR/eJE2U/AL1j7NqFMV28OHkC8fnnZb3BdAjEgwchayZOxDzMZPxUFVllTz0FGffVr0Je5guYHNT+jX0vNgZucBBj6HSmfjo6BjioXGC8kIgP8cMbwluEB/h7fnia94mImomIVFX18QdCCBsRlRKRIKJNRHRpkuf+HGEcfqB9eEPH1/OQ/yLm/69prinuA9y4kei551CYeOdOCEaPB9Ei06fjbyGff3xCVWWKSl3dSKO4rQ0bfnU1QuKLixECr918tm6F4Dv+eHjklyzR38AUheiZZ6DErF6NFPXYgr1+P76nTRM8cACbbqyiHAoh7W7GDERJxm7Gqkr0xhuYe5deimuLh3AYCnd1NRT2VJsTcL3EYBDjyDVGEm2YqgqS1G6Xil62FKUtW/D361+/gU44QQ72RRetoPXrf0xr1lxBNlspnX46agD953/mtyzKdmfFAlJHbESiouD/mdRH1HYDvPFGY6+zy4XvWq1Yw/PnJxehx41aAgF0PI5N6+P78niwpsvKhsuuaBTOETbWUmmCpAeHA3UJ9+xBTcLPf16OnaKA3KiuhvKcCYH41ltEP/whrv+uu2AkcPH3ysrUalUlAiv6N910AzU2ysE7++wV9Itf/Jiqq6+g558vpZUrib75TaIJE/JbFp16apJXVMAweL1Yhzyv3W6sq2zMxcFBrJ2aGuPu5aEQCI/eXsismTNHGtu7dmHNLFmSfLSxqkLOCDGSyNmxAwTd2rWoi8hYuRIGvNOJOtPaiMI9e6AnxZM3qopIRW6OMns2zn/gAJwt55xD9IlPpDauXi9+azLhenKl24fDUp5nGmkdiwMH8Peb37yBTj9dPgyHYwU9/viP6XOfu4LKy0vppJPgVLnvvvyWRStXJnlFOYDfj/2MKLl0aXbU+Xz4t90O8jCXjTJ4bXLDTtZfOLgi2804zWZZtsHn0yfzuOYyE4baJqQWi4wu5L+RCNZlKKTv+CCCLHA6R9ZvjYeNG1EzdeXK9OYRy59Jk3DeTMYxGoWe9vrrsDevvXbsmunEgudQPHJQzyEmBOYD93woKZH9IsJh7DW9vbC7L7gg5cs6JjioXGC8kIhtMf8fGPobW6mE368hIhJCFBPR9wjhprE54cmmyMwe+rs9ye/HXitz3jWJfjhhAhbzueciHWP3bhhUTidIKLMZCsz06YgWy3aXtwLSB3uYuQmBdmPp7sYGNnUqNnGrFc+zrQ3pNZWVICA3bcImO2sWjHYjod7SAiN81SoQfgcP4jicnme14rdMJg4MgNSLTdUhwnf27IFgnjtXn/B75x0oi0xuxgPXaREC15SOYc4d7UpKZKFwv1+SiXqIRpGK4PdDwapJuNrSQ1PTcDFSVIQHXVIyjebOhaE0tLnntSwqYOyhVx+RoxHTjWjbuBGRPxddZEwgclRyXx8Itubm5AzPaBRe7fZ2HF+PQCSSaXaxzZQiEcg2vx/nTKZhUzwcPEj0b/8GeXznncMVd0WRNaEyKSgfDhP98pdEjz8Oxf7OOzGuAwOQrXa7McmSKbSyyO8neuUVyCKTaRp973sgMguy6NiElkBkvUAIkElOZ/L104xQWYn57XDIkjuxKC7G96JR6B+HDoF8YxKDG41Mn55auQI2Ftm4ZFnY0wO9eepUEHqx13LuuWgk9be/IZWwvl7KuenT4zsshJDXznWW7XaiDz+EUX/SSamPZ2kpZEVXFxzLEyfmptlSURH0IbcbemMolHxn7mQRqxeVlUEWWSzTaMIE1J4dkvUFWZQiOFre40kufZmbc/p82OfYWZerbDZVHd5RWUsccvmtXEY9Wq0yHZmvh0nDYFBGQHKpJ25OabXqjwl3OzfqRt/RgTU7dWrijCzGm28iEOOEE4bty0lBVcEDdHYiCtpIr0oWPh/Rgw+CVFuzBnUZRyulnWV2vChCvfrfJpNsHsuZmdqGskwexkJVIa937sRxFyyAjZYGkX5McFC5wHghEY0SMYze5yVxLxF9mcDMvknIE48S0TVEdHk2LzCFa0qISZOILrsMtVjeeAPseTAIhcPrJfrHP/CaMAGs+vTpuTNUCkgeRUXYrDl1oKQECrvLBYWVFcSyMjw3VqL9fqKXXoKxu3IllFUjZfLIESiuU6fC0x4KYVNrbwd5xlGQQsiNcv9+XMPkycO7vwYCIBAVBeSg3jk//BDfWbIEYfvx4HZjrhYVYXPNtPaXxQJll1PFeVzt9uFGC49BJIK1k8u1YNbsVG1tiHwgIpo1y2yUypTXsqiAsYUekRiNppfWvGsXHBUnnICXEdrboSSXlkJOJZPioqpEf/0rvOVr1siGJbHw+7FeS0qGOzPCYRCIgQCU5EzTat57j+gHP8B5fv5zyFRGNAq5rCiy7nA66Ooiuv12KKgXX4x0IIsFxw6HIYdy6ehjWfTee0Q//alsHnH11WY66yzdnxRk0TEAjwf6YknJ8PnHdfEGB7NDJNbVYQ309mLf1TPi7Hast2AQZB0bwi4X9IqamvQ6k7Ls44iUUAhNk+x2RAXq3ZfNBtn07LNEL74IorG3F+RdsnXAmptxnx98AFJ02TKiU05JP7rLasV4dHXhVVeXfGRTKhACupTfj/nhcOD/mepoDK1e5HQSffwx/t3YaDbqjF2QRUlAm75cXo59Md6a5efLWQSxkf7ZgqIMT1UmwhooLpbEYa6hKMO7JWu7VHOtRS4BFa9OvB5KS2VN1+JiKUf7+2FP1dUhcysZvP8+0oaXLIHzIlUCcfduOEiampI/pxH6+lDa5cgRNH5bsSKz4zG4K3miFGO9uDkmArnTth45mI589XiQ3edw4HktWZKRk+aY4qCyifFCIqaLtUS0XlXVr2rfFEJcp/NdI1Z479DfxUT0ShavLSGmT4fg2LMH6VL790PZOeUUCM62Ngio99+XDTiamjKP4CggfZSVYfNxuSA4jxzBe1ovN9claWyEUH/xRQi7lSsRgWgURef3g1QuK0O9HyEgdKdPxybT1weybfJkuWH29WGuNDdjQ+VwfkUBWSAE0nj0Io22bUMHunnzUAcxHhwOKI8lJSAQs5kyUVQE5SkUAonudmMsSkuhwHR24j6mTs0sJTFZRCIYm+5umfqdxOaU17KogLFDvIjEZNdZXx8anTQ2Ep1/vvH3jhzBvmIyIdo9tn6YEV54AWTamWcaywpOK+Lurdr3W1rwV6+cQqr461+J/t//Aznx3/89PMpJSyBWV6dvRL/2Go5NhDTm1atl0wkmJ3NdesTtRur0yy9D5l53HQqjJ1E+oiCLjlK43VhHdrv+ns6NFAYGMFczaexhMmFtdXdDvhiVwCgvl13Yu7uxLlpaIAcWLUqfyDSZpPG6YQN0n0suiR81XVoKIvGJJ9A5/rzzEtcx1DuGw4HzJiJ1koHFArnMOlw4PLIkTrbAXW1dLsyB0tLsOTq4Cc2+fZJQTiIjpCCLDODzYZ4JkTh9mZs7RqNSX852eqqiyDRlriGobeSYS+JQVXFubS1DbVpyURHGiJ0ndXWZ2yBVVTiHw4HjB4NwxJaXJ99Reds2lFGYMwcRf6lck6LIFNwZM2DbZ4L9+4nuvx9z5BvfSJxVpr2OROSgUXoxE4FG5GC8BlnpguvstrTg+MuWwTYcIxzz8i3fScQoxbCvQoi5RHShznc9RFQlhBAxeeZ/IqJ7iOh2IcRbsUUtDXLSswYmeWbPhnfvnXegMDU3w4tgtYIMam1FLcWPPoIR1tSEV3396IUqFwBUVkIh3L0bSpReR0DefF0uGN/l5TDYjYx2RUH9inCY6Iwzhhu/XFPHboeSzunN0ShIgaoqqeDbbNiUdu2CgF28WJ94270bc6m5eWRqkBaqinNwYeNcKb9E0ssZDMIgOXQIhEBVFQiPbHnV48HrRWqC329Mvhog72VRAWMHLZGobbTC/4+HUAgGs9lMdPnlxsq+zwfFy+vF3I5tIGCEl19G04KVK4fXIdOCSQSLZTjhHgzinJEIFO1U66dqoShEDzyAzsinnEL0/e8PX5+RiCyonS6BGA4T3XcfzjF/PqIdJ0/GfQwO4rnkolGCFrzKf/xj3N+FF+K5bk822aUgi446cJ3RUEim6xnBZsP8ZyKxpiZ9g5vTK/v6ZHSjHqqqZAfkjRtxzpNPznzPNpuRldPejqZJyTg9iorgeO3qQprgokXJE2kOB/b/pibo5G1tWHeLFmWWLso6XH+/jGTOtjOWYbHgObndsslfeXlm5wqHoS/296d83QVZFANt+rLVivVlNLdCIdn8zGLJvvMqGpWpykwUmc2SjM5VinQ4PDzKMBiU+x6TUmVlMsqQ51s4LGsRZ6JLEEnytqcHmU6dnbjnhQuTm9979qDB3PTpRF/4QmpjpSjIcurvR2ZGsrqYEd5/n+g3v8G6v+UWmYadqO6gXnMSIty/liCMJQctltzW3jSC04mAHLcbetmiRWPeR+KYl2/5TiL+hYiuEUJ4Cd1wZhLRV4hoFxEtj/nu+0T0aSK6VwjxLhEpqqo+rqpqqxDiP4jobiJ6TwjxBBH1E9GCoZd+4lCWYTajYcOCBdis33+faP16/P+Tn4Rg8/tlrb0dO6Dc2O0y5XnixLFZ2McaTCYZEVBZaTzmgQCiEIuK0M2zpgZk8JQpIwXfRx+B/Fu50jjdpbISm/vhw5IMaGwcTmL6fNgQKyvhneHuodwdmQhenHfewecrVhiTFFzrKBiEopOLNBw9WK24Zlayyssx97luRi6xYweMn5NPxobcHlsRwxhHjSwqYGwghH6zlXiNVlQVimxvLxqdGK1RRYHS290ND3Vzc3LOgDffhKw48URE4xkd2+PB2tQq9oEA5JSigEDMpB6Yz4eIwLffRjTSV74yXO7GEojpkHyHDxPddhuu+dJLib78Zchunw/yvqgI45vLPbarC3UniXCuO+6AoswRZkmiIIuOIqgq5l84nJhAZHCDlWwQiaWl0AG4G7MeIScEdIS9ezGHGxuzU3bkwAE0PVu4EDIkkVMlHIbTtrIS8nDjRkRRf+YzicfN5ZIN5k49VaaL796NiKPFizN3HtTWQo709kLeTJ6cG4dEbHqz05lZevPOnfjtggXQXzdtSvqnBVmkQSQio1ErKoxrV4bD0mlgNuN72WqYE4nINGUmDrkWXXFx9vc3RRne+CQYlOflbKvKSvy1WuPPUW4k6fMNbyqVLsxmyMlXX8X/V69OLsLz4EGiP/4RjoG1a1NbV9EoAoa4cYteEEoicHOSSASRkM8+i2jGdevw2eHDwztSM/Sak+jVHxxvgUmRCOTwwYN45iedNG6aQx7z8i3fScRbichP6G5zDRHtJqIvEdF8GvkAf0xEs4joKiK6mcAeP05EpKrq/wghDhDRNwlFMiNEtI+Ifp37WxiOoiJEhi1dilpIW7bA+Fu6FAtn3jy8gkEIitZWGD27dkEAT5sGUrGxMfeEy7GKI0cgqJuaZKey2I2Ha4j19qKGz+LFeB6dnSCBGxulUd3aCgE5bx7I4HgoLsZvOzqgZPBmwikse/fiO3Pn4m8kIpuXBAK4nrfewua3erWxwhAKwRiIRuHVykVBcD0oCogOjwfnravDdfv92HRtNigR2VR0wmE8AyIoFKtWpaVsH3WyqIDRhzYCUVsf0UiWv/02DNyzz46ftnfwIGTDtGmQDcmsn/ffh7G4ZAmOrweOkCICgcjKp9+PfYsI58vEAOrtJfr3f8c93HrryM574TDIEqL0CcRXXiG6+26M8913yyYtHPXAhk6ulOtoFNGP69cjApsINY2OOy6tfbwgi44SqKosncKROcnCasV6cDoR8VJTk75OWF0NnaCvD7qD3v7Y3w9ZMHcuZFZPT/KNCfQwMIBaYxMmEJ12mkxtNlrfigI9KhQC8V5eDrn14ot4rVljTBB4vShhYDIRfepTUl5x1N3OnYiAWbIk8+jKigrcQ3c31vqkSbkr05JJejNnoRBh3nziE2kRwwVZNARt+nJ9vf4z54j+QADzjhsQZrrvaDsqc2OLoiJJ2mVLn+a0ZC1pyDUViWT9Qa7nXlyc+r3ZbNKuMZszW4+qCkeFxQIyT6/pRywOH0bn45oaoiuvTE0mRyII/nG5kOmgR4Ql25wkEiH6058QgHLccXCuMgnMUaTJNCcZ7+jpwZj5/SBK580bnZqcSeKYl29inEdKHm1IebDdbkSCfPwxFs4JJ6CLrlYZikRAKrW2QikJhSBEpkwB0TV16uikgh4LcLlABNbUQBHo74dAj63R8Y9/wCg86SQYvYoia0gcPowNlhXsF1/E8c48M/FmHo3CmI5EcM7+fhy3pATC1maDEh/7vEMh/G7jRlz3eecZK5M+H45lMuEaRytcPBLB2AYCMBy0UVWKgk3E75dNZbiQfCYYGECqZiCAzSmFwsbjzFeXMgqCfxyDlVmORuQUZy1aW4l+/Wus9yuuMF4L/f0wkMvKQI4lY0Ru20b0zDM49sUXG8sltxvrtrxcKnZer6xXM2dOZgZySwvRd7+LdX/77ZCnWoTDIElMJhAdqSrJwSAKkf/1ryAe7rgDMo9Tzjh9NNPUqXjYt4/of/8X97p0KdEXv4h9O4UOqwVZdBRCSyCWl6dfA43XiBCZEYmcmcB6gVYm+HxwOtjtWEc7d2LNLl+engMyHEakj8+HJoTl5dJhynpULFpa4HCYOxe6EaO9HY3tGhqQERL7W78fkUjhMMhKvZqtfX3IUrDbsUazUY+OHbWRCMiEXMoYjmYNBmVDini6ZjAIw93hAMm5YEHS86Ygi2IPqGL9eb3G6csczc/6rd2OdZOJfsvRhqGQjODlhiRFRdlxiIXDwwlDPhcR7tFmk4Sh1ZpdstLlwrjFywZLhP37YTvPmoXr8/mMCV4i2EW//S0+v+661Ej1UAi2hsuFUgk1NfpEoVFzEi0ZGAggffnAAZQ7+cxn8pMgjIdgEDK3owPjvHRpyv0g8l0W5QUKJOLoIu3BdjhATLW0gDDiaEW9zairS6Y9cxpoYyMMk2nTRqc5xdEIbnbDEZ9CyO5qxcVSwHV0wDBtaCC64QYoh9EoBL/Fgg28owOKxebN+HzNmsTROqqK8/t8iFi027Ex7dyJKKOJE1ErTI8w7u0l+tvf8Ow50q6oCOfUKtWDg1CYi4uhPI6WxycYxJgoCs5rZHgoikxlYPI0HU+tqoJU3bMHY7J8ecrp2vm+QRUE/zhHPCLR7UYR7eJioptuMpbpoRBqGgYCMJCTUcL27EFK7fTpMOCNlFOvV5JsbFR7PJBFFgsIxEwcEG++iRTmykpEB8YS/KEQiL50CcT2dtRV3L8fJOwNN+C6o1EcNxLJbhpZLIJBRB4+/TT2gCuuAElaW5vyHl2QRUcZFEUayWVlmZNWTCQSZVbTMxCAIW23y4ZG0SgIxFAI89dmgxzYsgXrP51o2hdfBLl+wQXDi+ZzE4DY0iaHDmE9NzXp1xfbtw9E4bRpqK3IcjQYRLS1z4cIxHiNQhwOEGs2G4r5Z8O5ysRsIIB1n+umidygg9OdjSJKt2+H/Js/P/nO1kMoyCINwmGMJ6cvV1YO/5z1WZ8POimTh+mQYqo6vKMyE4es62dKHHIXdi1pyDqKySSJQiYNc207RKOQkZzunSo6OyEXGhuRxaGqkG2cfRUrsxwOOG1NJqJrrx2+VmObk8SSg34/AoECAehF/FtOL9aLGNT+W/vcurthXzqduI4TT0z93sc7Dh0CgRiNgnCdNSutNZHvsigvUCARRxcZD3Z3N4yr9naw8ytWYKPX2xw4HaGtDVErrDw0NMjGLKOVpprvUBSMoaLAuNZukD4fNrOKCgj8n/wEY33zzSDEGOwVLC7G9/7yFxiwZ52FNJlEQrKzExtHY6MkvPr7cYxgEBtTWRk+1yqHTiea9VitkqzkFGFVlbUxnE6QiHb76NbX9HplhENjY3LKeTQqSQyTSaZIJINQCJFWR45gLSxenFakbr5vUAXBnwfQptdwirOiQJnt7ERtQKPaMKqKvaKjA/tEMh3sDhxAZ9NJk9DMw4i8YPlRUiLXncsFpby4GIpyusSHqiIK6Ze/RFTRf/3XSOOeG52Yzel1oH3pJaJ77oGs+d734HwhkqnRqgoZm+0OmIyPPkKH6a4ulJW4+GLcY5qRYgVZdBRBSyCWl2cvi4S7ixNl1rnc5ZJNVioqYBwfOQJiTbtOe3rg4GxogI6aLLZuRZO5U05B5k0s2GA3m7Hue3vhXJ8wAQanEXbtgjxsboZDJRJBhLbLBceqtsu7EQYGoDsUF+N+s+GQZz3d7cZ45rpZYiSCe45Gh6c3qyp0yQMHoEcuWZJWdGRBFg3B65VR8jU1w+eKqsoyQ6qKz8rKUpf9TBxyqjIR5g5HG6a7xlV1OGEYDI5MS2ay0GbL3T6ZCNx4xmpNzZZ1OCC3amsRZcvrLRyG3CoqgjwRAuvE4UDkXzCIGojV1cMJQ700aG5OEonAMRsOI0q7vj795iR79qC5nNlM9NWvpt55frzD64V87evDmlm6NKMI7XyXRXmBAok4usjaYLe3owh0Tw8E4cqViQVKf7+MUGSvdF0dSLGmppFesgIkDh+GgJs2TT8yxenEhvbCC1CCv/hFeOBjEQhg09m3D97e2bMx7jYbPOhG3ru+Pjzr+nrZobC3F8RmRQWO4/WCWCBCfY/yciiLGzZgszr33OECWVVxPZy+HI2CkEimA2K2MDAABdpmS6/IONeQCYexsdrt8UlIjv4MhWDYNDWlfen5vkEVBH+eIJZI3LABtRAvvRSGnhE+/hiyaPHi+N9jHD6MLs/V1ZBf8aIbvV4YDqy4Dw5CptlsIBDTNV4iEZBrzz0Hcu3f/m3keg4EcL50Gp0EAkT33ovjL1mC9GWWd4EA5KXJhOPmIpLC5QI5+vLLIGrXrYPsLi/PaP8tyKKjBBxdo6qydl62j+9wQKbU1KS/Tnt7oTeEQtBFm5v16znv24fP581LroFAZ6fseLpmTfymb4oCObRzJ8Zq4cLE5NvWrag1zmRjXx8cLFpnbyK4XDiO2YwMhmxFKjsceJWU6EdCZRNcyzYQkLXxuNlDYyOeV5rnP+ZlUaL0ZW52oyiyC3Eq61xRJHHIzTO4Dl5xcXoyIxQaSRoyLJaRhOF4auDJmUmlpckFIHi9iJK22UDqca1VJgU9HmmTlJfj+3/8I/5ecolcm0aRg9rmJH4/ZEUkAj0sExv7rbeQuTBxIjowa0s25DvYgdHSgnFbsEBm+2WAfJdFeYECiTi6yOpgqypSx956C5vWpEnwqCbTLn5wUEYo9vXhvepqGaFYW5vNK81v9PdDaZ440TjdRFFkl6xzzkFkiR64kO+mTTC2V63CptXZiY15ypSRxrvLhfDuykr5bHt6oJxXVg4P9Q6HEXnk92ND/eADXNu55+pvYFxP0+2GMsNd0rJRzDkeVBVjOjCA88bWWEoVoRCUiUgEm7jdPtw7yuPO5QCWL8+YNM/3Daog+PMITCRu2UL05JNwGp13nvH3OzshYxob0Wk00Vru6YGCWloKYsvIqx+JQFZYLLIekNOJtWW3wzhPl/jweEDqffghUnuvu27kdWsJxOrq1GRUayvSl1tbQZJee6007rxenD9XHZhVFc/jF7/AeS64AE0frNaRUSppoCCLjgLkmkDUnoeJxOrq9KKIVBVNTLZuhR6zfLlxNgzXATv++Pg1xLxeoscfx/Vcemni6/J4cH6bDREryY7Xu++ihEBVFdFVVyUXoa137i1bcM/LlmUvo8ftBoHBjR5yXcs8EIAdsGsXxnHJkvS6xWpwTMuicBj2VCQC/VKbZsup5NEonmsqUcaKIqMNmTjkpiKc2ZQsIpGRhGFsWrKWNMyHWnvcvZ7lJjceiU0x9vkgM6JREOWxpCOXSWCCvawMNl1/P2RFc3Py48HnUhSsq3S71asqHCsbNoBc+9KXkm+OlA8YHMQ4DQ7KzLAslVzLd1mUFyiQiKOLnAy2oqB+wNtvQ/jNmAEjM5n0DCIobxyh2N0NoVVWJiMUOaz7WITXCwKvoiK+ctXWhuL41dVEX/+6cX09jwdRMBYLinxzZGAwiPMoCs7D7/v9qN1XUoLnIQQIgo4OnKu5eeSzUVUQjM88gw30ssv0Pe3BINLpVBUEqc2G8wWDsnmJzZb9Z891O71e3EOy8zQZBIPYvFlRs9txf1u2yM6SixdnxUDL9xVREPx5BFWFbP7FL0AMXnutsQHidqP+aXExnAeJDBWHg+iRR6Acr1tnTK5zmiXX0xICyvXBg5BXs2enb3B0daEDc0cH0be+BdkYC78f5y8uhnxNRS698ALRT38KeXb77bKOEDcc8PvxWQrNTJLGkSOoYfT++yBcrr0WMo8JxCwYaQVZlOdgApFIlkXJJRQF6z4axVpKtb5fMIga3YODSDlubDReN8Eg5r7FAiJRTx5FozCUe3uJvvCFxE7sSARpb36/TLtNZt2qKkjEv/8dY7xmDUjAdODzQa9QFJCY6ZIEsQgEIA+JYFTnqiarqiJSdN8+jN28eZBLGRKix6ws8nhkjd7aWrmmQiHZgMxiSb7LejQqicNoFO+ZzTJVORkZoSiSKGTSkElITnvWEobjvQEnRw3GkoNcHzkSiT9/Oa148WLIvdgoQq3zsLMTmRluNwjEeKUSYuHxQD4RQTaku6ZCIaRRf/gharauXZsfpG4yiEbxPA4cwDxcvDi1iPAkkO+yKC9QIBFHFzkd7EgEXt/33sNmMW8e0Sc/mVrDiEAABFRbm2x0UVIiIxQnTRpfoey5RDiMqBWLBfdudN8eDwhEv5/oy1+WRm6sNyUahXHvdiMKhZUB9rhHIkgp5O7E5eUQsCYTiGHu7NzVBSVlxgx9xTkUQmHynh4lQopZAAAgAElEQVSk+NTW4rlpvaJeLz43m/GZ1usfjeJeuN5gSUn2OjSHw9icQyHcY65S6DlNu68PERNmMzapadOydop836AKgj+PEAigkUowiFo4ZWVYm7HrPxKBgex2IyI60foaHASBGImAQDQy3pls4zptZjMM/rY2/H/27PT3hR07iP7jP3DsO+/UN+p9PpzfasU9JUv0+f2oUfviiygvcfvt8h4VBfefqw7MioKuzw8/jP9feSVSJxUFsjidYvAGKMiiPAbXqGNyfrSMREVBFHE4rK+vGEFVUdPT7UZkjN8PGZCoKcnWrSCp9NKOX38dn59zDoj2ROffsQNjtnAhHIVGHZtj8cEHcHosXgw9at8+rMkFCxL/Vg9+P4jESARkZrb0mXAY1xcOS10wmwgEQHIMDCC7Ze5cmRZaVIR5mKY8P+ZkEa8jnw9riB1D4TBsg1AI/y8rS7zGIhGZqszRgdqOyvGeCddHjO2WzOC0dW235PEUHMK1TvU6FmsJw1hwcxIi+QyYINSmG+/cCafnokXxZRURzvPoo1gj55+PbI5k14PbLcsdLF2aftTg4CB0vtZWos9/nujMM8fX88oEvb0YW58PtvX8+TkhsI+S0RrfKJCIo4tRGexAAMrSRx/JUOpPfCJ1b0g4jOi4tjb8jUSwmU2diqi4xsbR69472uBOyKEQ7tUotSYcRhrgjh1Iv1uwQHr4Y2uhvPMOlNbVq6G4sZdR28mMo/QGBrAZVVYSzZyJ77S3y7qIevWHiPCMXnoJ5NkZZ0AB5fTm6mpEHA4OYjO1WkEgGhktkYhMEebmJZkUUA4EcC2qimjLXIbkc6r/jh24vyVL8DzS7Xyng3zfoAqCP0+gqlBoW1ogY6ZNw3tEw4lERUGUTWsr0cknj+xmHAuvFwSi14v0XqMGLUQykqK8HLKqp0eWWGhuTn9NbdxI9D//Azl19936pTg41ThVAnH/fqLbboPcvOYaoquvltcZjcLwY0IvS+kz/8TBg3As7dmDqMfrrpO1pGpqsueUGUJBFuUptARiZeXoO2jTIRJbWrD2Fy2CzHA6cQ+1tfGJ+NZWOEWbm4fXIm5pgXN12TKUd0mEvXsR3TtnDnQhVcU4JiISt27FuebPx7UrCmqTtrVBJ0sl0kiLQABEYigEcjJbHZZZF/T7ZdOlbKC3F/UPFQUkbEOD/Ezbvbm8PC1975iSRaEQdGlt+nIkgjEMBrGeS0vjlwfSdlRm4lDbUdlIJkQiwwnDYFDqBWbzSMJwLCPYjMhB7b/jNScxqjvIL0YwCH2hpGR4BO+BAwjAaG5O3G1cUYieegq2w5o1stljMhlTg4Mgx4qKMmu81NGB7AWPh+iGG9KPlh5vCIVA5h46hHWxdGlOS6fluyzKCxRIxNHFqA62xwPiavt2CNrjj0fqSToGTDQKwdbWBqMsGMQxp06FQjh16th16MoFurull9ZIMWYl9IUXQNidfz7ej0ZB4hUVScVv3z48i0WLhm8Ifr+M9mRlgesIdXbK9vaHDsm6jEbRdNEojPKuLoS+M9HI9Qf7+rDBlpXhupJNUw+HZYowNy9J1WvkdmNMLRZsyrmcK8EglPr+fpxr4UKpaKmqVDAyNNjyfYMqCP48waZNiC78zGfQsVRV8VKU4V74Xbsw7+fO1W/qpEUgQPS738HhccUV8evo+nxYP6WlWLddXbKcwsyZ6XnHVRXn/+1vQfD/53/qR+Z5PJBZNlvyUT6qijpG994LWXf77dj7GKEQlH0iECfZ9ICHQkSPPYZC7GVliExfulSmS1dX58SYK8iiPEQ4jH3RZMoo8itjcCOIUAhrLF76bHc3jOupU4dHDPb0QEY0NBjv7aoKXdTphB5UW4s9+o9/hC5y0UWJx+DwYeigU6cO14M4iolrmsVi1y4QZ7NmoX4jIxpFlHJXFyJ9jJyziRAKQfb6/SASs0X4se7mckGeTJyYfjSSooCA5ehxoygpTq2PRPB5isEHx4wsik1ftliwV/n9eEbc+Vqv3FAkIoMIVBXf0RKHsb/htGQtachReUJIopBJw9FKS45tSqJHDkajktzUIhE5aLGkN9e9XowR15zs6sK8nzwZ6z/R/TzzDAJwPv1p6FseD2RWRUV8HcTphIyxWrG20nUUfvwxmq/ZbEQ335zV7KkxRUcH7i0cxnOYMyfne16+y6K8QIFEHF2MyWA7nahfs2cPBNMnPpFaIepYKAqUxtZWKCQ+H4TBpElQwow6GOcLBgagLNfWxvc+bdsGg3HqVBS71Y6n3w9DtawMG9rf/gYF8PTTh2+MqiqVDq4/2NUF4760FMpCTw8U82nTjL1oikL02mt4HitWjPSqKwoic1iBnDcv9dQbbl6iKLJ5STJzyOEAgVlSgo08lx7Rvj4o89EoyEMtOaIoMmVHCEkmpqmU5/sGVRD8eYC9exEtuGQJUlp4rmqJRJMJCtq776Jj34oV8Y0IJrq6utDAYOZM4+8GApIAKynBebicAtdoTRXhMNGPfgRi9Oyzib79bf3rdbuxXktKkk/99fmI7rkHzp0TT0QjFa1Bz41ZLBaZ8pQtbNsG4rKjA4TENdfAkIpEsp6+HIuCLMozhEIwTseaQGSoKvSeYBDXo0cueTzIcCkvh5NCu/ajUcgFIeKXvGEHHxF0lOeek3WbE2Um9PVBh62v10955mim2Npme/finE1NkAmxMischiO4rw/EQaJIJSOEwziP1wvdI5u1np1OEK42W/zsESP4/ZBPg4PQV+fOTZwaq+3enMIcPeplEdcT1TqG/H68iLBfxWa8cKoxv7TEIXdU1u7todBwwjAclscqKhrZLTkXaa7a5iRGKcZ60YMcERyPHMx153GXSzoWdu3CM0rUvV1VYae9/TYik087TX7mcGBd19Xp27b9/XCulJTAtk43SOLVV4n+8AfYLTffnL2o5rEEy54jR6BzLV2aU11Ii3yXRXmBAok4uhjTwT5yhOjNN0H+lZfDy7JgQWYKLHtKudOz2433J06UjVmyXWsql+BudXZ7/K59hw7BEA8Gib7yFX2FcWAAStu772LjX7NG3zsVjeK8ZjM2qu5ubFYTJsBz39ICxfG44/Q3MFVFh+59+4hOOmlkfZ9wGMcMh7EpuVwwtquq8JxSff6BAF6Kgs2ypERfKVBVEKAuF+ZbQ0Puanpw+vK+fZhvxx1nPO+4Sxunm5SUpNVAJt83qILgH+dwOlETp6JC1lrVgknEvj7UwTWb0VArnoIWjaIDamsrOsjPm2f8Xa7pVFwMo6i9HXtIff3wdMRUMDgIYm/7dqT4XnGF/rpzuaB82u3J1wNraUH6cmcnjn3VVcNlG0c1FhdnN3XU4yH6v/9D98SGBjTWmjMH95qj9OVYFGRRHoEbLVgsuWnkky5UFXM2EMCa00agRSKQMdEodAy9+RwMYr+32aC7GIEjdrZuxXEvvTRxQX23G78pKwMZYLR2Y4nE1lY0dWlshL4br/nLc89B7qxZE7+0QzxwwxeXC2nT6R5HDx4PxtdiGVnHOh56ekBwEGHsUrmmYFDq9BUVSZ1znMzmtBFXFsWmL7POzhkupaVSF2YykIlDIsxJbcQhET6L7ZasTUvWEoZWa+b7lrY5SbwoQj1qIJYI1CMKx9ohQiRLAXz8MRyey5cnJi43bQKJd/LJcCbEBnscOYJxmThxePBEby9SdMvK4OxNJwpUURCRvXEjSLYbbsi5zpBzqCrKuuzejf/Pn5++4zlN5LssygsUSMTRxbgY7EOHQCZ2dcHA0YtcSxdOp4xQdDjwXm0tjM7p01Nr8jLaiEZx7US4VqNNp7eX6PnnQVp9/vPG9SoUBR0H+/qILrkkfu2HcBjKCUdANjaCEOO296EQNrDJk0ca1e+9h01s2bKR1xIIyI7b3OlPVXFNfX3YqLjmRypQVUkmqip+r00R5sgEnw/3k8O6F/+sSeRwwIO3cGFyns5IBApgOCxr16QwDvm+QY0LWVSAPiIRol/9CjLhppuM14/LBSPZ6USEeTzHh6IQPf00onk++1kovPHO7/FgHZWWDi+nEO8c8dDejg7Mvb34q/X0a8FERrLNTlSV6C9/QQ2hykqiO+4YLgc5MiEQgIwqL8+OIsvOm/vug8Po4otBinJUChfZHwWjqiCL8gTBINbVeCMQtRgYwFopK8NLVUGM9ffDORdPh/N48L3Kyvjfe+YZGMxnn41mKvEyGrgJCNc2TmSkMwHS04MSMhMmwLmSaB36fCiDEAigdES6Oks0KpuWzJuX3Y6jwSCcJKzPxYveVBQ4Vtrb8TyWLEkvQ0ib3lxSklAmj8MZnRIMZZHbjb2JU5W5hqHVijGxWPB/bozCXZBNJtkYRYiRackczcfZSNrU5FSzxeI1J9H+OxZcFiVRivF4lFd6CIUQNe12o4RXorX8zjtwAi5fTnTBBfr3GYnIZpRcVqC7G/pURQXKGKST3RcIED30EGTGmWfCphwPRGwmcLngJBoYgPxNV/ZkiDyZrfmNrJKIQohNRESqqq5O4ruriehVIrpBVdX/y9pF5AhCiIeJaB0RFamqGknzMONKWd63D2SiwwGFZOXK7NZfcLsloXjkCN6rrJSEYl1d9s6VDRw6JLtFGRXEdblgOL7+Ooz2Cy80Pt6WLXjNm4dXPKU6EMBmZDaD0G1txbmmT0fkTzSKekB+P/7PmyKfY+FCpOpo4fFg3NlzHat8e71QSKNRfJ5OZ0FOxw4E8H9WfDo7oUxNnJjb0PXeXmxW0SjqLHEq0urVq4mIaNOmTQmP8fLLm+iss06jn/70Ibr66uv/WfstAcZsgzoaZVEBw/HnP0MJvvJKeHD1wMb14cOoMTN/vnEdIa4TuG0bjPZYWaGFosjok/JyyO/+fsiIdFP9Nm9GlKDFQvTDH0Je6V3j4CCMq7Ky5GpxeTxozMIRBN/73nA5qyhQZMPh5I+ZDPr6QB6+/TaKtX/jG9g3HA4ZpcLOnlRk0aZNm+i0006jhx56iK6//vpkL6cgi/IAgQD23KKi7BHZucLgIPb10lKs/f37EV2bjAOhvx/rcsIEfcOxvR3yzWbDnt3cjO/qGc6RCKKWQ6HUDNGODui2dXWoD52sce/xgOBUFNS4TrfbsqLguh0O6HPxas6mikgE+lUoBF1Q7xp9PhkR2dSEa+DxTUce/epXD9HatdeT3y8JcANH7ZjO6izIoxGySJu+LAR0Q1XF37IyjCsTh0zQMfnG9Q+ZNNSmJRcXD48y1KuFqMVoNSfJdygKbAKvFzKLSy8Z2XSbN8MJuWBBYgIvEIDNYbfjeba0ILtr0aL0xtDhgB7R0UG0di3SqPMZ7LjYtw/zWWuTjQGyJosK3JYxctpbVwhhIqLbiGiLqqp/yeW5Ckgds2ahHtauXaiZ+NRTUDhWrsxOGkZ5ObwzixdDqWlvBzm2fTsUnNJSnK+pKb202myitxebzqRJxptNIIDw+A8+wDWfc47x8Q4dwnfnzcP9u91QQvSU4HAYhrrNhnHYsQMKwcyZkiw0m0HwdnXhWjmtYssWKIixpIDTiQ2qpATH1NvgSkvRwbWjA0qp1wsyOZXnIITcoP1+nLenB4rRjBm568Csqtis9u/HPFu+PL6HXFEUuvPOO2nZsmV0YQzzywYGP3eXCxtgOg1kCiggU3zwAV6rVxsTiJEIIqF7eiAXOJI8GtU3mF96CTJ39er4BCLXwlJVrKeDB7GmGxvTj6jZsIHoJz+BIf3f/61/HC2BWF6enNzYvRup0T09iNa87LLhsisSAYGoKCAWs5EepKqIQv/1r3H8664j+tznpHFhMoEQieeEiCeLCjg6wQQiEw/jmUAkkl3QDx+GnGluTj4CuaYG+klfH9a6Vh65XKg7Vl9PdN552L87OvCd2tqRKYR79kCvWLgweQKxrw+RRRUVSGFOJTqorAzpzM8+izqJ55+fXjkekwl6386dGL9oNP0SELGwWCBLu7shc8Lh4Q757m6clwgR2fFSy4mSk0dCYByKiqDLOp2Q0/mecpkIPI/9fujQXLqH9ye/XxKHiiJfTCpyjI7FgrGqqJCRhtpGitEo9r54RKFRerHFgufC1xZLFI53WZNt7NmDObpgAdYF11a2WEbKgp07if76V8i3iy9ObPtwg7fdu2F/ceZTOrZrWxvRz3+O537LLfqO1XxCf78kb6dOxfgfTc1WtShwWxI5JRGJyEREtxPRI0R0TA/0eIXJBOE1dy4EwLvvEv3+9/DgrFiRvcKudruMyAsGQbK1tkLg79wJ4TxtGiLv0ikcnQncbgjAqipjz3MkArJ182Zc65o1xkqtywVStrYWBrvZjHtmckq7kSkKyFVFwf0fPIgNT0sgMoRAOnNxMUiGHTugJH7yk/I7XKPS7YaSV18fX4mwWKDc9vXhd34/NsZUlUOTCYqO243f1tXJmoNGpGy6CATwHJxOjNn8+Ynni6Io9IMf/IDWrVtnqCgXFWG+BwJ4BoODGOtkG8gUUECmOHwYkTCzZ6Pjux5UFZ5eLn0wYwZkkTaVSbseNm1CyvPJJ8NBFA9eL35vt0MWcSH+dJxKqoo0nT/8ASlFd9yhHwnITR1CIRhZicgCVYXD6/77QVg88AA83lqEQjimEFjT2XAGtLejcQrL3a9/HU4XpzO19OVkZFEBRw/8fuwnxcXJ1/ccDyguhqHLmQzJQgjoHV1dyISYNAnvRaMg5hQFBGJlJXSN9nboH2bzcH3zwAGs4dmzk48IHBhABKLdDoeJxQLdLZX9u6oK+t1zz0kiMZ1UPNatd+3CvSgKZHU2wI0M+/pkpHV9PQjLQ4dSS19ORR5ZrRhLlwsvrgN4NBJVbjd0Yq4RynUMuQ4id0hm4pBhMmGcqqqG6/tMCvp8GDttXcJYaJuT2GzGUYQFDEdrK57ZzJmSWC8rgx7j8QxvELRvH/SIqVPhgExWRjidkG2lpQjESYdA3LwZulFFBbIYxjBaL2OEw7Dh29shd08+ObtNpcYpCtzWEAqmcQFEBAF6/PEwxj78EK+9e+FNPfnk7DZHsVohfGfNggDq6ICyevAgosuKiiDYm5qgZOYyGiwUwobAUYB6YI/4vn1QLFasME77jkSQ6mwyEZ16qtzoKytBVA4OwtAUAsc9fBhk26RJuP9AAB4cqxXv65F5Hg82y+pqjFE4DIU/GgWxEAjgHKkQwHV12AA6OnAdDQ2p1a/s78errAz3wmnO3A25pCQ7XusjR0B2KwoM+cmTMz9mLDjFhMnEgQH8324vKG4F5A4+H5o1lZcTfeELxsppa6usZzp5snQ2mEyy2QoR5uo778CoXr4c9XYSnT8cxvznJllNTekphMEg0V13QRaefz487XpKuqpCKQ+HISMTORzcbhz3jTdAiH73uyPLJfj9MNKy1YE5EiF64gk8G5uN6FvfIjrrLFxzT4+MdMynBmIFjA58PsxHrpuWL1AURC6XlMBJx6Q8RygmgsUCneLIEegFdXVwZvT2ot4gk4L19bKOYkkJfldeDj2kuxv6X6JIOobbDXlTVIQU5niOlUSorUWmyYYNeH3mM+lF1QiB8eMmL9Eo9N5sgMlaJnvffRfzrLkZxGuuiD0mez0ezO1wOG56c95BUSDX+/vxvMrKcI/RKPR3bcqyouinB7NDPVFzEqt1/DYnyTd0d4PImjRpePkAISBTXC5JJLa3o8FcfT3qGCe7tg8cwG9nzcIacDrx22TnvqoiK+TppxEw87WvjVqn4pygq0uWm2huRjDS0SIHCkgOCUWVEKJJCPFzIcQOIYRn6PWGEOLcBL+bTkRcAWKdEEIdem3S+e7XhBD7hRBBIcQWIcSIkutCiCohxM+EEIeHvrdfCHGnEMIa871NBue4euj802PeP1cIsVkIERBCtAohviOEuEbvu0OoFUL8TggxIIRwCyGeEELUxBuLfILVisi2665Dl6iPPyb6zW9gtHHdu2yiqAjC9FOfIrr8chTanjED6bWvvgrD7eWXQWgGg9k9t6JAWRUCniAjpWv/fgjLgwdhVGsj/2Lx9ttQNFauHB51YzZjs+COp0RQVNxuEH5MJs6Zg/8XFcF4jcRUKOjogKLc1IT6HaoKBXJwEJ8FgyBD04kgtdtlCnJXF46nV2NFC1XFd7mYemOjVIbKy2XtJ68X16itCZMKFIXo5Zfb6LrrbqYbb1xIF19cRnPmlNGqVatow4YNcX/b2tpKRUNM9COPPEJCCBJC/LM2kBb33XcfNTc3k81mpVNOWUZbt75KJSXYJJ1OPLvOzgEqyKICsglFgVLr9UIOGqXzcjkDRYFhHptiyMXPFYXoo48gOxcsQGRNPHBXyKIiyBOPB7IgHQKxv5/o1luxZ9x0EzztegSiokgCsaoqMYG4YwfRNddAxt58M1KjYxVwtxvGgtUKOZqpQrtrF+5h/XrI9P/7P+xRO3e20Ze+dDOtXr2Q5s4to4aG3Mkiq9VKy5Yto1dffXXEd3p7C7JovMLrlRGq+UQgEsmUwIULQeKVl0P/GxjQJ0b0UFKCde31yuZvJ544PBpPCMiw0lLIArdbZqnU1SWfAuzzQS8igvOW5afJhFdstFgyaGiAs8DpJHrxxZG6WLIQAsb1lCm4t5aW5McwGfh8RB980EYPPHAzfe1rC+m448qovDz38uj991+lykoQZk4n5ke+22leL6LEtmyB/ut0Qjfv7MRfbtDFc4FrGbIDj18cdVxTgz20oQHPv6kJ833yZKyr2lrpgCopkccqIDUMDMBGrK7WJ+m5QVwkAnvu0Udhr3zxi8lnSu3bBwJx8mToVPX1kCn9/cmt52iU6He/Q/Tj8ccTfec7+UsgBgLIbvngA4zfqlUYk3wiEAvc1gikJTOTiUQ8kYjOJKI/E1ErEVUR0ZVE9LwQ4ixVVTca/K6XUKzxESJ6g4h+NfR+T8z3vkxEZUOfh4joViL6qxCiSVVVJxHR0GBuJKLlRPRrItpMRKcS0feH3js/ifsYASHE6UT0LBG1EdEdhKK6XyIiV5yfPU9EB4jo34loLhF9bei6r0rnGsYr7Hai00+HsPvHPyAwtm0jOukkRLXkIr3TbMZGO2WK7K7X1gaFsr0dytikSbKOYqb19rq7YThPnWoc7Xj4MK5j3z5sQueeaywod+/G9S5frp/+Y7Phmr1eGOkOB5SHnh5sMHPnSmOjuBgbFKcEm0z43iuvQOk44wxZmHnvXgjz+nqQkJmkD1ssiLLk9OZAAMSg3jGjUShagQAU/hodcVNUhM06FIJB5XbLQsfJziG/H4rdK6+8Tzt2vEyXXXYRzZgxnQYGBujRRx+l8847j/7+97/TGQb5n/X19fTII4/QunXraNWqVXTjjTcSEdHEmNDTBx98kDweD914441UXFxM9957L1144QXU1tZG1dXVNDhI9NRTQfrWt84gwrrPW1m0eXM6V1lArvD224j+Pv10RO9wIyottI4CXj/btukfr6UFsmL6dKznLVuMzx0Oy6LxvOY5zbC9PbX7OHwYdX48HqIbb4RCr3duRYFBpiiQefFrCMJ7/9RTMBJuvhnpStrjci3HUAiyKtMGKn4/0spffx3y9sorEaV/4ACew8aN79Mrr7xMZ5xxEU2ePJ3c7gF64QXIogce+Dt94hNSFrHTaPNmIr+/nu688xG67bZ1tHz5Kvrc5yCLamsn0ubNkOVERD/96YPk93vos5+9kSyWYnrssXvp/PMvoOefb6OKimpSFKIPPgjSd7+b/7LoppvSucrxDY5cMpvzr66u1ytr3mnTiMNhvDhdM9lIt/5+OGCrq0Ekrl8/8jvctToaBSlmtycuxcJgPSQahYH/4osjv6MokBEmU+oRei4X5NovfwldMROSx+WCDmS3Q65kEi3IUdxeL1Fv7/u0e/fLNGXKRbRw4XRS1QHaufNRWrPmPDr//L/TlClSHrGMuekmonC4nk4//RF65ZV1NGnSKlqwAPKopGQi3XQTxpWI6LvffZDCYQ/Nm3cjNTQU07Zt99I551xAV13VRkVF1RQIEHk8QaI8t9O+/GXM+ylToPfW1WENcKfy0lKZohybYpxPBMrRBL8fcoWjpo3WVHExZMz69XiOX/xicnoC11/v6sK8YJKyqAg2D2eYxcvc8vmIHnwQTsnzzjPuAD3eoarQCXfuxL8XLIAuNtb3oqrYQ/x+2OqbN0P/TIACtzUcacnMZMz4F1RVfSrmAn9GuNl/IQzACKiq6hVCPEYY6AOqqj5qcPyJRDRfVVXP0LFfHTr2WiJ6YOg71xHRcUT0bVVVfzL03gNCiC4iulUI8RlVVZ9L4l5i8SMi8hDRKaqqHhk6/6+JqCXOb95UVfVW/o/A6vmaEOKrqqrGe0B5icpKEGcnnICuxG+8gQiXU06BUZUrr5kQ8N41NKALcl+fJBTffhuvCRMkoZiqR8fhgEJXX2+8kfT1QWB2dcGwPvts443iyBGQAFOnxi+QW16Oc7e34/odDrw/d+7I67BaZedjnw9RRWVluA42ugMBbGZcl8btzk4NQm16c2vryOjGUAifRSJQ3BNFWhQX48WCXtu8JJ7y1d2NcHlVJbr++jV0112XDPv8lltuoeXLl9M999xjSCKWlpbS5ZdfTuvWraOZM2fSlVdeqfu9np4e2rVrF5UN3cxpp51Gy5cvp8ce+wMtXXoTPf000Vtv/Zocjo+IiL5TkEUFZAMHDkB2LFwIpUwPPp+MDC4pQQSDUb2r9nbI6YYGRHjHW1/RKGSIokDehUKQYenUbtu+nehXv4L8+dd/NS75EI1i/auqrDVlBLcbTUy2boVD69prRzqPuJt0JILP0qlfpsW2bYgKHRzE+H32s7incFhGYp1++hq65JLhsmjt2lto7drl9Mgj9wwjEbUoKSmlT3/6crrttnU0ZcpMOu88fVnkcPTQ00/vIrsdsujEE0+jtWuX04YNf6Bly26iV18levfdgiwaj2ACUa+Q/3gHpy3bbN/MJqoAACAASURBVCN1Ku4eGwrJUiuJjMdIBLpRURF0ClXV/w2Xb+nvl3qB0Xe1iEYRJcZ6iFHJFG00YqpEYkUFjt3ZCRk8ZUr6RjOPKae6Vlend6xwGGPFqcQTJ66h4467hBQFe0U0SjR//i30zDPLacuWe4aRiFoUFZXS7NmX0yuvrKOKipk0Z46+PPL5emjt2l1UVAR51Nh4Gj355HJqafkDzZlzE0WjRK2tvybKcztt/nzsSZySPDiI97lZCpfm0RKJvM61rwKhODoIh5EtJwTs0XjydmCA6Mkn8Z2LL04uOlxVERzS0wM7M7amKXdpdrtl/fRY9PYS/exn+HvNNfEz2cYzPB7oYQ4HZPmSJZk7a9MFO77ZPna7Yafv3QvdMskaugVuazjSkpkJVRxVVX2ag9qIqJTQOnsTEV2a6PdJ4Hc8yEPn2yKEcBFRs+Y7nyUiLxHdH/Pbewjs7meJKKWBFkI0EB7egzzIQ+fvE0L8nsDC6uGBmP+/RkRfJ6ImItqeyjXkE+rriS68EErUG2+A0PrwQ9QHzGX9FUZdHV7HH4/NoK0Nr/ffx6u6GlE3TU36EXFa+HxQbMvLRzYvYbjd8GgEAkhBmTfP2MD3+xGxUl6eeIPgDUdRIPSqq0Eg6hm+QkCZ7+7GeNvtIBCZJOzvx1iUlsIb1NsrFUsuZp4J7HYct7MT1+Dz4biBAN4zmUA4pEJaWq3DyURt8xItIa0o2LxbW7EhLF9OZNfs0IFAgLxeL6mqSqtXr6Ynnngis5sloquuuuqfBCIR0dKly6isrIJ+97v99N573GnyGSotLSWv15vXsmj58lSusIBcoa8PnUBPOIHohhv0leBgEB7sBQsk8TZrlv76bmtD99Ply1Hrh9ObjdKJ3W5ZF7ayErI8HQLxL38hevhhXONddw3vFqoFp74pSuKGJ9u2oZGJ04kuzJ/73Mh7jkTwuari+jOpu+p0okkLl4v46U9ld2y3G7Jq2jTsL8XF+rLo05+GLNKuLxYp/B6nwtXUjFyHbLRee+1VtGKFlEXLly+jG26ooH/8Yz8NDMABVVJydMiiB2J/mcfg9ZQNMnu0EQ4j7ZgIGSdGa5PTmrkRmZEjWVXR+bSzE0S8omB9Tpgwch1Ho4gs3r8fOsekSZJ4NDo+1592OlFqIJnmT7z2WC6mgo8/hvN6zhykTGeiXx06BP2ytjZ1Z3xnJ/YDsxk1y6HDSnnk9wfo4EEveTwqCbGannnmiWFrjDs383uRCKIsTz555FrctAkR2bfcchXddZeWdVlGlZUVtGDBfvqP/8CecdllzxDluZ323e9K8rC3F3+9XuzBXIrIbIbOyo1WLJaRkfRC6BOMse+NdQRXPkNRUOIkGAShFc8O8XiIHnkEsvn66yGbuT6i0TNQFKwzbtRi5BStqsLccDjknGDs309033041je/CdmRb1AUyKq9ezF/ly0bWUYnl2BHt5Y05Lqkfj/2okOH8Dzr6pClN3t24uMWuK0RSEtmJiQRhRDFRPQ9Ivri0MG0yEZljzad95xEpKWCphPRQVVVh1XlU1W1SwgxQETp9DybPvR3r85neu8xYq/XOfT3mKj/09iITlYHDqBg/3PPQSlctSr5+jWZoqoKr6VLITg4QnHzZrzKyyWhGJsSE4lACSsuNu44GAiAwCKC8KysRJqhHhQFimwkgsYF8YziSARRQtEolMZwGBtTPGPD5yN67TUo5KtXgzDkVG+vF5tgXZ2M3CwuBkEaDuNZZRoJYTZjw+jvx2Z65Aje4/qH6RyfyVGOtAwGZfSDzSa7Lw8OwvM3dy7GKxQK0Q9/+ENav349tbW1xRwzc22sSTOB9+0j+tOfiIiqye120OWXE512GtGSJa00Y8YM2r59e0EWFZARQiGi3/8e62ntWv21FI1CeVNV+XlTk77i29WFBiBVVTiezYbfRSIjGwtw+i87BIig4KZau01R0CX5T3+CA+V73zOWZ0z4EcUnEBUF4/LQQ5DRv/ylvvIdDEJGmEw4Xrqyjoud/+pXGI9169DYxmLBtTgceJ9TEEdbFh0+DKJZiGpyOh108cWIkHzyyYIsGi9QVRCI4TD26GxkA4wmVBUkWSgEh0Y8PcZmw3obGMDaMOpI/s47MO7OOAPRe16v7CaszWrgxnXBIMi5vj7oBWYz5AU3otMiGkWZHYcDWTHJdo83m6U8TFVeLFqE8fnwQ4xPJhFFnBbd0gJnyeLFiaPXolGQGp2dGJPFi6XTJJE84gjMdKGVR+wIqqiopoEBB02ciOtoa2slOgrsNLNZNiYMheQ+yRGe4bBs2KOq+Ov3y2yboiL85b03FMJv9WpyalOijSIaC1GN+mhpQdTZ/PnxM9H8fhCIHg/29kmT8FxcLsgkPZ1HUSAPHQ44bLWNWmIhBIj8nh7IrokTsdbee4/ot7/FXLrlluRl1HiC04noQ7cb9t7ChdlpkGkEVcVaY9IwEMD6YXCDSyZtOztZFsEJMnNm8nK9wG2NQFoyM5nhvpeQ2/0LInqTiBxEFCWia4jo8iR+nwg6De6JCIxwOlANfpstUZzt681LzJwJgmfXLihzTz8NxWjVKpBZo4WyMgi2hQshdNrbQSru2IH0OrtdpjxPnChTAo1q20QiuCdVRfSd3w+D0khwfvghyLVVq+LXxFAUXJvLJRsJTJgg6wzpKe1+P6KKVBWdArnBx5EjULi5KLMW3JSlsxPjMGVKdoR+bS023EOHoCBNnpw5QSkEno/NJjeMtjYQeFYrok61G++tt95KDz74IH3lK1+hlStXUk1NDZnNZvrtb39Ljz32WGYXQ0Rms/mfBvuWLRhHm41o6VKVzjknrUMWZFEBulBVEG+c4qInO1QVayEYhJLk9cI5olc/sLcXTajsdkQgctAuR0RoO0r+f/bePEyussofP7eqq7p637uTdCfd2feEQMIuCSAQNkVR0EFAxQ0ZHZ1xnVFx4+vo4z6jKIq4IPDTcZcgmLAlECBADCRk7z29VnXXvtyqe+/vjw/H91bVvVW3lu50J3Wep55Ouqvu8tZ9P+ecz9mIcKxQCHhYVgaSLtces+Ew0Ve+ArLg7W9HP6lMWUN6AtEMOyYnib76VUwavfRSok99yrhkJhyGYetwCGIvHxkaIvre97Df16zBQBiOssdiMFQ5a1J/HdOBRW43JsO+8ALWy+UiWrdOo8suy+uQJSyaItETiNXVU+tkTZV0d+NZ53LObFJeLqaTTkzg33qyo7sbfZrXrBEVHFVV2FN+PzCM91NPD46zeDFsx7IyBEQqKrC2qf3GNA34MDqKQS3t7dbvk7OyeWhdrjbMmWfCBnv1VWGj5Cs8hO7QITjq69aZXw+XEoZCWKfUPmRmeHTPPffRb3/7AA0OiuzOfMT++pcbDIqWDpJEVFGh5fu8z3g8kiR8x+XleFa4ekaWxVRmfo74FQyKz3OFTVUVnuXycuhhfq/+33x8xeCq+ZnNlNGYT2btbJa+PvhBXV2Zh7/JMoaoeDzoa8xkIPdlD4fhd+iDPoqC/e31IoHBLNlEL3Y7fCSeRv/cc8jgXbaM6PbbZ99grUQCuNTTg2f3nHPgrxZbuFc+k4axmBhSU1aGc9fV4ScHlcbGksnDc84xt4uzSInbSpa8rteKCn0nEf1S07Q7ko4qSbdZ+GyxZpD1ENGFkiS59Izt62mb9a//nWWSktNFWRal/J9ZV6PE11mYdDz9IkkwEJcvRzT1uefgyC5dijLnbGXFxZaKClzL8uUAp4EBKJujR0EMRqMAHbPBMBwRj0bhlPb2ItpspkR6evD+lSuzZ2EODsLRj8VECXNZGSJXPh8UkN4IiMVAIEYiIBBbWvC+gQF8bu5c834UNTW4nsFB3H97e2G9K1QVhGo0CqcgkRADGObOLTxSarPhuzt2TAywWbMGykPfF+nBBx+kW265hX7wg+TM73vvvTfrOaxkB+3eDUNdURDVuuQSZNrqldPChQtp165dVMKikhQiu3fDUL38cjiFRtLXB3KipQUGk1HQgAh/44zGm25KJwH0/cAkCfuWhwWUl8PQzbX0cmyM6LOfBUZ+/OMoVzQT7iVIlJlA/Mc/iO68E9f2yU/imEbb1u8HLpaXAyPycZ4SCQS+7r8f1/PRj2KCNR/L78errAzGc6rzPdVY9NJLKDtUFGRaXXwx0R/+kHwdJSw6+aJpeE4SidlLII6PYx+3tyM4aFWczmQikaehe71Ef/87AoAXXZT8Gc7u8njwebcbhGF7uwg+NzeDjJmcxO84K7GmBuu9Zw+CHxs2wHnMVfSBldQMbSty7rm4h5dfxn5cty73a2CZMwf4/NprwL/169Ox5sQJ2K8OB7JEjezqbHiUSNA/icRUsYJHigLMj0bxjBtlny5cuJAOHz7cdSriEZN1FRWwzWUZek2S8Bxzn02bTRCEPCyIdR9X4FRWilcqXnBmYyo5ycfkydBG04Cz9WgsKzs1Jj+PjcE2amszLzEmwjo98AAIpxtvBPGuF5cL7wmHxQCsRAJ2GWc45pI9WF4OHfDTnyKLccsWoptvnn19cUdH4c9Ho0gUWrGiOPeQSCRnGEYiIjvXZsP30diInxUV4pzBIHSU14vvPhCAj7hhA/zcAtqGlLitIoiVR0OhFCZSkqTlRHRdtg9qmqZIkhQlooZs780ifyGirUR0OxF9R/f7T77+88+63x0loqslSZqradrw69dbR2CX9dc2LEnSXiJ6pyRJd+qaTzZTcVjo00bsdmzo1athWO3ZAzJo9Wo4QPn02CpUnE4454sXA7wOH8a1+f2IYr/0EqJSXV1iOjNP3mxuhhHc0YFIt5FMToI0bW3N3l9udBSGYCgEo3H5cmEo1tfDAA8EREp+PI7zBwJEl10GEiEchuFNhOvLRgq6XIJIHBjAeTNlSppJIiEmwra2imNMTADQe3rgABTS/ykUQvmy349nZvFiUT7CjazLyxEN11Ksp8OHD9Mf//jHrOew2+3kcrloktOhXhefD31/iPD933AD+istWmS8xtdeey39DeMfS1hUkryktxcZZqtWpTvZLCMjcLBbWmBElZcbZ9wEAiAQFQXTBhtMNC07zZEI9u7AAAzefKa5Hz6M3lHRKNHXvw7H1kzicWAllxwbOeyqinKj++7DPX7rW2ICol40DYakLGNv5hvdP3KE6DvfwX6/4AJMKeUejoqC9eEJ2GaDD6YCiyIRkMt8jf/yL3BE2tuNv6MSFp1c0ROINTV5ZUKcdAmHUbVRW5tfvy6nE44fE4nV1UQPP4x9fuWV6ftdkoBpw8N4xicm8P/UIGxHB4K/k5OCVCwrQ3ZMXx+CjEYYYVVsNnx/HFjJlVx5wxuAbc8/jzVYsSL/a2ltxfkPHBBEotOJ5+q116ALmppQvmz2jGXDo44OrDlPsdZ/L2Z4RCTIKp8PuNvYaI67r+NRNZ3CeMQBb5cL6yHLWE9+jvg9TIpwO6V4HHstHMYzzba83Y7jcQ/VykprA5kyZTTy0Amj8mmbLXNG40wfCuPzwf6or8+MV4qCISo9PeilbLY/q6rw3mAQa79/P/69alXmDEcjCQbREmX/flRR3Hjj7CIQYzFc+9AQ9NnGjeb2ZDZR1fSy5Hhc/J0Hd/FeSiXTuY2Mx4PP8xR6lwvfe1dXUbI7S9xWEcTKI/5HInqPJEkhwmSZRYQbPkgYQZ1N9hDRGyVJ+gQRDRLRmKZpj+d4nfcSpth8S5KkFUT0DyJ6A4FJ/mvK9JqfENF/ENF2SZJ+TOg4/P7Xz50ah/sUEf2NiHZLksRjut9PYH8bqHhs82khTieitOvXw7jatw8R1DPOQMrxyeoTxP1vNm+GQzYyIgaz9PZCsfKkwTPOQBmO3U60dauxAynL6IPodMKYzGSATk6iua7fj6jZsmXJioXLekIh0U9lxw6A5yWXgPzz+UAoMJHApQ/Zsh4cDhjnPBhFlnNLSY/FRPl3ajZjYyMUwIkTWMeWFvMhNZlkaAiRP7sdSouvz+EQxlAoBCX0pjddR7/4xX1UVVVFGzZsoO7ubrr77rtp5cqVtHfv3qzn2rRpE23fvp2++c1vUmtrBw0OtpLPdwkdfb1LxHnnweAwavzOctttt9G9995LL7/8cgmLSpKz+P1EDz6I/XP99cbP2eQkyP/GRuy9RAK4kYoz4TAIxHAYpTqZjF5JwrHGxtBWgbOhc82c2rmT6K67YMR/61uZM4F40qvdbj6AYWKC6EtfQlDniiuIPvEJ44CEouBYiYQwPnOVaJTol79EGXlDA9EXvgASkSVT+XKqXHfddXTffcXBojlzOmh0tJUmJy/5Zy/eiy4ietvbYMyb6ZcSFp08UVXsZVWdvQSioiDjxGYDQZVvlpLDAayamMAgFbcbetQseMzEyZ49wBGjwXxlZQju9vSIrOPnnoMds2qVGHhUiDBZoig4fy4ZzZKE7OB4HJjodKZnOuUiPO301VcRUF28GIRpJAKydOHCzNeXDY+cTkEkynJ6tqMejzo6Oqi1tZUuvPCSf7agcDiyV53cdtttdMcdd7xMp4GfZlTqzCSJ0yn0bSSC39ls2A/NzVhDLo1mYnFsTBybp5MzsVhRkb43uVdiJv3NPRmNMho5A48HDaXeW7aMxpMxFCYSAanucgEDzM7PA50OHSK6+mr4o2YiSSCj3G4Q+HY7AhS5+jIjI5jAPDmJqob584GHc+bMbFKWpb9fVD6sWAH8yUUf8POsL0tm4bZQDQ0iIcTs2JylPjmJawmHxTCjefNgb+aTDGMiJW6rCGKFRPwYEUWI6K0ExvMQEX2QiFaStYW+nTD15UuEm36KiHJaaE3TYpIkXUpEX9FdxyARffX1l/69xyRJuoGI7iKibxJRPxF9mzAB576U926XJOnNr7/3y0Q0RET/Q0RxwnSbpGaXJbEmFRXIoDjzTGRWvPwyjKNNm/C7fPuy5COKAoecQchuByHW3g7SaHwcZOfzzwPY9u0DiL3lLcbH0zT0gAwGUYqYyZkNBgHMHg8MweXLjRVKTY1wuF95BQrpoougiLjcuapKRKzR2B8/s62lzYZ7HRuDUpNlrEM2BREKweDkCcxGxkpFBYzb4WEcPxwWa5xNFAVrMzAA5bJhQzrJzJPOuG/GF7/4XXI4KuhPf/o93XfffbRixQr68Y9/TAcPHrTkuN999910++0fps9//k6KRsPU3r6ZPvnJS6i9HaWC8+dnL18oLy+nHTt2UENDww+ohEUlyUEUBQSiLBPddptxUCUUguNcXY3X0JBxpm8shmN5vRiikq0MUVFE8ISnMOdCfGgahrbccw+MzLvuyhyl5qEnmQjEF18k+vKXcc+f/WxyObFeuBxa03CsfAibl15C78PRUTgW731vciSby5d5Imw2XP3ud79LFRUV9Pvf549FH/6wwKKODmBRZyfKrNva8D1lkhIWnRxJJRCn054pphw8CJ19xhmFB3jLyuCIdnfDzsvUF1uW8b76ejFwxWgwQnU19qLbDee0uxvvX726sGvVi90uyJZciRGbDcP0HnmE6Ikn8BwUMrW0oQGEx9//Dnt0xQqspZVsICt4xLav3Q5MHR0VAVPGozvvvJPC4TBdcMFmeuCBS/7Zo6+mJrtdVw4j8bTz05hUU1VR6qxpWK/qavw7HheZWTYbdFh1tfhumXDUE4s+H/7GhKWeWOSkh0wiSenTgo3EKJtR36cx01CYTCXUxSyfTiSQJUcEks8sw0/TiLZtgx/3xjdiyryVYx89ivtcvz53AvHQIaK778Y6fPKTCCYkEqKaJFNSwsmWUAg+p9uN+163LnuGXyKRTBhGo8llyRUVOAZnGVrJxgwGcQ2BANaK+wwnErCDFi7ML0kli5S4rSKIlJoCXxIiSZK+T0TvI6IaTdPMmk3mI6flYns8mOR8/DiU4LnnWptGVwwZHARQmk1BDgahnKqqAFj33w/Q4ga8zc3I5uvqwu9ffRUKatMmkIJmEosh0j4+DmOQJwybSSKB/nuDgyhhXroURl44DGM7FUB5zL3LZX0dJydxTJcL95dpuMH4ON5ndYAKH7usDJ/JNKQhGES0PRBAxGvZMmtKlqNdqiqitVbvXVEwoOCxx2CczZ8P4rKlBQ6P0QTILDItZkEJi04d+etfEVR5xzuAf6kSi8Gxt9thNHV3wxhLzXDhXj+Dgyi/z1bWp6p4b08PnvNly0Qjdu7llEkSCaLvfhdlilu2EH3mM5kzIKJR7DGzoSeKQvSznyErsLMTw1kWLjQ/lt+PY9TX514e5PNhuvOOHcC8j30see315ctVVTjHVBv8mgbD/ZFHgLPz5iGjff58QR6XsGhmiqLgedQ0EF+zqVxNLwMDKCdevDi/voKpcuIEAnGdnbDvVFUMedMLDy2IRrEPudKgrc2YyNQ0EHT79iEAzYHYpqbiERTch44ov+9TloGNk5Mo4bYyiMFIEgmUNPOQmyVLYGfmOvDKikxM4FVRkZwtFY3i99zjM4+hVdNGl0wRHhWMRUwa8qAU7pvodIpeifG46Petn+qsx33OFmRiMRIRzymTNXpicSqDGVwRYZTRqP9/qhRjKIymATN8PpBcmYJr27cjM/jCC0EiZtOjkQiwJZEAFpaXww6wWqGxaxfRr36FPfSRj4i2KHxstzuZLJ4poqrAmcOHsf4rV8JHTl0vLkvWk4acvcrkNpOFFRW5BXhVVZT2x2J4FsrLETDmFj5dXQWRsDOUuj15MhWYeVqTiJIk2YnIpmlaXPe7NgIj/bymaVuLfMrTd7EJGWs7d8KhravD0JIVK6bOafN44KS1tRmDeCwmynmWLEGmjcuFXlThsCh3Hh/H+1UV93DWWciaMZNEApHk4WGk3a9Ykd0Q40ELS5eC3AoEYJy2tBhH6TUNoK5pAG+raxgMIrvJZoNTrTfcNU00sK2uFk2/rUo0CmeCy6aNIkeDgzCU7XZE/XLtO6JpgkzUNCgtntxl9v59++Cwu924Zz5vTQ0i83kaX0V9aktYdGrLvn1Ev/kNymeNsENRENGWZTjK/f3AkdSm1tzr59gxZEtny8rRNByrpwcGLh9PUfA3JhHN8CMQIPriF5FNfvPNmCSdCWv0BKJRP8HxcZQv/+MfyAj8+MfNs6BCIeBVPhOYmXz40Y9wjBtvRMam3shlh5kzHKfCWU+VI0dAOAwOAiPf8AYxWTAfkvR1KWHRNMipQiB6vdjPXEJbqIRCRA89hGf4xhuhi/VtAXjPaRowbmICdlFDA343PIy1nTs3fU0HBmAz8oCh+fPheHIJdbFsR85GZNIjV4lGif7yF6zFNdckkwlWxO+HjohGYQM2NeH/3GKnkMF4ZhIIoIKkrAy2XjCI6+e1zXNIUNGt+WnGo6JiUWqps8PB/b3xO+6ryISiwyFIRaNnmzMDmVhkH4CPnUosTucgFd5DmQbD5DMU5tgx+HKrVmVux7RrF7J4N27EHsyGDeEw9piqAgdrarAn4vHs+K5paIvyt7/huj70IeMkFZ8Pe7uxcWr2cD7i88H+8vuBuWvWwAZj30pPGurLkp1OQRa6XHjlg7+xmChZVlXxvE5OwkdzOEBozp1b8PN72pKI04mZpzuJOIeIniei+4mol4jmE+rGG4loi6Zpu4t8ytN3sXXS0wPAHx8HmXPhheaZKPlKKAQDtLbWuNSPI+KxGCLif/87nO13vjPdAAyFkCH0u98BXFevBgnKGYr6SImqgkAcGMD7MvXuYHnpJVzL2rUwkvv6cN3z52d2bLkEwm7PrRwpFoMTqyhYm+pqQZCGQlB4uRrB+msaHoaCqqoSmYyKgozPEydgIK9fX1gJFZOobERxRIyVjqbBYd+2TTjs556Le+Pp1gX21ii2417ColNURkZAZrW3o4w2lfDWNJTTBALIEuQeqIsXJ/cV0zRk+7z2GojIM8/Mfu6+PmSAt7QAi/jcPFSASJCIqTg1NIQy46Eh9Cq84orM54pEsO+ZEEs93vPPo3w5FkPZj9nxuJQlEhENuHMxVkdH0Z/oxRdBmn7sY+n6xefDORwO4NFUE0IDAyAPjx4FebJlCzJMuVdWgQ5GCYumWJhAJMLzOBv6XBlJLIaM/LIyZLkV+twrCjDJ7UZWNE8O5sb4igIsKC+H3Tc0hOden60XjwMjmczivT48TPTMM7BF1qzB/9vasF+8Xuj7IvbH+icJYrPl9/2GQkR//jOOcc011rOP+vuREVReDjKD7ykchrOvqrCXpmJAYTQK/cADBdvacsfbFJkKEnE68WhKsEhVQRbGYqLUWU8W6glF1st6QtGMTNG09DJoWRZ/Z9KHScV8iZ9iilEGYyrhyGswNAT7vb0d/pBZVuM//gFCb/169GPNRj4Fg4KkX79e6F9NE2XkdXXmPfHvvReBmM2b4TNmwovxcTGY8mT2zuVAdU8PsGbFCtyjnjRkOoj71upJw0J1XiAA8pBLluvqRD/KkRGR2NLRUTT9ejqTiNOGmac7iVhFRPcQGlm2EpFMRC8Q0Rc1Tds1Bac8fRc7RTQNhtMzzwC0OzpAJmbr7WVF4nFkEJaVgehLVSgcEfd6kcbd00P01FNolG3UhFdRiB59FOB3ySX4XG8vFByXFHd2imnIvb047rp12RX2q6+CRFyxAiTiyAjWo63NWiSGI51scFiVRALXGo3C2I1EsG6trdl7cVkRrxf3YrfDKD18GIp76VJkfRbLkGEiNRbDMV0uRNe3bUMEs7ERir6pCffc0JC9QbhFKbbjXsKiU1CiUaIf/AB76447jB3Bvj4Yml1d2Mc9PSD9UqcxP/ww2gBceimycrJJfz/2QGsrCEQjHNRPldQTifv3E33uc/j7V76SuTk5EZyXQADGaarxnUgQ/fSnaBWxeDGIxNRprCyqKiaBVlfnRq6pKtEf/0j085/j/O95D9Gb3pR834oCQ5YnPE91+fLYGLKgX3kF97NlC3QDBz7q6kpYRDMcixIJEIiSNLsJRFWFfYv+bQAAIABJREFU4xsMgkAsRmbM00/DGd+6Fbo99XyTk6If3IkT0L1GA0jCYWBgdTV09fg4MhBra6G/HQ7gmd+Pz6sq8KampihTOpOuWVHyJxL9fmQkEgF7MhF/8TiqMsbGgPdr1qRXRUQiIEgSieylnLlKIgGiNxDA91RdDZ1TIFk5FSTidOLRlGKRWamzfthEIpE8+ZlIlHpmIhRZeCCFnljUl0G7XMnZijNxKJSqwn84cAA2PPcZTCUdieDLPfYYbIprr8X9mPVotNuxHjzMcf369EQNxnuHI30v+HxE//u/sNne/nZrJdN8L5IEv246s0NZRkYQVPX5YA+2t4vrYL9JTxoWqzReUUTJsizjO2hqAq7ztHhNg98/f37Rn8XTmUScNsw8rUnEkyClxU4RzlDbvRvgvngxSv7yzYTTNAC8LIsysVTp7gao8gSqhx6CAnrTm4yPuXs3orVbtoheiUQwBpg0HByEw+52o/zksssy9x0kgvJ77jkoyLVrYdC5XABZrxf/thJpl2VcS3l5bpkFqgrC4vhxKMt164qbcs9ZDwcP4vtkMm8qRFXxPWzbhvPV1+M7WLhQZEe1txfV4ZjtCqqERVMsmgbi7MgRove/HyUaqTI6iiy1OXNgYB46hGc1tU/o9u3AigsvBA5lEyYQOYvHzHBVVVFKRYSfjz9O9N//jWv62teSMc9IuOzYKGtwbIzozjthtL/pTUT/9m/mZXJscKoqjpNLpnJ3N9F3voO1Pvts9CdKLX2azvJlnw+Bpz17YJBv3ow2GPE4vot8J0ybSAmLpkjicZAs/J2dDAewWHLkCLBmzZrsA8SsyOHDcN7POANl+UbCevnAAejfDRvMnW6vF/tGkhBYrawE1jFeKAoyeSUJgUjOVq6vL+peIkXBddvt+X3fExPof+t0AvOMcMbnQ2AhGgXWmwVViGBH7d0LO2/t2uL0VwsEsN5EWL+qKjj1kQiOX4CdVsIii6InC4lEIoCewFEU8R4mzcrKRIaiVaJbltP7K7LrX1aWXgZ9sgMlfj/2R02N+eR4TUNVxoMPwkZ529uADalZjXqKIxCAjVVejuNWVxuXUSsK1oinYxPBx/uf/4Gt84EPZA+s6kWWYQuVl+fewilX0bd88vlgew0O4rtdvhx2Ed+Xy2VtSE+uEo2COPR6RclyUxO+z6Eh6KFEAnqos7PwwV4mMtuxaFZIiUScXikttonE44iS79kDwF21Cj0TjfoBZpLhYZHZaEQYDQ+DOJs3D0btgw8CcN/1LmND9OhRlOGtXWuuNDQN0eJnn4Vir6zEMe12XEdnZ/qE4+PHEWnv6MCxg0FcL5dGh0JQeHV11gxkHrSSSx+UQEBkPjqdMB71EapChKepDQ7ifjo6cHyrg1pyEa9XOOxlZShbXrkSv3c4cE6ebF1Eme0KqoRFUyxPPAHy79pr8UymitcLoq+hAYGE48dh4C9fnowVu3YRPfkksoeylRQTJROIa9dai5SzGfDLXxL94hfAuq98JTv+ch8tlys9S+bZZ4m++lVg+6c/jai9mciyKCOqr7ceCY/FMGTmt78Ffn74wyDs9PesaXBKpqN8ORwGCbtzJ857wQUgWDh7oqJiSsioEhZNgZxKBCJn9SxYkJ4xmI+43dhzbW1E111nvjbcd0yWQfxlI++PHQPWtbQQXX55uu0TDiNgwMPvJibwPTU2FjeLhcmHXCc2s4yPI3O8uhqlzewkc5D76FH8zmp2oSzDxoxEQALnS/LJMtZMlrG2DQ0CC7kntt+P625ry+veS1iUo3CpM5cy22wi61C//kaEor4sOhedxu2A9MSivv8dtwdiUjGXvuuFSjQK0rysDAEKM1ugpwdB2rY2oltvNQ9O8lCY8XEQk2Vl8A/s9sxDYXiIY10dAiH334/1uP12JCdYGQqjl2AQQdLa2uJmFMuyKEnmqixNQ4C6uxvP0/LlaK9VVTV1eozb0Hg8uFdJEkNBXS7oIE7waWzEGk5xn8jZjkWzQkok4vRKabGzSDSK7LW9e/H/9euJzjnHGpHGJbRNTcbRnslJZKk1NgJUd+wA0XX99SD5UsXjATk1Zw5KnY2UhabBOD9wAJmPZ58tAJwHs4TDAO65c0V59e7duMa1awGqDQ2inxALG8hWnF7ujcKp6dkU28QEHIGKCpBswSDWzumEcV5IOrvfj+8vHIbDsngxCAIub543rzjKIxSCw75rl3DYL7oI55+YwHtaW2EQT0HZxmxXUCUsmkI5ehRk3Lp1KHtJ3Y/hMCLiFRXAovFxBDgWLEjGgT17gEFr1yKrJdu+7uuD4djUlDkDMVUiEaJvfhOYePnlRJ/6VHbMCQRwH0yMscTjmIj80EPY/1/5SuZsRh7GUlYGo9NqFsS+fZgaPTSEa37/+9NJTy7Z4/Josz5HhYosgzh8/HEY8Rs3IhO6rAxrZLfj3HkOK8gmJSwqssgydOKpQCAGg8CR2lr0US30+Y/FMIQukcCkeTNSUJbhtGsacJCJitpa48+EQuhN7fVCl3OlSKqMjcG+6ujAnvJ44Ow3Nxc3g6pQInFoCH3aGhsxRIoI9ub4OOySNWtyI37icWBeMAhCIJeMJu71xpPuGxvNvzcuP3S58mr9UsKiAkRPEuonN6d+B3rikYe22GzJk55zFc6+0xOL+km8TCYysTgVuiyRAFkuy8haNvP7BgdhX9XXo21JtqoCjwc+WkUFfMpUX8BoKEw8Dix6+mlkXM+bh+FyqTZGpqEw/DfGsYkJ4Fxzc37Z0/wd6fsYppaq8+Rlnw8YMVX9VPXXNDGBNY7HRaCWAxTj4/CDIxGs3cKFxSVRM8hsx6JZISUScXqltNgWJRBA+d7+/QCijRtRDmZGBEWjcKArK40JwVAIx6qogAHW3Y1I8caNKBE0Ot62bVCeV11lrDA1DaQkl+qcc47x0AS3WxCK/f1IwZ87F5mW7e0AVSOQ5/5ddru1SYSKguvONGiFCU6/H+fUNzIPh9GjgggGej5Kju/P6UQUUU+IxGI4fiwGJdrcnP90r6efRsYCO+xXXCF6jxAhOtnUBKUWiWBtuGyjSP0+ZruCKmHRFMnkJPog1tZial8qZskycEOSEBGPx0E61tUhEMHyyito1L98OQId2YiMvj5E55uagHFWiQ+vF/0P9+8nuu02TKeXpMyf9/uxryork7FreBjly6+9hgbn//qvmcl7zmR0OnH/Vq45GCT6yU/goM+dixLpDRvS3xeJ4LsggkFbzJJHFkVBpvpjj0FnrVlDdOWVcG58PmBSdTVeU5jJUcKiIoos47ssKyt4yMRJl0QCQVlVRYCz0ECapqFUt78f+1s/IEUvqooyunBYlA0ykRWNpg8TikSQuR2Pg0AMh2HDGE1j1TTgXCSCIIXNBjvJZgP2FYvwLXRiMxHW6bHHRJWJqqJ82ai1hRVJJKAX/H7oDitl6dzGIZHAmjc0WBs8MToqgr45PDezeLcQ0QzBIkWBbculztwT0ch21bRkQpHbk+gJxXwxLB4XfRU5042HntjtglDkn4Vk+GsabBCvF5hh1sppbIzoZz8DPtx2W3aCbGwM9lZ1NYIZVu1/VUWlGgdxP/IRfNbqUBi92GyCUGSbZO5c3IOebEw9P5clM2nIhDGRyBbV9zHs7UVwWpLQZ7+ra+r0VzQK39brxXdXVSX6HUoS7rOnB1hSVQU/NzVRZopltmPRrJASiTi9UlrsHGViAsNXjh4FUJ5zDiIresBVFIAnEUAzFYw5Ik4EJRKLITW9vp7oxhuNBw7s2IEIyhVXGAOfqqIn0GuvgYjbtCm7oTU+TvT738MArKwEuNbV4fNdXchSTO13E4sBjKuqrEWTeMqbUTRSURAdj0QA9kYlMbIs+lXMnWu9nDyRgNMwPCyiX0broaowTr1erEF7u3XDQ1FALP/978kOe0MDIpPhsGgOnnpuVsaqinUp1OCh2a+gSlg0BRKPE91zD3Drwx9O32OKAtyIxWDkOZ34PxHIQsauw4eJ/u//gAvveEfmbBBNEwGKpiYc1+qz3d9P9JnPwAn/9KdRCmyzifJmI2eTiYCqquSWETt3Et11Fz77mc8gezvTNfv9OE5qJmOmz+zaBYLW5wOxevPN6QEeJiuCQawvT2QvpnALi0cewdotWoRso/nzxX05HMD3YjUpzyAlLCqSxGJ4bk4FAlHTYPd4PMhALMYk4z17oIM3b4YtZSaHDuG8K1em209eL/YHk+uxGAKC4TCO29iI78Djwf4xum4OvDidyFiMx/F+3u/F+t4KJRI1Dff25z8jMHvbbYV/D4oCW2tyEjrDbBihqmKt+XlubMyt91gsBntR02CjWuwhO4t3DBHNICwiEv3tspU669+vn/TMhKLDIUjIQvYGX48+WzEaFX/ndk5MLLpc1kn9o0fhPyxfbk6OT0xgMrIkYS9l6w86MgJbqrYWRKDVPRyNIlD5yiuwYy6/HPdipac6D2dKJRf5FYuJqix9IgUP0onHxeftdjEpmQO2TBzq19XvR5ay14u1W7t2aoKmbLd5PAj+2mzJJctE8M16ekRf/64u+IQnQZfOdiyaFVIiEadXSoudp4yMwIHs74dCOP98GKhEgkAyatDKg1uiUZBOlZVwzsfG0AfRKK16715kF553HgzUVFFVNCk/dAhK4KyzsgP25CQcTlWFQc8G9NAQMohGR/G+2lpBKLKC8ftxfw0N1koIYjEoIJdLkA+yjHPF41AymZx2RcGaRiIA/2z9d3w+rFkkgij7okXZFQaXN0sSjOBMylnTcPxHHoERwQ57ZyeI2bExUS6ezajgiJ6mweApoIn0bFdQJSwqsmga0R/+gKEAN98MMi/178eOYT8vXYo92NcHbFi6VGTmdHejXHDuXGQFZgpOcFbOiRNwEpcts05avfwysgbLykD+rVol+iMaEYlMzsViyZOT43GiH/4QPdJWrMD0ZTPHlkg4t/G49QnMbjemIu7ejd5qH/84fqbKVJcvaxqckm3bsObz5gGLli8Hrvj9eF9qptUUSwmLiiDRKBwjnso5mwlEIuBCdzcwwag6I1fp7yf605/wrF9+ufn7+vpgPyxcaI4DPh/2i9MJHPL70TtUX6LLvbV4EECq+P04V0sLSK5IRAQni1kul+/EZlkG2efxwH4bGcF3YdYaJ9dr2r8fx166NL1dRDgMvaIoogdbPudMJGA3yjLW2cK6zvJdMzOwyEh4qjOT2kwMZnom9YSiqorPcZZiMTBOVdPLoDljTj/5lwe3GPkwg4PAqgULkqsx9OL3g0CUZaL3vjd7Of/QEPy0hgb4flb37sQEBqgMDcH+2rxZZGEyMVqoBAII/Goavo9wWJCI3ELBbsdapQ7M1JdOS5IIIHOP1QULit9+g+0qbrHldIqSZV7XcBjX4nbjnhYsgA17EluBzHYsmhVSIhGnV0qLXaD09yPjZXQUILZyJYykuXPTDRx2+CYm8L6GBpSe7d6NDEMmIfUyMED01FMwzM45J/3vigLF1N2NCMyaNdkNK78fBFgohNLb5mYYvXpwDYdxb319iMbxRKuuLoAxK5Hm5uygzE2TNQ3KOxqFQiSCUW8lQqVpuA6/X2RLGhkcvb0oFSgvR0lhLpMDZRnGQywm+limDkQ4dAhl58PDyDC86irhsPNn6+vx/VuNMvL68BpxWUCOym62K6gSFhVZ9uwh+uMf4SQaDRHp7wfh3dmJZ31iAr+bMwcvIjzTv/419tEtt2Q2WDUNODQ8DFzo6rJu4G7bRvTtb4Nc+NrXxPmJRI8du12U5kgSHHRZBsHCWSknThB94QvA2RtuQNPxTCRmIiEm9lnpEcjlk/fei8/ccgvRW95i7BBMdfkyT38/fhx4tXUrME9RQIrIMu6nrm7ap1uWsKhAYQLR6Zzy0vNpEY8HmbJz5qC1QaHi9yOwUVWFHq9me3x0FIGSOXOMA7B6mZxECXMwiP6hqaXRmgbijasijPT7iRPA0YUL8b0FAjheTY21rCGrkiuRODmJLKZ4HIGVjg5kCr3wAv5vNs0612t67TUEUhctgl7h/mRM0BZj4Ay3iQmHYWs1N2d8+yzfOScfi7IJlzpz6XKmUme9pBKKRMmEYjHJnkQiuQw6HBbntNmSsxXDYfhULS3GPhkRsPlnP8P+fve7MwcpieDHsZ7OpbVLby+ClbJM9MEPJmNnIIA1rK3NLStZVZN7GEYiWJ9gEK+WFuwpJlv1+5WHwqS+FAX7/tVXcYy5c+Gz8jPAmdNmPRr539kkEhFTljUNmMpTlllHxmLwWzm7cv58+Gone8I3zX4smhVSIhGnV0qLXQTRNKS+79gB8OrqwgTU9vbk9/X2gjxbuBAgOzSEbJnly+EApgqTfbW1iLSngmAiAWf5xAmA6dKl2aNhoRBIsIkJTGidNy97ancsBiXIEX3u51dXB2Nx1arsAM2KKxDAfTkcWJ9cS+vcbry49JjPG49DgY2MIFNg3br8jFXu0Tg5CQXK19jbi3XjIRFXXokei+xYeDzinvJtGsxrFI3i+2Ay0aIDOdsVVAmLiiiDgyhjXrQIRFeq0To2BsKwrQ1GViwGLKmoQEadJGEf/OpX2Gu33po5k01VYSS73dgfHR3Wys00Ddf50EMIaHzxi+nn0TSRtWCzAX+8XuBfba0g5554gui//xvv+6//yu4YyzKOw1P7smFRfz/Rd74DR3nDBvQ+NOrBllq+3NRUXAN2ZAR6Yf9+YM1llwHLbTbgu36C71SUEFmQEhYVIOzkOp1T24B+uoSH05WXY48XuhcSCVRv+HxorWAWNPV6sVfr6mCjZNKjqorp7d3dcNSXLDE+biKBIIndjr2fekxVBWmpKLDHyspwHZEIAgnFyBpiURScTz8oIVU4M/z4cTHEQf9M7dkDcnfdOuMgda7CfblHR0FENDTgd3V11lvRWD0PkwlVVdBjJmtQwqJpEu6FGIuJUmenE/s+mw2bSAhCkYOGZWXmg1yKIUZl0MGgKDfesAHPFhOM/HxFo0Q//zlIs1tuAVmeSbg3NJOSVgnEl18m+ulPcS0f/Wg6Ucl2BpH5sC0u99aThvqp19xKiUuSg0G8p7XV+qCaeBw429+PtVq3DvZUphJqznBMFSNi0W6HXcOBUbsduNLUlHyN8Tj8VG55MG8eEl6moX2LVZntWDQr5LQnESVJejcR3UdEl2matn2KT3d6L3YRRZZhgPb2QmmEQiALL7wQymNkBH+fOxe/5z6INhvRTTelE16JBBzFaBTZbqmOdTwOZefxAEg52pJJolFk0QwPi9LoXDL1+LoGB3GPR4/CYa2pwbG6ujJPUh4dxblraox7RVoVvx/HcThwvnAY5cXRKKLqCxfmd1yjc7jdiNgfP47rvvxyMbDG7xcl2c3NGQ3ZnITLMWIxUX5hYcJ10RVUCYtmp4RC6NNns6EPYiqZ5/Nh79bXi+yco0fxvC1fDiyamMC0QbsdBGKm7GZFgeM8OSn2gRXyIxZD2fLOnZj0/NGPmmMCE4lEIgOxrg73Jsso9/nDH0AAfOlLyZmMRsKlvlYmMMfjyHp68EEY2x/8IDI7jfZjIiGmAtbUFLeP3eQkmqq/+CIw/+KLMf3d6cQa+Hw4P/d0PJXKdk4XLGKntry8uJlrJ0tUFc9rJIJBKsUgtbdvB1F17bXmpYaRCPR2eTmc2mw9XJ9/Hg7oWWfBgQ4GoXONym4jEQRhqqqMs+CiUeBhdTWuT9NE2V1TU3Gd2kwTm2MxBFYnJmB3rlxpnO3zzDMgATZtQmC0UJFlkMZcQn7WWcXvAcvi84HMKS83zQ4tYdFJEH2pM5EgE63Y/IqSPBWaCJ9jQnGqnqVIBDggy7CLmNhk4aEjf/6zIBCXLct8zO5uEaxdscKaLaBp0PO/+x2CwHfcYU7AKwrsGLsd74nHBWHIpCFTKjzgUk8aGg1PGR3FZ9rasn9fw8PAGF6zZcus+3WpJGPq/3kAk9eLvzkcsNW4jZa+hHp0VAyxbG/Huk3FtO4CJeu3P83YckrKFMFDSUoydaKqyAa024kuvRSgtncvDKlf/QpEV0MDfrLRu2MHnP0bbjDOmNu9G8rh0kvTCURZBoHo90MZtLZmT6eXZZS+DQxg2uCKFfk5KWVluIeuLmT6HDwIgm1gANE2np7X1QVi0+US2XqBgOhbUUisoLYWCmVwEH0pfT4ol/POK06zdiIxRfLpp+GMX3YZnJbycvytvx/ndbkQ7bLY5NuS2Gz4zl0uYQzEYviuZ6BiLMkMElUF4RUKgexKfS7DYezXykrRK3RoCL/v6gIW+XwIcGgaAhzZCMQjR7C3W1vhVFvBFY8H2YJHjsBIvv76zAa2JAlnXFWBIU4n9uEXvgCn/Z3vxD1nczICAdwvl/pmOu9rryH7sL8fpN2HPmSOMdz3S5JEOVAxJBiEvnjmGRx782aiSy4BRnA2QjgM7G1sLGHEbJVQCI6TyzWt/SunVA4dwn5bv744BOL+/bA5Nm0yJxA5M8Zms1Yl8dJLsF/WrQMmEmGfBQLYX/X1yRhRUYHfeb3Ya6kBE5dLVJp4PMLmcbuBX83NxcusKisTTreeSJyYQPlyIoHASqYA8/nnwz7csweYumpVftfCQw78fnw3NTW4juPHQS5MRUk+D4oaGYE9OHduCf9mgjgceOkJQVlOzi40Ex7cUVEBXc+fZVt4KghFRQFmOBwIdrDdlEiITMVgELZVXx/8gXgcJKG+vyIHCLjfNPcpXrrU2vOvKGgfs3MnMO497zEPOiiKqFxyu/E7ffmwywWc4rW0Eryw2YBPo6PALrMqtWhUVH7V1SGxIte+rzykJXW/hsM4tyzjuhsbxX3oSUbOPOzvx7/Zx2bfMDWj0aiMera3CSlJupRIxJLMOhkeBskzf74A6k2bMJFq1y6Qd3Y7stgWLEC24pEjIPOMMmYOHoSi2rAh/e+xGAxzdjZqa3HeTGCYSKAUt6cH51y3rjgOrs2G6DYrGkXBdff1AdwlCX9zuaCYeNozp9TbbPlny9jtUDQ9PTjm2rXFIRCDQWQ6PPssrv/aa9FnMhbD91xZCYWtqojUTeWUL7sdhAz3cwmFRDPlQvsKleTUlO3b4bS99a3pgYV4HIZtWRmMWpsNjvLYGBzd+no8Y7/+NZ73W27JPMQokQCOhcPYC3V11vq3HT9O9J//CWfzq1+FE5tNVBVkmaoC81wuosceI/rGN7AXvvGN7MfRD2LhyYJmEg6j59Ff/4o9/pWvwLkwO67XK3rYFat8ORZDP9wnn4RBffbZ0CGMc9Eo1lBRQDqdCsM3TlcJBvF9n0oE4okT0JkLF2btW2dJRkcR1FuwwLz0VlVhP8ky9HY2QmnfPtgQK1ciC5ulqkoMkZuchI2h31t1dfi+JiZElpVempqArcPDIiDY2JhMJBZrr9rtIpPHbge+dnfjvBs3Zg/qcGAiHkegwuGAfshFeC3icWArO/Td3bAHVdV6JlauUlmJcw0NgTyYM+fU2UOzXZgQdLlEqTOXDlvpfWiziUocVRUZjkwocsm005l/hi+X4IfDYtglS1mZGEr21FM457vfDazgrHGPR5B4ZWW436Eh2BqcnWdFwmGiu++Gf3f11URvfrPYL1yWrO9jqM+StNnw3oYG0ds5373mcACrPB6RoKFfq74+rJemIeBgZXBlNmEbyuMR32tTk3lQdGwMPmYsBqziRA6jrMZYDLaZUeJKKsloRDqexIqOkuQhJRKxJLNKJiZgLLa0pBsu3LvhuusAYvv3I+o9NgaHcOPG9OONjqIXxvz56Q3Io1EoGJ50XF4OYi6Tw6ooRH/5C8iDCy7AOYtZDmCzQclMTEB5nnMOXh4PSiT37oVyqKsD6Hd24uVw4H5y6Pn3T5mcxHFlmWjLFhgQbjeURLaekGYSjcJIeOop4bBfcYWIrrnd6B0UieD6ly+fvoh3WZkoVeCIqN2eHPksSUleew3P79lno4RML6qK/ZhIwJlzOERGrcuFTJVolOiBB4BnN90EYtBM4nEQiLGYcNqqq7MbXM8/j3Ljqiqi73/fmrOqKNjzqgqjMh7H8JWHH0bw4M47M18rH4P7KOoHsRjJ7t1oZu7xALtvvdU8iyoeF85zTU1xprAmEriG7dux19etQw/W1lb8nQnVaBTY0NxcwoHZLEwgckbLqSB+P/Chqak47UUiEQRjq6qgl81sBm6xsmJF9pYKBw/iGpcsAXmQKpWVOI/PZ0wkNjeDJBwfRwZcqh3W0YHr6e/HOcrKcIyJCRyvsTG3NTATHloQCiE7KBBAAGnlSuvBDJsNVS9/+xt0iMNhnumpF55sHwziGlpakrFy0SJcQ3c3MHjVqqlxyp1O2MxDQ2KwV7GqUkpSuHCP7/JyMVCFs+isljrbbOIY3H+RiUnuI64nFK36FcePY08uXWrc3knT4EMdOADs4QAGP188GJED/QcO4BmcOxfXduRI8uAWo9ZE4+Owh8bHMen5rLOwj5k0TC1LrqiArcEEK7dX4rLfQkm9ykpceyCA9aysxL9feQVr1dICu6RQfcX208QE7J7ycmBXQ4MxTkxMIOgTCsHeXLvWeksuRcncpzEaFf049WKzZc5q5N+XZGZI6asQYpck6YtE9D4iaiail4jow5qm7eM3SJJURUT/RUQ3EtF8IvIQ0Z+I6D81TZuY9is+zSQcBiFYU5OescMR8Xic6MwzAXibNhF9/eswKufPB1m4fr0AoHAYaew1NelZNeEwSpiJcCxVRfQlU0aaqkL5HT0KApGb7xdbnE4Y96EQlAD33WhuhtKtqkLae18f+iO9+KKYsLxwYfZSbBae/nrkCI5/3nk4Dg9D4RT4efOsK1F22P/+d1x/qsOuaSAQR0ehODUNStrrxXumM/PH4cD9yjKeh0BANG5fsGBKT13CohkubjeGDXR0IIqtF9434TAMZTb8+vvx/C9aBOPpoYdwnBtvxHHMRJaxB3mvlZeCZ6O1AAAgAElEQVRjj2czpH7/e5BzS5YQ/b//Zy07SU8gNjQgu+nzn8f9vOtdRB/4ADCNDWyzPoWTk6I00Yz8n5xEL8mdO+FAf/7zICPMpNjly6qK4Mjf/gZjeckSfJf6vc37XtOKP/G1GMLlm1MopwwWaRrIF1kWTuapILIMMsvpRCC0UB2pqtgTkQgmMZvts/5+4FdnZ/Zn8NgxBHU7OzP3AeQgp9eLPal3bm022ABMJLa1Jd9rWRnsvJ4evKe9XbRQ8PnwKkbQgQj7bt8+YN2qVThvrsLVMtu2oX3C1q2Zy6AjEayJogCLUsu+WTo7sVa85mvWTI0dardDb42O4jkIh63blvme8lTBoukULnVWVRBVTAZyaauVKhs9KalpyZOeuZe4ftKzGQadOAHiuaPDfEjao4/CV9uyxbjaQZJEq6HhYfz74ovhL3C2YiAAW0H/fsb8gQGiH/0Ie/df/gW2RHc33suZmA0NIqPTLGBYXQ0iMRgsTi/m+nqsq9uN6+/txXe0YUNm+9CKhELALL8fa1xbC8w2s2f8fuCoz4d14Aq4XITLpzM9X5qWuU9jJJJ9KIxRVqOm5Uy4lrAlTykNVhGNNV8iIpWIHiQiFxF9goj8RLRU07SEJEnlRPQUEa0mop8S0UEiWkZEdxDRESI6R9O0aJbTnd6LXYAkEgBVmw0OZ6pRxENPVqwQEeenn4YyOv98lF309sL4Ou88vG/HDiiarVuTo6ihEI7H5a3RKEA8kwHKBOLhwzjfBRdMPeHl8WBdHA4onvJyGKF6ciEYhLHPk6rjcTHgoavLvDxYlmEkc9R/7dp00mJiAqQuT1XORGqoKrJCH30Ua75kCdE11yQb35EIvqdoFGs9bx6+g7ExnIszuE5GabGmISv14Yfx86c/ndIG4lOORePjJSzKV2QZ0wJDIaLbbktvwn3iBPZNe7swvDwe7L+5c2Gg/vGP2JfXXJM5OzAWgzOoqmI/cDTcTBQFpcEPP4xAyr//uzXCJJGA806E/ff000Q//CFw5d//HcYsl/FoGl6SlIwfsRgMUJsNxzDCBE0jevxx9IGMxYje9jYMejHDDy69iUSw9xsaCitf1jTg9PbtwJZ589BzSV8mlEjgPuJx4Gtt7cyJfmsa8Pzxx5Gp8MADsxuLfvWrqccizl5RFDzPp0omKZe7hcMIEBaDGD16FA7kmjXmpJDfL/pzZctKHh8XbVB4En024SoA7lest/c4Y6eiwriM1uvF35ubxXoUq88xB1DHx4HB3BOskFYx8biouli/Pt3OVFXoGiZ+qqutYdHkpCjvnj9/6soEVVVMaf3hD2c3FtFp4KPpswoVRRCE2UqdzURPKPIwNj2hyMf0eJA12NSUXvXF8uSTRE88gQSMrVvNsUJRcCwO/hmRbPE49g0PC/H5EDR87DHo8xtuQMCQewDW1GQmQI2E7QSnszgBxvFxrEE4jDVauzZ/vOKsZY8Huk8/ZdnMhwqFYFt4PHhPZyeST0522xazjEb97xUF3/WhQ8h2/fKXcxqsMh3YckrKDDGLZ4RoRHS+pmkJIiJJkg4S0R+I6HIi2kZE/0ZEZ77+nhf5Q5IkPUlEfyGidxPRj6b3kk8P0TQ45qpqbAz19QH0uroEgdjbKzIPzz4br4EBZL089hjRb38LpfH2tycTiIEAjOiyMhzL5wMRkG3YwbZtcEzPOw8ToqdD6usBmKEQCIu5c9PXproakfJVq8QUw+5uRKj374dRzSXP/PmJCRi13OfILOuusRHGwvAwvoOOjnSFp2ko+9y2DU5HRweUt75vCU8oc7txvM7OZGKmrQ1RpeFhOCNz55pPTyu2cB+67dvFc7Fly5SftoRFM1Q0DeSc243BIqnPodsNQ7C5WRCIkYiYkt7YiL5/fX3IGs5EIPJ+1TTsQc7KzUQghsNE3/oWCPs3vxmlwVYcAz2B6HIhg3HHDhixn/gEDE9VxYuJRF4PIvyfS/85g9fovMPDRD/+MTBh5UoMZsmUvRKPwxnmsmgrU6gzSV8fsqD7+/Fd3HBDevZWMAhMJcpeij2dkkggsPPkk7j+sjLzXnVFlFmPRacqgUgEEpz1fzEIxNFR6NiODvN9GQ7jfZWVooLATLgcrrYWgUurzqjDIaotQqFkIpFLNCMR7IFUm0PfP7GtTfRNU1V8hsswc5V4HPsuHAZ2sL3EuEiUHwnjcKAiY+9eBAXOOEPgHJdtcmaNUWmmmXA5+PCwqMYp1oAZomSSIpHIv7VNDjLrsWgmiD6rkHvYcQmvwyEm8VoVznSsqhITlmVZ7F2HA/8/eBD+iFm1wXPPgUDcsCE7gfjqq3j2li9PzmjUD4ThXvCahv3/wgto77J+PTIQuW8kv9/jSS6DttL6qawM7+W+k/lWRyQSWJ/eXqzjkiXYr/kQiLIsSpYVRSRf1Neb41M0CttodBT3xBVrxcSLQsRsKAxn13Z3Azv7+/H7PIZVlbAlTymRiEJ+wg/Q6/LU6z8Xv/7znUT0IhH1SpKkLwx7johCRHQpnaYP0VTL2BhAnstT9DI6CoJxzhxh9IbDIAqbmjDRmGX+fCiPJ54gevBBRJB4AueCBfj/0aMwMOfMwXnr6jIbyrIMgunQITh0+vNNpTDxpihQYrW12Q1Ylwuk4OLFAF63G6B77Biu3+EAUMsy1ur887OTdTU1YjpXXx++I84O6O4G4dLbC1LllltgLOsVcyCACLYswzCfM8dYcdXU4PpPnMCLh0tMVYQsEAApvGsXnCCXC+Th5ZcXTmRYkCnHomkw+E9JeeYZPOdveUv64A+/X0xd5owbVQUJ3dICg/eRR+DUvfWtyBI0k3AYz3ljIzLkeBpopmdvbIzoy1/GfvvMZzCkyIowUdfSgp+f+xxw4QMfwLRCPa5wDxveo5yRGAjgXuvqjCcwJxIo/77/fuDrpz6V2VEgggPi9WINCp2CPDwMLDp4EJh2661Yfz3WxONiAnx9vTkROt3i8wGHnnwSjk5jI0rgL7nk1MCim28u0pUaCE+wTSTgwJ5Kk2THx+E4tbdnbgNgVSYniX7zG+DG9dcb6+FIBOfctAm6PBPZMDqK53btWqKLLsovk1eWcV02G557Pe6MjuLvc+emE8OcwV1RAYeYs6eZ8Gpqyo1MHh8X5dirV6cP4EskcHz9xOZcJRBANYuqosULT4MtLxcB23xkfBxZW9XVIFAKJdE1DbqmpwfXV18PHTUNgd2Sj1Zk4fJPfalzPC4Im1z7/fHxeOiGLOO53vd6UeiKFfid05mML3v3wjZatQpVCWbnTCSAP4EAArA1NXi+efiJnszn4Up2O5JGXnoJNsfNNwss4sBCOCx6LHIwlScu64lFI/3hcolhjHZ77vtrZASkaDSKfbR8Oa7J64XusrqvgkFRsixJomQ509AjDowMDeEz8+fjNVOqLoxEUUQ2+uHDeAUCwLc3vAEYl8egpxK25Ckz+FGZdunT/0fTtEkJSMbtmJcTUQURjZt8PktMtiT5CE/ra2xMd5h8PhBV9fWimbimgUCUZRjCqWA4OQnQ5mmqzz8P57a5GaRURwdAdHAQSiNTjxruqfjaaxigsnlzce/dTBIJEAyxGMhPSRL9Ea1E2F0urFN7O8gORcE6Pv44DO+qKlGq2NmJ9ch0XJcLxMnAAF6aBufh0CEosre/Pd1hTyTg2Hu9uO5Fi7IDP2cpjo+LqWLFLG9mx+TIERg1fX1QTJdcAgKxWP2ULEgJi2ag9PSgV9iqVenBgkgEJRQuV3JZ7NAQjMPFi0X56ebNmQnEUAjPoN2O/SnLMIozlcscOoQJzLEY+sAaDZEyElnGHpQk7Nnvfx/n+e5304fFECVn3jDB5vXivNXVxqTWkSNE3/421u/CC4nuuCPzkANNA06Hw8KBzjci7vGIHksuF8rHL7ggGTOYBA2FRMlPof0WCxXuq7lrF5yfcBjP1TXX4NmZxuzIWYtFegKRS9VOFQmHQQzV1lqfRppJ4nFUC9jtRFddZbzfEgnYOpIEDMzkaHo8CLjU1GDP5+uUOp3Y/5OTokciE3UtLbAhxsbSqzB4YMDgIOwF7qecOrE5G66oKmwiboWzfr3x3isrE2V1+RKJNTUgD3/7WwS5L7sM9k2hZZItLSBy9++HXXPGGfnvBY8HOB4M4rrWrSvewBoLMmuxaKaLzSZ6APJE5nBYZO2Wl+ceUON9cOgQjrtqFWx4Juy4X96xY0R/+hNspOuvNz6PqkJHv/gisKCzE/8PBPB3lwtYyPfAZF8ggJYsx44h8Hvllcl7k9sl6H0PznLm65ycxHNPJIas6InFsjJ8XlGwL6wGH2Mx7MmhIVz7xo1iaElNDWwznw9rZGaPqKq4vlhMDFvKFiRRFGDj4CCOMWcOfMmZGmTTZ8xOTIA47O2Fjp8zBzb14sUFZU6WsCVPKZGIQgzmBBER/bOuXiKi3UT0BZP3eYt+Rae5xGIwEisr0zOnIhEop4oKRG5YMezdC3C5+OL0Zt+yjD5fTidIIVZsTz8NR3P/fmQThkIAcybojMTng0I7cADp99NQ4kpEWBMu7easP+5x4vPhnrMpMC5n4HR/7pvY2YlIncMBAq2vT/ShnDdPlD0bKbSyMhiVf/kLvoP6ekQUUx12IpAOQ0O4h9bW3AamSBLeX1mJY/T0QIkUQvDF47jP7m4QQQMDUOLc2NnKQIoiSwmLZpj4/XDsGhvRw0//vMbjyGC22RAdZ0PG54PD2toKImjPHvT6yZStHAyKsvmlS7GvifA8mu2RnTuJ7roLmPWd72CPWhEmEGMxonvvRUb1xo1EX/iCuWMoSYJIjMcFScMGPAcfiIDRv/gF+j82NOC4F1yQ+ZricZEtVFubf3ZLIID72b0b13vxxQgGpJZ8xmL4nvQZ3Sez/48sg/DctQv4q2nIfDrvPDwPeUTYC5VZiUWqimdTVfGdnkolzIqCYITNBnKoGNmy3B/6zW82Jq14cF0shmqGTCS71wtMqqhABmKh5K3DATziEr3GRtFIv6UFQWGPJ91GbGgAno6N4Z4qK0VGo9uN+21qMt/vnHXp8yGQunx55rW220VvrnyzLqNRZLjv2gU7qlgD3JqaQPi9+qogEnMhDDhgz4MWVq0y76c9hTIrsWg2iX7iMhM3/Mq11Jn7iAeDwAy2KVRVlDwfOED0u9/Bv7j2WtEeJRYTJck8KOXwYfx/1SoEDXigpBnBOTKCoKjXi7YpVgOrXJ7NtoemievhwS3j4+JaHQ6RpRiPYw2NqjH00t+PgIyiIDtz8eL0e2hsFPZQaoWWLOP3k5M4RkUFEmDMhi2xqKpobxCPw7fp6po5LVv0wmR2NIp/Dw3BPxsbw/O5ahVso2w9eS1KCVvylBKJaF2OEVGDpmnbT/aFnA7CkRK7PX36bzwOg9ZmQ18tBtexMRhfixcjYqwXTUNkPBhESSobwV4vHPSbboKBumMHnP2LLoKiSjWouSTmwAGQjqtXw0GdDmMqGIQCsNuT+2VIEpQHp7LrezyaCafdHzwIZ7W+HsYrZxLNmwfCY3xckInPPINXWxsUT2cn1icQQJ+x3btx3MsuA8A3NycbHLIMAjQYFFme+Wb8VFcj+3RoCC8ub87FofL7BWk4MiKGxJxzDtZiiicNFiIlLJpGURQQiPE40fvel+x8caZKIgEnkx1mLhOprMT+2bULwYY3vtH8PH4/juV0IrsoFhOTOI2ea03DhOd77sF+u+sua3ufSJBnvb3IEhwaInr/+1HqYyUIwROYJQnOutOJtWDD+qWXiL73Peypq6/GAJpsBBiXEtlscFDziYpHoyj7feopXOO55wKPUslIJpm4r1qmZuPTIRMTwNbnn8d11dSAcD3jDODkNJQt5yszDov0BCK32ziV5OBB6LszzihOxuzevQhcnH+++ZTh48expsuWZSb2AwEEZR0OZIcUK6OXe1QzkdjQIEiNhgZgkdH05XnzsFYDA8jq5gmeDQ04Dle5pMrYGOw7ItiSVhxVSUrPSLQiPDgqEMD1LVsG0uCRR/C65priYFNDA+5l3z4EKjZsyP79hEIgD3nQwrJlsItP9qAFE5lxWDSbRV/qzFOYcyl15udmyZLkPcalxmNjqOxoawOBODYGm4kHCPH+5r6etbUIBGabBE8EjLz7blz/Jz8pKtXyES5r5qnNRFgT7lfKxKLPh/XhCqbmZpGxyPcRCmH/eTy4j0ylt5KEY4yMiGA0lywHAoKsbGrKTgJyC4LeXnyPXL030+wKfs64h7EsA7u7u7HOVVUIqK5cOe3EZwlbTKREIlqXB4noLkmSbtY07Vf6P0iSZCeiutN5zHexZXgYhtiCBcnGmKoiuiXLIPDY0YzHYXBVVBg76vv3g8DatElErMfGAPi1tVB0AwNE110H0D5yBA79mWciguVyiT6E3d0gEZctI7r00unpmzU5KSYCzpuXbqA6HFAIgQAUWrYm69EohqeMjMBgPfPMdEOVs/5aW7FuExMiQ/H550GMDA1h3aqqkLnHDvvkJNaqvx9OsM+H/xPh+hsbCzdEHQ48H243XmZ9M/WiaXi2jh/HZwIB3FdZGSKCZ50FgnSGGsksJSyaRtm2Dc/xO96R3h+1pweG4ZIlwhjkialclrtjB0i+q64yP4fXK8qhly+HIZVI4JhGzmgigazDbdsQxPj0p62TbtEozvfYY0T33QdD9PvfBylh9fP6CcxM0thsuN8f/QhE3vz5GPKyZk3m43FJTiQi+hjliqmJBNGzzyL7MBTCvVx5pXEWcSQiSKbqarxOxn7XNOiYXbugTxQFeLZ5MzCIB3qVsMi6KAq+W02bWRO1iyX9/dCjqY55vnLiBMjrxYuN2xcQQb+PjeHZzNRLNxwGgUiEIGyxnTwm+/VEotOJ75mzqsvLk4kxDrh2d8NWYZK0vBx7y+dL7jvGPWz7+/G7detyuw9JwjkVBa9s5XVcnsc9O3n4QWUlbKlHHwXRctVVxXmW6+qAjUwknnGG8f1FoyAcRkZw3kWLYFvNlEELJjKjsOhUESb9ONPOSqnz0BCwpb09ORjPPQh7e4l++Ut8fvNmHJMIfgsHTcvL8f7Dh/G+DRusBUl37kTv5TlziD76UWukY67Ce1S/dxIJ3JvbDSwaH0/eL+PjWJeKCuw7fdsbM+GAx/HjsCkrKvC71lbrfVK5BUE4DIxZtkyQoSdbuIqOiUNVxZpEIsDs3l7gaGsryMNFi05an+oStpjIKWZiTal8m4iuJqJfSJJ0FRE9S0hxXUxE1xPR54jo5yft6k4hcbsRcWlrSyfDjh0D8bN8eXIU5ckn4Yhef336Z4aGYDQtXIjPEcE4GhiAUlq8GP8PhYQx7fPBKX3hBXz2zDPFsJUDB5A6fumlU++kcATJ54MCyBQFrqoCGPv9UC5m1zY+jntSFBCkTU1QgNkiizzgYO1aGLd//jOurbkZTg0RMho6O/E7pxOK4OmnYbw2N8OoKGZmCPdGqqyE0dLba1zeLMv4GytTdjYTCbx/wwaUC86EQQoWpIRF0yT/+AemBl5wAZ57vZw4AcyZPz/ZuB0bA36Fw+iDuHgxygTN9hZPMK2sxDPIDcldLuMMlECA6M47kUF0880YfmKVaOJJ0Xffjczhc84h+vznrWcwhkJiAnN9PfCJ+yM+/jgIxFAIk6tvuin7XudJgokE9myukXFVRVuJRx8V0xqvugr4nCqKAhzl0qymppNDMsViyNTctQuEkMuFjITly2HcNzbiZwmLcpNTnUCcnIT909JivWVBJgkGEXitqzPPkB4fB6HW2mqepUgEB/Cpp0AybNkydRkudrvokTg5KYjEpiZgyfg4bCT9d89TpEdHYUOxA83DH7gXqiShfNnvB2G6bFl+e9BmE7jI7R9ShQMnPL22rS09CNTRgcyrHTtQ6XH55cUh8WprYe/s2ydKmzkAJsv4vk+cEAMH58+fNdm8MwaLTkWxWuo8MQHSq6EBBKLXKyYg86Ck//s/6L2bbhJ+HmfrEWH/cMuoeByZZ9wXka8h1ebRNJRGP/ooEkw++MHiTKy3Kjz4jpM54nHck9uN+/B4sPc6O2EbHjokiEjur6jHilhMlCwHg9BvTMpasfd8PtiVfr9oQXASWjOlCZeHM3HILXCcTuD3sWPAH5sNtvPq1TNiEGQJW0zkFDOzpk40TYtKknQpEf0HYVLPW4goSmjI+QARPX4SL++UkVAIoFtbmx4t6e/H3zo7k6NLR4+C2Nu0Kd3QDQbhrDU0wGEmElEynnzKQN3SIpzpujpksmzcCILy4Yfxe6cToHb55VNf/qaqolS3sdGaAqivxxr5fOnZfpqGKPvx41B0GzbAqOboYCyWubxFVVHq/eijOP5ZZ8Fhb2wUGYqvvALjlPuLcTkEp+ZPlTFaVYXv8sQJrFkoBHLQ7weROTgIJVxVhevx+aDgN22CgTKbHM4SFk2PDA+jn9/ChegVqhe3G39vaUkudQuFEJDw+ZDhs2ABeiiaOX8cJa6uBoGoKNiLTqexATw0hMnLw8NEn/0scMiqRCIgr775TRj6t98Oss+Ko8xDKqJR0cScJ54ODRH97/8is2XlSqKPfcxaL69gEOtks8HJzwVPNQ2Yv20byIEFC3AvHMxIlVBINGGvrT0pvQXJ7cYz8cILWMe5c5FtxNMQOUgzw7N9kmSmYFEiIb7furrZtYZWhJvwszNYqCgKCMREAkPmzIIVx45hvyxenP53Fu41HYkgq8hqQCJfYSKRy5Hr66HLuT/i+Dh0v972aWkB3gwNJU9Zra3FWhw/jr85nSDVUjPO87lGIjHNXo+xoRCuW9PwrGbqw7pwIbI6n3oKQZpiVb5UV8P++8c/QCSuXg0sHhgQgxa6umbuoAUjmSlYdDqIUakzDyE5cEBkDfe9Pq6CB5JoGtETT2B/ve995lmCoRCOU1mJAJvTKfoocp9ohyOZ1PzpT/Esb9li3a6ZKqmuBj698orwZy+4ALYiE6pcBu33i89xW5hoFPfkconp58GgKCnPZCsFg0iYmJjA+5YuTcfD6Rae/s0vTRMZrjYbbODXXoPOqayE371ixfSSwJmkhC3mImncxKgk0yGlxc4gPOCirAxEoV4JjI+DLGxrSzZo/X6iX/8aJOENNyR/RlFAeAUCILtqakAmDQ+LhrKBAAyn2lrjSHsoJEqY//AHAPR55yFCvHTp1AFzPA5CLB6Hws1lcEgsBmVeVSUyAqJRKFjOnFq1Kn1aMkcUUxWUpsGB2bZNlDVdfbWxwx6LoR/J3r24fpdLZAq2tExPVGl0FAppZAT36HKJfiKDg/jO1qzBqwAjeWYXGWaXEhZlkEgEU/0SCUwT1vdGDQRAxtfUJGOAoqD0ZmgIRFFbG9G73mX+jHG/UW6noKp4Ru124xLbV18l+tznsB+/+lWU2lmVUIjogQcw6KS1lehLX0rPrDQTJt1lGdfFBJyigGT9+c+Bu+99L3obMQarKn6mGvOFli8fP07017+KDKmrrsJeNsLieFz0KuISxukkmDQNz8TOncg84GEYa9aIoVj19en9Y3OU0xqLEgnYAZKEvXSqEYiqCoI+GETQqxgE+FNPwcHduhUYlirRKP5ut8OJN3s2EwkQiJOTmMJcpCb3loRxJB7HHnK54JiPjwOnUgkKHoDldMKGlCTRHufAAazrhRfmP8zJSBIJ7PGyMuDlxATWlqfOWw2q7t+PzPFly0AqFsvuDIeR5XjiBOy6hQvxKqAU/bTGotNFFEUMPeGffj9sFFUFZjQ3Q99WVeE5DwYxwC0chq1ghhV+fzL2pD6LXKkhyyL7/Gc/wzN8440IrJ7sFiDj4/CB3G4QgGedZb7XFQU25fAwbMdgUOiy+nrYmRz48Pvx06j/eyQCe3JsDHizYAGyFk8WmcpkKBOHRII4dLkEUXz0qKgIW70afnmRrnm2Y9GskBKJOL1SWmwT4T5isgwjRg+4fj/AprYW5BcrCFVFWrzbjbT4VKJt9244nBdfjDRwBtjWVpCU3JujvNwYuLxeZAopCox4SUJ0ZN8+nLOtDUZnMUqL9BKNQiFqGpRAPgad3w9l3dAAJ3rfPqzX2rXmA0NkWTjb7DQcO4YszP5+3O+VV5o77IoCRTg5CUO9tRVr2NeHz4+PQ1ksXYooeEdHcTMTYzFEtHp6RAlFSwuiWUNDuL7ly837AOUos11BlbDIRDQNPXWOHkW0XJ9VF42CJHc6gQV6wqKvD5957jkY0LfcYv6cjY6KdgqLFuF3eiIkdX9t30709a/D0Pra14zLdc1kZASfeeEFTIb+r/+yHpRQFOxnnnLLmcrHj6Mn49GjyPC+4w7cs82WfO2pRGIh5csnTgCLDh/GZ7duRcTabOhMMIgX924s1pAHKxKNImt71y7oipoaDHlZsQIYywNzWlqKktF+2mJRPA4HzGbD8zlLSsBzksOHEfxas6Y4JN3hw+iHumED7JdUSSTgxMfjCFSYZYMoCjJrx8YQWG1vL/zachVNA57E49jjFRXQ/T4fSMTUwXh+P3C6pQXPy759eH7mzxeVG01NxSWimeRmcoCJgVzl5ZeRSb5mDda7ENE06KDeXlwbt6U555yC+6Wdtlh0qoqmQZ/pSUPOBiSCv+B0wiaIx0GYceYgkchYvP9+2BK33mpuv3i9ICKdThCI2XR2by8GuAWDCNiuWYPzcYbidAeUZFn036+uhr4vLwcuGeFoNAof0+vFGlVVAX9crvSMRU0TQVFOeuEeiSdOwPeSJOAwVzdMtzC5HIuJ758TOVwu/Lu/H2vEQ0KXLAF5OAW9K2c7Fs0KKZGI0yulxTaR4WGAY0dHsuEXiUCpOBwgwPTA+NxzeG3dCrDWy9GjGP6xdi0M4d5eOHNz5gBg43FkF0oSnHj9cTUN7/X7AXIvvghAvPJKGFiaBiLh2WdF/5wLL8SxC5VAQDSzbm/P38Hkezh8GAqqvh5OQ7YsBp6K5fGgoffhw/jsFUsDXZ8AACAASURBVFeYO+xE+O6YqGtuBoGYmhU6PIzo3OHD+F1TE76Lzk6sYb5ZgZOTomRZVXHuri4Yxk8/jTVdtgwRyiKWWs12BVXCIhN54gmQdtdeC+KHJZHAvlcUlO3qn9eJCTikTz2F5//WW80dRZ4o3tAgCMRAQEyT1Ru+mobswV/8AuT3l7+cmwP64otEX/wiMOBf/xWReqtRelnGvibCvnE4YBz++tdEv/0tDNkPf1hkxvB05lTDnYnEUAjHs9tzm4bsdgOL9u4FKXvppcBbMyOZrzuRwPvNpltPhYyNgTh88UWsFeuGhQtF1lRVFUiMIpKapyUWnQ4E4sgIAqgLFhhnDOYqbjf2blsbhsilrhm3CfD74diZBRs0DUFaHlbX1VX4teUrPMBKlvEcVFaCIIvFYJOl4syJE7BBeNrnmjXYj/E47B4uxSxGNpMs45jhsOjNWAixsXs3HPAzzzQfhJNN3G4xEKymBjqoqgqlzZEI1qMAh/60xKJTSWQ5OcOQy0+JsDcqKqC7+KfNhsoftzuZDOJS52AQNsPoKHo4r1hhvLcmJvBsc4/gbP7AK68Q3XMPruMjH4HvyKXViQTeY7cLQnGqSbXBQWBnIgFijHush0K4pupqXIemQW+53fgbBxaamswDNkzk8uAWJgzDYayrwwFfaskSEUyZrmzMREIQh/E4fldWJojDsjL8/fBhPCfBINZi1SokdUxhcHe2Y9GskBKJOL1SWmwD8XphLDc3J/f944i4ooAM1IPNiRPIQlyxAgSXXtxuRNrnzEF/jJ4eKKh580DMqSp+J8swoPTKSlVxLZEIjL4XXgDgX3FFehmuouD6nnsO71+6FH0v8p2aODGBa6+owLUWYmxGIsiEGRiAA3vuudacrLExDEx55RXRcP2CC8wVcDwOQoSb97a3Z+9jwX1TxsaEkSJJ6BHW1QWHKVumoKriGTh+HMfj9P2uLijVvXuhoNvbcf+KAgXe3l40pTXbFVQJiwzkyBFMDVy/Hr0M9VnPR47AaFu+PJmMj8Ww1x55BPj17nebZ3MMDgJfmprEFPBgEPuopiZ5n8ky0Te+gcb6W7cS/cd/WDeENQ1lxvfcA8f1a1/LrZcalyeVlcHAtdvhZH7ve9jvV1xB9P73pxOaipI+TICDEtEo1s3q0BC/H6V2zz2H69i8GXhutn+58Xo4jOutr5/6vrV83oMHQR4eOYJzn3EGsj7r65GBzYNyWluLP7WWTkMskmV819zM/lQkEINB4EptLUijQp3CWIzo//v/YFe94x3Gz+GxY9CfS5ea9wXUNFxXXx8Ck2Z9SKdTNA12ZCyG56GiAo42EewKfj4UBY7snj3AoquvTg5ax2Kww1yuwjLyeDAEB6IZiySpcDLj6afhkJ97rvWWFERYn+5uXFNlJewivU0bjyMQFgwW1HbmtMOi2SxMADFpyEkERKL0VE8aGj27PT3wMxYvTs9GTiSQgdjdjeFy3EYgNVPQ7ca+rKxE0kc2vf3440QPPQTi7CMfSU8OYAKTq6v4fvi8xayACofhL42PAzPWr0+2i5g0jMWwtl4vromHu+XaB1lV4T8dPoxjsN+sqoI8lSR8Z/rBLcXsbxqPC+KQz+lwJGccEsHuO3AAekVR4NeuXo3EkWkgOWc7Fs0KKZGI0yulxU6RSATpzZWVyT0JVVU0Wl2zJhmUYzEoJpsNZcx6hRONonefzYbMwYEBkEwdHTAmNQ3nC4VAOukNyHgchmciAaX07LP47BvfiM+aiSyj1GTPHnx29WqUm1jNGOLSEr8fDkNbW2EAOzoKpaZpUNrcGzFTFqLfD+L1+eexduedB4c9U+aexwNChAjXnEv0PhwGCUgEpTM2hmxRznzikvOuruR1jEZFyTJH9xYtwnsHB1Hu4/PBAN64UXxvfD5FwbUWWLJDNPsVVAmLUmRykugHPwB5/qEPJRua3d1wLhcvTn52NA2E9e9+h8+95z3mzld/P55z/XTVcBjPcVVVMo55veh/eOAAyLp3vtP63vL5iL7wBZBvb3gDJjnnkr0YDAIfnU7s/2CQ6Cc/QX/ZuXOJ/u3fQB4YSepUUs7EURRgW01N9vuIRJAN+vTT+Nx552EASaZ7iEZx36oKTDDqKVlsiUSAl888g2ejtpbo/PNxvTYbnAoektPSMnUTa+k0wyI9gZhpKMVslkQCAUxVJTr77MLJcE0TfUTf+lZje2ZwEMTg/PmZByPt3QuncM0aZGTPFGHiLhoVWT8jI6KHWCgkSLJ58/D+mpr0LMpQCPZQVVV+/RGjUdGyoboaGGqz4btUFPy7kACxpiGw1NMDfE+twkmVYFDoL27dYzZogQP3fj++2zzK52f7bjxl7SIebsFlstGoINiIBAHEpKEV0mlkBIGzefPSgwmKQvSb36Dn6FvfCnJNUZJLXcvKsGePHcNeWbcuM8GnqgiEPP44AnXve1/269S0ZEJRPw2YCcV8dIimYQ8eOoTPr1xpTI5FIrAFBgbwt7Y2YQ/kcl72E/v6xFTjefOSK+nicVH+zD+5EoSH2+iJxVwCGrIsiEMmmp1O8dzoe2H39sJ25aq6pUvhFxfB78pFZjsWzQopkYjTK6XF1omiAIQlCYaN3rA6ehTAu2xZ+lTihx9GBtqNNyYbOWxcjY/D6ZyYgIJasEC8b3hYZCXqAS0aBeBpGgB+1y4A9iWXGA9cMRJ2Kvftw//POAM9ZjJlvikKMnsiEZBwhfSF4Cbhvb0gNDZsgLLwenF/TU3pCjoSgULeuROfP+88kKYuF5SGUdSOezaGw1D8+ZZdyzIUayIBp6a2Ftfa2wtF6fHgfY2NMMS5AbGmwQhetAhk48gIygfHx3HfGzca96nktQ4GcS59hkIeMtsVVAmLdBKPI2tvYgL9/fTZxFx+3N6e7nwfP46m3uXlMGiN+o1yv1fuo8p4whkAbLiz9PVh8rLHg59btli/j1dfJfrP/xTTl2+6ybqhqp/AXFEBI3fnThCrfj8yMzMNimHhsmZ2xnnysMMhDHizISi7dgHDo1Hg19atmTGRG6tHozh+Xd3UTYBnGR7Gdb78sujhe+GFyAqKx4FDoRDum5vLTzHRddpgUSwG/HY4cnfCZotoGogcjwclq7kMVTOTPXsQVNi82Xggk8cD26G5GZnWZrJ/P7July0DKTATxecDrlZV4fngcuKhIdiYa9cCUzwe/G7evHSM8fnwmbo665nDqgr7JRgUmJdq+ykK3me3F5Y9q6oI+g4MoL0Dt8XQSyQC+5oHLXR2QodlO6+iQI9MTuJZMOuhbSKzfUeeMnZRLJaeZcjCWWPcq6+8PPfnkfsX1ten90nXNAyi3LcP2b5nn538WU3D9Q0MiB7HZ56JvWaG6ZEIbLT9+9Ga6Prrc79m7ivIpCLbIw4H1sAqoej34968Xth0a9cm23BsS7ndwBGbTQSKa2vTe7VmE25BEIlA7y1ciGOMjgJLzBJPeJ2ZVAyHk58Dp1MQi/ws8Joy+crEIQeHmThMfWYiEeiQgwdFq4TVq6ErTtKU99mORbNCSiTi9EppsV8XTYMCiURg3OiNrcFBRMwXLEhvwHvgAErcLrwQZJFe9u7F388+W5S2LVwoSEjOnGtuTiYfg0FhaLW1wUEcGECvLyPjLJv4/ehd89prUEqbNkFBpjq3sgwyjidTFZKpEg7j/n0+ELIrViRHhpiQa26GIpBl3Ofjj0NJnHkmShT1xjSnqnN6uqZhncbHcex58wrvMagogpBsaUk+v88H52fPHrzHZsOzsmmT6Kvy8sv4W1WVKK3KZlh4PLgPhwPPV57lzbNdQZWw6HXRNKLf/x7PEvfsYfF4YLzxNHe9uN1E//M/2Ce3325MXHO0mgMX7JDF48AdpzM5Q/jll5FF6HQS3XWX9UwfVUXfobvvxrV+6Uvm2YJmn///2Xvv8DirM2/4nhlp1OtYxVaxJcs2lo2FjbExNqETQssG3hAgBBLIm5BkN+V9v303m92E1C+7+fbd1E02yRWyyaax6YFAKIEApthgG4ybbHVp1DWa3p5yvj9+vvc8UzVNskbMfV1z2ZrylPOc8zv3/bsbp9lUVUEJ/Na3QDxs2ED08Y+nnrao6xgbNuSN3ZeFiCUSdR1RV48/LiNgrr9+YePV78f3iRaOtM5WOIXo+edBHBcVATP37YNhHg5H1tG12eCkWiKS602BRcEg5uVKJhCJgBcDAyBw0mmglEiGh1GiZNMmGN/R4vWCDOD6gIn2z95ekJsdHbG613ITbixXUoJ1OzgI/N61K9KgHR6GntjVFasHOBzA9vr6hY1gvx+kG0dcJ3Mc5IpIVFWU0JiexnNl51Q4jPsaH8c1tLWl32hB10HWzM1hbFJ1pNObBIuWm6hqZB3DYDCyqVl0HcNsU+r9fpQ3KSlBsER0HedHH8WeftVVsKPiid2OYJHKSugY0YSe8ZgOB9E3vgEH3rvfnfiY6YqRUOTx4ghFLj9gFE1D5GV/Pz7fujVST1FVXCs3e7JapS5gscgo0PLy1OwOpxPY5fHIEgRGGykYhD1WXp56AIquRzZt8fsjI1I5i8RiwXPgSEMmDqPHZGYGGNvfj2O3toI8bGs753t0vmNRXkiBRFxaKQz2WZmZgYKyenWkp312FiDd0BBbSNzhIPrZz/CbW26JBKiREaS/dXYCsH0+/J8jijwefKeqKjJVx+GA8ldWBgLxhRegwHMnzWxkbg5EXX8/QJ5r2PBmMj6O761Zs3AdwWQyOQnl3mRClEG8FBTuimq1yu6MbLDfcEP89CYu5suGv90Opbq2Ft/PVaFiIXAPLhfmQk0NNs6hIVx3VZWsZTk2hvHk1OfVqxFdsW9fehFIgYAkcBsbM6pjme8bVAGLzsrBg0S//z2ijq+6Sr7v9WKtVFbCm2rEm2AQ3Ymnp0EgbtwYe1whMFedTihW3HiJI2otlsi020ceIfra16B8/dM/pZ5K5nSi4cqLLyKd9pOfTFzPLJ6oquwOWF0NbHjwQfx9zz1owJBq+h3XFOMmMRUVsb9lPCECefHYY9gP1q0DFi3kuFFVrP1wGEptTc3idWH0+WTKstOJveWSSxBhXlGBa5mdxfWYTPjcZlvyGn0rHovY4LFalyZV/VzJ3ByM8+ZmGGLZituN2mFVVUTvfGfsnh0KIaLGYkmeSjgwgFIhbW2Y+/kw/uPjyFDgTIeWFuhaxjFQVRAZ3CXUuG65OR03i4un72ga8I7LFtTXp5aVoao4flFRdmMZDmPfcDqRfaOqssEc15jONBWeSwrNzMiSMSlIHsyMpLLs9SJdjyQLA4HIWnjcDZhJw1zXBVYUBCzoOgjEaDLsz3+GLbZvH7Ka4s3v0VHoRjYbcM5slqnOnHJcVIR7sdvhrFUUlJlJp7ZzOqKqMtWaCcXiYkkoOhyws7gUVne3xEtueOJy4dorK4EZ8fYqjwf3Ul2d2IbyemEDzc/LEgSNjfHH0u3GeTPt+q7ruCeHQ9aUDoUkiVhSIlOgOWLRYsGewLXti4uhAydrxnUOJN+xKC+kQCIurRQGmwBUdjtAz9jR2OOB97OqCgAdXZz/F7/Ad97znsioE7cbhmhFBcA9HI6sXcZ19BiMzWYZVef14nwNDYi66e1FClE6BasXkokJRLCMjQFge3qwgXCjj0zT77ig//Cw7L6crLvXSy9B4fT5oDCnYrArCshXh0OSeYtV2+v0aSgnbjeU8ZYWPEeuMcfez+PHoTRXVcn0g5ISPHtO2UmFVDCmN1dVQelOg4zI9w2qgEUEZfb738c8e897JOaEQlhbRUUg2o3zQtMQpXfmDGogxuuSqeuo88Pd25nU4whpItkQQgik6fziF4iyfeCB1KPqXnsN3Zfn5ojuu4/ottvSa9wRDmMtmUxQRL/1Ldz3jh1EH/1o8lqw0eJ2y/Rlmw3/GusjGqW3F1hkt+Mc118PzE9mUAuBter1yo682ThfkondLlOWudvivn2RBg9HHBDJ7oqL3QEygaxoLDISiItYV/KcSyAAh0ZpKSL9siXGVRXN51wuNFKJNu64MVwoBJ0k0VoaHYVutHo1CPR8aGJjtwPHuNxAY6NsuLR6dSTOsMHOOodRNA3kgMkEUsB47x4PsJMIY5tu/cRcEYl+P9GPfgT9ctcuON87OnKDjUJgHKemoFulkJmzorFoqYXTUY2kYSgkP+fUUiYNS0sXl+DXdWCG1xvbQIQIe+aTTwK/brwx/rUMDeHV2AjdKvo7nEYbCsEe+M//xP76iU/Ers/FElWVEYrcWXhiQqZdNzXJGqxcLsFslk7EZJHLnOosBDAjOiV4aAjEfXExdMdUyi5x5kdjY2qpw0zYBoOyPqUx8tBqxefG+orBIP4dHITNqevAxJ6e2Many0TyHYvyQgok4tLKm36ww2GApNUaWYQ2GERUCnvEo42x556DQXfzzZGKDKd0+HwgAsxm2eaeCCTY4CD+zwVoNQ2Rb1wnsLYWXvY33gAYxiMFciGDg7jW0VGQpzfeiGvORHw+Sbh1dCBVKd5GIwTIuUcfhZJZVUV0xRXJOy6zuN0g2YJB/C7biMl4omnSK+l243mVleFZbdggN7M33gB5KAQiRNnoUVUYDMPDIDvDYdxXWxvmV1vbwiStwyHT2VPpLn1W8n2DetNjkc+Hen9mM+og8nNXVdR2UVUouUaljLsev/oqoqGNkYssmgYC0eOB04LLKXCXPo7Ss1iwtr70JSjfb387Og2mQh7oOpTrH/wAx//bv4UTIR1FjjswC0H0pz8R/fKXGIMPfQhRmakaI0yohUIgMI1pvNGNVkZHUdP2zBng7lvfCrxd6J7DYSjsqoprjFa+cyFcC2z/fmC11Ypr27dPOruEkOQhpy42NCx+HcYFZMViERsxJSXp15HKJ9F1YEogACIoF/vsU0+BALrppthSDEIgyszlAnmfqCzJxASicFetQhOPxYr4zZWoKu55YgKk4Pnn4z2upczN2KLrbE9OwnBfuzaWDFQUEAXsHOGUxVAIeFtfn5nzQAgcK9OOzZzBMTQEMvPoUVzL//gfuW1gwDrk+Dgi6qMzhKJkxWLRUoiixKYls4nOjTGMpOFSr8eTJ7FOurtj19CrrxI9/DDW3C23xN+fBwagpzc3w2ZJpGMIgfImv/wldPj77oPOZLXGpjovpoyPI1Lb78d1rFsHrHa7QaSaTHD4cspyqjoJ13K2WIA3oRDGZXISx2htxSvV+9R1EP1CgOCM9ztNk7UxOX2Zoz1LS5PrMFNTCPDp7cXvGxsxHkZdj+clRyzGS31eYsl3LMoLKZCISytv6sHmrk2aBjBm0FJVGG+KEluglgi/+d3vQBxdcUXkZ88/j42prQ3E4caN0jum6zAG2RvNzUImJ2Uaa2Ulzn3oEMipiy9enHtnhc/txiZ86hT+39oKIzWd4tXj47hmsxljkih1cWQEBntfH8D+bW8DQetwSIU4nqgqzuFyYcxaWvB9RcHGkItoG78fz21oCMetqQFx2NaGZzQ2hutwu0E4cHTpjh2Jo1F0HcbD8DBegYCs3bhuHbx6iUiWDNKb832DetNj0Q9/iDXywQ/K9ccGk9cLJTeauPjNb1BH9PLLYaxFC9fM8fuBOcZ55PVirldWAvvm5tAE5cwZkJjRJRoSicOBmoeHDsEZcP/9uP50ildz2kp/P4jIsTEQh/ffn146ijF9ubY2fgQlK7iPPw7cqqxEmtOePbLWaqJmK+y15yiimprcF+n2eBBp9eKLOJfNhnE1kjkcdTA7C4yoqAB5uEy87ysSi3w+GCylpYtb73I5yIkT2Lt6emKN80zk2DF0ON+1C+nH0dLfD32kqytx2YSZGehX1dUoGXKOifIFxeOBwR8IQFfo6JCY4vPh81AIa9lmi9QjuPREOAySLF4zOa5zZjbL0gXZzstMicSZGei2fj+eT2cnruvhh/H5zTfnPmq3rw9OoDVrYst7GGRFYtFiCJM6RtKQu96aTJFkYVnZuV9/Q0PQlzo6YmtkvvEG0a9/jbVz++3xS5j09UHHXrMG30uk66gq6jvv34/MjPe9D3Pb2AylqEimGS+GBAK4p6kp6BwXXIBzTk9DbwuH8Vzq6vA5k5vpdjt2OqFTOByy23JbW2b3pSi4XqsVuonJhLFk4pBT3rnm5EK1MTUNmHj8OK7RaoVO3N0tHS2aFhmt6PfLOcy1OI2NWxbreSWQfMeivJACibi08qYebLsdilx7u0y5Y4+4241UsWgvsN9P9JOfAIDuuCMS9E6ehPFXXY2NbeNGafRz4xaPB97lykoA3eQkwHX1agDpqVM4RmcnPO2L4Tnh5iHBIMC9rg7vHTuGFGO/H0rv3r3JDQhNw1iNjuIYiSKPpqYQ8RhtsPPYBYPYvCorY0kShwNjpOswLrgRC/9O0yI7eKUr09MgDycmcNw1azD2xvvmJgbPPAPipbsbhZTTqVvI6erDw1B+2GvY3Iz5sHZtrAGgabgujwfjsmZNUk9gvm9Qb2osevxxRDffeiuIaZbBQSiJxnqqLE88AVL+gguQ+hytgKkqCMRAQNZmZeE6M+yh7e9H52WPh+jTn0aaYCpy6BAIRK8X3aCvugrnSVU5YzLM4YDS/9RTIM0/+lEo7OkIpy8XF8vuy9HicmGsDx7EeF15JQiJ6OjOeERiMIjjaxrWaq6baYyMwFB57TWcY9MmOHSMTamI8IxmZqD0l5UBw9NJGV8CWXFY5PVivbABspLFboce0tGRWSO3aJmcxNpua0MUYvSaGR8HzrW0xEYosjgcRM8+i7G//PJz1l0zZeEur8XFcJTGi8TjRkwuF+6HdUCWcBgOnbKySAKSCHNxbAzj0tAAHTZXkdC6DvzhZgbJZH4e+pPHA0w0Ng4kwvU98gj2g5tvzv3aGRiATtXUFD8VlVYgFuXkoEJ2yWXSkNNIiWSXXCYNl0EUV4RMTWF9NTfH1n/u7UUplvZ2orvuitUD2DE7MQFMSpZ95fejOdypU8jUuvnm2K7PnOqs61gzTCbmYj0Kgfl98iT+v2kTsITrnlosMmWZAyuY3CSKvJ5kpK+uy8Yyfj/skQ0bsndK+v3Af+6+zYQek5ypRK/6fLAzT53CPK2rg23e1ZUakR0ORxKLgYCMpi0qioxW5PqKiyTLaAWtXCmQiEsrb9rB5pTR6A68fX14f8MGWfuORQhEINrtIBCNv5uaQoquroME2LQpUmGanJSNW+rroTzOzgIEuSlIfz887W1tiHBcjFo/oRCUdi7uHU3aKQrStF95BeDb3Q1CIZpM9XqRvuzxYBOO5wl2OmGwv/IKNo3LL4812FlcLoA7FwIPhTDOPh+U05aW2N8Jgd8QAfxTVXJUFQY7K79WK5Tf6Lo9QoDwO3RIRgS1tcnOY9HzIx1xOHDs4WEo4kSy4+7atZHRV/PzmF8LpDfn+wb1psWi48fRoGnXLqQQs0xMSE95dGTw/v1ovrJuHdHdd8dGeSgKFOVQCOvTOJ+4tgxHFLz8MhqhVFQQffnLqXU95sjJ//gPKOuf+AT+ra1NnUDUNGDEyy+jhpbbjaYp99yTXvpksvRlFr8fEZvPP491vXcv8IjrQMa7PyIch4nOYBBrsLY2d1EYqopopf37gUklJSBP9+6Njej2+UAeBoP4XkPDsk2pXVFY9GYiEN1upAHW1yMKMVviIBCAQW82E73rXfE7Dp88if00UeM4txsOvOJi6EWLVXc0F6KqMHgnJ7Gfb92aHA8DAezvDgcwOtpR6HSCkGxqAh5w13qvF1jENWxranI7Nxfq2OzxQH/iRgsdHbjGePNlehrOrqoqEDG5jpYeHsa1NDTE1i6nFYZFmQrX0mPSkCNgiTCPoqMMl3OdUZcLqfI1NcgUM865wUEEeTQ1QY+IZzOcPIk5uW5dYqcFEb7zzW9iz73nHgQ+JBMm8Dg1l8m7TDOlOJJ5fh46zZo1eG6qivtatQq6SKKyUUwmcmMYkymSUGTdZnISaygcBu7bbJgH1dWZE2o834JBXL/XC8K3rg7HTmV+TUxANx4awt9r14I8TCdLLp5wg04jsWis68mNgJhYzGFdz3zHoryQAom4tPKmHGy/X3ZGNhbG5Vp2ra2RHZNZDh9GtNCVV8K7bDzeb3+LzWbvXiiORkXX4QAg2mwA0rk5KILl5djszGZczzPP4O9rrlkcb4jPh+vglNpkylwwiGidI0fwd08P0pDKyjBOx47hGnt6Ysk0nw8G+/790mC/6qrkBi93HuSaZbOzuM7m5uQRf7ouPXILKader/Rcqyo24PXr4zc+sdtBHs7OYuO78ELMCSFA6HEjlTVrst9gXC6Z8jwzg/dqayWhaLPhedjt2JwbG+Omfuf7BvWmxKKZGXi6GxsRyccKp8OBuWqzwUAzyiuvgEC02dC4JLq4dzgMb7yiwBliJBgVBeuguBik4W9/i+YlXV0gEBOVFDDK7Cyap7z2GmoIvu99UEzr6lIn17g27IMPYp2tXw8ictOm1H7Pwml9QuD80YZ0OAzi8OmnoSju3El07bXAlOj6iNGi68B2rxd/x4uUzlTcbqQrv/QSjt/QgKjDnTtjcSwYxDzx+TC+q1Ytq46D8WRFYBE3zgmHZZTCSpZwGHu+yQSHRrZEua4DpyYm0Ik5Wk/wepGdUF4OMiDeGmRdggh613JOI3e7QW4EAsDTdetS0w14fXNUYXSjldFR6AirV8vsi6oq6Agmk3Sg2Gy5Tc9jItHYaIUbGXCjhbVroQMtRAqMj6PObX09GujlOhV2dBQBADYb9G/D9awILEpHNC22jqExpZMbnjBpeK7TktORQAA2idUqU3pZ7HY4NWtroZNE6wLc3Xt2FhHW8Ww8lr4+1KcWgujDH46Ndkwmui67KgshG4QwcZfK78+cwTWoKmygigrZ+MRmS08PESIyQpEJRZdLBpTU1EDPrKmRNRZNJpwvlWtm0pIb7bBeVVKCZ+V24zxNTcnntGsFDgAAIABJREFUm6rivo8fB66VlMC51N29uA5TXjNGYtHYYdyYBl1enjHO5jsW5YUUSMSllTfdYKsqlCCLRXZGJgKx19sLAy3ehjE9DY96RwdSclh0HXVfTpxA+nG0Eej1giCsrISxz8ZgTQ02A5MJSvaTT+Lva69dnE3d6cQ9lJTImoKpiMcDQ/f4cYxVfT02lqYmbOLGew2FQLL+5S/SYH/rW1Mvqu1yyWYlbW1QTlO5TlXF+YqLY8Gd04j7+0H+mUyyy3I8cnJmBpEYExN4Zjt24LvRGylHspaWgnTOVRdUn08SipOTuP7KSijr7e3Y7BKkN+f7BvWmw6JwmOjb34bC8pGPSGLI6wUWVVTEFvs+ehTEX2UljLHoz0Mh/FbTQCAaFS+eO2YzFKF/+zdEVu/bR/QP/5BahMiBA0Rf+AKUxY9/XNY3Syc6LxjEPfz0p5jf73kP6jmmu4ZcLtxPvPRlTcO1PvEEvrN1K2qwckMSFiYSua4Yi6rK6EMmSLNd4xzZvH8/niM3Zbr00viR3OEw8MjjwTrnyIPllFaWQJb/FSYXwU2HFAXrcJnUmlw0EQLGucuFfTsX9eteeAGO16uvRqqpUcJhRNmYTHDIxjPKAgE4VhUFEYjpdhteShkZQeS31Yr7SdQYJpFw5oXLBd3HmBKsKHC0+P3AicbGyPHSdeivup77juzcsVnTcI/shG5rS1/vGR6GntvcTHTddbnvHD8+jr2vrg6k9FndKO+xKOmHIraOIUfCEcmoKiYNrda82D/iiqLAcamqsaWTpqfhkCwtlU1PjKJpkpjasCF5V2XOjLDZUFYlUY33hcQYDahpkaRaItLd4QAOT01BR+O66fX1MksrW5meBknpdOLYa9dKTOFr4/rvVmti8s7YqZsjW81m3CO/eK5pmrS/OHDGKB4PbOjeXukQ2bIFtleucSJVURRZV5EjeDlDxWKRjkX+N4XrzNOVl1+yokhEk8l0HhGdJKL7hRDfPfuelYjmiaiciJqFEFNn37+RiB4momuI6DAR/R0RXUtEnURkJaITRPRVIcRPos7RSURfJKLLiGgVETnP/v6TQojXF7jElTPYKYgQUIRCIQAnh7p7vYisq6gAcEUDnKIg3VBRUGPDuHn95S+IdLnoInjKjSAfDMqumm1tAO9QKDKSZGYGKb9VVVCsFqPWz8wMQsorKuDNziRVYWQERv/AADbgm24CiWixYIN4+WUoiMkM9kTCjQ5mZzHGHCGaTnoOh/lzjQ1FkSnLXi/eX7dONrSJFpcLivrQED6/4AIQNMkiQr1eKK4WCxTqXD+7YBD3MDwMA0PXsVnV1eHf5maZXk0LbFAFLFpeIgScEsePE917r6w9Fgoh3cZigeFtVEx6e2XH4iuuAFYZ51wgAENWCBibxvWj61ibRDj2F76AqKN3vQuNXBYyLDSN6PvfR6pQZyfRZz4jMSwdgu3MGaKvfhX1bXbsABHZ2prab43XwtE3FRWRxJoQMDQee0zWkrzhhuRpS7ouFWCTCeuaa5ZWVUm8yDTFS1VBpuzfj3VcWopIr3374kd+qiqw0OmUjpv6+uWdYhYleY1FQpBgArGycvnX38uF9PVhn+nuho6QrfT3o7zL1q2xzee463gwCMIt3j4fCkG38vtRAiWd+sNLKYoCDOfSOFu3Zu4EDoehr3Ajlupq4JDTiXFwOPBs1q6N/a2mATNMJuiXucIKjhgfG8MxW1pw/kzJjL4+EMPt7ci4yTWmTU1h/6yuxtwqKspvLKIovYhJGyYNjWnJXHeOScNU00bzQXQdmOHx4LkaHQoOB5qxmUwgEKODFhhvnE7o9InwTQgEhDz8ML73oQ/lLvKZAx2Y4OVmIqw3KQqu8Y038F3OkGJbMRfP0ePBWjaSh/X1MkqRo1W5Q7KuY45xFB6RjLLkuUcUGd2aDBdCIeBkWZl0ktjtwM/hYTy/jg7otanajkspTJoaicVgUH5utUYSi3HWX75jUV7IiiIRiYhMJtMkET0jhLjj7N97iWg/EelEdKcQ4qGz7/9/RPQxIqolom4i+jUR/YqI+oiolIhuIaJ9RHSfEOLBs78pJky6SiL6DhGNElETEb2FiH4khPjFApe3sgZ7AZmaApnW0iI9VaEQokIsFngv4ymATz4JoLv11sguYK+/TvSHP8Bgv+WWyN+qKhRCIWAkz87K5iCsNM/Pw9gtKSG6/vrcp0txd2CfDxtrpjX8RkfhJSoqArifPCnTwevqQG44HKkZ7NHi8WAjURSZ7u10yr/T8UIFAvCejY/Lbsr19biulpbE6VJHjoDcKCrCHNiyJb2oqrExjHVLy+KlWykKzjM0hOcRCMDA4MjZPXsW9nItZyxyOt9cWMSk+5VXIt2fCEpcby/m7caNkWT30BA6MZeVIfqvoyO2UUp/P/7f1RVb29Pjwb9+P9EXv4g59OEPw3GxkMzMINX5xAl8/777MO/NZhB4qZReUFWQpr/6Fe7r3nthRKYbFcE1doTAuY0Nsc6cwZiOjwNHrr02adfOCNE0merNzojqaokZ7IFOR5l3OhENeegQcKaxkejii+GgiEdMMTnqdOJvjlZfxELfORVuQHXppfmNRa+/TkJVZdOhlS6zs7JJQbImA6mKy4UaeDU1wAvj/OWuqG43cCpeWr6iwMHh8cA5m0qJhXMhHg+cIaEQ8DhZdFOqEgoB6zkah+uY1dbKbuzNzYnHzemUdVuziTjjyKHxcRy3rg66by70m74+ZHu0t6POXK4j4zir6GyUe15jkcdDwkgaGvchY4ThQp1t8116ezEfzzsvMjLQ7QaBGA5Dp4i2cVQV9p3HA6dsoqhCRUEq9MGD0MfuumtxxjNeqvPYGNaDywVbcds22Im5siX8fmAK1+Bvb48fTKJpskkME4qcCs/RiNywhctHpZsOz03hZmdBHDKhuXkzXsu5XEU84XJaxjRoJoqNHc1DIaLOzvzGonyRlUgi/hcR7RNCrDn796eI6H4iGiGio0KID599/yARKUKIvSaTqYSIVCGEZjiOiYieIqJWIcSms+/1ENFrRHSbEOKXGVzeyhrsJOJygVCrr5cbCXuoQiEAdzwS7/RpeNR37YrsWDo0hMi8xkai9743Ekh1XSqCTU04N9f3Y6PE7QaBaDKBQMx1vQdVBTkXCuEa002v4WMcOwZF0maTxq8QRH/+M9IHxsexId17L9KW0mluMjGBTYRTrHkDMdZEZEU6mXBx4P5+6TFfvz62I61RQiGQwNz1bPNm1HfMJG1NVXHeYBDPO9X07UxFVXGvp07hHlwuov/7f1PaoJYtFr2ZSMThYUT0bdyINF4ucN3fDxJr/frIdBy7XUYg7twpu3Gy+Hz4rdkMwzx6DjMxZreDDFQUdGK+4IKFr/XAAaJ/+Rf85mMfg4LNeJYqgXj6NNHXvgbHwyWXwMOf7hoRApjJ9Rzr66WSPzKCtOXBQbx/9dXA81SxyHjsoiIY6PFwIBUikVOWX3oJpCsRsGXPntgOq8bjOp0gEHVd1j3Kh1pV3EjixRdlA6rHH89vLDp0iERFRW7ryy1X8fthZJeXx9SSy0jCYeg1wSCaaEQbhSMjiEZZuza+U1PTUPN1fh51iDNNJVxssduBN1yzKxfp30SyNvSJE3gWW7dKrBQCzp9gMDKTxiihEPCZnSCZnH96GvoMk4etrZgfqppax+ZU5ORJ6C5dXdjTshWfDxjKr/FxvL7+9fzGolOnSHAaLBMSnJb8ZpGREeyp69bF6j0PPghi6r3vjW24oSiYYz4fAgOMJQKM4vGgtEt/P4JBrrtu8VO+uTzACy8AS2pqoBtt2pQ7x1UoJMsicbZUa2tq6zcUwri43SD9dF1GRVZVZRbw4nIB1w4fhq7V0YG09M7O/HGUpiLBIMbNGPTh8xF96lP5jUX5IivRl/IsEb3TZDJtFEKcJoSRPktEwwS2mEwmUxUR7SCirxARCSH+u1fQ2XDWSiIyEybF/2symaqFEG4icp392nUmk+kxIYR3ie4pryQUApCWl0vFVQh4twIBpPDEA0W3G2RZczMiSFgmJxFRU1GBLs3Rxp7djuPW1Mjudc3N0uj1+ZDCLAQ2rFwTiMEgFKhsouPcbkTocR0ergs4NIQog4EBbMy33Qbj94034AG+9NKFPfLz8yAQdR1GQmNj5KZtNsux83gSK8OKgusZGMB1lpWBPOBCxPE2Y1VFtMzRo/h/Vxc2smyeQVERlJvxcXhLw2GQibkUbvLAHq+SElz3nj04Z4qybLEoE5I7H8XtxvpZswZdlXmOcge688+P7fr+2GNQ4C6+GBhmTLP3ePCdujq8H21cBALAmVdfBRlos4FIjJcSZxRVJfrud4l+/nOskS98AXPa6QRRV1e3MOEQCKCD869/jef7xS+iI3K6omnAFosF2FJTA7yYmoKD59gxKLZ33okxSkchZaPbasWxKytxX4nuLRGRGA7LlOWJCTyn666DYZAoFZO7PjscsoB6Q8Pyj34LhZAy/tJLiNxwu3HNPT2RjrYFZNli0bZt+UHgZitM2LW1IeIvF3UfH3sMY/fOd8aWKZiYwGcXXRTbLIoIa+vFF/Gdm2+OzPpYLqIowJtAAPMknayFhSQcBs6tXg0s4NS/9napO7a3I+K6uDixU8LrlXWTUyU3hQBZMDiI9d3VBePeGPGo65gzuSASOzuh9732GkjTXbtS/20oBMycm8O/DoeMkCoqwrzr6UkrkmzZYtHatTntDpt3Mj0N3aipKZJADAaJ/vM/oY/cfXcsgcg1VwMB6FSJ9uCJCaJvfAP78P33w3GxmMJz9/hxmf102WWwQTmqjbsvZxoJqSggrsbHsa5bWjB2C+GUoshUZW4qUlODsfN4MAfNZtggXCuaOz0nEiFAph0/jmsym2VEaF0dnms+E4i6jnHjlPDpaeilXLrMYoEjKBGBHUeWLRbli6xUEpGI6HKTyTRARJcQ0ccJzPI/mEymRsKEsPB3z7LIHyMw0BspNpe+lojcQoghk8n0FSL6P0R0l8lkepmIHiOinwohRhf3tvJDNA0gZrFEdtLl2hDr18dPDdF1KMRCoL4fG40zM2gKQER0++2x5NPUlDRKuWZXY6P8fTCIqJlwGEZmrrtser3YGC0WKOGZGKQjI/AYFRcjdbK+HsTpo49iM6iqgseODXZdh2L98stEDz0E5XbfvthIg3AYBKvXCyO7pSWx4VJSgu/4/bJIL4vbDa/h6CieL3fk4xB9BnSzWW5wug7S+LXXsFG3t0NhyFXUoNkM5XV6GkqCoqTWtTCZaJokDoNBmf5QVSVT7UymtKI1Clh0DkXTQMopCjox85yenIQhtXp1JIHocKAWq9UKRVPXYVSw0uV2IzXMagWBGK3MhUKY67//PdGPfwyD94tfXJiwnZxEzcMTJ4je8Q6iv/kbzD2nE+dOhUB85RVEH46Po7nS/fdnRhQHAnAmEGFsysrw9+OPgxgtKQE+X3ppeljHHQgDASjr3NmU6yNyB8No4ahRbsbicCCa4MABHGvNGjhWtm9PHi3CHn5FwT21tCzv7r8+HyINDxwAWer3A7u3bcM+sHt32o6YZYtFbwYCkQjr2++PbVKQqRw5AjzauzeWQJyfh7Ovvj5+uRMhQEhPTGBfXo4EotMJ52MohOjDZN1d0xHGVm6g1NAALKiuhp46NATcLy7Gq7VVRhjFq+9WWQkSgCOrF8IVhwPn8Xigr0Y7sljM5oU72qcjF10kyR7uthstmoa5YyQNfT58ZjJBf25txfVy078MCLdli0XLeU9YbHG7obPX1KAZCgvXqJ+aguMw2iEaDGJOhcPJmxydPEn0ne9gTf3t38Z3bORKPB7M36kpkIfhMHS23bsjM7A4nVhRgAVM1KVaksVul3ZRU5MkoRMJn8/YwdtqxToqKZG6Znk58IRJQ+Pv2M7iz0wmfH76NPYYlwu/v/BC4CZHNU9NYUyig0iWqwiB62b7kv+dn4fdx82tuAfCmjVwBqUZNbxssShfZCWmM5uIaJqIniCibxDRy0TURUQThKKW7yZMiv+HiOqEEF6TyfR/iOifieinRPQ4Ec0QkUpE1xPRJ4ioQwgxZDjHJiK6mYiuJjDXGhHdIoR4fIHLW1mDHUfGxqB0tLfLDXl8HIoZF4mOJy+9BIPpuusAfEQAij//Gce86Sb5Psv8PD7TNBA9tbWRylg4TPSnPwFUr70299Fq8/MwTEtLU+9sbBRVRUThxAQU2Z4eGTXJBvuVV8JgjweMqgpD4uBBbDCbNyMypbpabqBEANZU6hwJEdl1cGoK5OHsrAzPT0QC86ZYUgKF+/BhbORNTVBeFzNNyumEgl9Sgs0kneegqjDsfD5ZuLi4GBvvAjW6UgmVL2DROZSHHwbRfscdIL2JsGb7+2XtThaXCx0CVRVROV4vDEbGDKcTvysrQ6Rw9Bzj+lj//u9ETz+Ndft3f7ewQvP880Rf+hLW3ic/iaYIwSCup7gYmJbMeHS5oJj/+c9YY/ffj2jZdLGIo/RYca2vx3p46imQdiYTHBVXXpl+pDXXTtV14HRFRaQSG91oJVp0HQryCy/AEDGZYHjv25c4OojF5wNGB4NYyw0NuY9Ez5U4nSCDDxyAkygUwrVu24amOBdckDClvYBFy1xGRmDMdnUtHJWcitjtcK52dqI8i1F8PugVpaURXXMj5NVXQWRt2wbjejkJlyjo68M99PTkrlM0O0lUFWsrGl+npjC29fXQ6Ri/x8ehG61bFz/aUAjpzEzU1dXtBrHLdck6OlIz6Lljc1FR9sa/EGig09cHXbGtLTLC0OmUjUPKyyVZyNHwueiIWsCi5SeBABz+RUXYZ9ixo2kgEPv7UQqG9Sjj715/HXM0ugGLUZ5/HiVlVq+Gk3Qx6q4aCfBAAGvW4cAaP//8xE4IIWRUIHd1tlqhL8TTvYSAzTY8LGvJd3TEb1jFHaOZAGSHgNUKDEh0DiLZoZizvPg6uQM1l4XhJl26DltvyxZcT/RxAwHYchUVy7NxFteI5EhDRZGd6vm5soObsw05myQBLuU7FuWFrDgSkYjIZDL9moh2EybFXwsh2s++/yIRHSGi7URkFkJcfPb9I0TkEkJcHnWcLxPRJylqUkR9p+3sMU8LIRZKLlp5g22Q2Vm8jHXqHA7UkrPZEiurdjvSlc87D1E0RCCFDh+GZ+Xii2EwGoXrkvl8AJHGxsgNTFURgTg7S3TVVbkpws3CdWxcLiiUzc3pK3cuFwjAQACkRGMjiIAXX0zfYA8GYRQcPozNqrkZY93UBEU4nUgPvx/KhN0OYC4vh6GyUIdALuDO9TdsNtTeSbcTbKbi8+GaOUJxIW8gRxxyWg53+qqoSHm8UnriBSw6N3LkCDBl3z5EzhFhjvT2ggjctEkqWT4fCESfD92TnU7MBS4pwJEj5eVYq9FGuaZBYf2nfwJe3X03agYlwwRFIfr2t1F7cdMmos9/HhjFhBsX9090DK6T+t3vgqy/8UZE5GXSKVRVZYpaZSXWznPPwdgMh5H6du216Uc2ahpwLhTC/dTUJDZC2StvHNtQCLi2fz+M+8pK7AV79y4cUR4IgDz0+7GeGxpyR0TkUqam4AQ6eBBEKUd6b9kCY27DBlx3eXnS+VTAomUs8/PAo4YGGLPZiteLpkmlpVjzxn2ZI82IQL7F27Nffx1zbfPmWFLgXEs4DAKd9cgtW3LTcEHX8Rx8PlnjNZ6DkNMBHQ5gaUODjJju7wdWbtgQ/5p0PdIJy9/x+0EecqOFdeviN1pIJrkgEgMB3NfMDBpiDQxgDjQ3yzGpr5fEYYbRsgUsyjPhYARFQZS0sTPwr36FbKibb45NPfb5gCVCgEBMRK7/+tcIjNi6legDH8h9tGcohHU3P49r5nrUXF5q69bUsyZUVZJ0RJERf0Qy3TsYhA7S0RGrV3BXYWM3b2OdTc5oSkU8HjyX6mqJJ0KANDx6VJKHHR1yLSeLpnS5oF/W15/bpirRacmKIkvX8HXPz0vykInXpibcY4p6br5jUV7ISkxnJkLY6S1E9D6S4ar8/m1E1EZEXzW8r1HUhDOZTA1EdF/Ue9VE5BdCqPyeEGLUZDLNENEit3dY3uLzQUmqqZEEotcLZbWyMjI83iihEKIFa2oQhUMEg/zMGZBS550XWR+Rf9PfD3DhMGbjxqRpRM88A2XpsstySyDqOq7P7wcQp1F74b9laAjEqtWKTfvoUdnxbNcuEKnppF2XlsKrvHo1oqCOH4enbO9eAG4qwpFWY2NQNsvKsPl2di684U1Pw9gfH8cGuWcPNrSlDJmvqADROTaGqI/VqyOVmlBIEofczaukBM+wvHxRO+0VsGiJZWICKcUdHdIpEQ4DT4qLgUXGcgc/+xmUtTvuwN8mE+aSyQRMGxqSGBZNIHKU3Gc/C0X2U59CF+RkMj6O9OVTp1DL7MMfxnX5/biOhQhErit0+DDW59/9nSSb0l1zxvTlmhoYBU8+Cew+/3wQsJlEcPt8uBc+bjwvvVHMZoylrgPX9+9HVF4wCIy/806QIhZL8nsMhfDMOFWxqSn7zqm5FCYpDh5ExCHX5mxpwVzt7sZ+VlGBOZfjeo0FLFpiCYVAipWVYU/MVjQNZV9UFRGIRpJQ0xCpq2lYu/EIxJMngVddXcuPQJyfhy6kKBirXKVY+3yyw3xNTXKcNJmgOzDpyOUkOMuhrw/rN16KuNmM7zKhUVkJXWRyEp91dKTeaCFaLBY8c01LTVdhxxC/5uaA9XyPPD/8fmDOli1LjpEFLFoGIgQcn8FgZLNLIZDJcfw49qVoAtHrha5gMsHZFY+QCoVg1xw5gtrMd9yRfUq+8bo5ZdnrxXVUVuLv8XEZhZ2q/cNSVIRXaWlkqrPTCb0rHIZdsXVrZDQfd4HmF2dWcEflVFOko6WyUmaIlJbKlGUuhXDppbCRLRZJfkYToFarHPeaGpkSzJ8vtnBasjElmZ3GRHK82QnDLyFkxHZzM8Z7kTCqgEVZyEomEYmIziOifzG8/xcCU2z8DhHR74joCyaT6WdE9AwRrSGiDxJachsrzV1JRN8xmUy/IqLTRKQQ0Y1nz/OZ3N5C/oiiSAKJDc5wGEZycTEUwkSbx5NPygggqxX1Jex2KGtNTSABjUqTqkIRnp0FuLS1RQKhriOKxm4HiRZP2cvmPu12/NvcnH5ki6IgzWhyEoDo9yMayefDBv62t2WW9uv14rrCYRyjtBQG6oED2Oh3745f+JoJUSZkLRaQJ52d+IwL/iaKzJufR92ukREoH3v3InqLPUxL3dGupATXb7dD0eeurz6fTFPgLorl5UtWYLiARUsogQBIwbIy1FA1m/Hsz5zBnDamIisKInpmZ4E/RUVYkx0dmPPT05jb1dUwuqMxTAissc9/Ht//13/FOk4mzzyDiEWTCQ1XLr0U7/t8WMclJbKRSbRoGlIYf/xjfP7e98LxwinC6YgxfbmoCB7tp54CDnR1Ed1wQ2Y1yBQFx1UUudZSXWe9vUh76u3FNfX0IJK0vV2OB9dPJIocI0XBc+RO1g0NqdWSXArhTuAHDsg6dCYTiN9bb4URUFeHcaqsxGuRsKmARUsouo79XtOQjp4LR9X+/dAf3va2SCNWCGCc1wt9Kx4e9PWB0Fy7NrVO8UslQiDSm8tF7NiRm+7LTKRxKYP6+tSyDIqKgB+6Lh0htbXAs9WroTPNzsZ3IBcVYf2+8Qac2NXVsoxPNrU/TSYcW1XxMs4lY8MoTk12uyVOVlbifjjCkEsiXHUV6m4fOAD8yaWzPQUpYNEykDNnQJBt2iQDF4RA5ODhw7C/opt3ud0g+y0W4Ei8yEKnk+hb34L+dPvtyKrKBQHEqa1zc7B3iothJ6oqyLVAADbf5s3Z4S0TgOEwxoijiNeuxTrh6ORgEC8m7iwWjAcTh9kK1zt85RXoaMXFwKDdu3EtRv2muBi4b4ym9PnwKiqShKLNJuuCNzfnXkeKrmPIQRtEGB8uF2W14rOpKVwPO7MrKmCDNjcn1oVzLAUsykJWajqzmYhmCWxvlxCi/+z7lUQ0T2CR68920CGTyVRERJ8monuIqJmIBono34jIS0Q/pLPhqSaTqYOIPkXIa28h5MGfIaJ/J6IfiIUHc8UNNodWKwrAu7gYQH/sGMD1/PMTR6EcOwbDdd8+pL4OD8NwHx2FMX/FFZGKjRDYvCYnQVZ1dEQaW0IgHfjMGUT0dXfn7j6DQZBTRADxhSJrosXplN2XuRbi/DwM9htvzMzrrmkwSOfnAcitrZHGw/Q0jI6hISjle/ZgTBQFSvvgIO6LQduo6LJXiAjKshHIvV7cC3ct5K6JvGnz5pFNx7NMRAgoET6fLFxeXQ0SoqICzyyHG2aqofIFLFoiEQIdBPv60EilvV2m2bvdMlqPCGvnoYcwT269FTjT34+53toKjBkbg8HV2Rl/3vzhDyAOW1qIvvKV5EZYOEz0zW+CBOzuJvrc52SRfiYQS0sTRyD39RF99av4d9cuone/Wxa2TzftTFWlEj4+DqfL5CTu+4YbME7pKm5C4B68XtntPZXrCgZBrL3wApRaTlm+5JLEY2FsxMI4xQpoXR2U5HPdgVDT4EQ7cAAGAHe73roVJAk35hECOFlVBeMjQ4W5gEXLUHp7gSHnn5+bmsCnTsHpun17bHmXoSHoJx0dsZ1TiaBbHTwIjNqzZ/lE5obD0IW4S3K2xj+L2w1ijQiYkEkdVKcTL7NZlmMoK8NYejzQQaMzYMbGpP5aWgqnVbrRUMmEic35eUkcOhyRTRqMdQwTpW2zhEKIOPN4ENmag7rhBSzKExkdhf7T3h4ZbPGXv8DZefHFqFFvxAqnE+vVaoWTL94ePzoKXcfvR/ryQo7VVCQYBEY4nVgDFRWY56WliJa027HGe3pyU+/P5wOmzs3hXtvbgeFcboajDbm2Ib9y1SRM14EznFXGXZ97etIrEcV1BsPLCJwpAAAgAElEQVRh2QGadSNuwBLdkDPd6zSShVyrkQjzxpgOXlyMc3s80DcnJzGWRNBjucZhLhxIZyXfsSgvZEWSiMtYVtxgT0wAjFpbAeJCQNl1OqEQJqqjxZ1Q16wh+qu/AmDOzmLjGR3FxmPcfIRAVN3EBIxcrldmlIMH4Y264ILcetoZ9IqKpBcqHRkcRPQkExM8XjfcACUzE3G5QABoGogPY0fqaBkdRYTPwICMomxuhsLY2Yl/4xkV4TCeU1kZFGjuwsbNDbq78YziKancaKWsbHGjgXQdG7vfj3+5i2tZmUxfrqzEPMsxsbBMzLCMZcVh0dNPo07gzTfDU0sET/j0NAhyVpZ0HXV6envx3e5uYFZREdbj5CTWVn09xW3cIQTR976HQuHbt6MxSjLFZ2yM6NOfBul+++1EH/ygVDa9Xiis3B00WkIhnOdXv8Ia/J//E6SEyQRsTVdp9fthgI6OopnV2Bjw421vg4KaCbkQCgGPNA1KaVXVwmt+agoOjldfBc6sWwdiZNs2XAN3Rk8kmibJQ05TXLXq3Hb6VRQ4uQ4exH15PLieCy7AfOzultELJpMcqxxELBSwaJnJ5CQMwPb2xKVc0pGZGdRPbW6GvmRcX5OTcICsXh3ZLIrFbsdab2zEGlsO0blE0C2OHoVxu3lzbiLhWGcJh4Gp9fXZ7ftTUzKSkQgYbbXCmWM2wwlMJBsthMPAoY4OrHWvF2s802ZOihKZkuxwAMN1Hfe1alVkHcNMzuP3g0gMBtHAMEsSpoBFeSCzs7CVGhoiyyy8/DLKJWzfTvT2t0fqAw4HAj+42VE8vf/oUehG5eVooJJNSQJuHDI3Bx3JZJJOwtJS6C7Hj8s6pfGyRdKVYBDreGoK66utDfYRN17hZh/GBinchCXTlOXo8586hWfj82E9b9kCp6OqymZrmegMTPgx6efzYXxttqTNSf5buLGLMcrQmJbMRCETh8Yaji6XJA6523tdnbRF0w3KSVHyHYvyQlZqOnNBlkCcToDDqlVSeRkagmHX2ZmYQFRVbFTFxSjYPziIDaq0FIb2mjWRBch1HZvXxAQ2ClbcjPLaawDe7u7cEogOBzbcsrL0iShFAen2+uvYGFQVivzdd0tjOV3hlGqPB9fU0ZE84oc7n65Zg+d1+jRAvKQE0Z/JvORWK57r/DyIwzNn5Ia9fXvyFMqSEpB6oRCuL5eRD5oWSRwy4cDRhsbz8eY1PAyF4FySDAVZPDl9GiTi9u2I1COCIjg9LTu4EWGuPPIICMS3vhXrcGAAc6qrC+Th5CQwjesiGiUcBmn49NP4/Sc/mVz5euopon/+Z8y7r3wlMjXI48EcZjIpWo4cIfr614F7111HdNdd0nhM0KU3oQiB9T8wgMjDkREc453vRPf0TAxtjorx+zEGNlty5VbXgdH79wNLLBZE5e3bF+td1zTpEIh3H7OzssNqY2POawemLIEAntPBg0j/CgaByzt3Yh5u2wbM9noxTvzsKirOfbRkQRZHvF7sl3V18XWVdCUUgr5UWgocMK4JXtN1ddAFomVqCsRAfT2wZzkQiELgmvv7sQ527sy+Yzobqm63JNdyYZg2NAB/VRXr2u0GVre24h6OHpUpjbW1iDQ2OoM0TdZnXaihhK7jeRpJQ06nJpJN/OrrZW3H4uLsn2l5OaIQH34Y6c033ZReTe6C5Jd4PLBHqqsjm10eOQKc2bwZzlWj7sOkY3k5CMRoPVoI6EQPPQQ9+2/+Jv1GbCyqKlOWFQXn4nlvsWAfffllOFbq6xM3dUlHFAU60fg47rupCWuf06eJoNtUVQGHee9mcjEQAAYUF0MXSXdvn50FIdrfj3O2tKA8lLGUi9WKz3w+HD/dcxhrNHI0p65jHDUNfzMpWlwcWccwXlqyMcKwuDhyvgiB58fEITtOV62SgSvnSmcrSG6lEIm4tLJiBjsQAOiWl0tv08QECME1a5LXInz2WWxYN98M8HI6AdiHDwPojAXDFQUbnt0O8IkXuXf8OFLGNmyAopwLwkoIKOBuNzbbRNF6iWR+HqlHL7wA0G9rA+mQqcEuBBTLyUn83dQEoz3RNQUCeBZDQ9J7tX49lN/TpxGZ4PGAKLn00vjpVpySt38/Nu6tW2EYp6pgaho2D4sl405//y2qKiMLg0G8x92jmThMJH6/TEVvbc1Zd7h893KtGCxyOFBbtLZWRvk5nYgWqavDvGd5/HFgxWWXYd7PzGButLZinkxPYy3EqwfodKJxyhtvEN17b/IOzKEQCMA//AEOkc99LnKNud1Yo/EIRI8H3vwnngCWfvzjuAefb+GmK/FEVTEWTz6JtV9djXpY+/ZlHgXHaT26Luv4Jbomvx9pvS++iGdVUwMFeffuxOSB0dvPRrLLBWVbUWQaTmmpTG1eqhRNjweRhgcPykYQNTXA9l27gJNc+N3ni0xZLnjc48qKwSJVxbzQdcyFbKNM2ekxMoKyC0ann9+P+VdSAkM6Wq+Ym4OuVVmJxgZLXaM4noRCwE+HA9i2eXP2ZHowiOOxU6G2NrdkaTgMvYtTF7nr6smTIB06OzH+8SL4WG9TFHxufAZeb2SEIXeXJcJ5jBGG8eo55qJjs1GcThCJRUXQzTPs3lrAomUswSBsL4sFDleeUydOEP3Xf2Eu33lnpGN0ehpzvaoK8zxebfWf/xxp0Nu3E913X2YEUSAgU5aFwFq22XBezk4YGIAD2GQCdsRz9KYjXIJgbAxjU1cHPY3v0ZiunAxTNC0yUrGoCGOQLGhB12GfHTsGW5MzYbq7ZXPSeL/hus+ZNNKLFrZzvV6sd2PqM5ODHGVpTE2ONxa6Dv1schLHDIcxzxoasG81Ni55EEe+Y1FeSIFEXFpZEYOtaSCoTCaQhRaLjFarr4d3KxG4DQ6ic+q2bTDcXS4Y7EePAoDe+lapjAWD8MyMj+M7GzfGHvfMGRB169aBGMhV8d7xcWxqnC6SqggBMvShhxD91t4OUnTv3syVeK7H6PdjQ12zJvGx5ubkmAkB8F6/HkBuHBtVRYTkgQM4/qZNIGDr6mQjgMOHsbk0NsoOWTZbetfOoe+8+aT7WyYOQyG8x0V52WuWqoTDUBQUBWlf6TbFiSP5vkGtCCxSFKLvfhf485GPyIZFp06BLN60SSo8zz6LtP6LLya6+mqsb+4eb7HIQtPxas4MD6ML8swM0f/+30j/TYQ1w8NIXx4YQPTg+98fqXi7XLIWqZFEEwLX+J3vgKC77TZ0NAyFZIRbuvN2agqG4eHDWDdXXIEi55kS6ZqG6w+FsBZrahKv64kJjPfhw3hOnZ0gbrduTc3I5yhqvx/PhqOaGxoiDVw2vBeTSJybA/l84AD2OV3HdezahRfPM+6wHQrhWvgZLzKBU8CiZSBcs3luDt1McxHNxV28L788MjuDsxyEiF9SxOmEUV9SgjWfrRMvFzI3JxvNbN4cv3ZjOsIdlLlxQH394t2n1ytrRHPpnZoa2dRq48bEOKjr0N84osnlkjVpibD3GGsY2mypOxtyTSTOzhL98Y/YH266KaN9ooBFy1RUFRlb4TCytXiO9fXJ0lJ33x25V01OQpeqqQH+RBOIgQAcnseOwXa79db05iFHEM/NYe/kDuc2WySmuVzAO5cLARTbtmW31o1NJX0+rOOWFqzlkhL5StcZIYTs6syZFNGpzoEAdIiTJ3HP1dVIWd64MTU9QVGgY3CmWLrCackcZej3y/RtY31ExhVjinJ0yraqgmSenIRuzE2fmppkBtA5zLjIdyzKCymQiEsreT/YQqCeViAALxB3v+VaGVu3JgYNnw/1vbgDXyAAcmp0FNGEl1wia/p4PFC8xsdB+mzcGHvcoSEY3WvWwDDOBViFwzivqqZf5HVujugHP4DSX1VFdMstRNdck7nBLgQAemYGm9GaNfFTBNib1t+PTZa7iHV2LuxNDoUQVXPoEI7DhbWDQWzkO3dicw0GYZhw1FE6EgphPI1pAIkkHJbEISvZJSUy4jAbT5amSTJ21ar43RXTkHzfoFYEFv3mNyCp7r4bRE44LGt2bt4s58vLLyO1ePt21CLVdRCIqgrFyO3G+opn2L76KtEDD2ANfvrTWBOJlMs//YnoX/4Fc/Yzn5G1Gfl6mYCrrIxtgvTNb4I02LiR6BOfgGPE6YSil24UWyCACKZnn8V5L7sMqZDZkOc+n0yvS9QRmktPcA3WoiIQKvv2pU8aBAJQTrlJQUND4utfDCJxYgLP4+BBOKuIgIW7d+O1bp1s7uL1Ymw0TXZoraxcsvTRAhYtAxkcxJzftCm94veJZHgYkcznnQc9goXXmM8Hwz56P/Z40BjBYgGBuEjRrykLN7caHMS1btuWffoy13bVNGDCYnfx9Pmwz9jtcKq2tMh04qkpWVrGZJLZNRxhODeH/YXTmtesge7BUYbZXLsQ2MO4e3MuZHISaa01NWj6l6YDpIBFy1CEAGY4ncAMtiNGRoh+/GPMxfe9L5KYs9ux79XXg+SKF+n8zW9in7zrLjgIUxXunD43J3Uwmw0EovE8mgY9rb8f39m6NTvng67D3uRO9tXVsJVWrZLEYa5whFOdeX3OzwMHh4dxHW1tGNfW1vTPGQjITJaFyNToOobcYIVIkoTsHK6ujrSL+HfhsNSxiGS6ucOB961WWd/QZlseZTMo/7EoL6RAIi6t5P1gz8zITno1NQCXo0fx2bZtiRUOIdCZdHQUaV9WK0gurxc1ujZulLXMuA7h9DQ2sK6u2OPa7Wii0NAABTsXCpTfjw2RCEpiqp6uYBDK/u9+h/G4+mqkBGRrsNvt2IRqazHe0ffo98uU5XAY5+vsRPRjuoTqwABSEo4dg0J83XWxnmiXCxtXdFpOKsK1C+N1IA2FcL/cuZoIY8/EYS47PAsBJZk3zNWrF7cj6jKWvMeigwcR1XzllUjP1XV4zUMhGN48d48cQXRFdzfRO96B5z06CowxmTDnWlvj1wd95BF0RW5pIfrHf8T6ijcfg0F0an70UXj5H3gg1qvrdGKdGglBXUek4A9/iP+/970oaM7f13XgbKrpQaoKPH3kEWDrzp1wZmRDmKuqJDNLSnA90fji9cJ58sILWFt1dTJlOV0SIxTCPuP1Yqzr6nDOhXCAldxMFVghZBfbAwcwR4gQyc0Rh8bmD+GwrAkpBDCLuywvsRSw6BzL3ByifJqbYRhmKy4XshmqqlC31Dj3e3uBXeedF5sZ4PeDQNQ0EIg57HSZkQSD0A+dTqyd887LztnL9dICAeggmegi6UgwCP1qclKWZWlokI5VjlAcG8MzYtxms6qsTJKFVVWyFlkuOsiyLAaRODqKchoNDcikSeO4BSxahnLmDGwbY7fwiQnoHZWVSEE2OgVHR0HcrVoFvSl6Tx0cJPrWt6ATfOhDkc1Zkonfj/XicmHeVlXJlOVomZ0Fdvh8sGm6uzMLItB16BTj4yDxOKNrwwaMRS4aoiQSTcPYv/46MKS4GE6mnp6sgxjI65V2H69P7sZsrGPIWGQ2x3ZLNj5X7mhfWxv7PIJBmfY9NYUxLS0FDra2IvhkGdZ4zncsygspkIhLK3k92BwdWFsL8NU0RBAGAvAQJYt6O3QI6TWdnfDAcCetRx/F8a65Bn9PT+M88/MwPjs6Yo3QqSkoODU1ILtyoUS63ThucTGAMZXNSlURcfOrX2FD3rwZqYvxCpynKpqGzcbhkNcSDeizs9jgmfDkroxG0iJVcTgQaTU2hnHu6sI49PaCMNi1C9FbrCDPzuJ36XqbjF1Jua4QRxxqmnyficPF3pDm5kBUlJVhE8zgfPm+QeU1Fo2OEn3/+yB47r4b7/X3w4DbsEGmEp44gWjF9euRHmyxyGYE3BW5vT22JqiuI036v/4LUdP/639hfcXDmsFBRCgODxPdcw88+sb5ZCQQq6slyTQ0RPS1ryFycudOFCNvbsb3nM70OjDrOtbxww8DQ7u6QB4a60GmK0JAUfV6ZQ2eaIJsbAw1U48ckU2X9u2Lb3gsJIqCNel247ccmcDRfsb6iIkkXSJRCEQ6MHE4PS2jWHfvhsPLqOwLgf0uOmW5quqcNm0qYNE5lEAA86e0FOs4271LVaFTuN1E73pXZFr08DDW3Lp1sd2Mg0EQiKEQ0p8zbWyQK5mZgVNS14EHq1dndzyPB7hIFN/QzaWEw4jSstuxxltbobdyLURu4DI+LqOQzWaQpK2tMjU5Gi8DAdxDeXlum5foOvQoszl3utPgIBz1LS1ogJjicQtYtMxkbAz6TlubtE1mZ4kefBB6/X33Rc7FoSG8GhuxD0YTbIcOIeOqpoboox9deF1zBsbsLOZ/opRlFkWBXTk6ir21pyf9MkpcozAYxHlHRmTq8MaN2ZdSWEj8fpmyHAgAr7q7EfWo6zLVmTs7Z0Jiaposi1BaKpvREeF40d2SU1m//IwaG7EPcWMUxt3KSuioq1YB28Jh2aG5qEimPC8TQjHfsSgvpEAiLq3k7WCHw9hYSkpk04HeXpBQmzcnLgRLBMPspz8FuFx8MQzc8nKk/oVC8HSWlMguTqEQNp7W1lhFa24OvysvR12yXNTAmZ3FfZSXY3NJxVB95RVEN/X3Q1m88UZEQ2XjCXa7oZQqCkC6qUlei6ZhIxwYwPesVhgS8UjWVMTjQXoOpwn09OA58vXPzIAc4BSkiy8GUcwbV2lpekYKExIOB+YSd/Ni0rCsbOlD4D0ejHdRERSsQtpOfojPBy+4xYI6iGVlUDinpiIJwb4+kICtragtWFyMuXfiBLDGZsP6ifYIB4NEX/wioupuuAHRgZWVsVgjBDDgq1+FsvuZz4BEMAp33OTmG6WluIZf/AKRRuXlRPffj2hKk0nW1CsqSq0DsxBQuP/4R+BDQwNwMVsyIxyG4s9dSaurI7Ho6FHgw9AQ1s3OnSAPuRxCOqKqsqA6kTTAo4nYXBGJ7Pw6cAA47nTiXNu2wWmyc2fsvqNpklDllGVO6V4GqTsFLDpHwuR9IIC5k4so1KeegvF5002RDeqmpxHV0tQU2/U5HEbpAo+H6C1vyT7KJRsRAtc5NIQ10tOTXUq1okBv4Jqo9fW5zU4wiqqCdBkdxZiWlwP7PR5cA5dZcbtlreiyMuCHy4X7jVd6xygej0ylzLB5SVxZDCKxtxeR7evWQb9NAesKWLSMZG4Oex1HFBJhv/vBDzBf7r03kqDr78fcb26OrW0vBNLcf/tbOCc/8pHkRD6vW256VFIiHYOJ5tH4OBwP4TAwbsOG1OcyN3IMBmWtP7sd+iI3lmxuXtyyB1NTuP6hIYxvezvspmiHjzHVmUiSiYnulaONjVGGqiq7v/PYGonDTMTpxPVPTwNjLRboQpyqHK8MBUc/ckMWItm92WpdPKxOQfIdi/JCCiTi0kpeDjZ3kdI0KE1FRfh7fBx/J/NEKQrRj34Exezaa1GPo6oKisnoKFJ/6+pg1KsqNhefDwpadGSdy4VNrKgIhnK2Chintno8AMrGxuQbDNcVefRRRASYTKj3dc01skN1JqIoGEu3G0qysYOwzwficHhYEhHcZTkTRTEQQNoVdzjbsgXPJFG6JEcajY/LFMU1a6QSnMw40HWcz+eT6czc9auuTnZdO5cSCOAeibDRp2Hs5PsGlbdY9MMfgjD74AcxF6en8XdTk1yHw8NIz29oQL2ekhLMv95epDw3NUFJjk4rm5sj+vu/BwF5//1Y21yT0yh+P2ofPvEEMOCBB2KPxYX/NU2mJB87BtJxbAxG2Qc/KAkrTo3llOGF1kZ/P9KWh4aAhZddll739HgiBHDI75cKJGODx4Ou7i+9hO/YbCAOL7ooM/JE16WRoesgTVetSqx0cqMVs3nhsYkmEkMhEJ8HDiCSwufDfW3fjojD7dvjr/1llLKcTApYdI7kxAlkBOQiPY0I+PDMM1jHxnqqLhfIgJoakAHRxe2few5YkymRnysJBLDOXC5gsbGxVbrCWOR2437r6nJLuhlF07A3HDuGcWQHJzsWKyoiOyWbzcAFTlP2+6FLTkxAN1u7Nvn55udlN9hcNoPhaCSLJXfOjWPHgPkbN4KgXgB7C1i0TMTrha7P0XxmM977wQ8wX++9V2IF1y2126EHd3XFYsxPfgLH6q5dcKwmIqp8PlkHVAjYCTZb8jqogQCaLk1NQQ/o6UmtHBSTccGgJLC42YfTibXV1oZ7Wixnn6rCRjt2DPdttQL3tmxZOFqaU63DYYyVxSIbukTXMmSxWCIjDIXAmHMmVzrCWTIccciZYUTQrbu70zumrktCka+ZIy4zaa6ZpeQ7FuWFnDuOuCB5IxMTAIX2dhh4U1MglZqbFw5lf/xxKGdXXikLap88CaN/xw5sFJwyUlmJCLja2lgC0evFsUwmkJHZKpOqinsIBnGuZJGURNhg//hHkBMWC7xLW7bgHrJJq3E4AN66LsPETSZsgv39+MxkAqCvX59+WD9LOIxN7tgxbBKbNkV2aEskra1Et9+OTfL550FaNDWBeDSZYj1NmibTlINBuTFyI4mSEmyamobPzjWJWFYGLztHHzQ35zbNqCC5lSefxFy89VasCZcLWFJTI5sZTEwgyq+2FhGITILZ7TDE6+oQdRsdSdvXBwLR5yP6/OexvouKYtdIXx/Sl+12lC+4++5YBdVIINbWyi7Sf/wjnBVf+pKMWjQ2XCkvXxhP7HYcp7cX8/fKK4FDXBw8UwkGofhrmkzRNZkwvs8/j7o+jB233YbUvUzWrxCyMLem4TyJUsWNYjbLVKCFHChsMB0+jGjD117D+FZUYNx3705cw5c7Qns8wE2zWTZKOYcpywVZZmK3A2viRTNnIpOTiCZcu1bWhyaCgW3sNm9cc7pO9OKL0CP27Dm3BOL0NPQLIqyteDVmU5VQCPekKFizyaKXMhG/X9be7u9H5GQggHMx6cCdkuvr4+OqpuEYVitwu7kZxx0dBaYlq3tYWyujrzmCKBdisQC/uERMLvSrrVuBg4cO4V737Mn+mAVZXAmFsBaLi6HHmM2Y3z/+MfbFe+6JJBB7e4E/bW2xJVB8PqLvfAffuekmvKLnla7LlOVgEPPQZsMr2b4uBJygp07h/1u2yAZFiYS7HweDkvCyWkGiTU3JkiQdHbifxYqE83rhROrtlQ6BffsQPZnqObl7s8mE43HZGyFkdGJJidQ9iovj6z4chckpxcmEy1JNTeGZc1mWhgaQx01NMjuEo7FTFbMZz6G0VHaqDodlhOg5JBQLskhSIBELklQcDhhTjY0AE64nVle3cO2/11+HUrxlC6JkyssBXIcPg5BsbQWRZ7WCBOC6fNH1KgIBEIiqigjEbBqWEAE07XYA75o1yT1kY2OIPOzthYK5dSvGoq0teSfqVK/B58NxW1pwrMFBKLVeLzaPTZtkykwmomkgbV9/Hefs6EDkVLpj2NmJ3548CaPliScwBy6+GB7qQEASh0TYzDhSMVoBLynB94PB+I1Wllq4mzUbheFwZvUlC7K4cvw4Im5275bd3QcGMMfWr8c8mp0l+tnP8N673y0VIKcTHZrLykAiRc//F18k+sIXYPx9/et4/kwesQiBBkpf+xp+/41vgIiPFk0DScbRda+8gvTr+XnUKbz7brmeuZOnqsavOWiU2VmUcjhyBN+77DIZ2c3RMZmIpoE8DAZllLDJBJx+/nkYxSUlMB737o2tH5mOsKHB5EBDQ3qROGazjLaJd78uF9JLDxxAZIOq4n4uvxzETHd3YsyOTlnmsVgmKcsFWUbidkMn4JII2UoggCyLyko4SXlPVBQYqlyn02icCgFMm5pCNHB0ytxSCXe6HxkBhm3blnn6Mpd/4KZKjY3ZR+oZ0yq5Iyw7TGZnsc7b2oClnZ2pd4622aArzMzAmc770BtvQE/asSMxnptMwGwupbNqVe5SkIuKgHuqiv/nQr/asUMSU1YrdMiCLE/RNBkscMEFeF7hMCIJZ2eRmcEOVyEwV6en4Uw3lk8gwvvf+AbWzH33Qd83iqLILr2aJptt1NYuvGd6PLBL5uehByTDDSMRxVkGTK5xbdKxMdxPczP0+cVquDQxgfEdHsbfa9fCxk2lzqIQsRGGTIQS4X6MNaCNdQaTEZPl5VjvPh/GIxpLNA04NTmJZ6oo+E5jI8arsTHy+DxnPB7pJElXTCZJgvJ9h0KSAOYgFCYUz7UdWJDMpJDOvLSSV4Pt90MxrKrCxuD3Q0EqKYHClUzpmZyEMV5bS/TXfw1DzO8HIWe1wqDz+QBOdXXwRlksUOKMxw2FYDh7PERvfWv25I7Ph03AbMY9JYrcmZ2FUv/aa7jGnTulMrtli9yE0xUhAObT07iG1asBoJyyzEZvZyfOkanxquuImDpyBPfc0oJ7yDSS0SiaJhvlTE1B8bj0Ukk0G9OAkl1fICA7Hi4HEQL343Rizq9enXT8833Lyyssmpkh+va34SV9//ul8isEjGurFcroj36E799zj4wuNjohrr46MgJRCKJf/xrH3rABEYLFxTINh5+/z0f0z/9M9PTTIDH/8R/jRy8bCURdJ/re95AC1NlJ9IlPgHBnURTZybO2NvGacbsRgfnyy1D0LrkE+FtUBOdLNpHQfj+OTwQFVtNkyrLXizW9dy9IimyiHD0eYCrXNWtoyDyaPLo+4swMGlscPCgjGpqaZGMUTs1KtJZDIZmyTATDv6pq+eBSClLAoiWUcBhzzWzG/Mo2okLX0WV+YgKdmFnH0XU4TrxeOCyN61wIOCeGh0EUbNiQ3TVkKoEAiAC3G47hjRsz11n8fhm9XVUFTEzXsOSIKCYLHQ6Jb0SyAZLPh2M3NoL4y1SvVBQ8N6sVmGMyyXrTZWUg25LpyaoKXLRYZBZKLmQxOjYTwYnX2wsy6fzz436lgEXnULhG8vw8MKOuDvPgpz+FjfWudyGDgAhr5cQJzL/162NLMp05Q/Rv/4Y59OEPR2KMz4ff8dw55oEAACAASURBVNqqrsb8TWVP13Ucu68PczNezUAhJOEUCsn9vqQE+zITU+PjsFFVFWt43brFKTWiqrheLndQWopx3Lw5udMhuo5hvLRkY7fk6CjzeKnOiQg3XZelH6qrZVr35CR0JHaMNjXJrLdk2MS2ajiM3+QqcpAJRY5S5Iw0HoscdsvOdyzKCymQiEsreTPYqoqoOIsFwMzF9HUdHqNkBqXbDaPc7Ybh3NCA3z3xBJQ6LvpfUwNFcWgI5+vsjDSkFQXG//w8jP9su/s5nQDVkhJsWvGUK7cb13ngAD5/y1vw3bExbBY7dqTuqY4WLvQbDALkLRZsgFNTktRcv37h1OqFZGgIJJ/LhbHfuTP7sSPChsapyrwJHDmCjbWiAmOzZ0/qpIaq4pi8eSwXcTgwT7g+ZQIlPN83qLzBolAI6TR+vyzm3dsLA/a880BaezxI1QkGEenHRiF3LHW7kfZrNBY1jeib34QBv28f0T/8gyxYXVUln/vp00hfnpgg+sAHiO68M76hrKrAKiGAHz/8IY53111IvzbOo1AI69NsBgbGm2OBAK79uedwrXv24MVReInS7FIRVcX5ucnR/DxIyqNHcf3d3TI1JxuFzu+HIhoIYI03NOSmq+rYGIjOV1/FPkUEEmP3bjio2tvldQshFVXjez4fCBpOWeYU7nNYCDxTKWDREokQ2PNcLuyruZjLL7wA0unqq2GUspw+jbWzaVNsuvSRIzBqt2yRDROWWqamQFgQyQyNTIQdL34/MKK+PnV9gOuvcZQhk5BEstkA1zG0WoEbDgc+W7cuN40WmFAxpjDPzEQ2tEhGrHLqdklJ8hTodGUxiEQh0LF5cBCOYyakDFLAonMo/f2wMbq6EBmnaWgud+oUsiB6evA9bi7mcGCPjybxXnoJDtlVq9CBubFRRglzJK/FIlP+UyWYHA44Hbxe6NZbtsi1zqQZv7j+MROHTC6xs394GN/jrLhM7bJk4vGAaD11CnqCzYZrXr8+dk1pWmyUIdMrHHVnrGWYqrPFmBbMZQpKSuIfw+fDHOAmUELgu9wYxWZLD+80DWNtMkU2+sylGAlFjjI1EopZnDPfsSgvpEAiLq3kxWALAXIrFIKiVVSEDcfng7KYDKw9HqJf/hKg+573SG/lwYOIHDrvPGxMq1aBSBseBtitWxcZMq1piL6ZmiK64grZETpTmZmBgllRET/CLBBAlNHzzwPI9uyBEd3Xh9+1tSVPhUsmug5v0Nwc/lZVXA8Xw+3owP1nG/kyMQGjemYGBO2FF8amJ6QrwaAkDrlwcWkpxrG8HOM4NobzDg3h7+3bYcincj/coay0NHfpPLkQrxdeTosFyk4csibfN6i8waJf/AL4c++9cDT092NNdnWBgONaPy4XCDtOKwkEEK0zMYH5aEw79PuJPvtZfH777SAHufNmRYUsWP2b3yAVua4O39+2Lf51MoE4Pk70H/8BxbOnh+hjH4tV0LneXnFx/LQfRUEzoz//GfewYwdSHLmuEXcozUS5MpJnXBD81VdheJSVYZz27s0+YjkYlBhXVAS8T6VZTLLrHhiQEYd2O97r6kJUzO7dyWuwMZGoafL+dR3PgLss53E6Tf5eOSQvsIgI+sDwMHSBXDjm+vuRnXH++Ui5ZxkZQRmBtWtjsx6OHYMutXGjJAWWUnQdTpzRUazpbdsyj/7hOmBCyKjqROswHI5MSeaOzUTYp+vqIusYclRUIADdZGoKWLR2be4bLczPw1FljMYaGsJr7Vror8n0G78f+1d5eW5rMi9Gx2YOCBgdRXOwzs6IjwtYdI5kfBz4xMEIQqCT8uuvE91wg6yzqmnIKHM64aAw4hiXbHnkEdhqH/oQ1szcnCToS0vlfp7qGlIUYNbwMOb4tm0yuCQYlMQhUWRdvWhnwuwsCOxAAFjR0RFb2zoXYrcDZ0dGZH3FLVukjmGMpmPi0JiWbCQLi4tzR+Jz4AVHNBYXYwxnZ2FjOhzSKdrejuvORu8iwvmmp4Hxuaj9m0w4cpMJUyKMXSLSdAHJdyzKCymQiEsreTHYU1PYMFpaZNTP3Bw2lWSeUpcLXvVnn0W9ruuvx/sDA3ivsRHKd1MTNpLxcXke40ag64jAGR1FJGCUkpKW6DpIBJ8PSmZ02ko4DIP96aexmW3fTnTddfgdNxI4//zU6l3EE48HG5LTKbueCoFxXL8ex81WmZ2bk0RARQXuoasrc5LBSByy56usTKYqRx+Xi/ByAfgTJwD4F10EAmQhLyV3bi4tXV61x4JBEKS6jjkalaqR7xtUXmDR/v0oK3DddYh8GBuTBcCbmqDg/OQnUHLuvFN2xfT7Zd2azZuhALIiNTWFBiojI0Qf/zjRjTfiWQcCmOelpTBuv/xl4NYllyBKMZFxpyggzB55hOh3v8MxPvCByNpmLG63JAKrq2PTVw4ehIHmcuG63/Y2YJbDgXWWTfoyp0/PziLi8I03cC3NzXCYXHhh9hHB4bBMc+Li6lxjMV1hsuLAAYzL7CzwYcsWGEQXXSSfyUIGcjAILOY0xooKOMPyKGU5mRSwaAlkehprpqUlbgRW2uJwIErIZkOUEM/hmRlEITY2xqYp9/Zi7XZ0yMZMSyl+P/QijwcOykz1DK5TyOUN6utjG7Q5nZGkodcrP6+pkRGG9fXxCY1wGPg/Po4139oqmwPmWjg6KhyWJWp0HfrQ3BzGqbExOU653cCn6urcdqFmIjGXHZtVFfvy9DT2OUMqbAGLzoE4HNB3bDYZmfzoo9g3r7oKdhQRntvRo1i/mzdHRg8rCrInXnkF+sDb34416PHIFFmbLf25OTkJ3OSa7Bs2yPp44TC+w2WNSkvj2wtOJ8hDjwc2yLp1uSe0FAVp1seP43xlZTJl2WqNjDLkgAoi4ImRNMxVHdJk4nZDfx0bkxktNhv2Jg6SCYdl+YZsxePBmNTUZN+TIFXRNEko8ngba0Sm4BTJdyzKCymQiAuIyWSqEEL4cnS4ZT/YLhdIt/p6bDAMVOy9TSROJ8D3qaegrN19t0yT+93vsODf8haQZlar7A7V0BC5kQmBaMCBAUSYZKOsqyqItVAI5zASlZqGzfLxxwHImzeD9Fy9Gor6wADAcvv2zBQ67v7c14dN1GzGptTaCvIwF94ztxtpy4OD8NT09OA+0vU4c31Cvx//crokE4dlZQsrn1xbraoKx3jhBURZlJcjqjNZExoh8BsmK5eTqCrmfzAI0sqQar7kG9SbDYsGB4kefBBz+o47gBnDw8CMtWvxbH7+czgb3vlOaXD7fFjDY2NYb+efLxWpkyeJPvUpKIOf/zxI7nAYv7FasdZPnCB64AEYSB/6EDoRJ5r/4TBw5HvfA25edhnqB0WXJBBCdt5jAsv42RtvQOmfmYGCfMMNcJ54vZFKYiYknxBYm8ePw9kwMCBrEe3bJ5vSZCNc28vlwrE4KihdLFIUGEMHD2Jc3W48u54e2VAnujacsT5i9H37fFCAFUWmLFdUrLhC3gUsWmTx+TAfKypAtmdLxoTDIBCDQURCMx643Zj/1dWRjg8irNtDh0DY7N699PN3chIYYjYDOzKpIygE1iPjRG0txtTrjYwwdDplaltZWWSEYV1dcsNYVbEnsANw9Wpg6mKXTNE07AEmkzTkg0HsOZoGAjOaLI2W+XnZ6TWXDg5uRpVLIjEchuPM6YSz62xEWwGLllh8PtRuLyvDPmmxIIvhueeQVXDNNZiTigIHgN8PotFIwnk8qH/Y1wdSuKcHz7eoSBL16ZJRwSCwbGICdsCmTbhGjqIrKpLEYaI14fVCD5yfh42zdq2sPZorcblkl2VFwb1u2gR7V9cj05LN5tg6hksV+OByAYM5KIYIpN6qVcDR0tLIZiWcbZFOxGgymZvD3Em3GV4uJB6haLEs2HTmnGt4OcamZSl5SyKaTKa3E9HviOg2IcQvoz7rIKIBIvqyEOJTZ997PxF9mIjOIyKFiJ4jor8XQhwz/O6zRPQAEV1IRB8goluIqIGINhPRSSL6RyHEl6LOVUFEk0T0eyHEXQtc9rIe7GAQRnpZGRTVmRlsKk1NMDQTicMhU81CIXRFbWzEgv/lLwF+114LL5TFAkWZU2GiU3VeeglgfuGFCQs3p3wv4+NSiWQiUAhspI89BqPXaLAHAqg15HRC4Vuolk0i4Q7UIyPY+Hj81q7NrjkBi98PpaG3F+O5dSte6SjJui6jDTkS0GLBs6+okBtSOuJ0Yty5Rsr4OAhhux3Peu9ebM7xjqtp+C2Hri8n0XXci9cL5b6piYgMG1QBi3IvLheU2vJyEHmhELzE1dWI6hACRnhfH9E73gGDmwjK8JkzwKSaGqT8cbTaX/6C6EKbDf8yEenxYN5VVhI99BDqLzY0EH3uc/K4ia7xu9+FI6KpCbWDorsXEiXvwHz6NMjD0VFEBF5/PXBHCNwDdzCvq8sMizweGBMvvwxFvLYWpP4ll2Rfe5XvjaOFiHB8my29aJ9gEHh24ABwkyNCd+xAxOH27cmV1mgikZ+pz4f3rVY8W05ZjlcjMc+lgEWLKOxwVBTMx1zsT489BifbX/2V1IGCQegmxcVI9zOuodFRrOHmZuyjSxmxzxF1Y2NY39u2ZWZEhsMwRD0ema7G0YZGYsEYYWizpe5Y1HXoGiMjOF5jI3TOpXRMhkIw9MvKpHN8bg7XVFoqidBE+CgEvq+q6dWaS0VUFcfPZaRUMEj08MPA2htvJFq1qoBFSylcl5wIDZZKSpC98eSTiFS+8UY861AI2BIMwlYwZpSNjxP967/C3rvxRgRucOpqJqmwXA6LMx24JAMTcEwcJnMwcgmCmRnZPT0XWVvGaxwbwzUODeHvtjbYgVzKhZt9GKMMl7LkEuuA/z97Xx7eVnmlf65k2fK+b/ESO4ntxImdlSRA0rAEKGmhZSuFgVKGoUxLaaftdNpOf2UoM+10psPWZZjSli60pVC6wZACgYY1ISEkJCEbiePdljfZ2vf7/f54c+a7kiVblq4XBZ/n0WNblq6u7v2+95zzns1igU/JiRbFxdAD5eXhOBxZ6mww4O+MjInbREzlfAYGgNvl5bPXO1pVJaGo/a7aSc9nJOY3TlFsmpOSyiRiOuHivyKEuCrif18non8johVCiCOKojxIRJ8jot8S0etElE9EdxJRHhGdI4Q4ceZ99xAWwZEzx/4TERUJIe5VFGXXmd+XRnzWJ4joF0R0iRDixUlOe85e7FBIgmldHYyCo0fh9DY3xwYg7lHR3w+HfssWEICqit4a7e0gEJctkz292tuhpOrqwo/79tsA9ZYWHCNRcTpxPmlpUDw8yevkSaJnn4Xy0DrsigJw5KECLS2J9TsaGoIjfPo0gKyxEd+7slIfo83nw/U5cgTn2dQEwyFeIzkUCicOiXCNuEw5IyO581RV2fdRO2mwvR2GzdAQyJlNm8L707FwqUCEIpgzMjAAZy4YJNq4McxYTjksev31uYtFwSAyHKxWEIRZWQhumEwg9w0GkNPt7SDtmprwPpcLezsYlJl7ZWXYKzt2wNmprye6/XYYVaoqy1uJiB57DPurtRWZ1NoerZFy6BBITIcDGdYf+lB0x5oJLSIQWbyuBwZACnR341w2bJCTTQMBEJRCyN6jU5XRUWQdHj2K41VWgoxrbNTH+NNmFKkqzjPWgJho4nYjCHLsGHA5FAKOLVsGTF60aGrnyb2VvF5pWGZkxJ4Un+pEYigE3dvXR3TXXamNRceOzV0sIgLRb7ViberRr+7oUZDla9bI0sNgEJ8TCMCJ1xKVQ0N4fWHh5NN+9Rbep243MnMWLpx6Y/6xMZB7g4OwzZhMUBRgHw/Yy88HRiZCWgwP4zP8fhmc1rMkeCricuE75+XJrOneXgTPCwqAR3l5sW0cVQV+E+H1et7v6SAS3W5UIDmdRP/6r6mNRTSHfbRICYVADHo8yBzMyYHOf+YZ+DBXXy2zYQ8exN5oaQmvgnrrLQRrVRXtYJYvh92UiM1BBJx8+23YNwUFwDfOksvImHwtcwsCrt6qqppwuOGURAis1aNHZU/I9HRUsCxZIvekNstwpoV9qP5+2R7BYIDfVFEBe3ayZBEm2bjPJPePTLStjFaCQdlbtqxs9m0n7dAZzlIcHYXO3Lx5QhIxFbFpTkrqzSE8I0IIv6IoTxHRJxRFKRBCjGn+fQMRHTyzADYQ0eeJ6PNCiO/xCxRF+SURHSWibxLRxyMO30NEl4twhvVnRPSIoigbhBB7NM/fQkTdRPRX3b7cLEh/PzZgbS0M2RMn4NTFyhwjglHY2YmN3NEBB33NGtnTsK0NhBFn8wQCiFBxZEl73MOH8WhqSo5AZAAxm6GAeALys8+C5CwshLJcvRrgzFH29nYYn6tXT02BCgED8cABkIdGIxT1mjX6NcgOBqH4Dh0CUC5ePL6sb6L3ulxQnty42GTCuTFxqJcYDDiu1QqDmb8/D445cQJlzn/8IwyDzZvDyVqTSaat69kIPFHx+2GA2e2ycTqXXGkzzuaxSF95801gy9atWOOdnVgP1dVYE7t3Y7+uXSsJRIdDDsLhqGRJCdb/E0/gmOvWYe+bTLKEngjY97Of4RjXXYcBB7Ewz+Eg+v3vYSiXl8thL9GEp5mzs2w0yinIbW3AqM2bgY9sJLvdcMZ4ev1UDFkhQEq+/TaOrygwkHk6ux4GnxCyxJqbrBcUxIcjDgdIw6NHcf+EgOG+bh2cjcmGD0QTJg95Yjz3O8zKmvhY2inN2r/nonA5+sAA1vjAAPYHl/XcdZf2tfNYpKf09QHvFy7UR59bLLAVeFAbEe5vezvwoqEhfC9ZrcjSzc2FbTKTOnFwEDhiMOBcJ8tc1mLD2Bh+Wq2wP3hwSm0tjlNQoM8kdKtVthvJyQEWz1TPrliSnQ3bgVsxmM0gAFgfmEy4Nnl50QkBgwH/4z7aBQX64ZPRCNwIhZK/9n6/vNclJfABtDKPRdMnQsBvcTrlsMvDhxF8bWxE8JWTNt55B/d75UqsK56G/txzRH/+M8igz30u+sTheM7D75fBhlOnsMZWrgQxl5ERX/YgtyDggWmVlcCKZFoQBIOyj+HgIOyOtjY8X14OO4/PkSc/z4bwoE2LRep1JukqKkAgTuW+8GCajAwZULXZcNzc3PjvSTThTPHhYawhPafJTySqigdXnfDvTifWTE+PfHAv/82bYx9vHpv0k5QlEc/Ib4jodiK6hoh+SkSkKEorES0non8685qPE5GfiH6vKIq2FauPiN4kooujHPd/IhYAEdETRPQg4abvOfNZtUR0IRF9Wwih6vGFZkOGh7EZKyoAEocPA1CXLo0NXv392LAc/crIQMZhKAQj+eBBZPRs2IDXh0KScIxsbn38OBzfRYuilwPGI0IAgG02AGVFBYB5+3Z8n5wclA6de264w37gAN5TV4fvGy+4+nwwmo4fB4gZDFCc55yjXwRcVZGlc+AAzrW6Gg73ZMDNSp2nzRJBSXL/oemMsHHpoNOJNcHZWbyeGhrQJ2X3bvSzW7wYRDOXEGRkyGltmZkzp9iDQZnJ5PViTbjd+F1VcV4FBSBEY0wnTyks2rRpslfMjhw4ALLpxhuBJydOwPBtasLafeklrI0bb8TUdiI4ku3tcpK8xwNDOhAg+sY3sEc//3miW26R64lL6p59FgRiRQX6GsbqwSoEsi0eeQTv/fSniT75ydhGLk8ATk+H8+xwYGDK3r3Yf3/7t8jaZsJAVRMvX/b5kFHw+uvA5IwMXJ+LL05+yrJWHA5gqt+PcywtnTzgMjCA77xnDzKtiOAg/N3foTR00aLE9ni0kmU2kBUlfrJlLmYkulxwqLq6oDO7ukAmCCGnLm7ejPUeg8BOKSxatmyyV8yOjI7CzjnnnORaq7A4nQiiLV2KPquMHSdPYp+ee254n8HRUWBXUxMc3plq8xEK4XN9Pnzv1tbon+3xyB6G/ND2q+LetSUlWKd6Th0eHUXQNhCATl60aPonh05FhMDaCYWAd2lpOE/utZ2djf8VFsbWIT4frqnZrE/rCe25BYPAu6mSRl4v7jkPujEaYZeuXCkHKUZISmFRqkh7O+7B4sXwB06cIPrDH7DfPvYx3BeXC36YEKhY4hZDIyPSFlmzBrbRVBMnfD5pp4+OgqDz+bAP16yJv90BtyDgCp+yMui1qbZL4N6FkdOSe3txbYaGsM+WL8daPdOSaNYkEIBtZLHg3NiGqayELVpSknzpNvdHLCvDPXK58PD5wjMupyqZmcAwux16IRl/l0nBSHIw8nciec36+oCt/f2yt66i4HuuXYvkodrauD5+Hpt0kFQnEV8lsL430plFcOZ3QUSPn/m7iYjSz7wuqiiKYoi4iW2RrxFC2BVF+T0RXa8oyj8IIfxEdDOh7v4XyX6R2RKnEyQiT106cgRAvHx5bCDv7cVGLiqCgzMygkleRqOcpllXJ5187j/h94/vC9jWhsycmhoQG4lO8ezrA+nD/WZ+9zsoyfR0ossuC3fYiQDehw7h9zVrANzxyNgYzrmrC8BsMiES2Nqqz7AUIpnZ+fbbAOqyMjgRE50jR7ldLmnI89TDrKyZ7V+RkyOj1CZTuEPPUcrmZpRovfUW0S9+gb/POw9rMCMDDgpPbtRbVFUaQR4PfnI/Ea8XxofJBGVZWYn7mpk5KTExj0VJSn8/hjDV14NA7OjAel68GIbKG2+AfF63TmLL8DBex2VxFgucmuFhTGC2WDBZeetW+TluNzDroYew/i68kOgrX4lJDlN/P167bx/O5d57seejYRVnjXm9smH49u0ovxYC/cy2bg3/LHYWVRVrLdZ5RMrQEIjDt94CjpeUEF15JcgIPbNxXC58lteLvVldHfscORuSJyp3duL5+nqi669HUKmqKnHCzuMBeej14hhZWbj37IhrDdB4jHBtj0T+eyaFh391dcnH8LA0oJmAqamRvaXiyCSYx6IkxedDsCsrS2YMJiOhEPogBoMgW3i99vQg+FlTE04g2u3oZZqRgXYJM0UgOp2wi5xOrDseuhQMhk9KtlplJrfBANyqqwMZqs2AycsbP4U+GXE4QMTxoIWlS/UftKCHsFPb3w/srKiADisrw/1mfOY+tdHub0YGdJrNhvWgF6ZzkCUUklObJxLWlyMjcqBDTg7wiO3LCWQei3QWTuBYsAC6tKMDrVUqK2WlhdMJApEI+3hoCM8FArBHuB/r9dfHpye1NrPfL/VsZyfOJycHtk285JwQsM06O3G8oiLYCPEQUkJIopCJw1Ao/Fw7OpAV6XZjD114IbBipgeCaMXrlcThyAi+h9kMwquiAtdgunCMs5k5ISIQwF42GBLLxMzPx3VnHzhWyxi2xWKRg9G66TEpaLXCr7dY8HNwUAZ8OaGjuhqPysqEslbnsUkHSWkSUQihKoryWyL6oqIolYQ69I8T0atCCL7pChG5iegjEx0q4m9PjNc9SrjxHyaiPxDRJ4holxDiZIJfYVYlEIAC4OEfp07BSGtsjF0q292NTV1SIoeUrF4NA7irC45jURFIACauLBYosAULwpVEdzec4IoKkGSJRF4CAThhgQDOefduHFMIZGtcfHG4w6uqKKnr7AQQrVo1eRSOScq2NgCb3w/wb26G8V9RoV+ZUW8vyMPhYUSft26NHlXhaCCXKodCAFezWZYqz2Y5cH4+vsPYWPRsKJMJhMLKlVgzBw6AgOYprBkZ+H7cIzFR4XILbZYhl3VHKjnuVcQ9IuOZSi0/Zx6LkhGPh+jXv8Z1v+EG4JLVCgOhsBBE2c6dyIy57DK8Z3AQmJOXB2P61Cmsu74+ov/3/7Af7r8/PIvI6wUZ+N3vApO+9CUY07GG/fzhD+iVKAQyGT/84djlZaqK9R4IYC3t20f0179iva1di/OOzCK22/Hg8pXJIsNcxvTaa9gvRMjuXbtW9vXRa997vXA+XC6cV2Vl9GwiIXDtmTi0WHB9mppwzdavT2ySKwv3rnQ4QGZwqXdOzvjvysNV2ECNxzCe6usTFSFwPbWEYV+fdICys6FPWlpkL6jsbKyNqUyTnsei5ERVUb0QCiHAqMd+eu017IvLL5cYoJ02r9XxLhfRK69gXW7ZMnODQXp7YRsZDHKQ3r59wGGbTb4uJwdYxcNPuG8fE40cbEhkomsscbtBDAwO4phLlug7aGE6JC0NdvLgIK4L9+jlnt2LFuFeM5EYjeDIypItabh/tR5iMEw82d7hkMSh14vn8vLguDNRHI/MY5G+MjoKXVtUhD3a2wu7qaiI6KabcF/sdlm9tGABcIaD4r//PXTOzTcTXXTRxJ+l7TPMFU08fNFmg/3BVR8TVa1FytAQ9rLHgzU1Wa9ZLkvWZhmyGI34btnZWLPvvSdLlisrEYBZuHD2cMLtBu5bLLLPaXY27l15uX6JJ5OJogC37XY54I/7CHIiBQ8mmUzfcdCVbe3eXjlESutTRRODQeJNWpr82+UCJnKCUm+v9NMyMmAPLV0Km6iqKr5WXpPJPDbpIylNIp6R3xDRPxJu/l4iWkhE2gk4p4jog0R0WAgxkORnvUKY2nOLoij9RNRIRP+V5DFnRVQV0SwibMzeXiib2trYZSGdnTCIysrwml/9Cgbw8uWIsBw7BgDaskUSdxy1LikJL8no78e01OJiEH2JGOoeDwDH70d0etcuAM+6dXDYI0tAXC4oV7sdxlBT08TKxeuFsmtvl5lFXMKXkwMw06t0eWgIBjtH9T7wAZkFwMJ93LhUmQ3AzExJfM0Vo5od/bExKPdYoG8247uuXg0C+MABOHDr1sGZDgSkwolHgkGpFPnB0S6jEUrSbJakgcGAB1/DJEuo57EoARECmcN2O4aeeL3YByUlIOgPHcIE5KYmoiuuwP2xWOS00Pp6GI9paSDY7r8fBuR3vgMjmsXnI3r0URjdNTV4XUND9HM6dYrogQfwc+1aGOhVVbGN3WBQEojHjaNeLQAAIABJREFUj4MEcDiAjdu2jc8i5r5EXi/W3WRNrz0eWbI8MoL9tGkTMiLz8nBeekXZ/X7gkcOBPVNWNv78QiHgPROHo6NyUvyVV6IENNnyxUBAliwLIVsKTLZHOZLN2YhTIRLjzWCMRxyOcMKwq0s65JzRuWkT1mplpcw0YtIwyeEH81iUoJw8CSe5pUUf/X78OHTamjUgv4jkFHmeNs/i8QA7VBXZMzMxHMRuxz4+dQp/FxbCFiSSZGBNjSQNowX17HZZWlZUFH829WTi88lsJ4MB2Y56DVqYCcnMlNmEGRm4LjU1uPe9vdBdY2N45OdHJ4zz8qBfbDZgrF5ZqWxzh0JyUBZnmvr9uJf5+dB7SU6KnsciHYSHgmRlgXgbGkKAMztbDoKzWKQf1NAAO6G4GOvrBz8Avnz2s6iciiah0PgBZWlp+Ay2m48cwdplGyTeUvvRUfhSTieOFzklmkgOBdFmGWqrBLhdkjb7rbMT58TDNJcsgd01U/36IsXhkMSh3Y7n8vNhv1ZU6IeNUxW+jy4X1kFmJq5hKCSHsHi9cvBVWlrsLEKWrCzZz7G0dDw5yP6VNlDh90uisKcHPzlIZTDgGrW2SsJQO6RzGmQem5KUFFHFsUUIcUBRlGOENNTFhPr1pzQveZyIPktE31IU5fbIWnVFUUqFEENxfpZQFOXnRPQNwphvDxE9mfy3mHkZGABoVFdDwXR3w1msrh7/Wi6vHR6Gs1NVheEYfj/IHpsNx7PbkdlXVYX3MZjm5YWnuQ8NobdZXh7RJZckZpw4HACfd99FNqTbDaV0+eXRy377+vBaRcE5c+QkmlitICV7ewGa5eUg9IJB/F1aKgEzWbHZkHnY0QElzdNm2cBT1XDikIkvbbbcXCvlYTGbcX4ul0yZjyW5ucheXbdOlq2+8w7IxeZmKN7I662NlPKDS7kVRZYD8SAN7nvI0wn5+pnNeg2dmMeiRGTnTkS1r7wSJNF77wEbFi7E8zxVmZuF9/XJdgr19bKx/u7dICNXryb65jfDievBQaJ77gFJfemlKF+OltXh8xH98pfIQMzPR6ZiayteG6ucjMs6jhzB2h0bw3ndcgtFnUKuLV8uLJyYKBgYAHG4bx8+p64OQRfOXOJyXj2wKBiU2cMGA4y3oiJ57EAAhO6ePTgf7vm4ejWyDdes0Yf0mKxkOR7RkoLxBqiSIRL9fqxDLWHImQcGA/TmqlW4bwsW4Lpqsx+ZODQa57FoNoVLBWtrJ7YR4pWhIWQjV1WhzQAR1vWxY1jP2j7MPh9KmH0+BGKnY0CI3x/ew7CnB3jr92ONLl0K0qGoCD8n2898PO6TWlSkT+ZmMIg91NODPbJgAfRBMlUJsyUFBfI6cbZPVRVs7qEh2bPMZsN3jaaXCgtB7o2OApf1IFFVVdruVis+22jEZxUX46c+U3HnsShZ8fvhv3CgzmZDG6C0NBCIqor2QAcPYh9u2IA9YzbjuR//GOvqK18Bia0VbT9wtp9NJtjc3JKFCOv1yBGQTk1NIOvi0ZMOB8jDsTEcr6lJYisThvxTW5bM2ZPcu0+7Fr1e2CJHj8K/yM3Fd25qmrnWDyxC4H7098PfdbvxfFERfJeKipnLJp9IVFUSfGNjuOYmU3jmIFd/sV2SkSGnajNBGEkOZmUBl4QYT5CqKjCOyUJu38E7vLAQupYrLyoqZnYq9jw2JS8pTyKekd8Q0b8S6te3CyFG+R9CiF2KotxPRF8koqWKojxDRGMExvmDRHSIiD45hc/6BRHdQ2jG+bgQwjbxy+ee8OS8khJsci4D5BIWrQgBQs1qxUZfsAAOZEcHnKK0NEkyVlfL0kGPB4CRmSlJRSKAzY4deP7SSxMD/OFhlAe9+SYAb8kSog99CA52pHDGTFcXAGvVquiAzpmZ3G+HG2HX1OBaORx4X3W1Phk/nBV58iQ+a/VqGAc8odjpxGuY9DIaAdBZWfqRXjMheXkwDmw2GKaTGR1FRcg2s1hwj994AyTrxo2IvmpLk7nEggiGBl8bsxnXi6e2cjQwLQ3Ght5TqSNkHoumICdOwMlevRpk3fHjuDeLFwNT/vAHYM511+H+dXfD6SkpgVPJxtvvfgdc2raN6AtfCDc433qL6O67sZ++9jWZzRgpBw6g92F/P4IR3KCcSaxo4nbDeN+5U2LktdfCGY/2GfGUL6sqjOPXXgM2Mz5wz1CPB8/l5+vjVIdC0kElAk6WlOC7ezz4fnv34qfPh+uxdq1sR6DHXlJVYJ7TKUuWuT9kogSpwYDvNhVSMB4iUVVhCPPQk64u4BUb4oWFWJubNuEnl10GAtJJU1XpGE1jVtU8Fk1BHA7gT2FheHZgouL1ov9YZibwxGDA/T96FDq9uVnu/0AA+93lQhsWPbJouL2Cto+hwyH/7/MBP5csgX6tq5tK+w7Zq89oBF7oUWrLwxC6uuQU1fr62e1lpoeUlMj+iNxn2ekEjuTkYM3xNGYhxpO3iiKJRK7sSQQXOQOe8Z57IhYV4filpdPWBmceixIUzv7z+6FvfT4QiIEAWrEMDkp9xL3lzWY5DO7JJ0HU3HWXrA4IBKQdzcQdDydj+5nF5UIm9dAQ1snKlfFl03ELAi6n5iq3UAjPsS4kklVCJpN8RLOfhodxLdracJyqKvRirK2dWZ9ICOwhiwX2KAc8S0pkqfJMkZlcXjxZ/0Ht6wMB7H9OsuCycG7hxCXkXO3GGdDRsEHbA9/jkcGpnh5gHvtpzAUsWwY/OrK92SzKPDYlIWcbiZh75vcwEUJ8SVGUfUR0JxH9PyIyEFEfEb1ORD+aygcJIboURXmJiC4hop8nd9ozLx4PQC8nB49Dh2R0KBKEVRVgPTYme/8NDMDY5UbaeXmInmdlAcwVBeDT1QXAqa2Vxo7djqlgaWkoN55qdEZVkZWzfTvOackS9CiLdu5EMNJ42uvixejdEfk6jwdRso4OKOfcXCjJmhqcb18fXldZCRIsWUXl8yEyeOyYdCRWrsQ14QbWXPLGpFd29sxH1/QSLokZGYGSibf0obiY6IMfBMn68stEv/kNDBh2dsxmrD2zWSq3QADXcHRUKi6TSfaInKEshnksilOsVhi4FRUIApw6hfXS0ADj48knsQ5uuAH3rrNTZm7U1uIeHzlC9N//jX16xx1oFM57VFUxefmnP4Xx8tBDwIBIsdsxdXnHDrzuu9/FGvN4sPdiGcxHjxI9/TSIzcpK9BlavTp2f0WrVZJw0cqXXS6QdW+8IQ28bdtA1hmNOE+PR2J3slikqtKhVFV8XkmJLKncuxf6IRjE/z7wAWQcLl+uH/Hl98uAibZkWc/eX1PNLox8j80WPim5p0f26+HG6BdfDJ1RWwvMVlVcNx7YxMdlR2mG+tXOY1GcEgzCUeZBacnuLSFg6zidRNdcA1tHCARNvF7sIbZ/QiGZwXz++Yn3D3U6Jck0MoLjsfNoNgNL2Wbj5v7NzQj8TkU3er1yEnNODvZrspnQPM24owOYUFwM8nC2Sv/0FoMB95WnsZaXw4nmSewNDbiOHLCOltXDWYJ8j+O1RwOB8DUhBNZ5aSmOwcRSMBh90IFOMo9FCcqJE7I1isFA9KMfwQ+77DJgh8uFva/1JVSV6PHHYTuvXk10221YKzz0jXGBp+uazeP3MCeQnDiB97a2xkfW+Xx4H2cRl5XJyi2HQ5bLms1SH06EH6oKH+3IEXzvtDT4fMuXz1xPQSJJfjJxGAjIKfQVFfH1tJ6qJDOchDMFOYtQW1bM05UNBtz/yHvKhCKXOnO2aFoa1ozJhHXU14f73N0t1ymXm1dWojqFswwna9kzizKPTUmIMn4S9bxMJoqiPEtEK4modoqjuWf1YgeDMNIUBZv66FGAQ2vr+EivqoLAsduRTVFWBhD52c/geF51FRTK668DWD/4QYAEA77fj8bRTH65XHI64eWXT71fVlsblOLp0zC+rr46tsNOJEudeRpwpGE+MoJj9vUBhCsrcb5lZQDH3l4QUrm5+LxkCahgEErw0CFc84YGOCsGAz6HHU1uEjyDpNeMCA9G4MElWtH2YYmMjnJKfVcXMs28XjhCmzdDOfn9stSbe7hwVmJW1rSmxs8JdZiqWBQIwBgeGyP69KflAI+mJvx87DHcv1tuwc/OTuBMRQWwSwgYyP/5n1gr3/gG1gTLyAjKl/ftQwPxr3xlfHkgH+Phh2GEf+xjICx5DebkRI+UWixoUH74MIzYK64AuR2LGGKnWwi8PvKYfX3A0bffBk4sXiz7HXLGj88nCfFk1zQfk7MBONvv4EEQh5wpVVoKAnPDBuCVXr0Cuberw4HvpSi4Jrm507Nftf1PJzNivV5Zltzejp9OJ/5nNEIX1NZCJ/JEXS1pzdmGjF/a/kLTSBzOY1ESwgPirFZk2Cbby5MI+2jPHgyM4+qMU6fgfDY0yHI+VUUPs/5+YEhkqWEs4ZYITChxSTER1lphoSxJ1k7Qtdthg3g8CMLW1cXv2HHQgYd8FBUlnyHIw4ba23FO+fmww/S4B3NRmOjNz4cu8Hhgh+blyRYVnNHDmBgpXi/ug9kcOyirXR/cbywjA+uhuBjHjbzv3PKF2yskKPNYpKO0t4Ogqa+HHvnxj4EhH/kISDS/Hz5dQQFwhqsHfvQj+D9bt8I38/tlVhmXqGZkxNbpNhsw0WaDzdXSEnuv8/BCzjzs7ob+Y1tNW5IcWZY8kbjdyAw/dgy/5+XhOzc2zpxvFAwiy9Niwc9QCOdfXo7vl0zm7mTkYDzDSSLJQf493uCCwyH7TE4kwSDua3u7bCfEPVo5A5PLtquqsF7mQN/aOYFFkZIENs1Jmf3bnGKiKEo9IY31O6m0AISQkyBra0EQckQ8UjmEQvi/wwHlxYNWnn0WDtYVV8DQO3wYCu2882DMCCGzNGprJYHo9SIq7/NBoU3FQLRYkO2zdy8A6oYbQArEAu5QCGRdTw+M3FWr5PcLhfB8WxsAkKf8LVoEI1sIfJ+hIRy/pib5SJeqIkLzzjtQ7pWVSOc2mcKNu8LCaSe9ZlWys2Fo2O3S2Y5s4EwkFRqXJaenQ0nV1sLBY6LjZz+D0lqzBkax2QzDODNzTiivGZFUxqI//xl7+xOfwJpwOrEP/X5knaanE/3N32BPtLfDIVqwQA5Kef55om99C/vzgQdQPsyydy/RvffC2f3CF2BwR2byDg4Sff/7eG1jI9F//AccaibsuOxdK6OjCITs2oU19qEPTd6SgTNLTKbwaaWhEIz8119HYMRkQj9QHrRBJIl3IhjQepR+2O3ANzYgT58GNrW14f81NQjQbNgAokzPyDGXLDsc0hhPtmQ5HonMLOTvFAphDXZ2wkDmwWEcVy0uhn6orYUeXLBgPLaEQuGlP0Sy9GeyDIuzSVIVi4jg+I6MIIChB3nV0QECcelSSSD29sK2qK4On2S5dy8IxLVrYxOIXIKqJQ1dLvyPM/2rqyVpmJcXfd92daH/ocmEwUdTsW3cbtnHlQc5JYsNViuw3eEAtrW04PzPZsnJkWXkHOwsLwcOWa24h3xtOTs7MvjFVRh2e/jQOo9HTlTmwEdWFtZVPP0tOWspFJJlzqkoqYxFWrFYsD+459yf/gTdffPNsBUsFmBNURH8OKMRgcEHHgDeXHUV9LjPJyt2MjIm3rehEPyV06exPtetk/YIkSSatX0MfT5gWF8fjl1ZiUBobm5iw8EGB+HDnT4NvKmpQRVEdfXMZLL5/cBqiwXXU1XlhOCKisnbMmkzBCciCiOFyb9ovQcjh5PoIZxtyIMo2U8WAutNO/ikv1+Wn5vNwKzmZlyT2lroEm4hNTQk21fNS7icLdiklflMxDhFUZQVRLSG0GRzORE1CCH6pniYWbvYg4MwUiorsckHBxERj8zQCwZhaLrdcOq5N8+bbxL97/+inO3DHwa4vPoqHPD16/EaLpGprJTv8/uJnnsOoHLppeEDViaS0VEQBbt3Q1Ft3Ij+HxM1G3c60bfL6YTz19AA4HW7oZA6O3E+eXlQcjU10lByueRY+cJCKItkyCguBdi/H9c9Px9OBRvuZrPMljubSS9tD0O3G2uECNeYSxq0j2hKUgi8n41kLpfgjKmWFmShzXD2wqxFuVIdi/bsQWDg4ouxJ/r6YIxkZaHXTzCIDMTCQtznsTHZdJkI/Q//679A+n3/+9IpD4VQuvzYY9jbX/kK8EnbNkFVQWD+/OdYO7feCpJRUWSz6UgC0eVCb6HXXsP/zzkHBOJEfcu05cvZ2dj3igJsevNNEJE2G45x/vnAUP5M7aRnHg6UrEPncgHz29oQ2T9xAn8TASvXr8dDO81aL/H74ezyYCgt4T9TIgQcAi5H7u7GTw5gZGfDGOZHTQ2e435D2uh+KCQzDpk45N6G3Lh8hmUeixKUkRGQ6JWVcIqSFZuN6IknYGNcey3Ww8gIMmpKSkBUsrz9NvCttVU+z1NytRmGXIJKBIzQZhjGM/yCqyAGBmDvce/leCQYhC3m8YBUSHJKLxGB/GJcN5tB0JeVzdlSN91FCNhBwaAcJNDeDnxcskQGpXgyPU94jhRtNjlXYxCBqOSMw0QwlvvIchnkFGUei3SQvj7YCAYD9sfLL0Nf/83fYP92dWEPlZTAhgoEoNMffhjr4dZbgSs80DCevTU0hCxltxs6sLkZn6+dlhwIhE9LtlplP2AeQJlIK5JQCN/nyBGcR3o6bLfm5pmx67nNF5P5QmDvVFZij7L9Fg85OFF58URZhLOFf9xP024HWchVeESyLJknJVdXyyBHKIS1wYNY0tJw37xeYFNBQexe4jMkc0aj6IRNc1LOYvpCd7mWiO4mog4iuimVFoDDAWAsKMDvg4OyFEsrgQAIRC51KSgAOJw6BUKvrg7Os8MBBVdSgkgVkYySs3FLBGX24oswFi++OD4C0enEe3btAjg1N4MgamiY2Hjt6YECMhrhDJeUQBm1tUniiiNknFlJJLNReHKeHn14urtx/gMDUKitrXJSWnY2lFOqRnknklBIRrX4wU62wYDvX1OD57ncKpZwyaPbjZ9cipGXJw2rSy9Fuer+/Vijra0gm/XqpzaHJWWxqLsbGc1NTWgz0N4us2d+8QuQbjffjPVx6hQMG56Uqqrof/jLX+K9Dzwg9+rgICYyHzyIdgl/+7fjiar2drznxAng1uc+B0wSQhKIeXnyPT4fegO+/DLW4LJlyIJeuHBiLNKWL3MpYXc3sg4PHMA+aWxEr7Rly6SjJoQcLmIwAH+TJdpcLpC2e/ci89HlAs41NwPL16+fnoixENi7TufMlCxHiscjh55wpiH3GjOZgEPnnitJw6Ki6EY8P8eTI7V9w7g/UCLZFmeRpCwWeTzYEzk54eReohIMol8zETAoLQ3r/733sO4bGuRrDx2C08z2xqFDkjTUTkgtKgqfmDzV8mGbDcf2eoE5U8kudjiAi0TA42QdQg7m8qCFhgbYZO+XbF0WRYHtrR20UlOD6p+uLtjeiiJLjp1O2QpDSzKPjOD9fj/ez0H/ZPtns22qbcmQIpKyWEQk7ZCeHgQ20tMRsHz9dTx/7bUgEDs64NcUFGDtDA/j9U88gfv/uc9hn8crfj8C8p2dWDutrVh7nHlMhHVoMsk2S6OjWKter+xfmsg0eZcLQc1jx3CsggIEVSfz9/QQlwt7cGBAThfmYGJZmextrKrYcxOVFxsM4b0HI8nCuSLBIPxd7bRkTsxQFBCFS5dKwrCsLPb5G42wT81mrCGfDxjP3310FGslVfv56ywpjU0TyXwm4szKjF9s7pmRkQEFcPIkSLTIAQOBAKLlPO04Px9GRF8fJqQGg0S33w7Q+Mtf8Lpt23BMhwMKJTcXxgxHKf76V7x/y5bok5O1onXY/X4A2apViALxdMtowmWBvb1QZi0tUAqnT4OASE/HZ9fXjyeX7Ha8LxjENSkvTxzwhYAS3r0bwJyVhXNZulQSh3NJmSQrqop7xmShxxM+bS0jQ2YXck8UFo6wFxaGKxhVDScOuY8ZZ2zyVGruochOPGd3HT6M59asAUk0zcor1WmDGccip5Pohz/EPfrEJ4AZOTkw2n71KxitN96I/X7yJF5fVyeHffzbvwFTtmwh+vrXpVO7ezf+5/MRfelLIId4mrmiyBLpJ5/Ec5/+NHqVcWSZs/7y87HGgkGspx07cA6NjTBsKyuxZmPtYyFkiZnJBIP4yBE4AZ2d0ik4//zxARWebhcMYr8wWZ6IBIMgK3fuxGRql0tOVN64ET8TMfjjEZ4s73TKkmUeDjVd+BcMQs9oScPhYfxPUeQgHs4wLC+fPGNQiOgZh9q+TnOIOJw7Z5KYzDgWhUKyx+769fpkxe7YARvqiiuAWz4fCDxFQdCDM3feegv7MydHZv4qCvCCycLi4uSHJ3V2gsA0m0EMxJvREwjAsfT78d6iouSqJXw+2KAWC/ZcbS0c1LMxkDoV8Xphq2ZnQ8c5HLhOJSXhJaROp8wO8vtl38KCAtwbbtNQUqJvVQsHTKaIdfNYNEXhwTdWK+yc06ex9zdvRkLF/v0YorJ+Pci206dx35cswb159VUELxobiT7zmfjIfi5L7uiQQYaaGiRZcDlt5LRkItmCwOXCuq2vT2ySvMUC26i9HeeycCFKsquqpn6syUSbKWizwVbo75dBxbw82AhlZeEl/9oMwYn6D85V4QnSWsJwYEAGB/LyJFlYWSl78SdjG3J5u9+PwD4HbGepv/8cvjtnj8yTiDMrM3qxVRVKIhSCgXHiBICCp3yx+Hz4XyAARZSbCxDo70cGy+nT6K/R2AiF1d2Nhr3l5VA+7e0yi4/7T73yCoxYjirFkmAQJMCLL8JYamkBCWQ2w+idqMzF4YCCdbthjCsKnMhAAAbW4sUAyUhjNRCAIrHb8Tnc/DeR6+t241hvvQWgzsyEk97aGn3qVaqKljD0euUgGKLoZckTfW8hYIyEQrhPPh+MJ48H/zcaw4nDaMIlFpwJRITI165dWMtmM/rB8LS6aZBUv7MzjkWPPgrsuPVWEHdpadijTzyBPfTxj8t+rW63NFCHh4n++Z9hQH/0o8hULCsDdjzyCAjCJUuQicjtAnJzgUWHDxM9+CAMqK1biT71KelM86CAUEgOLDlwAO0XrFYcc8sWOPRcVhxrXYdC0vEOhXCub74JjCopQa/Dc86JPsCKS32NRnxGIuQ3T33ftQt4ygGUc84BYbp27fSWD/t8wG8uWc7MhCOk92dqy5L50dsrDePcXDgkWtIw2jWPNmiFHSvu+UQkhwzwROXI0uY5InPrbKYuM26EHj0K+2bVKn0ycQ8fRgB0wwY4+8Eg9iJPk3e7ZblYe7vMhOWyU+4ppYcEAgis8mcvXx5fVg8PXOKpnYWFyfVhDQTk/iSCjTZZFvf7TWw26MLCQjjvfX3QIzU1UqdwD0yvFxmMrBe1GYPDw3LAgZ7BmgSIxHksilNcLtxfu11O4+7vh35qaYH98NprwIn16xEQ4L6qzc24J48/DpzZsAEtYGLtrcg+hnY7bJThYay9lSuBQ0wcRt5rux24ZbNBn9bVyTYy8UowiAzKI0fwvdPTkWTR3Jx4lnO0kmLt36EQ9tfAAEgtjwffrbhYDkdhPy2SKJxjOn5S4WADk4a9vcAMIlxrJgyrqvCIJAv9fhyDW20lI9z/uqcH66m8XPa4n0FJsTuYmjJPIs6szOjF7u2Fg1pWBvA2GqGctIrG6wXpEgqhpCc7GwbvwAAU2q5dUDCXXALDe/9+kHzNzVAKp0/jODw9TAi85+RJOLDLl0c/N1XFsZ57Do78kiWItqWlSWMp1uQ5IhinR45AEefkSOVQVQViIlZ0zGrF9xICwFZSMjVlEQrJ/jMjIzDWu7oAuuvWwSlJ9anKwWB4hqHPN74smTMMzeapOz/BoMwCVVU4UGlpkjiMl0ThKc6RWZ4DAzL7KzcXRhj3d9FRUl1BzSgWPfccDOKrrpLZfg0N6E/Y3o7S3iVLZDuFxYuxLk6eJPra12CQ3HQT7uXixYhk/8u/AAM++lGiz35WTiDMzcUxfvpTROgrKlDis3atPJ9IArGtDa/t7weGbNsGfPD7sSYnMnI9HuBKdzcw8vhxHHfpUmQTNDVFxxjuHaOqstR3KljkcgFD9+wB+el0Yh+1tCB4s3lz8tNTJxIuWXY4cJ0MBvk99CLunc5wwrCrSwYb0tPhbGt7GcY78EFLJHK2IWdSc+kW9ziM/M5zkEicO2eSmMwoFvX0wOZZtAh2S7JiscCZz8+HbWS1IrvH4YDTxq07fD5gXWMjghPTsX7GxvDZPh9whyf+TiY+H+yZYBB7eKKM68mEB9jxlNbycpAO04lFqSxDQ8DR4mL83L8fuolb4HBmakaGnKZaWBi+fjh7NC0Nr9VrbSUwsXkeiyYQzoYbHob+NxpxL4uL4UsNDsK+OXSI6KWX4E9cdBH03tgY/tfQAL348MOwl668En3q+Z6rangfQ+5Zx9Lfj89KS4N/xuXz0cTlkoOn0tOBJ5WVU1tfTqe0i3w+OQiGMymjSTLDSXgwCPf+DwZxnUtLce6Vlanvo3EijJY05NYTBgMwV0saxhtccLuxLnNy9LlGXDKekSHtW+7TOQOVeamORSkh8yTizMqMXWyrFQBaVASACQTgXGozQzweGNNCwODMypLTO1UVpYNmM0oMrVZkC/KkLM5y9PlgiLOBuHcvFMaqVXhEihD4//btML6rq9Gbq7YWoKiqcPpj9SUMBtH/4+BBAB5Hkurrw88jUnw+XAcmHauq4gdJblztcuE4Ph+Ud1eX7CGycmVq9n5Q1fAMQ6833JnWliUnE0kKBCT56vfjOe4xVlaW2BRs7ptIhHUdadh0dYFMtFiwDzZtguGik6S6gpoxLHr3XTjZ69eDWOPBR9ryv2XLsKe4nUJeHoIR994L5/z227HXm5oQof/2t7F+vvpVGNlOJ9ZYTg4w6AffWvGSAAAgAElEQVQ/gCF59dUondbiAk87ZSJxxw449yUl6GW2YoUsLdb2SIwULhd5803p+GVlIXiyaVN471WthELAWa8XZBVnQcYjY2PIeuYeh8EgPrOxUWZxl5dPb7ZPKARn1unENTSZ5DCaZAxDvx8YrSUMrVb8T1Fw/7VZhom2n1BVGSjhUkAu4eKMw4lkDhKJc+MsEpcZwyKbDQNNiopkifFURVt+2NuLIVGhENF550FfcmsPbmeSk4PX7d4NR3bTJv1LebmdysmTUytf5nYOHIBIpO+i9lj9/XKAXUkJbDI9psqfreL3g1A6cUIO2jIaga+VlbiP2jXq9eJ+mUzjiV6vFzooMzMxeyqWTJFInMeiKMIkL1fgmM0yA1lRsG/b2qDT+vvhe61aBfulvR1rpK4Oj8FBoocewrFuuQU6X5tlyFn5ROHlyF4vfK/RUdjcra2xbRuvF/t4YADrsaYmelXXRNLXhyBvZyf+rqsDech9qCfKIpzqcBJVxfUdGMCDicOyMtgNZWWpO8BSVXH/tWXJg4MysaOgIHzwSWVlcvaf3Y41lJenj54aGwOe8YBArvIwmcIryaZBUh2LUkLmScSZlRm52G637DfG5SmRU67cbhguigLHPDMTIDw2Bmdw716A/8c/juNs3w4CiRuGc6P62lqZpfPOO3g0N8uJzVo5fRpDFbjvy7ZtUGRuNxSnwQAgjEXG9faiH2NPDxTR0qUgHKqqJu5TxhEpg0H2NZtMAgGQhlrSS1FwTdra8HdDA9Hq1aljJAshpyXzABT+bkS4v1rCMCMjOSfZ75fEISsO7s3JU6nHxmRz5kQUH/dRNBpjOz8nTxK98YacTr5pE4yiJCXVFdSMYNHQEIahlJcjm3l0FMbkrl2Itl92GTDgvfewRhoagDdPPYVIe2MjsgyDQdyzX/8avQ0bG0EwVldjfXFT55/8BMdevJjoH/5hfO9XJhD7++HUnzgBY+nSS4FZ7FRzI/tYpPnICIZNvfUWzrumButqsl6cXNZIhO8ZzxCnoSHg8Z49MuhTXg6Sor4e1yAvDwTFdEbYfT5Zek0EnZGbmxjpoKrAZC1hyKVcRMDomhpJGlZXJ/fdVFVmHGoHBhgMOO5UDdk5RiTO/hkkJzOCRX4/9pHBAKI/Hn3DeMCk4cgI9gD/7913cdxrrgF2ud2wEaqqZB9ozowvLEQAVm+nKRBAOfXwMHBh+fL4PsPtltnYeXnxZ/FGihDYy+3tcjjCokXT13c11YUHb3EpKxHuVzAoJ9yOjsLerawcH4yaiEh0OrE+c3L0nYzKpaE8QGICmccijTid8j7zYMDiYti/nJDQ3Q0btawM1/j55xHIvO46lByPjGBN1NSABPzhD7HnPvnJcDvWaBzfx5D7Pr/3HgbVmUw4dqzeg9yCoK8P712wAJ8RbzsEnhL97rtYwxkZwMXGRnznyYaTTEQURjvXwUEkCQwN4dpx6WxFBfZNKvZd5Sqtnh48+vqkn2Y2S7KQiUO9/U9VDV+vydo37IP7/bg3RqPsmygE/s7IiF5Gn6SkOhalhMyTiDMr036xg0EYc0YjQHVoCESbtn8FTww0GkHEmUwAY5cLhmRvL5ryf+ADiNbv2AGjZds2gIrFAsVWUSH7CR05Aoe6oQEReS0Y9PeDPDx2DO+/7DIY8UYjjjs4CBCpqopePjYwABKIJ5adfz7Iu8nIQLdb9oXIz4dCnMi4ZiIikvTKyACYv/sujlVXh+ifntHe6ZBAYHyWIW93Jt20Dz0UrtcrB6NwRiP32MjKGv8ZHEEkmnppOUswiHtnMsUmGlQVBtju3TCy6+pA+ky1r4tGUl1BTTsW+XwgAt1uTBa027EHDx8GVmzZAhx47z05sdhsJvre95Dds3kz0V13yZ53//M/wJBrryW6807cb58PuPXyy0SPPYa1cPPNiOBH7nVuv/DXv+Izs7KQxbhpE9aNz4egC09GjoZF7e0oM9q/H39zuVFDw8RrNxjEsf1+fFZ+/sSlPL29cqpyezueX7gQvY+WLQMmcRZiaen09TsUAtfX6ZQly0x+ToUMsdtBrjBh2N0t+6qazbJ/4cKF+KkHAcGZzoGAdF6MRlmqbDDILIhEmqTPISJxHosm+wCBkn+bDUR/LILF6ZQEj9UqM5aJsOe4h2FREbKoDx1CcGTpUrz+2DH8n1sYjIygP3RODnqT6k3yj47iHAIBfGY8wTEOpLjdOJ+iosTPa2QE+OR04jvyhOB5CRdufzMyAjwlgvPP6ykrC88PD2NtFhUBLx0OEEiR+O7zwXY2GvFaLclis+HzCgr01QuhEPYCT6GNIe97LOLAw8iILFkuKoK/wgMJfT5gktMJArG0FHrwqaewh66/Hj7V8DB0YkkJbNcnnsB6uf12mXHGxGG0ezIygqotlwuk0/Ll0fc6tyDo6cHvnPXPAdHJMgfHxoCHJ08Ci4qLgYmLFklyKBY5OBXd6fPBH7RYcG2EwHUrL8f1KCqadV08JfH5QBLytecWZERYNxUV4VmGM/X9uOUUV5kkK6EQ7puiyAoSJp19PvxfUeQ0Z51KnVNoJaSuzJOIMyvTerGFgIPGhEp/P4BH2xfH4YADbTIB5A0GALLPB0UVDKL0sKoKvcbeeguv/8AHcJzRUYBeUZGcIMeZXgsXghhgABgZQS+0AwcA9BdfDAKQldjgIJRPTg7AUgscgYCcLnj0KIyiZcuIPvjByUFNVSXRaTLhu0R7jxDhxCGTXpmZMOoyM5E1uX8/lH1lJRyQ0tKEbs+0Ck8s1j4440ZRxhOGepU7CiGJQ7cb115Rwq/hZArB74cDlpkZ/wTJSPH5cP8mI0ODQRhVe/bgvJuasCYTIIRTXUFNOxY9/jic6muuwXUvKgJ59NprIMM2bcL+FgIEohBE99yDqak33IABLCdOYP/9+tdYV1/7GjCGSE6Uf+QRHGfVKqLPf15OPNWK1Qpict8+rLMLLgD5x04WE2U8VVm7Zv1+YNhrrwEPOJPpsssmJ6G1JJyiyLLfaK87fRrrcs8eYDcR1uf69XhkZyMo5PNhnZeWTl8WdDAopyxrS5bjGRbl88Eg1pKGNhv+ZzDIAQtMGpaW6mcYc29D7URl7m0YK9Ida9BKPMKfMctE4jwWTSKnTmE9NjdLu8XnC88wtFplxgc7/vxgokd7vL/8BdnAF1yAPX74MPBkxQoZIH35ZThFF16ob09ADmi0teEzV66Mz9lzOmWmdX7+1PuwsthswCubDZ9fX6/vPj4bhLPQRkZk25XcXEkcRlsPo6Nw3ktK8P+TJ4FLS5aMt6P8frzeYAgftEIkh3zxsAy9hInECQatpPoKSBiL/H5cd87uzcwEcWg2438cNOO+4tzaKS0NPtDjj8OeuOoq2DU2GwjksjIkcrz0Enygz3xm8iBbIADbq7MTuNXaGt1vUVUQVx0deE9REfRyZmY4URhNFAV2yokT0PdGI3BgxQrZN1EPPHC74c9ZLLi2RLBDKirwSDSDeqYlFILPqy1LZiKUCHtVSxiWl89uCTb7dZmZ+gQjfD58/8zM8dnVnAiic6lzCqyK1Jd5EnFmZVovtsUCA9FsBtHHEXEWmw3Gb0YGnmeyTVUBWOnpRL/9LYDjppvwv127YHivWSOb3Gdng1BUFCifV16BY3jRRbKfy44d6BVmMCCjSOuwc+8clwtKVqvc7HYYp11d+H1kBIqNBypMpiwcDtkDsrh4PDnJpJfLBcOOIyBMenFfr64u9E4aHcVx1q2LXQIw08Lkp5YwjFaWzINP0tP1VbLci5AzDjmTR0scTvXzmLDIz09cYXk8MjI5GXHp84FQevttnP+KFUQbN8ZXXnpGUl1BTSsWvfYaAggXXCAdcKtV9vq56CIQf9xOYWwM/Q17eoi++EVkPR87huEou3bh/nzzm+EEwC9+QfT73+Oe3XEHMoIi153XC+P7+edxnzdvRiBCS1bb7Vg7ZnN4+YbVis/eswdrMy8Pw1nOPTe+rNlAAN8rGMSazssLX5ehEJyFvXvxGBnB/5cvB8l6zjnAR49HThZMT8dnT1epoNcLDGWnl4fKxCrRZizv7pak4cBAuGFcWytJw6oq/fs1crYhTxIlkqRhvFNFOeCSSDb2HCAS57FoAhkcRBVDXh5sCSYNOSOMCHjAZGFR0cSOqdWKlgrFxch4DoUQmCICmZeejj20cyfW04UXJj/tUit+PwhLrgbhSa0TCfdx5ABEUVFiDprLJXu0pacjoz/Sxnq/ihDSZmUST1GwlnhdTUbocWm4z4frGgjAHi4sBLEQKXxfI4lErvBQVXy2nmTEJBOb33dY5HDINgeKAnskOxv3QhuU0AbwPR7Yn14v9Osf/4h7fM01cngY9w/85S9hp27aBL9sMh3V3x9eNdXYKMuatVOLBwags30+rNGFC2VQIVbmoKKET1nmIMKyZXjohXMOhyQOueQ/L08Sh3qW6k+HcHamdvBJX59MVMnKCi9JrqqavmqSZIQrUHJz9bHbnE741Pn50W1YHgrEmbpJljqnOhalhMyTiDMr03axbTY5BWlgAEps+XJp3I2Oyqh1U5NMCzca5bSqnTthDH/0o9jgzz8Ph3XrVmzs06fxuvp6HLe3Fw56SQl6igWDiLy/8gp+37gRjr0WLIJBvM/ng4LMzwdY8MSwoSGpsLxeHHvVqslLl4NBHGNsDNeguloqNO6bNxHpxddpYADZl4ODkjSoq5vdSBf3MeQHAywRDLnILMPpMOhVVWYbclm00Sivodmc/DWyWmEUJ9rLhMlNzryM53xcLpBEhw7huq1ZA/ImjiE5qa6gpg2LTp8mevRROeyDJ64//zwc3ksuQTDDaAQWtbURff3r2MP33otWBQcPYvpyfz/Kk++4QxoxR44Qffe7IK4uugiR+Uh8CAZBAD7/PNZVSwsc/ooK+RqelOj3Ay9zcrCGTp1CD7MjR/C6xkZgaV1d7AwSrQgBI9jlwnfMz5frifuX7d0LnHE48L1WrUK24dq1ksj2+YCHPPSgpGR6ou6cLelw4Py4ZDk3N3wfsmHc2SlJw54eGT3OypJDT5g0nI5MSW70zw8uKeaMw3iJw8hjchZ1osNaiGaNTJnHIu3Bzuw/qxXr8803sU7q6qTeZ1KHH/GSLH4/CESvF/2iMzOxn71eYEx2NnTkzp1w1C+8UF+H12rF5wUCqCSJRixphYkt7nFVWJjYnvR6ETC2WHCtamrw2anYc0xPYR3CmayMnwUFco1NlcALhaD3FAW2Off05unzkRIIyAwt7eeFQiB7DQaci17YJER40CUCa98XWMRD2UZGZIZhdnY4ica2uckke/LyAJQjR4BRZWXIaM7Ph53T1YW91twMX+sHPwBpf+218LG0E5gjS4zdbmCDxQL9vWLF+PWiKDjv7m7Yyrm5sgXBZOXFNhvOm/tXl5XBLqqv128ABxOHHOApLJTEoZ6BGL3F44FfqyUN+TukpWEfa7MMeaDOXBfWH6qKtaQHhoyMYK1yCX+sz9Wh1DkFrnDqy5wlERVFWUNEVxLRz4UQHbN8OnrJtFxsnqRlMEBBGI1IX2enm/vWZGfDIebIWUYGwM1ohOP/9NMgUTZuxCAVVUVGUFoa/i+E7HExMED0wgsAlq1b4RS/9BLAYdUqDGCJ1hCaJzBzf8KODpwbp03X1EgHoLw8/HvEEh6UoKpQbKWlUqkyccikF2cbRpJMVisigz09+P/q1ehzNtNOobYsmYefaB1UnpbMWYbTme4eCoUTh0T4PC1xqPfnjYzgPnGvzUSO4fXiPKcyLdtmA+l07Bi+1znnYA2kpRHt37+fnn76afrkJz9JdXV1/JZUV1DTgkU2Gxp/m81woFmefRaZxJdfDiwxmUAgvvEG0b//O7DiO9+Bo/Tss0T/9m+4f9/6FsrNibAfHn1URu2/8AX0X9WKqiJq//zzcKCqq2F8r1gxPguQswQ5Q/Dtt0EechBmwwYY89x3qLh4cmOZ+yqGQjhGbi6eO3AAGLl/P75HZiYIww0bgJfatRoI4Ny5P2Nx8fgG+npIMCjJTlXFd+Rya0XBeXZ3hw8/4X49aWkwiLWk4XT262HikDMOiSRxyBmHenxGKhCJUfDofY1FXq8kcfjBfTA7OrCezztP9sxKJuNj+3bg11VXwYY5fhyf19yMPer1IpDq9SILW6++ydzuoK0NuNLaOjk5yeXagQCuQWHh1J19v18OWiCSe346p7/PdeGektrSVS5/54m7yZIqPh/IlMxM2LOnT2NNNTREz2YMBuUk+8JCeX+4VYzJpC8+Mx4fOLCftm9//2CRzyfvu8+H+5yTI3Wm0SgfoVB4P15FwX3o6JBD/rZvh+666SY8zxmIbjfR978PAueWW5DhrCUOw05YyOEs3Bpm8WIZTGNy0G7HZ9jtWFd1dZO3ZuJjHzkC38hgwLGXL0++rZMQuA5MHHq9OF+uICsv19/H0EOCQdiI2rJk7uuuKLBltYRhWVlqB1tCIawZozHx9hda4TkHoVB8JdtJlDonjUVnKQ+lq8xlEvHviOjHRHShEOLlWT4dvUT3ix0KQTFwD6tgEBFxjtoMDeH/ubnoq8I9V7KzsYEVBU7kY4/hNddfT/TqqwD1Sy+F4dHRgU1cVyenOD/3HAC+rAxRd5sNpMC2bdGj404niD7u/9HdLZv4lpRAMZnNyEDyepEaL/ma6OL3A8SdTtkjIxTC99GSXlriMFIcDjj2bW0wzlpb4ysP0kO4tFr7YKAkkoShdlrydEswKIlDjrCaTPIaTuf0VyI5eZAJmESEo73ccHoqMjQEIqm9Het040aiN9/8Cd1xx+20c+dOuuCCC/ilZ7WxnIgEg5iObLGgZNhkguGxfTswYds2OKQ8se/JJ1GuvGIFSEOzmejBB/H8kiVE998vWwjs2QOjemAAuHTHHeEZzkLA0N2+Ha+prAT52Ng4noDjMmM+Zy4n9npxnps2AUNtNrw2N3fyKXU80c7jkUM7Dh+WWa6BAI5xzjnIOFyxYvzaDAaBrXxunCWltwEarWQ5KwufrSUMBwfle8rKJGFYWwsCZboNY45Gc8YhEa4rE4fT8fnJ9Efk9xNNL5H4k5/8hG6/PQyP3jdYFAzChtH2MeRp4YoCAof3jcWC/61ZM3klQzyyfz+CHuefj2O2t4NcW7QIeOP3owrD4UAP6cggaqLi8wFLrFbsu2XLJl77nB3ncEhya6rEaTAI+6y7G8erqIA9NhM2yFwUJukYn7lPLBOHemXpaIWD6Twg5eRJ6MhFi6JjE+8NIcKJRI8H55yZqe8wQCGIHnnkJ/T3f3/2Y5HDgcDe6Cj2Obc+4WvMxKGW4EtLC5+WrE2ayM9Hy6dgED4XDxprbgam/PzneM+nPoUAXazJxW437IuxMeBNa+v4TGNuQWC14nwWLsR+nki/+f3odXj0KOyarCyc29KlyQVhVBXX0WKBncbD2kpLJXE4lwIUTHRqCUOLRWbi5uaGT0tesODsxMhAAHsgI0Of6hImYtPSYFvGY2tFljpzUs0E7br0IBHPRh5KV5nFtp36iqIo2UII1+SvPLukrw8b3O3GBtP2pRgYgDOYnw/DY2hITm3jTC8hQAgGg8gSOnoUx1y/HkqJ095ra6E8bDZkILKBvmsX/nfjjXD8o4nVCofUbodC46lyNTUgD3NzoeQOHsRnnHfexAM2hIAiGhjAeRcUAEzY6TWZ8P6srNiA7vGgT9KJEwCg1lYQB9OpALgsmTMMmaQjkhl+BQWSMJypLEi/f/xU6vR0nEtW1swqdc6ydLmkgpiqmExQ8mygTIVsKC1FlklPD8jEF1+EA0ckS8gnk/crFm3fDry44AKsZ6MR2FJeDuKvs1M24b//fmQLbt1K9E//hADD5z4H/LnsMjxXUACj/eGH4ZxXVRH9678iQ1S7T9vakL3Y2QmD5OMfB7akp8OZ0hoYXq8cCnD4sOzLuHIlyMOFC7E/R0ZkVHmyaLjbLR2+EyeAY8eOwegpKUH59oYNCLJE29OqKjOoVBXfu6RE30CGqoaXLNvt0ilmA5mJupwcXIe1a+XU5Jnq16OqMuOQDXU2FnlNTacYDLJELBEiMfL9c0FSFY+EwF7VZhjabBKHebptY6Ocfsrro6sL+7KhQR8CsacHts6SJSAQLRbYSZWVeASD0Bd2O3BELwJxZAQ4FQoh8BBtaJRWPB5cp1AIdtVUyS1Vxffq7MQeLC0FXs/lUsLpEh6UMTKC+yoEcKiiAutOj6yciYSz2MfGoMuqqqBfBwehUyMlLU32/LRase7T04HdoRCwn4P4eoi2Byxj9eTvSR0sCoVkphzrzexs6OfILD8i/ExPD9dTWn02NAT7IDsbPpTHQ3TdddhrQgBXDh5EEHXBAqK77orde1lV0Xbl5El81qpV4yezcwuCwUGcU3091tBEeDA6imDsyZM474oKBD65FUQiEgziu1ssOJdgUJJHFRXAmNkcIKIVl0uShT09wEIOtqan476ce64kDqcbA+aKmEzAEQ6SJ+snM1YxMV9UNPl7eCCRdlAR+9I6T3VOWFIJ3/SSWctEVBQlm4juJqJriaiKiNxEdJKI7iOiZiL6lyhvu1UI8XNFUX5ORLcQUS0RfZeILiOiMSFE/ZljVxHRt4jociIqIKI2Apv8oNB8YUVRXiaiJUS0iYi+T0QXEFGAiJ4kon8QQngjzvkTRPTPRFRHRB1E9J0zv/+LECIeKNH1Yg8P48FTlBYvlsZFfz9AsLAQDqHFIo1CbYbXvn0wfrduxWt37gTheN55IOmGh3HMkhJk/D36KMr+8vJkdtGKFdGBlFPhjx6FMjaboUAXLYJSMpkABgcPQslUVIDIm4i08nhAOI6O4nUccU1Plz1JJnq/34+mw+++CyOhqQkKWG8jORgcn2WozVKJ7GM400o02lRqJvCysmZXqQshm4KXlCSmGDjLU4jJB724XC6699576amnnqLe3l7KysqihoYG+tKXvkRvvHGUvve9b0Z72zwWaWT/fgw5aWmBo60oaG+Ql0f04Q8DS7KysMfvuQfR81tvRR+gF15Aj0ODAWU9l1yC1+3YQfSjH+E+Xncd0Yc+FN53qLcXxOXx43CWL7sMWOR0ShJce99HRoB1+/fDKcjNBc6dey7Ok3v+MYE9WRZgKAQSkrMNu7vxeVVVCMJs2ADDPdbaE0KWxbHTX1qqb7ZvIIBr/957uF4DA8DaQED2mamuDs8ynOl+PaGQdLQYI41GmXE4G4ZhMoNWiJLLSJwIj44ePUrf/OZZh0eCCLpIm2E4Oip1U3p6+KTkoqLYjszoKNoHlJYCj5IVpxMD5zIzgUMuF4IEBQUI2goBXBkclA5msiIEgiOnT4P0aW2dmPzhElu3W2bITcXR4xKzjg7gbWEh7LS5PsBAb+HSeB6UQSR7aBYX60fAxSvcLzwUAlnNE2oXLYqdEcRrIRSSAXYi6DaPR2Y2xiPvRyzyeCSB5HTKHsF5eTKwzb13WU9F0xNMMBoMWEtHjuD9b7+NYMjHPoZ1JgRw6rnnYPO0tCADMVbwcnQUPpPDAVJrxYrwvc4tCLivZlUVCMZYNr0QIDKPHMF3NhrhS65YkXhbIb8feGixwN7gdinl5VjHevboTFQCAeknM3HIVSCKgnPVZhkm6oucTcJkel6ePj6izYYATVFRYhmOoRB8WR5gxFzAGQ5gUiv2fcpD6Sqzyf//NxHdQEQPE9FhIsojopVEdC4R/YyIqonoNiL6NhEdO/OeXRHH+AsRHSdcUDMRkaIoxWdeV0FEPySi00T0YSK6n4gWE9FnI46RSUQvEtHLRPRlItpIRHcQ0RARfYNfdObG/YKI3iGirxFRLhH9BxH1xvuFu7rifeXk4nIBAJmNLy/H711dMlW8oADgvXcvFEVZGRQQN2EeHCT63/+V6fJ//jOMi8pKRL8HBuCcu93IBHrkESiZZcvg6Le24n3d3ePPz2pFph/3GFy4EA5qaSkAmoegHDkCUFqyBCDd3x/9+3q9+ByLRWYIFRUBPIxGOBs2Gx7RJBRC5O7oUbynpgbKOjcXRGkyoqq49tqHtm8XR0n4kZ4ePqhkJkQ70Vk7lTojQ/ZX5HNnRTqbwiWnvAYTEVXF92XSNpZ88YufoaeffpxuuunT1NTUQk6nnY4dO0gvvLCbrrnmVurr66Gnnvopbd78z5STs4zy84l++9ubX404TEph0cmT8b5ychkYgJNdWAjDorMTQ0PS0xHF3rsXBkJGBtFXvgLD+VOfQuT9H/8RPcQaG0E2lpbCeb77bjjqjY0YnJKfD0MvOxvY8tpr2MtmM4jANWuw5w4dggGRlyf39fAwnPx338WaqK3FlOalS3G+3N/GZsMxcnLwiLYPhAAGvvUWyMiBAXzewoXIwGxtlcNbGHOiicMBHOZ+ZUVFwILOzuTuBZOGnZ0gBHp7ZT+b9HTogKoqOB+VlbjeWsOYA1PTLUwcaicqc6kyl4PPpiTbH5EocSLxn/7pM/Tss4/TDTd8mhobgUfHjx+kZ57ZTRdffCtdckkP7djxU9q6FXj0pz/dfDOlsG20datsoI7jYa3yI87+R0Qkp9LytFo9+jf19OD8amuJ/vQn4AIPz1AU7BePB5/31FPJfR4RrgMPfOKp7hN9j2Aw3ImaauWAzwd7kjOEcnKmv23JXBK+fto1yKWoGRmz38+M7Ri211wurMucnNjYIgS+kxCy9QM/x4ROPN9rz57PUFfX47R48aeppaWFgkE7DQ4epG98YzfV199K9fU91N7+U2pqAha9/XZqY9GOHZL0ysmR5HF2djhBEWtycbThJF4vggFmsyw9/uhHgRuKgjLhX/4SxOBFF6G8Odp9DQYRMG1vx7HWrw/PSOUWBD09uNcVFbBLYu1lr1eWLDud+L7r1yOxIpFehF4vbI/+fthoHMDn8unIqpCZFC6j1g4+GRiQOjo/H0Th+vX4ycNG5yVccnKgmxMknQEAACAASURBVJxOfYb85ecDkzgpaKrXnEn8sTGsOyatrVaiL34xrkO873govWU2MxFHieg3Qog7Y/w/Zi26hgH+sRDiUxH/+0/CTbhWCPH7M88pRPR7IrqKiFqFEIfPPP8yEW0hoi8IIR7UHONPRHSeEKLszN8mIuomIueZ97vPPN9ERO8SUVo8DHBXlz7ZP4EAgJBJqMJCRI4UBQ7u0JAs77FasdHKysKNS78fBrGqEl15JdHu3TCEL7wQx2HyLzMTJZ0vvIDnP/YxKLpoRr2qYhOfPi2d4cZGkI7aCK4QIDtZsa5YMT7izaQXlxZyHwpuWpuTE58RxM3VjxzBteJsx3jSp6MJG2JawpANeCKZ6s0lyRkZs6c4tT0XeSo1Ty5m4nC2nfWJxO2GssrNTbyckh0EdgpYeMJgKES0bl0hbdt2I33taz/8P3KDy1CEIHrmmZ/Qf/7n7fTggzspJ+cCOn6c6LvfRZQrVbHo5El9sMjrJfrVr3CNN2zAnnznHfxvyxZpnHo8RA89hPX2hS/gfj74IPDqIx8B2ejzgUB85hncr+uuA9nnduN9QqCk8J138DnnnIPPNJslFvJwEB5E8NZbMJSJsO/PPXd8SaDbLaeXajM3WIQAhhw8iM+2WPDc4sVE69YhkzlePHE6ZV8lznZMNAuaM3b7+mBA9fbid58P6zc3F8GSujr8rKiYXcM4GJTN5lm4l5TJNPfKgphITLQ/ohDhBGksUVWQyhwEu/nmQlq9+kbauvWHZLPJwB/3Auru/gkdO3Y7rVmzk6qrL6A//1lG3FMRj7ZsIcGEDTvpiV5vzl5MZCpuNBkcxD2prMQ+5cBCQQHuKU+aLCjQJ2uPBzMJAfJwIr3HNhKv0fT0qelzvx+EVCAgB0Scjf28ogn3TebrRySd2LlQFhcpPFyAs96cTpzvRNk70UhDXjNE8a2XP/yhkBYuvJHWrv3h/x1TO+Cjvf0n9M47t9P55++koqIL6OmnUxuLfvc7EgsWyAm6vBa05OBUsCkYhM3gdsMO6etDRQUP/6utRR/pnh60YbnooujHGRhAYofHg+oGDoASyRYEXV34vNJS2b8+moyMwB86dQr6eMECDEqprZ36une55GAUxsbsbOBlRUXiCQDJisMRnmHIdhER7EW+xzwAZaYzjFNZgkHYyyaTPjpPVbG+mfjmNSgEcI4zw7UPqxU8x/Aw7jXbWYxLJhPRM8/ElYn4vuOh9JbZzEQcI6INiqLUCiESzdH77yjPXUlEp/jGEREJIYSiKN8l3LwrCIwzi0pEP4o4xitE9BFFUXKFEA4iWktE5UT0AN+4M8c9oSjKcwSGeVKprY3nVROLqoKg474cVVVQAEYjnjca0d8rNxcbrb4eGzOScHv+eWy0666Dg2wyoRywuBiRruJiZCvt2QPltHIl0Z134niR4vXiPR0dchjBhg1w8iNBxu+HUnU4cMwVK6QyFAJKkgkBoxF/GwyI1nH/xHilo0OWDtTWwuGvrJzCxSZ8F21JMjty3Gsmsix5LkSt+Rp6PDgnJoP551xz1icSJlyKi6eWjcJkIE+Y9npxbxRFkoQ83TUvr4DefXcPORxdVFtbO65U5eBBHHfFChh5Mfr/pBQWNTTE86qJRQhE0NPScF3y8kDaFRUBS7xelF2dOkX0gx+AyPr3f0ep4be+hfX48MN4fs8eTIfv6UFG35134jg8AITbLoRCaKFwySUSC5xOGLNmM/bl3r0YgDA0hL8/+lEcM3KaIJMOmZkwKrXly6EQIvR79uA7jY5izTQ0oGfm5s3R+1LFErcb55OZCcM6sq1EPOJwAON58El3d3ibhNJSfE92Mnhw1myKdqKyEHLP8WO2z28ySXbQitcrS3P5wdM9uXcZD2og4mtUQEeP7qHa2i6qqqqlJUug77lX5htvYBDRt7+NfRZDUgaPXnllslfEJydOAD9aWhA0TVaOHUMAdc0aBB8OH8Y+bmmB03ngALBt+XLYJ8mIELC3eAhetAEJWrHbYddw4GMqTrDTiQCL1QqSpK5u8kELqS7cX5MdUL9fXjsuj59LAx2iydgYvkNxMXSixQL7f6IAFus4vx96JzNTDvAyGCYuKxWCaNGiAsrN3UPf+EYXVVTUhtk+aWlEv/41ehl/85tEF18c8zRSBouuvVa/fSAEbAiXC3u7pwdZ1x6P7Nf8wAP4+667orde8PlQQdHXB1zYtEn2eOUWBJ2deF1hIXR/NCzQJlNYLLh3jY3Arqn2jLXbJXHIJf/5+chgrKiYeULO55NlyUwc2u34n9EIO2jlSlmaXFx8dmPddEtaGnSTywV9mEgQnPuNMh5bLFifDoecgM44zcJEYW4u1iy33OAWJyUlsIHLyqZkW7/veCi9ZTZJxC8R0WNE1KEoymEieoGInhBC7JvCMdqiPFdHRDuiPH/0zM9IGmxQCOGJeO5MwS8VEZHjzDGJUCsfKToWBk4uAwPYaHY7DIKlS2EEtLeDla+ogGFotQLMo00+On4cBvLGjdKgXLkSYHv0KLJ9Tp2C4snKgnGwbdt4EtRqReZQby82eG6uTF2vqhqf9TIyAgIxEIDCrKmRDf+ZOGSHLRCAwZSdDcUY7wQnIiiUffvgtOfng+Coq5v8fVw2oh1+oi2xysiQg0/M5rljdIZCkjjkPoCcWcDDZVJVaebnY12PjUFRcIagliSM/KmdkMfCQ1a4p562l819991Ht956M23eXEctLS106aWX0vXXX0/r1q0jIpmdwSRkDDLzfYdFf/0rsGTlShCI+/Zh/V18seyrtXMnHI01a4i++lWi//kfBDDWrCH6lzPdRh58EOXJFRUoYz7/fOn8vPEGsqR9Przngx8M79PjcMhMwtdeQ9CA2xVccQWc++Li8Xs1EAAeBYM497w8vG//fpCQ+/bJ3ogrVuA4zc0wVHJz44/Ye73AIZcL64aj85PtR58PuKolDTnSbzAgGLJ8OQyo0lJ5XrNdiiiELFOOJA65rC6VsEg7KEUbIOIouXboh3ZqMP/OpYcsigK9zf39mpuxHnJz5WPz5vvoy1++mX772zpavryFLr74Utq69XrasGHd/1UJEE2aNfa+wiN2Ihcu1IdAHBoCdlVXg0B87z3cb66qePdd2EiNjckTiF6vLHOsqYk9gIkIGMXOFa+jeAOXHg8cNZ6KuXjx5IMWUllUFdeUHdJgEN+1sFASh7Md9J2KFBTI+19ejvXY3y9bhUQTRcH3HR2VGa5ZWXiOcYrL/jlLnIM+gQDR3XffR3feeTO1tNRRc3MLbd0K22jjxnX/h2VEk17HlMEiPXXTyZO4vm1tCPht2hTeo/uhh4AlX/0qcCZSurtB+oVC8PMWL5Z7dXgYe9nths5oaoo+edvjkf6ey4XX8pC3eLOOuVc0E4fcgon1V0XFzA5eGxwMzzIcGpI6tqgIvh4ThhUVc2doy9kkGRmy739klRdXD0ZmDg4Py9/HxsYPqlQUYFlZGdqbFRbK4GlBgSQOI3uRJmlTzvNQScqsbS8hxB8URXmdwMhuJaK/JaIvKYpyjxDi3jgPE3nRE5GJ5orNKXdHm72QmQkAN5lkVJkzDh0OWc4cKTYbnP8FC0DOvfQSALehAX3NXn0Vx167FhvabkfWDROIoRDAu60NQMDGaH4+AIWzeiIdrlOn8MjKwrGNRhizkaSXySRLhPLzcW7x9ucYGYHzz73TNm0CGEUzkrU9AvmhjXrwoBYmDOcaERcMhhOHRLgXubkTT6Weq8LkQzRS0OXC+u7vjx7lZGXCJTuRSoaNCI9HlnNr7+XHPnY1XXDBJnrmmWfoxRdfpEcffZTuu+8+uueee+juu++O9yu8r7DoxAmJHbW1IO+cTmTCcUDhsceQZfShD4HQ+/znYfTddhvRLbdgr37rW9i3115L9Hd/h/urqnjfCy9gbTc3I4gRObBgbAxBiQMHcNy0NBCNa9fi89PSYHxEOjg8IZ5J5cOHkXF44AAwITsbWcvr1sEgVVUcKz8/foLO74fRxP0Iy8pi9wTico6uLkkacsk0kTSMFy7ENeA+MtoeZhP1x5pu4b3LzicRvqfJJDMOU0mCQawPLUHIUwT5ee5nqRVFwb0pKoKTv3QpyAo2fpmsNpnCszP5vdzPbuXKq+maazbRs88Cjx577FH63vfm8SiWOBxwlLmtS7Li9WJgU2YmghZdXcCo+nrc2xMn4JTX1yOAkowMDYGQFCK8n2qkcCYd40lJSfwZIH4/cKWvD+uMByil2r6MR7QZLpw9zpNAi4tlGXqqCvcNHx6GnX36NNYnDzOLJkwkjo3JKdOZmXiMjEBvZ2WNx6KsLKIbb7yaLr98HoumKj09uE9tbdh7a9fK6qXhYaI//hH6/LOfHV/y63IhqDA8jHW7cqW0e8fGkDDicOD+cGAzUoaGQEC2tWEPVFXBH6qpic+P4VYpTBz6fLIXPQ/wnG4fgzFPSxj290u9m5UlK/GqqvB4P06RnylhMpnxlXtMDg9jzXKVhSfKbtf2F120SP6utY8Yj2w2rHttgEJLGOqJ3/M8VPIyq2aEEGKQiH5KRD9VFCWTiLYT0d1nUj4T7dnVTkRLozy/TPP/qUrHmZ/RigAbEzjelMXjkZM109MRATebQcyNjcH45B545eWxU9r/8hcogwsuQOYONy3/+texgdeuJbr6ajkZcONGKA2eitzeDqM0Nxe9wKqrZV+CvLzxJXQ+Hxz9wUHpDHNGDZNe2dn4TiMjckgLlxfGo/DsdpAY7e34PuvXI2NASx4EAjK7UFuWTITXcQNzJg3noqEZCMiMTW0j9fx8KM+52gg4Vvag9vdo2YMGg7w3RUW4Zzk5WC8TTcWLJRkZ8t5HEtNlZWV022230W233UYej4e2bdtG9957L335y18mJXH2+KzEIquV6MkncQ1XrQL5ZrdjwInJhOv80ENw7D/1KazNv/977PWHHgKe3Hcfeh8WFxPdfz/2rBAwnv/8ZxiuixejX2IkMeByIRDyxhv4vawMROWGDTiGyxV9OjNnplgsOLfjx0Eg8hTLLVtwHsuXY3/Z7bLBek5OfFgUDAIPudQwMuOFDWPOLuzshHHM+zkzEw7+ihVwMmpq8Nl+vyzbdrtx7QsKZs9o5j0dCMhsbe7LxhmHc1E4Sq4NyEVmErKjrZW0NBi6RUUIuHGQjo3goqJwwlqI8Vk9PEkwEMA14mz2aNeromIej+KRQAB72GTCntFjkMoLL2CvXXMN1kJvL+wrJm0OHcK+XLs28c9RVWQqdXbC7mhtjb2XuSSehz7FS4QFg7CnenrweZWVsL/mqp2QqAQC0rHlDBceIlVcrM8AgLkiBgMyz7kHXXU1MtL6+8f3+mXhIE9GBtYzJyFk///23jRIsqs8E35vZmVlVmZW1r51dVV1V++r1JJaCxJCEsKAIDx4HAPjCRgw4GA8YM/MNzNh5rMnEMzYmDEzJtgmCAv4WD7AI8syMgKBEAgjEBJqCaRWd9O7uru69i33/cyPp1+dk7fuzb2y8madJ+JGZmVl3rx57z3vec/zLk8A50WdLznwo0LbosqwsAA7wQTi/v0Y434/Kr2eegrBzve9b22f7nPnEKRwuWATxsdxjaJRrG+Wl3Edd+9eu9bK5/G9L7+M9ZbHg0DWgQPWWYpm5HI4dhbm5F6pAwOwHYOD6xt4SCYLhU+uXMG9SYTvHRmBzeU+hhsp1NJq4Axnu/6D/Ghu5cTZg/39mBOvv16WFnPgpre3kHDmXvTmyrFUCmsEXuf29DQmGUbzULVhQ0hEwzDcRBQUQryqpSuESBiG8RuCvHWI0DySiKjCjg30j0T0nw3D+B0hxMPXvs8gov907f+PVHHIx4hojojebxjGZ0wNLe27EtUJ2SyM6swMjPr27XCMzpyBUzA4iPcYBhwJu8y9p5/GPu67D8Te6dNYVM3PY8B+6EPIvnn6aUxGN9wAY/Dss4hiE8GZ3rEDE0suh9cTCWk0VExP47OxGI55ZET2oVFJL1Yv49T80dHySoXjcfk7XC5E7A4dwjlSMwzNZcm8AGeBkWaOyKfTkjjkCJzXi+vl92/ssaviJMVKjK20mziqxEI05j6E5oiTEHISq1YxkbMV02m5kM/lchSNRqlLCQd3dHTQnj176Mknn6RwOEzBa4z8Msual4+Ws0WZDNE3voExfe+96Be5tATyjcmuT34SC5wPfxitEX74Q/z/z/4M73//++EM33sv0XvfC6fw9Glk/1y8iHv7938fDqPqJE5NIfDxi19gTO/Zg30cOID3ra7CEeGAgIrpaRCPzz0H8o6zA++7D8e2e7cs6eL9eDzl9+LkDJilJfzNvbayWZktwqQh9xFyu2Hrbr4Zi4WJicJ+PdwjdnZWZgIEArCRG9FKIZ+XpJhKHPL43UjiMJ+XC2QzMaiShpy1rSIYlOXFnG1mJgkDASmUYtUfkQWc+PxwRiaRtDtMFrJts1oAaXtUPoTAgjmZlJk+teLZZzFG774b/sGJExjLk5Mg5I4dgw90883VL2ATCRCR3Kt5925rUjCfx33LrRDKzf7J52ErL13C/Tg4iPu6USWHjQD3zVpclP3PWDChr68+Df+bFe3tsE+Li7gf+vrwnNshsP1RgxgMDoZxFvv27bgPuUpDneu0LaocnBV97hx8mR07MP58PvguJ08iu/mf//NC+7G6irVMOCzFH30+2YJgfh7XZnIS97hqL+Jx7PfUKVnBddttsCulbGI2C8JxehrfkcthnhoawnH096/PvJ7LYS2qkoYLC/L//f0I1nFZ8tBQ8wYmmxkcVOfMQTuikH1SFT6fzBY8eFA+7+3F9eEATS6Hz7e3r01e4v70iYRcKzLY3nAFGZclBwK4N5aX17evt+ah6oONoiA6iWjKMIyHiejXRLREREeI6P1E9CMhxKxhGM8RWOD/YhhGNyFl9BkhRCkG9xNE9A4i+qZhGCyt/RYiejMRfU4IcbzSgxVCZAzD+DARfYmIfmYYxleJKEiQ6X6RiG6odJ/lfzeIupkZ/L11Kyal3/wG0Skm8zhSY7e4vHwZAgEHDsDx/ru/k1kvb3qTjPAcO4YJaWAARufcOexz505MYBwtT6dh+LNZfC87bZwxw+pffj8c7oEBPFePTwhMYPPzMCBjY+VFzNJpOOEvvwwjtX27JAGmpgrLzLxeGDbOMGxvb/7oVSoliUN2AH0+WarciMnULE5i92iGYUgisL19bf9Bfqz0GnCpoJruXg08Hvw27pEYiURodHSUfud3foeuu+466u3tpRdeeIEeeOABuueee2hoaIhuuukmMgyDPv7xj9PKygp1dHTQ7/3e723fjLbo29+GU3vrrSD+FhdB3vf1YRx/9rO4V//9vyf60pdguz7wAQih/M//CRJvxw6i97wHJKAQRF/4AvbV1YWy56NHJQmYyyHT6KmnQMYJARt2zz0yQ5EX25mMHCNEsAXPPovPnj6Ne2hykugd70DWIkf5GbGYdKZCoeLCBuo5YaIqnZYtBl54AYv4uTlJog8MwE5xOeGWLdYEZS4HGxqNStvO4gmNzpDmsZ7JyIix241r3NbWmOPhKLld9iBnIJmzmd1unLfeXswt1123Nnuwp6d88skwJJHKDrG5LNnlko3HOavHfI5U1WazHazGHr33ve/9l7QJfaMLF3Dt9+6tjwroxYuwF/v2wbb8+tey7/TMDFoeDAwg47ra+352Fn4LkexDbQUuD8vnYYvKyaYTQjaoT6Vwf09Oto7yqNprK3ptaef3Y2z39ZVnr1sFwSCu8fKyXMifOIF5hf1DLksOBGRPWhZTCYcxT0Ui8vPcd5p9dG2LKkMqJUuIz53DfTk2huvx6KPwBd79bpQVM3I5rOXOn8dahcUf02kkiczMyBYEW7cW+gtsSy5cgJ0YGyO68068r5itSKdlmfLCAuyG1yt7CNZbeIR9JHNZMq8fgkGQhdddh2MolgijIZHLSf/HrgehWZyEwUHugQHMb2rmIG/lVrm4XHivmjXK/pHq57jdMgGkWFmy2y3XE0tLhT3Q6wzNQ9UBG0Uixonos4Qa9LcQkY+ILhHRXxJOPgkhzhmG8UECc/s3ROQmot+nEmmgQohFwzBeQ0R/QUT/msAmnyc00Pzrag9YCPHla6n7H752nLzP64moxtba9pifh7FPJKQk/W9+AwegtxcDtaOjUBrdjESC6LHHMNE8/TQyecbHid75Trk4HR2FA/2DH8CAcz/FI0fkRMiIx2V/nbExvLa0hNdjMalItmcPyEmr6HkshgmFVcVGRkqTY9ksyMNjx/D5kREsygMBOJVtbTh2VqHzepuzLNkMLkNnReVcTmZMcqlyPX+Hmj1YiTgJL5DdbntycD0JTigpg0SMxapfNHi9+H3IWvPThz70IfrhD39Ijz76KCWTSRofH6cPf/jD9Cd/8idERLRjxw763Oc+R5/85CfpD/7gDygH7+d1tMls0bPPYtu5Ew7K/DwW3KOjcGa/+lVk0732tUSf+ARsy6c/DSf3Ax/APj7wAXxmbo7oySdhKwIBkIcHD4LQ6ezEeH76aWyrq7ARd9+NCP3QkMyq4f51+TzGyvQ0FvvPPANnNZOBjXr720E8mvsqEkkRp0wG90ZXV+n7mNUOX3oJ38PkNiMYhI09cgSPY2OlnbJ0WgrFCIHxr/aGaRRUcoztAAcF6kkccpTcTAyaSUImDFR0dEgi8OBB6+zBepQxqmXbamkykcye5mz2csu41UxT9W8iIr+/cntERN+kTeYbLSzAroyMWI/pSrG6Ct9nYADCTidO4D7fvx/34dNPw57dfnt1c1w+j0DGpUu4Lw8fth7X2azMmG1vtxaFssL8PM5HPI45ct++8gKyzY5oVC6MuddWZyfKsjfCNm4kuEWCGtSJRjH/dXVJUmjXrtK2KBSSpcxCwF4y6cDZZ9oWlY9cDr1NT5/GxuIQqRTatuTzRP/hP4CwYczPYz0Tj8Nv2rcP1+TCBSlWOTIC/4EDXbkcCMqXX8b1am+HjTpwYG31hQpW8+YMLxbY2b4da0dz65daEI9LspCJQx67Hg9IwltvlWXJfC9qSCSThUIkVkShlThJW5skAXftKiQFOYuwt7f2CjYhCttU5XI45kgE9tnrLay8qHTO5LXv6qpcE6wDNA9VBxjCqtZQo2wYhvEIEe0VQpRTk17RyY5EkM03MwPDu2cPJhBOWXe7YYD7+4sb4a9+FQ6y1wvDc9tt6FF25Qr20dmJBf3Pf44F+r33IhJv1bB3dRURsHweEw9nZbBDcu4c9nndddbOPaexLy3BOIyO2kfKWf2J0/VfeAHPh4dlE3LOMOTMGKeASxWZOMzncQ79fmzV9GXk8mKrvoPq63blxVYlxepjs0z0Kyu4L8pdYFmBlbhVhcEK0SRnQ2I9bdGlS8gY7OiAgzw1BRuxdy/KhFlxua0NvQpvuw1BigcegFN9881Ef/RHcH6+8x1kRgeD6EN4550g0Lhf4M9+htKeXA5Bgttvx1jP5WDv+HpxJgb3ETp2DI65ywVbuWcPSMdt26ydEFbYjUbxGXXfZsRiOOZLl/B7OBOc+7pOTsoMw/Hx8vv1CCEzQtJpfIZ7MDayZFktgWP7wMRYNWOfo+TFsgeLiZMwQahu6mvrQR6oytJMGqrZ1mwf2SZyZk8t38fq1TXa1qazRUQV2aOKbFEigWBGRwcyd2oltbNZogcfxBh8+9sxxmMx2I5MBr6R348+0tX0aIrHkdUYiYAs2LXL+pgjEdkvuru7vIXT8jKymFhoYXLS2m9zCoTAb+GFMrdxCIXkQrjVejrawRy8MLdI4PlhYQH3pc+H4P7wMMjwcsDzn8+HOWdhQQr3VGmTNpUtIpJtFV58EVtvL3yhxUUovPf2Ev3xH0vRpHQa779yBQFUzpCfmoKPkc3Cx9q2TWbkcXLGqVPwW3t6QB4yYWyFaFQShxzg7OzEcQwPFycdy0Umg/2rWYZc2W4Y+B1MFm7divvSCYkd6wX2ca0Ui9WSY1bAVsHiJOaMQXXr7FyfdZqZMFT9Iu5d73LhPnW54MPV4zovLGC+Hxysau5tSlu0Hqhw7Vff79YkYnkwDMNLRGmhnDDDMA4Q0a+I6ItCiH9Txm7KPtmczn7hAoi9ffvwPJGQCo+sNmeH1VUs/L/3PTivXPb71rdKo5/JIKPm9Gk4ze94h3WGlxCY4K5ehXHo75dZGB0dcLwvXoQRO3LEmhhcXcXns1lMJoOD0tBwZlgyKQVQMhkc58mTMKojI4hgTUw405HM5wuJQ+6vpRKHdhOAnTiJ+mhXXqwSgXYkoZOQz2PCJarJ2X01esZ9GSvEhk1QjbZF0SjRZz4D52bHDtiN8XHYpL//e5D7r3kNiLX5efQ8zGTQMiEYJPrDPwSJ+OijsEU+H9Eb34hgRTAoVZaffx775rKeO+7A9V1Zwf66uvDZbBatGZ56Ck47Z+0cPozv2bsX9wiXblnZinQa9iiblT0U2RZls7BTrJR86RKcGRZn6u7G79+/H981PFz5GLIqWWaRqUY42SphxuW4bCt4sxtXicRaMRIzSbi6ujZY4fGsLSc2k4Q9PY2zR5xxac4wJMI1YNETJlPV88GReMOo7XpxpmeNROKGOst1sEdl26JcDi0RkkmM9XqQyT/4AWzXb/+2zADZuxfX/Mc/xuPdd1f3XTMzWPwbBjJmrcidTEa2Q+AM21IB0UgE5CELLWzfvr79o9YTrPrJi+lMBmOKS+56ejamB2wjobaNsGuRoNoj1eYkkwjs+/0yILVjR/n3K7fx8HrxGb6nqmwZs2lsEePsWQQwjx2Dj3LTTVgL/fKXIPk++EG5HpqaQsZiJoNMxV27UJXxyisY/9yXl9dg09MgHC9exN/j41KN2Aqrq5I45Az+7m5JHNZS8i8EbKNKGHJCCRF+O5OFo6NIfHHiOq1acNuVUgIl5nWayyX9Iqu+g0waNkJkhEj6kpp1OQAAIABJREFUhiphaC5LVjfVFuVyaJXACU61Ip/HPSZEVX0xHTgbFked1n71PSZNIpYHwzBuJdSif4uIpgkKOdeK9OiIEOJ8Gbsp62SzytbJkzAu+/bJlPBgEBP90JD9hMDqpY8/Dgf26FFk+ywtIUPo9Gnsn9WPp6aQtfOmNxUO0nxelii/8gompe5ulOYFAjiOZBIEwPKyJBbMAz2TwaI8HMZntmxZK36SSsn3ezzY36lTUjjmppuwf6chl5P9DbmpP5cD+/2YGOohTmL32KpRP56wOzpq64fFYggsDlEBNpJEbKgt+uIXUba7c2dhVvTXvobAwo03YmHf10f0r/4VyMKpKfRBfM97kIXz+OP47KFD6AvU34+x/eMfIwM6lYJNu+MO2CufT/Y6zOXw98mTKFP+xS9gi/x+9Da89Vaownm9eH8iIRfj5vs/n5clw+zohMOSLLx0CbaKHb1QCL+3sxP7GxuDg1ztPZdKye8nwnF2djamBxA7h2qzfSYOufRkdXWtGImZIOTSJBXBoDUxqG6sBroRUEVheKGuOsYqWVhuhmE+by+0UumxEdVkqzd64V6rPSrbCX35ZdgRVoGsFS+9hEzDW27BIvvyZQQpe3rgQxGBQKy0r2A+D//lyhX4TIcPrx3jnJUSDuPa9/aWbnkQj4NQYPXV8XHYI6fN89yDT1X9dLsLgwxOC26WCyvldjWYoGaAl9siIRyGbQ6FsOg2DPuMVyvE49iH1wviJxzGvVjFPLdpbBER/Jxf/ALVE8Eg7MiLL6I64tZb4f+0tUkxpbk53NuHD8MXuHhRJoawYGY2C2Ly5ZcxNrxe+Fv796/NTuZ+g0wcskBOb68kDqv1LSKRQuETbj9FhGMaHS0kDVtVzEgVJzETgqpYCQs8qeCWNGbFYnXr7t44+62KY/KmtrJSycJy15Osi+Dzld9XsRgyGdi09nYE4SrwtVqRRKzH2q++x6RJxPJgGMZWIvo0Ed1KRP0E1Z6fEtGfVtAks6yTPTWFCcfrRUR8fl4SeIEAMvKsohLpNNE//RMW5okEDP7OnehR9swzMGCxGBwG7i127BgmtTe+kdVqC0mvTAaG0uXCAnp4WA7i2VkcpxDY38jI2mNaWoJjnkxKsYJ0em2Tft6iUWQ2Xb2KSfmGGxBVdVKEPZuV5GssJsu929ulElW54iTFHp10TtYDnMnFPTCrBat3d3RUNJlvJInYMFv0ve9BNXl0FGN5aAhlNl/9qswOPn8eWUGDg7A9w8NQes9kkOUTiWDyP3oUGYuzs8giPHYMtuDgQSzU9+yR93Q+D+f1+edBHnL03uvF+1/zGjjpnKGSTsOR496IVov+ZFKWDC0uwq5euSJJMW4uPjGBxfnwsOxTyM2eyy1TLjjRppJll0uqLK93CwZesCYS+L2cJcjkxeqqJAhXVuyj5OZyYnOJcTNlHKhkqVlNmmjtIr2Wa8BEYq2ER41E4kYv3Gu1R2XZoitXkDE4OYkFd62YmSF66CGM9ZtvxsJ9aAi27sc/xr1z112VEymxGPyiSAS2cteutTaD1YWzWdgCq4CH+f2vvILMJJcLdmpszFktXLjfoyqEpPbwqlcJXDOhVIsELkvmrRa/bn4e80wwCP+5pwf3SblIJGQPsrY23MflCowp2BS2iAj38VNPwVZwa4Vf/hKv//Zvo+KLCBVkp07h+b59uM8vXoTvGgjARvT1wV6cOAEbx8JIBw5gDaeOc67EYeKQfYr+fvgsQ0OVz8fpNO4ZlTTkEmiXC/tUy5JrqQBqJqjBjGKblThJV1fxzMH+ftwXzXSezIShVVmyShhWe+zMHwSD9fEN43FcBw5Wl4kmOvP1QZ3WfvU9Jk0iNhQlT/biIjL7Mhk4n5EINm4UPzKy1nHM5RANe/xxvPfgQUz+Fy/C4X7mGSxYd+xAZODIERB23/8+Xr/3XuwjFpORJnZolpfxfGREOhP5PCa6CxdwTEeOyIgD95pbXcX3r6zAiAwNYZHOvVtUtWQivP/YMXzG50OfkL17mzcabc4eTCTgFEQiUhyFhV46OiTZoYqT2D02629uRnD5EzcDrwbcn5Kooknf6RNUSVt0/Dh6Gvp8cKIGB+Ec/e3fShuUSBC9/vVwfldWiH73dxFlf+IJ2LLt2xGdNwzYkuPH4aR6PAg83HEHSDvG8jLs1U9+Aseb+zMdPQp7sH07bKFKGkcisB9tbYWEFivInz8vW0Nw2Rar2Y+NSdKQ2ytkswicrKzIDKHe3soXuLkcvi8ahV30eGTJcr0cS7M4yfKyFL1hB3h5GU4Yl97yd3OU3KrnIG9qmXezwrxIN/cOM5cC1tupZ0d8A4nElrdF7B/09sI3qPUaxuNE3/oW7oc3vQkEYigEn+vJJ/H/172u8pLO6WkpynLo0Nr+hPk87Ar3U+3tLZ4plM0iO/rKFYz1LVuc1c6FKwZYeEoIKRjT19d6ogrmFgnmsmRzxnM9f3s+D1KJW3ksLmJ+q0RgJ5nE/akG53p6Kspmc/rVLGtBHI0S/fSnEKxsa4NNevZZXP/3vAc+TziMKoyVFfgW27fj+qys4HxOTOD1q1eRdXjpEva9bRvIQzUpI5fDnD49jWzGbBbfOzgoe2CWG1DI52UAlUnDuTl5nzL5zITh8LAz2wkkk/aKxapvZCVOwqSgXf9BJ7RYyOfXEoZWZcnrlZQSDss+5vVY066swJ+uQDna6bbIEdAkYmNR9GTH48i8WVwE4ceqoQMDMGZDQ4ULDCGQtffYY/jM5CTRW96Cz3z961iwRiL47H33FZbM/OM/Yh+q2mB7uyyzzWRk1Ht0VGY+xuMgOVdWsPCenISjoZYls5H2emX2os+Hv82GKhbDbzhzBsbs4EFsG2WgrcRJrB6FKOzhmMvJ/obBoFSoMvchbCWHuRmQy+Fec7tleX414H6VnBlbBpx+JYvaovl5ov/1vyQ5xyI2jz0GO5FI4LWxMSzAd+wgetvbQBJOTcEBfstb4EA8/DBaKHg8sGG33ipV1Ts7kZnIys8chR8cBMF4++2wM7wA7e6WC+h8XiqZer2yh+orryDbcHoa+0qnEezgHkQTE7Bp5oV4LifJOCJ8VzXkdL1KljlKbi4nNpcYc2kuZ8UR4fs4Kj4wUKjMt57iJOsNLktWSwHNZcnqQr1R/SXr0R+RqGoisaVtUToN2+ByIWOw1uy7fJ7oH/4Bduetb8Ui3uNBltDPfgZbc+ed5YtTEGGsnjoF28fliuZqkXhctmcIhYqrh+dy2NelS7jPOQPcCWNWXbxHInito0MuwCstDW9WmJXbzbbIHLxoRHCY/XaPRyYF7NpVGenMSQBuN35jPl+RiF1L2yIinNOnniJ65BFc7z17ZD/ED34Qa6LTp+EXeTzwjdJpEFjcgqC/HyXPJ07AJvh8CJLu3y+TNbiMc2YG/lg+LxMyhoexj1LzhBAgc1S15KtXZf/fjo7CkuTR0dr6JjYC/JtKZQ9aiZMEAtakoEoaOjGwYS5LNrfAMhOGjfCL8nlZ4l1srisXQmAcpNMYA2XYI4ddRWdCk4iNhe3JzmTQn+f8eUwyhoEBODKCBbUa0RYCi+1HH8WEsGULSMLubiziH3oIk8PEBBaTb3kLFpsuF17/7ndhaO6+W/aq8vulc76ygsgU973g1y9fliWIk5MwDHz7uN14zgtwVuWyc/hTKZT8nDiBz+3bh2jeevYGq4c4STaL38/p7axuHQhg01mEjQdHz5mUqhbZLO5Lj6csp9vpE5StLUqliD71KZQRe71wsmIxlOoQYbyOj+M1wyB685sxti5cgD1505tgG376U2z5PFoq3Hkn7EY4LBuG//KXsmn4xATswA03IBLv8eBYVldlo322J3NzMquRS5M5i9rng93q64OzsX07bJHd2OTei2o5dH9/ZYEMzgiMRKQwAKssW9nARMK+5yA/LyZO0tuL88FkBCsa9/WB/KhG3b3ZwKXY6kJd7dejLtLL7R22nsfKGUC1OstVEIkta4s4ULq6imzkehBQTz2Ffd5zj2xlceAAbNHCAgIXVq1Z7BCNwpeJRmHfzO1XODiRSGBeKVb+LwRs48WL8DH6+mC/mp1443KzxUXYQaLCBXs9emNtJMz9ZK2U2+vVIqFWxOOYD71e2VNvcrIyu5RKwacyDBkgKTOg1rK2iEhWfT34IGzH2BiCB9u2QYHZ5UL2YSwG38PvxzVwu+GDhEJ4/+nTGN/9/bA9O3bgPamULFNeXMR95/PJ/oalAuXJJNaETBhOTUmBFbcbdk0lDWsJvK8HMhmZhGKlXsybeZ3GfSCLKReXyvp2EsyEoeoXcVlyM1S2ZbPw97kKp1bkcrLnqzmpygJNdGe3LjSJ2FhYnmwhMLEcP45JxevFJLR1KwhCtSfPxYsgD8+fh2F8/euxeLxwAZ95/nkMsNtug7G94QZMIqkU3vfUU/j/W9+KCUU1MMz0r6yAbOzuxkQXj8sFezCISY9LHLgkmY27x1O8yW42C/LgxRdlybadmnMlMBOC9RIncbvlOYjHpUPV0QEHocI+ehrrhHAY16fW/mypFO4Vn6/k5Ov0CcrWFn3ta+hlyNmds7MIWuTzOC/9/RgTu3djccJ24a67cO6ffhoL4XQa4/ttb4OzfeYMerY+9xxshcuFfdxyC2wAXzdWJ43FpHpxNIrvuXQJDjj3aQ0EsO/xcWxjYxiXTHCGQvbZOyxssLCAax4MgoCrRAUvm5W9ObnHF2cpMyFoJVRiJU4SCFj3HFTVjH2+tY4jZzvXuzyu0TCXJJvLktWsnmbM6q6X0Arvi6jsuaXJzkTFsHVCz5zBmD9wAIvoWnH2LPq8HjwIOxaNIvvnpZdgs265pTIBt6kpBFva2lC+bBZ7iUbhTwkBP85OsZJ9rwsXYBu6umSgtlkRiUi/j0XjQqHWWLBvdIuEWrG8LBfvi4sI6g8NVbaPdBr74WxEDiiW+K1NdiYqRsmAxte+hjHd2wuy78Ybid73PqzNLl2SgYJUCp8ZGYEdP30aiRguFwIDBw7gmsTjkjhcXsZ3BQKSOLTL4mJCRS1LXliQ/+/rW1uWvFGEEveFNpcTm8uNuQ+jCr7viqkXd3e3bgKHWpZs7u9s7p/fjL3yk0lc+46O+mTSp1JIIujoWNsuxIQmOxOtCU0iNhaWJ/vyZZTreL0g31IpRLa2bpUR3JkZOL/Hj+M9r3kNjCcriXJj3lOn4HzOzGBBHQzi/xMTsmfHffcVOrvcE+7iRRhyn086u4kEHPlkEgv+Q4fwfzZUrOCVyWCfw8PWCx/uo/irX2Gf4+OYfEs1SVXl5ouRhGbUIk7Cpa2JBIwfLwxV4rDZDPVmhxAyk6ycMo9iSCRkBLjIfpx+B1jaoiefJPryl0HC9fZKxWLDkCW5nZ2wA6kUbNaNN+L/zz+Pczcygqzizk7YjfPnYXvm5vC+669HSfPRo3D+Mhk4zy4X7M7cHOzYuXN4vrJy7YAFxt/QEGzc/v2F4gLpNJzQbBZjtFg/v3AYi/ZMBu8dHCzt4PBxLi2BcLh6Fce3ugqygBUurcRJurvtRUn4uR15qWbA8HStZrw40RZx7zCr38Z9Y9VFulMCNfXqjyhE4fkoAQfeAQWwtEVzcyD3tm5FyWCtWFoi+j//B37KwYP4e9cu2CdWmZ+cLG9fuRzIw6tXMX4PHSocv5xRk0rJ3qN22WlLS7JfayCAY6iH8nS9YS4jTKdhe1hgoNkElspFOS0SKlVu32gIgfHDQdFYDPdVpaWqfB9z+avfX7JPaEvaIiL4JA88gLnf68X9/1u/hYzmEyfg+wQChcHQdBp+zOoqzt3evai2yOUkccgln6GQJA7tVJhV4ZPpaUluBwKSLOTkk0a1PlDbrpgzB1XSkCtFVHR1Fc8e5CxmJ/o41UAtS1ZbZxEV9jHkzQm2iAj+cTqN+7oercqiUYyHYoE5cr4tcgQ0idhYrDnZy8so+UsmZXnwrl1YHLe34//f/z6yd9rb0WtncFD2LNm6FVGt5WWib34Ti1UmFVntdPt2ZCCurmLS6+mRPQyTSTgYrPLFTXp9PuzzzBl8z+HDhZHMbBYO9OqqLB+0KlkRAk7688/DSR4aAnkwOLjWWFo9qmnaDDVdu17iJPl8oSo1K24ycagSpxrNiWwWzkp7e0UKXmvApLphFL3uTr8b1tii8+eJ/uIv4KR2d6O34MyMFAfq6pKN2n0+qZB67hzO0cGDIAeXlpAt/ZvfSFXy/ftBLL7mNfLaCIFI+okTsCVLS7L8JpfD9+zcKZWSAwF8L6vUv/pDhMxEdbtxnHaEXCwG8pD7KA4MYF+xmHV5sZpJGI2uXWQGArBpVj0HmSSsVHVUVfRkco1bKjRrFl4xcFmyukhnu672DuNHp2cU5HL16Y/IRKJhbL7sH26fEAyimqLWc5lOg0BMJtFrdXERdmVuDnbv8OHyicpIBJUUsRjKENVSUbZF4TBe6+mxJ27CYXw3Cy1s3w6/qJnGNgvBMDGQzRYqthcjR5sRZuX2Yi0S2NY6FbkciCZutUGEtUWl9pWDZ+r82sILd9tEj89/Hj4N9z7/F/8Cc//MDO6pUAi+jseDc3X5Ms7d4CD8n74+2JuZGXk9enokcaiun+Jx+EIqacg9/ljoUs0yrEfPOSskk/aZg8XESdzu4pmD/Njs4iTrDXMrLdUWmQlDJ/tFPC9yu6B6kJ/c95L5Cgs43RY5AppEbCwKTnYyicyf2VkMrI4ORKq2bgWJ8cQTaPSdz2MxPTKC536/VPbKZDA4H34Y/+OMmptvxv77+4l+/nMs0m++GZOWuVRqeRmT3/btmAg5yn7pEt5//fWFUa3lZTgn/H0DA4UTGEdTLl4E+bmwgN936BAmXXOERUU52YP1ir7kcoXEIRG+g8VlKilr1GgOcDZYKFRbH6ZcDvdEW5vtfeD0Capg9K2uEt1/P8j+jg6M/eVlmdHHIiqcUcdOQSAg1dlPnEC5DzcQv+sukIZ798pSz/l5md14/jwcVMMAWcA9ekZGEK3fskX2hg2HCwVeGMmkzP7jnpjmfmSrq3DCL1yArWWSkoUOlpZkj1MVoZAsleFSjFAINo+bkNcrSm7uuUVUmAXjlMWsSoDyIt3cO8y8UG811FNopUwisaVsUTYLAjGbhc9Sj3n4u9+FvbnzTpCAg4PwsX7zG9iagwfL28+VK8hIYnV5NSuLReUyGdiFnh7rhV88Dls0P4/9bNsmSx6bAblcYQuGXE62tujrc1bpoLmPoVWLBKdndRcD99gjwn3f2Ym1Q6XIZnFvRyI4V319tuS408/gmlXJwgLRpz8Nm8QK6b/7uxivLIrS2wtfhDOkXC4EF7ZskQIpySTuL67YGh6GbctmcY3UsmTuLW8YWF+phOHAQH0y3dWsYqvMQbW/qQq/H7+hmHqxE8VJ1htqT35z/31OjFG3Vjt/uRzuOdYRqIfQyuws9js0ZOlLttgZbE5oErGxePVk53Ig906dwuJ0aAiObE8P+oY9+SQG3NatiJh3dOB/W7bIkmciDJynnwZhNz4Ox3j/fjiqfj/6D87NIftvxw7Zx9Dnk0pubW1SrZT7KkYimAQ5m5EIC+3Ll2X24eAgPmvOIJydRRnS/LzsoTgxIbNMimUPrrfhzGYlccjn0OORxKETy3E0CrG8LJvS10JSZDLYD0eXTXD6BPWqLcpmif7H/4Biez6PgEMiAZvT2ysdWC7vdrvhQHZ3I0vl+HGcq64utDwYH8diPZNBluLZs7Azar8bdjYnJmCvBgelkmdXF845CxKkUnJRzvYhn5fKhdEovisSsc4kTKXwf8OA084lWWo2jbkHIZdZR6Oyf6HfL1XX6wF2KtX+f2oprxMW6lyWrPYxVMtwzaWAreYY26Ge/RHLIBKdflYLnFD2HY4cqS2jnPH88wjGXn+9XMC4XAh67NyJ7ymFbBbvn5mBfTh0qFAlfnUV9ofJNqtSwlQKftrMDL5/bAxbM4xzVdCAezgyWdTXt36ZTvUE+6CqPWqFFgm1gufFXA7+PQfrKgXPx0ycDQ1ZzoVNfpeURIEtikYhMvfEExjne/cSveENUlyRReeiUdxrHR0gB0Mh2IR0GvfZ4KCs8gqHCwlDJkKI8DlV+GTLlsr9DSZ8i2UPclaxCs6ctuo/qJKGTu512iiobbh4a4Wy5FrBfrrXWx8V8GwW46etzTKL3+m2yBHQJGJj8erJfuEFkH9EWHgfPIjS4ccfl47qnj2YVAYGMGGz08qkV1sbMga//304xixU4vHA6eNF9p13gshTDRX3r+BSZLcb2UG//jUG4v79mFB4Ychp+EQYrN3dcl9sCKNREArT05hMb7gBUf729o01kqowCvd2aW+XxOFmT6lvNeTzUnijjEbgRcHqnRZCK06foF61RV//OtFnPiMdz1xO2p2JicLMXL8fY+jqVThFAwMIUOzZg3F27JgsLWYSNhSCXZqYAME4MCBVsHt68DwclmVyLhdIBC71Y+KfScG5ORnZN2fpcZScS4u4tH1sDJnWfX3Fo6D5vFRZzmZxzVlluR6Lfe6/pfZyVcm2ZiAU7MCkp1oKqDrG5kV6M/+WRqCBRGLL2KJXXkHQYdeuygRO7HDlCtE//APG/8iIrDR46SXYo6NHS1+bSAR+USIB0nHbNvmZREISNJ2d1qVamQx8q6kp/L1liwyqbiTSaUkqcIBHFTGoR7bIeqGcFgmqPdrstmhhAfNaPI57defO6sigXE5mrAWDGFOm+7hJ75iy8aotSqWQgfj3f49768gRWeHl9+P/3BaFsxE9HimwNjQkEz6mp2GLrl6VVU/t7bKigUnDImXir4qTqESglXqxlThJe3vxzEGnZRg3E9Q+hq1ellwPsN5AIFCfgHwigXHAwoQKnG6LHAFNIjYWggiL40cfxWRy/fWYeJ58Eg50IAACb3hYKqqxE+RyYeNSy5UVokcewf9DISnG0tmJRfa5c3CS9++Xi9ZMBmTg0pLMyMlkZJS9qwuEo9cLZyyTwaI9k8Eks3UrCEI1gzAWAyl65ows8zlwYGOd5FRKEocccfP5pDBKK5bRaUiwOq7fX9wxKwUhZI9Mk6CO0ycoQYReqf/xP8JWxGKSyNu6FY9qWQoTf4ODyFJmBcLLl2VPQY8HvQ/Hx/F/zrbhhXU4jAV1LIZxOT0NuxOJwBlYXoa9SSbxGQ5AcAN/vp59fXC6BwYKswjb2mQZHpGMrJdy3DjrkDMKvF4skupRrqxmyLBzqSp8NmMU2lxebVWWbC4F1FiLegmtEBUlElvCFi0vw48YGIAPUSuiUaJvfQs2hPsddnejn+HoKNFtt5Ue25cvo+TZ40HfRM6M5JLfeBz/6+1duyDK5UAcXL4sS662bdvYTJ5EQpIN0She48BLby9sXrOBF+lq8EK1RU5Qbt9oCIF5lktufT4QidWcp3weQb75eczJW7YU2Denn3lBhN/4uc8RfelLOHc33og1Dbc9EQL3WSgEmxIMygwz9iW4ZzyRzEZUy5JVAUC192ix7EEmIFWwP2TVg5BJw0BAj4l6wUwYbray5HogEsE4CYXq4zuurmJt0dNTMIfpM98AaBKxsRDz83BsFxaw0L5wAZvHA7Jv1y5Zssw9lVQjxCV5Hg/6/CwsYLJgEpEIk/uZM0i9535kRDB2c3NwJDk9PZVCyXMige/es0eWnS0sYONyZzMZk0wiQn/yJN6/bx8c7Y1wkoUoJA65ub1KHG72CNBmQyQCsoqFQKoFq3WzQvc1OH2CEhcvEr3jHVhUZzJYbLNdCAZlyZfXi8m5vx/nUe3ZNzwMO8Zk0q5dklhkp5eV++bmZL8gj0cGNvx+ONg9PTiGzk4suMfHpUPsdsvG4lb9LvN5manIzZv7+0sHMhIJ3Cfcr4hLlmtta6CqD6vEYbOW0pn7GJp7h5mzDLVjXB7q2R+R92dBJDr9aohkEj3HPB4EPmudq3M5oocegj04dAjnqrcXlRIDAxBXKfYd2Sz8otlZ2JFDh6QtiUZlyW8otLb/Vz6P4Mgrr4B06O9HFnQ9yreqQSwmiQi2ocGgJBwapeJaLkopt2/WFgm1goN2ySTmvf5+rDWqAbcTWVjAuOL+xdQCtoiI6AtfQBlzPi+FIIWQFUw9PTLLMJuV5Czfp5xwwYHO9nb4GXbqxcvLa0UkWZykWPagFidZX+TzawlDq7JkTqjRtqg8CCEJ9nq1ypifx3gcHHzVf9dXowHQJGIDEYuR+OIXkfWXSMgmo3v3IntncBATlNstSQ/OxlGViPN59FN8/nk4g4EAoooeDxzGc+ewz5tukp/hJqT5PCb87m5EyV9+Gf+//no4FUSYDKem4AD39oIoUB3uTAafe+klTKC7diHVv9FOMqvoxuN45MUaKyp3dDTfYl2jcRBClpqVk41WDNmszMRrhQkqGiVxzz0QPmLnuKdHjpuODlnSxhHz9nZM+IEAnrPoCasItrVJkjGfl20VOMOlowMO9ZYt+HwwKCPyLEoghDwOItia1VU8+nxYsKvXUQgs6BcWZElhf3/xMol8XmYdcslyZyd+V633iFm9uRmb9nNJtbpQVx1jlSxs1kxJJ4GJxHqUNfP+TERik9xZ1SGfJ3HsGHyXo0fr40f8+McgDPfvh93o6kJGYU8P2rsUy35YXUVgJZmUbRgMQ/Ya4xJG8wJeCARKLlzAe7q7kbFdSyZ8tWDRBDV7qatLEg/NIhpnbpGgBl10i4T6I5HAPRqN4jxPTFR/f+bzyG6cn8faZWSEiBxui4iwRvtv/w3rH27rxH5RIID7MJ2WBHY+D1/G58P/hMD55XLjYuIkxdSLndCHtJVQbllyvQU+NyuyWcxTHg/871rBgQ0hwFm4XI63RY6AJhEbiI9+lMR3vgPj1NODzL3rr0e0qqNDZhiyw+T1FjpN7FTNzBA99phU7tqzBxNOMomswF27iG6/vbBvz9WreM4L+OPHsfDv68MxsErEOOVaAAAgAElEQVTY9DQW5V4vjkt16HM5OOK/+hW+a9s29D1U+yOuNzgrjIlD7jnFfdt8Pj3xakjkclItuJpm4irSaSx0vF6itjZnT1A7d5I4dw7P3W6Mc58PYzkUwt9MwguxtndoW5ssd85ksIjYuxfvCwRgOwYH8ZloFI60z4f/raxgHIdCOJfhcKHqIzvinEnqcsG+mbNJw2EsYFgNdWCgeFYNN3WOxWTJcmfnmjL1sqGW2XG2jNqPqxmIQ7UsmTfVMTYv0nVZ8vqgnv0RidYQiY62RadOkZiaQrbf4GDt+zt5kuiHP0SAYnAQC/xz5/B4113Fs4xfeYXo9GnYhsOHYQ/ZFq2u4nxz+aKKxUWQh9Eo/sftHhoFzuxg4jCdlsfaLBlL5bRIaHXl9mbAygq25WXc59xHvRoIIdWEh4eJhoacbYs+/3kSf/qnCGqOjMA/8Ple9fmICPaD5/62tkJ7oo65Yj0ItTjJxkMlC9XgBZEsS1ZFPzXqj1RKrg3M1UXVIJ1GkMTrJRoYcLYtcgo0idhA+P0kfD70CJucxKKXJ6mODtn/S+1/aN7SaaJnnsHAC4UwSYVCeP3qVfy9e7eMkqgZjz09ssk3p/2yolE8Dic0n8cxqU21hcBi/dIlGWGfmKhP9KAc5PNSDY0X65wZxVmaG71Y12hepNMYB9wAuxxYlTFwL9JsluhjH3P2BGUYsoE4l9nyeGIBFf5b/R//7fHIiLsQWDizY606XVyyzHaNhY04ip9O41y73YVqp5wdx46cOr45K1T9XLEFp6oczyUotZQUMyFkzgirF0FUCzjrTX1kmI9TR9IbCzXbs9rPqo/8/NvfdrYt+ru/I7F1K0p+a8XCAlTmAwG0Q/B6EXRtb0dg1c7+ZzIIkC4sYJG/dy9sVDoNwiWdho9mFh8Ih0E8hsPY9/g4yING2AHuo8aEENveri4cZ0/Pxi5+rcqSGWpZMhOHG207NxPm50GMR6O4XyYmqt8XE4nhMNE99zjbFrFfxOsLr1f6PVwJoVZrcOKCKj6n59Xmg5VfpM7H7A+xb6RtUeOQTmOuaG+vfL7i66he01QK670vfMHZtsgp0LG+BmJ4GJl7rPRpzrbhqBYv2nmxrv598iQ+wxNZZ6ckELu6CglEzgDiMsSVFbzP7YbDHgzCsVtZATnCCl5qVHJpCU5yLIb3Hzggm4uvJ3K5QuKQSPZn43OhoVEO2tsl8cSElBVBqJJNVrEV1dFoBXCpKjtNXI7LPZM4Au/zFZYpE8mARi4ns5VVspBIZr1xr5hMRpbLcmCAS6nZHqpkn1nVPZfDNeTv8fns7QDvSw061FJWrDqgRPKcbWQfHNWBMhOGRIXBJ+0YbzwMo5B4Jiq8ZlZE4WZAb6/s51wLkkmiJ57A89FRWbXR1gYRFTsCMRxGe5Z0mmjHDgR5uU1CNIox3ttbmCkRiyGourQEO7VjBwKy6z03sL+2vCyDvhwg7umxVoduBFTVebsWCX6/bpHQLOjrkz4R9+rjdkaVwjCQ9Xv5cn2PcSPAvVO5hFldh1kFW/k5V2xw2wqNjYOVX6TOpeyjNkvgd7OjvR12KJ2WJLwapLfarPxdBo9TjcZAZyI2FoKoUASEF+xWm1rqQUR08SLKkD0eOMm7d2PAXL4MgvLNbwbR5/FIxVNW7nr5ZRCI/f3ov9jejtKX2Vnse3gYkycb1NlZ9EubncU+brwRjv56GtxMRgqjsApae7uM+NUqdqDR2hCisGxT3VIppLlns9a9ZlTC3mozE1rk8N4/+TwJl0sKkszMYJudxTY9LZ/PzEglT+Xz5PPBbkxMwOkeGsLzkRGMV1ZuNIzCYEYksrZ8mTOmub8Qqx0S4drNz+MY2tpgw+z6BaXT2Hc8jvvB55MlyxWenzUldxzo4QVFI6He22alZ7WEWl3waGwszBFytedSMTDhqy5yzM9bUZ25ph0IokcegY+0ezd8loUFnKe777aumhACAdIzZ2AnDh+WbWGWljDOgkFk9bHtTybxHUxOjo2BQFnP8ZbJSGGU1VUZeOEy5Ub3TjO3SODAD0O3SHAG0mnZ09DrRXVUHUoKHW2L2C8iwljnbE32WXjj11ioSIXHA7sRCuGRK7t4M/s3GrXBnABgLktWS5K1X7TxYHKXN752Kyv4v50NYh/IqlJTfU39yDr/FA3SJGKjUdHJzmQkoTg1RfTgg3BuBwawYM9kiM6fx4R05AiMZT4ve/cMDSFL6MIF7G/PHpTpuFxwsJNJTGqjo5K5X1oiOnYMxKTfj36JanZjvZFOS+KQSx25nJIj1xoaXBplt6kZqyo4C4IJaO5ZxRm3NSg8On2CqsgWxWKSZJyagir78jLO/dSUVF5WiRPO/uvrA7G4ZYtswTA2huydgQGMdxZi6e6WYz6TgZ1aXcW++vpAVpptEQssRSIgHA0Ddq+zs7KIpJpNw4tiLn1uZFN/tdcib+oinSOtKmGoFySNg11UvJwouXqdeFFjJg2rgNOvfs1O6C9+gTYvW7YgI5DVk++6y7pncyaDgOz8PPykAwdw7peXYeu4hy4LkKTTyDzk3tKjoyhdXq+Mh1RKEofhMF7z+WRPtUa1kiFa28fQrNyuijDp1jLOQjSKuXt+HuNk166afX2nX/2KbFEuV5pojETWKi8bhiQTrUhGfq4zqgqhkk7mgBwHU1XCUNuixoKvj5kkVP+28o1cLrwvHpdtA8wE4SZcozkCmkRsLKo62dks0Te+gZ494+NQYg4G8XcggEg7N9a+eBFOcCAA5+DMGfxvfByvra5iUnO74TyzGEE+D0JyZgbvu+EGEIjr0QA4mZTCKOyQqn1FdLRoc6EYOcgEodkJIyosL7HLIDST0KzK29VVeXaaCU6foKo2/OfOwcbs2iUz4jo78XjunOyPdPUq7MnKCmzRlSsgBbNZWdrJiur9/SAah4dBAgQC2Pr7QTbu3YtrpiKXk9eTm5yzynK5CyF2SNXMPnVh3IjSJKveYTwtq2XYNRDeGmWiFDFoLo1i2GUPmv9m1Floxel3RE1O6MWLyEL0+xHwXFzEub3zTusSzZUVBEEyGQRWx8Zgz5aXcV26urCI4bYXly9jy+dhn7ZtWx9140RCEoec+R0ISHGGeqhWl4Kq3G62RVq5vTWxtITgYCSCOXhsrKbdbWpbZLlDJchpJhnDYUk2plJrP+vzlSYa6yFI0YzgYKpKGKq2SCULtVry+oJ9oFIEoRlqCyirLELzdWNugHuN1gin2yJHQJOIjUVVJ/tHPyJ68klECrdvRxT81CmQIPfdB+cyHseinTMQT5/GAn5gAGU6sRhIwnAYDjCLsaysoNT53DkYhPHxwgi7xyObCHd0SBEYdWtvL74QEqKQOOSeZypxqCeA1oN5QWKXPWg2QRxRLEYO1rKAWVrC9/f310RYO32CqsoWzc3BzoyNwY5kMnBo29qkAIHfL4VPQiGM/WgU17OnB+87exYl08vLcKDn5vD3lSt4DIcL+ysSYb9MMrJwQH8/7OG2bSil7usrfU1V0o4dn0Y1+FfLpM3HwPe9ulDXAZX6oRQxaOUEE5VXWlzNPaNmu9aITWmLiBAU/da3YGMOHpQlUXfcAT+o4EsECMezZ+G3HD4Mm7K0JHtC9/XJnq1Xr6LcOZOBH7V9e/0X7NGoJA4TCbzW2dkYFVdukaDaI3UMmIMX2ha1JoQAichZtpOT1tm7ZWLT2qJakU4XZi+aSUbun2/2l93utUSjuZya+/A3K3j+VQlD1RaZCcNm/i1OQzFSkP+2C5zakYLqVimiUYyFUKjmSkSn2yJHQJOINcIwDA8R7SCiVSHEdIm3V3yyz55FGXMyiQzE3buRgej1ogdiKARHenYWrwUCKNNJJBBlHx/H/7gB+OgoJpR0Gu87fhyGYs8eOOEul32PxkQC5IBVA38zuejzwchwphE7pNzfsKNDZ9Q4GSx8U4wgtOr9xZlVpbIH1/PeyOWwaOOG+VV+V9Pdvetti+JxZDZ3dYHIS6Vkr1Iubfb58LrLBTuzuiqJxlAITnA0iv+HQrADQuDzi4u4NuwIr6zIEurpaWQDTU3htYUF2DQrh3pwEGTj0BC2kRGQjb29eBwYwHGudwme2jvM3F+RaG0fQ926oTqUU1ps5+YUIwbXu+m6monbiiWEFdijqis0HnwQduHwYRl4uPVW9ClUkU4TvfQSbMzwMNH+/fBnmHTs7obNYULl4kX4XD09IFXqVT4sBI5zcRH2i9svdHXJjMP16v1sDl5YlSWrfQy1f7Z5kM3KObazE+uBKu/Dprxr1tsWNQr5PHwou7Jp3qxa+/j99iQjb+uRYW0FK1FDBpNRuiy5NqjiMsUIQisUyxpUxfrW67jDYTxyWXOVKHmEFa6ZNCygScQaYRjGNiK6QERfEUK8p8TbKzrZ0SjRV76CDMI9e9C358wZTO5vfjMc3Pl5LMADATikTDAeOYLBd/UqjEV/PxbXQkDh+de/xvu3b0fpsrlU0PYHiEIxGPV5LAannNWecX5wPF4vJiwmEO2yGnUPkI0FEx/FCEJzxgJDJUTsSMJmiSAmk7hPuXdeFWg6t2Y9bVE+D9uSz8NmpFJSvXllBfeExyMf+XXDgJ1yu0EoZrPSmXW58NrCAj4XCEiCj2EuWeam5Vzax+JQqjAMP+dNFYVhgqi7u5Bk5OfDw3KrVLDA3MdQdeSZPFeze7RjXBpWZKCZKKykvLiIOMmGoE5EYhP8krWowB5V5YT+4AdEzz+P4Gomg3F19OhapeelJRCImQzaIgwO4rV0Gj5HTw8+u7CA/tGxGOzT5CT+Vyu4TzUTh5kMrnV3txRHqXcAoVSLBHPwQleCaCSTqASYmcGcODnZOn3I1tsWNRtYFKYY0ciZzyo8Hvuyad4CgcruCyas1M2uLFkHL8oDl3oXIwmLlRdXIE6yIcjlQCS63SASq0Q5JOI2Kn/NpGEBnfvQpBCC6LHHQBpOTGDhfuYMjOwb3oAF7tWrmBwCAUQR5+awEN63D+RiOAwnmXv4nDsHpzsWQ0bijTda9wwqBsOQhB+RbIbKStNEcjJgqXarjEaOwpvhdq8lFs2kI2c5alSGfL40OcjiNirUXkh+v30GoZOuCZfSx2IYG1r5uziuXMF43bYNj0wULi/DBrjduHfa22VmIZcvs33gzE+vF3Zrfl6SkcPDhT2/UqlC9cOODjiw5vK+gQFsBw+uzfwjwj2ZTMqSQSvC8eRJEAhmMsrrBeHAJCNnN3JJdW8vNi4LNPcOU8dKMzhmzYZixGApcRI1Gm6VQegU8DHz73aSDd1IvPgitv5+2Iq2NvRwVglEIRCAPX8eY/HIEdxXMzO4R/r78frqKt6zugo7c+AA/lfLtcjlYAOXlrCxjWSbwYGVeqCcFgkdHRunLK/hDPh8mNcSCawlgsG1LQE0nAGfD9vAgP17sllJLlqRjJcvFxeFsSIa1UQRFsswlyWzDdLBC2vUIk7CG/ucdRAn2RC43VgPRKOyR6JGc0JnIjYWZZ/sZ58leughDKSDB6VC6W/9FiaGq1exOPZ64QAnk4iyh0JwkongAPT1QVXw2DFkBvX3I1o/MlL9j8hmJTHARCAvmrm8sRzk84XEoprVqL5mNYmpWYxWGY0dHZvLWS4ne9CqvLgacZJWgRAglvJ5jIsKHRqHTMe2KNsWLS+jN9jAgBQt4cW3mm3X3i7vwc5O/B0O4/yyEmEiAfIwkcD/BwZkJqgQsCmRCPbjcslMUbt7UO3rxceiZtqUawOyWSycrEhG7hc1OyvtnUoY9vXJjMbhYSjEmsupGyGK0CyohzhJsQzCVgSfmyrnLKeflYqc0Olpor/9W5yzwUGcs+uuQ4kyI5VC9uHSksyqYnsVCMjgxoULmAO8XhCQw8PV32PZLL5vcRG+FivOc5lyV1fti2YOlKikoW6RoFFPzM8j4aCtDQkJFc5dm8oWtTpUn8yKaFxdhY/H6zR1Xvf5YPM6O/HILSPUcuoaxQ0dBTtxEvNrVihVWtyqa11OTgoGq0r0cLotcgQ0idhYlHWyp6eJHngAi/frrpOG5fWvx6J0agpGJ5VCtMjngwPNrH0wiEzDxUWi556DU9DVhcxDc6lPuchkJHGYTuO19nZJHK5XGbIQ+D67Ho1MPFpl0JlFYayyGhvVA6RaqFkGxUhCO3GSUuXFmz0SmM1inHDWXAVw+gRVli1Kp1HG3N4O22MYkkDMZCSx09aGscj9vdJp/O3x4O9cDnYoFsN7+/tluTBHxKNRufAuVjajjgleQKsqxrU6VOY+hkxOCoHjX1pC5iJvLAozO4uN+6ypCAQKMxmtnvf1Nf94LFVa3GhxklZCDUIrTj9zZTuh8TjRN76B8TY+jnN16BD8JMbiIgjEbBZtYDijoa1NZg5fvIix2taG/YyOVmc30mlZpry6in2zQEtfn1R5rhZqiwS2RwzdIkFjPcCiQufP4x7eu7eiseH0O1AviG1QrCw5k8E6jMUz4/G1IjFqWxmG221fNs2vN7soDFHziZO0EsJhnMNQqOL7wOm2yBHQJGJjUfJkp1JEX/oS0a9+hbIajgLedRcWm9PTMuK9siIXosvLMDYjIzBYzz0HstHvR8/DnTsrN0aplCQOeSHt9UrisJmi3NmsfTajSjia4XKVzmj0+dbHkPMCoRg5aNUgWV082JGDurdI+YjHMVGFQhWlzTv97Ja0RUJA2CmRQHYdl8VxpiCRnNQzGdgGnw9EGxGcQI8HRBv3N+nrA1nLZcYc+CDCuQ8GrRVJWemb1fv4u3nxXO34NCuIq73D1DL+Sr4nmSzMYmRhGH4+MwPi0ZwZrIrC2BGOQ0PrE/iolziJHVGoURo19Ed0+hkuywnN54kefhj9nMfGMB4PHiS66aZrOxHIoDp/HnaEeyXyAqSjA1UZV6/iHG/dCgKxUj9GbY8QieC1jg5JHAaDle1P/X1mtWTVFqlBQR0A1FhPZDLI0r16FYkHFSQfbApb1Orgnnu8qX4RkfS9uJdhObYon5dkojmjUX3NThTGSghGJSDXyy9yojhJKyGfx/qBqOI+5frsNgCaRGwsSp7s73yH6JFH4OAODcHQvPa1WEDOzYHYm5mB8dq2Df9PpZAqHgjAwb5wAQb18GFkKJbL3gtRSBzyIldVVG72iFAxmEVh7Eqprcp+vV5rctHvl+QjZ2NuFnGSVsLyMq5XX1/Zi0qnT1AlbRETX/39sC0+H5y9ZFIq5jHB5PfLrGFWiV9eRnYOkewF5nJJdUEWGOBos/m8q+IAPFa4/L6afjpq6bN5DPIiXR1/6znOWCHcThSGnzPBqqK7uzjJaBaFaXVxklYCX5sKFxhOvwplOaFPPUX0+OMIQgQCyEC85Racp1QKPRKXl2XP0nQac2gohPF05QrO7cgI+kxXsujkDOTFRRkkCQalMEqlPZvU/q3m4AhRoXK8LkvW2AjE46hCiMVQ1tzXV9bHNoUtajWoZKGZGOP2MKoAynrCLArDJGM4LMlGK1GY9vbiJKO5uqVWcZJSJKFG/ZDN4vpzlVKZcLotcgQ0idhYFD3ZJ04Q/c3fwEht2wajeNttcHqXl+HALi+DsNqyRSqh9vQgAn/6NIzXgQNwsMvpIcDEGhOHnAmhEoebzSBy+bQV4RiLYSLjEmp18iWSky0r1/LW0SFVZZkctCIJ9aJ8Y5DPI2PO5YKzXMZ1cPqVKmqLolFkIfr96Fvo8cj73+ORzpXLhfs4m5U9DBMJ2CkhQGaxeJNasswOn7lkWe33xVOTSu5VMj7Mi3Q1ws1kJC/QmzVzNxotTjLOzuK+JSokAr1eXDfuy8jPBwcLxWH4nFoRhRobAyZ4KyASm/DOrQglndAzZ9AHUQiQdocPE91+O87RwgLKl/N5ZChyQKOzEwvOS5dgAwYHIVBXbh+uSERmHHIVQygkMw4rISHNfQzNtsjcx7AZbZHG5sPyMtHLL2O+PnzYukrABKffuS2/IGayjAlDNXjBwVSVMGxGW6SKwlhlNIbDsgzW3GrF74ffGQhIH5QD2aEQtvZ2e4KwGc/HZgDzFKqwawnoK9UAaBKxsbA92SsrRJ/6FKLlu3bBsN18M3r1LC/LPoidnSANhZAR9pMn8feePVAoLDXAWNCE+1fwgkUlDjejoVSdfLssQp5wcznZ940XCGr6P7/fHL0zi8LYlVLrbMPGI51GtgmXTpSA00eIrS3KZpGBkM0iI5r7HcbjcK74vmbHSgjcv6xIyuWD/f2wNWrk2O9fW3qi9h7k6ahSck/NWjTvSxVaqTaLsdGwKye2EifJZNBvcm5OisPMzxc+zs6u7RtrGLhG5kxGVqPm1zaTKEyzoML+iC1ri4hgk7/8ZdzbQ0PwcV73Oozhs2dReeHzSVvl9cKWs6J8by/Iw1IZDEIgc5ozDtNp2eOVicNyej+rLRLMAZFqWyRoaGwUpqaITp3CXLBvX8n7taVtkdPA2Xbq+kS1Rer6pNyy5I2EWl5cLIuQ38u9GWMxmSgTj+Nv3ri3t/rbfT77bEbeNpMoTLMgGsW8zK2SSsDptsgR0CRiY2F5snM5oi9+kegnP0GZzcAA+vyMj8uME7cbzjCX/UUiiM6nUkQ7dqDvYTEnOZeTxGEyKZUgmTT0+VqXOOQyxkrFSYgKswQrFSfh8k4+53Zl1FaiMO3tpfs0NrsojBMRicCx6O4uGXV3+mixNfysWLplC85BOo3J2+OBM8oZzioZx71sAgEstnM5fIZLltkRY9KRiXcm+9SeX6Wi36qwirncWd1PpQrNjUI9xUnKLS8WAoEqq0xG9TUuP1cRDK5Vm1ZJxuFhWaquUT/kcmVnhbasLUqnib76VaLjx3G/3XAD0T334Nxw+TKX97vdUmU9HkcgY3IS/7dDPo9xweIonFHd0yP7txYrJS7WIoFobR/DZrNFGhqlkMthrXHlCkRWtm4t+vaWtUXNDvYdVMJQtUVmwrDZbNFGiZOk0/b9GXnjFhYqWBRGVZs2k43BoPaL6gkhkGGazyO41+IBDUdAk4iNheXJfuIJoq98BYTexARUlLdvJ3rlFRgwj0c21I9G8Xoigcn8ppuwgLMCi40wiUUEp5aFUVqBhFKz/uwIwlLiJHYkYaPKivg62fVoLEcUxi6rcb1EYVoVQmAxmcthEVnE0XL6BGVpixYWYF+6urBlMlLZlDOWOUMwn5ckeEcHFty5HBwuLlnu7JT9wsxqx5yVw/uzPEixthTQ3DvMnGW4UTD3GLQjCq3QLOIkiUShAIyVKMz8/Nq+sW1tslTaimTk8ulWmHMahQqEVlrSFhERffvbRD/8IRZot91G9IY3YBFx/DgCqCMjsDuZDOx2LAZ7Mzkp2yiYkcvhvUtLMnOaRZ96e7E/u/NdTosENYjRqoFZjc0F7jkai4HIL1Kp4fQ73jELYjNhqM7JTKQ1Q1myWZzEjiS0Qjm9Bxvxu1gUphjRGIlY/45AoHhGYzCo/aJKkM8j2O1ywQ4Vuf5Ot0WOgCYRG4s1J/uVV4g+/nEMit27MUFPTqJMJ5fD4nx0FP+fngYhODgI8nB4eO0XZDIyZZsVVD0eSRyW0yexGWBuPG5HEJYSJ7EjCJstClcK+bwUvSmW1ViuKIyZdCynTGuzIJcDmebx2BP05PwJao0tSiZRtkSELES+p4SQ5JzPJ8dlLod7KxiUvVUNQ5Yst7UVZhwSFZYWW41BNSjA38NQif9qeiTWAi1OIsHjw055mslHO1EYJhntxGFKOIabCmX2R3T62bJ0Qp99luib38Tvv/NOoje+EdlQFy7IvtGGgUzCeBy2aPt23Efmc8Uk4+Ii3i8E7AeXKVupPnIZoJVyuxNbJGho1IJwmOiFF+AD3HCDrc/YkrZoo8HrIZUwbIayZC1OUohEwloIRt2sEkI44F6MaDT3D9/MyGRwLlnE0Qb6bDUAmkRsLApOdiJB9Od/DkXlXbuIjh5FJuLFizAWAwNwTqemQCD19CBLcXy8cKdcMhuPy0U3E0d+f/MRRGrPoGLlxWao/YSKlRhvZkPLojDFshqZXFbR1la8R2NHB+6pzXJuEwkQ99xw2QJOPxMFtiifRx/EcBhBi3QaWQfmMmN2gNxuKbqUzeLvYBD2Ro2QE0nyjxt2q99pLgU09w5TF+rr5VRalRObn1uhVGmxE53gekEIONB2JCMTjYuLaz/r8xVXnh4extzotEBQtSiDSGwpW0QEMZTPfhZk9b33IgPx/HmUKnd24l5YWYE9am+HTzQ6WjjmUilJHHKZvtcricPOzkKlTnPwwkktEjQ0GoGpKQhAbt2K0mYLe9RytqjhByAKycJiZcnrGbwo1XfQzjcylxFbkYSbZR1hBRaFsSMZVQFCFS6XXI9YkYxcTr2R1TiNBK9nAwHbTM5NfJc1DppEbCwKTvYDDxA99BAc4ttvR/bP7CwGRH8/nqdSiJIfOUK0c6c0vpyVFo/LLB+fT/Y43ChDUok4iQpVLbVYebFG7cjl7LMZVeLRbBrsRGGYdPT78dgqCyxepNo01Hf6BFVwda9cIbp8WWZexmIy26a9XRLPLhfsE58Pr1cGKtTehKriqMu1NrNYJRmJ1i7S6zHWKxEnUaGSgsUyCDVqRzotxWCsejTy3+agkstlLQpjJh65lN7pKCG04vS7sWAURiIQmTt5Ev0P77oL2YfRKK4pz19eL8iMsTFpLxIJqagcjeI1v18Sh4FA6RYJZl9ksy96NTQYQqBa4coVooMHkVVugtNHSsMXxGbC0K4smQnDWm1RJeIkZpTTe1DbytohBHzwYgrUkYh1QohZFMaqZ2OriMJEIpi/QyHLNYO+ExsATSI2Fq+e7J/9jOgv/xLP77oLJcrRKJxc7pHX00N03XWI+LlcUiE1kZBN11XicD3JGzVSX4wkLCVOYkcSbuasnWaEECCqi2U0JhLW/SZZFKZYVqMTyuqFQCYMERagpnvU6RPUqyM1HEZ2gduN65JMyr+uSg0AAAw4SURBVOxBIinC5PFIO+P1yn6b7HByf0Lul2gmDRkqwVht7zA7QZL1FCfR2BgIgd51diQjZzqGw2s/y1lrdiTj8HDxHnjNghL9EZ1+x75qi3I5oi98gehHPyK65Rai174W11YIXMt0WrZ4mZiAvYpGpTAKl9AHg5I4ZEV5tY+hWpZsznjW419Dwx7ZLNFzz8H/u/nmNeWETh8967ogZqKuVFkyV21UaovK6T1oFzg1k4KViJNobAxSqdJEo5UoTFubfUajk0RhhJBVBqFQy63RHIGmJhENw7ifiD6ivOQRQljQFo2FYRg7ieiM8tKfCyH+rIyPCiIsgP7dvyO6epXojjuQgZhKSfGC3l6iw4eJDhzAhM3EIS8iuEyZywlrhdqHzI4cLFecxEwO6ubirY1MpniPxngc97YZbnfprEavd+MnsUwGC1Sfj+hTn7qfPvrRj6r/drI9EkT4fS+9hInY78c1YyKQycP2djlBm8va2dk1Z/eYy5LVhXqxa9oK4iQaG4N4HFmNZgVq9fn8/FqCua2tuPI0/73RgQ8eAy4X0Uc/2nq2iIjo4YeJvv51VF3ccYdcAHF2xZYtIA+zWZlxyPNLVxcIYbZVxZTbrdoraGholIdolOiZZ0Agfu9799PHPtZ6tqhWmMuS1eAF0VrCsJSvW6q0uNnFSTQ2BmZRGLuejaVEYex6Nm60X5TL4be0teF4FBS9q1uQX9oQOKVA9F3XHl+9zQ3DeD0R/WciOkhE/US0SESniegnQoj7lffdT4U3ihl/IIR44Np7/z8ierfyvywRXSWi7xLRR4QQc9den7l2TP1E9NeV/JBsFn0Qz54l2r8fC/KZGTwODYE83L0bA39mRvZCYmEUn698g18vcRIuVzQThNoB1+B7wWS8C8AqvnYZjUtLkiQ3w0w0WhGP61nmzr+N0+aJiL72ta/Ru971rndRC9ij8+dBsLS14TpwFiGLOoVChcI8RIUkXzxeeN3a2qRQj9lG8GfYma6mvJij87q8WMMMvx9iG9u22b+HRWHsSMaXX0YWXCKx9rM9PfYkIz+upygM3+dqL6pWskXPPkv0jW/A3u7Zg/YKfj/O6+gogqupFJSZ2RZzxiGrxeZy8tq1tcF21bNFgoaGBsbdvn0IQHK1RivZompgJgxVv4hbwzBhaPaL1M9UI07Cts6KJNTY3GAV4yKK6kQkRWGsMhpXVjAflxKFsROG8fvXzy9yu7H/WAzrkSpa2LQMv7QRcIRbJYT4uvq3YRh/RESfJqIXiOizRLRARGNEdBMR/Rciut9iN/+JiGYtXn/a4rV3E1GeiAJEdCcRfYCI7jYM43ohRFIIESWirxuGsY0qvMif/zzRT36CBYnPh0j6xASyDrlsORotFCrw+dbuRxUnKUYSmqFmBjFJYEUQ6kW5Rr2gkuDFkErZZzXGYnBWyxGFsSIdaxGFCQRwbJzx8s53vpPe+c53vmqTnGqPpqchphKJyKxiPm9dXbJ3imFI9WUVnKXI2YXsGDMxyDapHHESu76D2gnWqCfcbkkC2kEIjIliytO//jVIdzM6OtaSjGo248gIejlWG4Dj9gE8nlrFFl25gj6I8Th6rS0uotdhfz98pXgcJe1E8It6evCo2hy2X7oCQkNj/bFlC8Ykt5FoFVtUDtSyZHN/Zw52trcXBn7YJ0qlCknCUuIkaqaiFifRWC+w7z84aP+eTMY6q5GfX7yIv833NIvClFKgrjbQ5/ViHHIlVSXZka3EL20EHEEiqjAMo42IPkZEzxHRbeb0U8Mw7JYH3xZCnC3za76h7PcLhmEsENEfE9HbiOhbVRw2ERE98QTR//7fGIgTExg0N96I7MOuLtz8fj8GAJfjxGKIApgJwlLiJGo2kLm8WEOjGeH1Yuvqsn8PZ5rYZTXOzVmLwrhcklAsVkZtt7jv6ipU8uTnTrVHsRjRU08RnTuH397Tg9/Y349H7nfIysvsDHM0nYk+tWRHDVqYswe1OImGU2AYMnK/a5f9+9JpSTBaZTUeO4ZHcysQlwvq0lYl02qmo13QxY5Yd7It+q//lejMGaLrr5fl5X6/7IfZ2Ynz1NUl/ZxGKLdraGjYY+9euWBPJGS1glNtkRVUH4cJQ3OGoOrTMFlYSpyE/SgtTqLhFHg8WCv09Ni/h0Vh7Eqn5+aw7rBKcuroKJ3VaJVURYRkj1wO323O9i0XrWS3GgUnUkr9RNRNRD+zql8XQlixwbXiccJFnqxlJ+9/PwjBG24gOnqU6NZbsWgnkr1/FhaKi5N4vRhYWpxEYzOCM3SDQfv3WInCqFs4jIV+OaIw6saIRgvKtx1pj77yFZAcAwOIPG7ZAlvErQs4+s0LdXVC1uIkGhqwFWNj2OyQz4MEU0lGVRTm/Hmip5+Gc20GE2dWJCOXEGazBYFBR9qiP/xDop//nGjHDtii8XHYpd5ebD090s/hIIaGhsbGgzO7iYhefBFiSNfgSFtEZF3ZxYQh+zWqajKRFLrk15kgtCIJNTRaGYYh12gW6u2vIpWy788YiaAKhIXSVHDvQyuSkYlEouJEZxE41m5tFJzojs0RUZyI3mwYxieEENNlfq7HMIx+i9eXhBA2MaNXsfPa40K5B2mF6Wn0EXntazG4pqexGFBVTc0bL+CtWHsNDY3KwIRgT49Mf0+nZbny6qrMZkylCsfdCy/g8a/+iuhjH3v1ZUfao0cewaL9xhtlVrTXi8W6mmlIJKPvurxYQ6NytLWhp9/oqP17WBRmbg7k4vy8FImZn4d6uhpg5NLevXvRX/kaHGmLvvc92KB77kEp88CArMxoa0OA1UphUkNDY+PBPUhPny4gER1pi557rlC5XSUF1TYJag9nlRy061+ooaFhjWJJIZxZGI3KR95WVyFOG40WVmZyVdRf/VVVh+NIu7WRcByJKITIG4bx34noL4jogmEYTxPRz4nox4Sml3Z027M2r28nooum1/oMw8iRrFn/COHG+k4tx55MaslxDQ2n4sMf/uj9RPSRj31MjmOn2qPHHtO2SEOjWdDVVTxqb4ZhwBadPet8WzQ/r22RhoZT8alPwRa9613Ot0U33aRtkYbGZoVT7dZGwnEkIhGREOLjhmGcJaJ/S0S3E9FdRPT/EtGiYRh/JIT4psXH3k1QwjFjpozXzhLRB4QQVp/X0NDYxND2SENDoxmgbZGGhkYzQNsiDQ0Np0HbrcrgSBKRiEgI8SARPWgYhpeIDhPRW4jo/yGi/98wjKtCiJ+YPvLzChpfvpGgnpMh3BhnhbDTF9XQ0Njs0PZIQ0OjGaBtkYaGRjNA2yINDQ2nQdut8uFYEpEhhEgR0S+J6JeGYTxFaFL5r4nIfJErwY+smmpqaGhoFIO2RxoaGs0AbYs0NDSaAdoWaWhoOA3abpVGq7XJf+baY5EW5hoaGhoNgbZHGhoazQBtizQ0NJoB2hZpaGg4DdpuWcBxJKJhGH7DMF5r8++3Xns82ajj0dDQ2LzQ9khDQ6MZoG2RhoZGM0DbIg0NDadB263K4cRyZj8R/ZNhGL8iou8R0Xki8hLRTUT0e0Q0T0R/bfG5f2YYxqzF68eFEL9ar4PV0NBoaWh7pKGh0QzQtkhDQ6MZoG2RhoaG06DtVoVwIom4QkTvI6I3E9HbiWiEiNxEdJmIvkREHxdCXLb43Cdt9vcJImrpi6yhobFu0PZIQ0OjGaBtkYaGRjNA2yINDQ2nQdutCmE0syiMYRj3E9FHiGiAiEgIsbChB3QNhmG4iKiXiMaI6Hki+nMhxJ9t7FFpaGisJ7Q90tDQaAZoW6ShodEM0LZIQ0PDadB2qz5wSibiPBGRYRieJlG1mSSiMxt9EBoaGhsCbY80NDSaAdoWaWhoNAO0LdLQ0HAatN2qAc2eiThJOKGMJ0QTHLBhGB1EdLvy0gUhxLmNOh4NDY31h7ZHGhoazQBtizQ0NJoB2hZpaGg4Ddpu1QdNTSJqaGhoaGhoaGhoaGhoaGhoaGhobDxcG30AGhoaGhoaGhoaGhoaGhoaGhoaGs0NTSJqaGhoaGhoaGhoaGhoaGhoaGhoFIUmETU0NDQ0NDQ0NDQ0NDQ0NDQ0NDSKQpOIGhoaGhoaGhoaGhoaGhoaGhoaGkWhSUQNDQ0NDQ0NDQ0NDQ0NDQ0NDQ2NotAkooaGhoaGhoaGhoaGhoaGhoaGhkZR/F8WjJB9O1hR+AAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "attn_name = 'enc_self_attns'\n", + "hypo = {attn_name: [model.bert.encoder.layer[i].attention.self.attention_probs[0] for i in range(config.num_hidden_layers)]}\n", + "key_labels = query_labels = tokens\n", + "labels_dict = {attn_name: (key_labels, query_labels)}\n", + "result_tuple = (hypo, config.num_attention_heads, labels_dict)\n", + "plot_layer_attn(result_tuple, attn_name=attn_name, layer=10, heads=None)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.5" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/Untitled_likunlin.ipynb b/Untitled_likunlin.ipynb new file mode 100644 index 00000000000000..6d561c5185b780 --- /dev/null +++ b/Untitled_likunlin.ipynb @@ -0,0 +1,884 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "%load_ext autoreload\n", + "%autoreload 2\n", + "\n", + "from IPython.core.interactiveshell import InteractiveShell\n", + "InteractiveShell.ast_node_interactivity = 'all'" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Warning: apex was installed without --cpp_ext. Falling back to Python flatten and unflatten.\n", + "Warning: apex was installed without --cuda_ext. Fused syncbn kernels will be unavailable. Python fallbacks will be used instead.\n", + "Warning: apex was installed without --cuda_ext. FusedAdam will be unavailable.\n", + "Warning: apex was installed without --cuda_ext. FusedLayerNorm will be unavailable.\n", + "Better speed can be achieved with apex installed from https://www.github.com/nvidia/apex.\n" + ] + } + ], + "source": [ + "import os\n", + "import json\n", + "\n", + "import numpy as np\n", + "import math\n", + "import matplotlib\n", + "import matplotlib.pyplot as plt\n", + "from pylab import rcParams\n", + "\n", + "import torch\n", + "import torch.nn.functional as F\n", + "from pytorch_pretrained_bert import tokenization, BertTokenizer, BertModel, BertForMaskedLM, BertForPreTraining, BertConfig\n", + "from examples.extract_features import *" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "05/14/2019 15:48:11 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-uncased-vocab.txt from cache at /home/xd/.pytorch_pretrained_bert/26bc1ad6c0ac742e9b52263248f6d0f00068293b33709fae12320c0e35ccfbbb.542ce4285a40d23a559526243235df47c5f75c197f04f37d1a0c124c32c9a084\n", + "05/14/2019 15:48:12 - INFO - pytorch_pretrained_bert.modeling - loading archive file /nas/pretrain-bert/pretrain-tensorflow/uncased_L-12_H-768_A-12/\n", + "05/14/2019 15:48:12 - INFO - pytorch_pretrained_bert.modeling - Model config {\n", + " \"attention_probs_dropout_prob\": 0.1,\n", + " \"hidden_act\": \"gelu\",\n", + " \"hidden_dropout_prob\": 0.1,\n", + " \"hidden_size\": 768,\n", + " \"initializer_range\": 0.02,\n", + " \"intermediate_size\": 3072,\n", + " \"max_position_embeddings\": 512,\n", + " \"num_attention_heads\": 12,\n", + " \"num_hidden_layers\": 12,\n", + " \"type_vocab_size\": 2,\n", + " \"vocab_size\": 30522\n", + "}\n", + "\n" + ] + } + ], + "source": [ + "class Args:\n", + " def __init__(self):\n", + " pass\n", + " \n", + "args = Args()\n", + "args.no_cuda = True\n", + "\n", + "CONFIG_NAME = 'bert_config.json'\n", + "BERT_DIR = '/nas/pretrain-bert/pretrain-tensorflow/uncased_L-12_H-768_A-12/'\n", + "config_file = os.path.join(BERT_DIR, CONFIG_NAME)\n", + "config = BertConfig.from_json_file(config_file)\n", + "\n", + "tokenizer = BertTokenizer.from_pretrained('bert-base-uncased')#do_lower_case:在标记化时将文本转换为小写。默认= True\n", + "model = BertForPreTraining.from_pretrained(BERT_DIR)\n", + "device = torch.device(\"cuda\" if torch.cuda.is_available() and not args.no_cuda else \"cpu\")\n", + "_ = model.to(device)\n", + "_ = model.eval()" + ] + }, + { + "cell_type": "code", + "execution_count": 33, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[2331, 2351, 2757, 3280, 5996, 8289]" + ] + }, + "execution_count": 33, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "tokens = ['death','died','dead','die','dying','dies']\n", + "tokenizer.convert_tokens_to_ids(tokens)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "BertForPreTraining:\n", + "Outputs:\n", + " if `masked_lm_labels` and `next_sentence_label` are not `None`:\n", + " Outputs the total_loss which is the sum of the masked language modeling loss and the next\n", + " sentence classification loss.\n", + " if `masked_lm_labels` or `next_sentence_label` is `None`:\n", + " Outputs a tuple comprising\n", + " - the masked language modeling logits of shape [batch_size, sequence_length, vocab_size], and\n", + " - the next sentence classification logits of shape [batch_size, 2]." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "from_pretrained:\n", + "Instantiate a BertPreTrainedModel from a pre-trained model file or a pytorch state dict.\n", + "Download and cache the pre-trained model file if needed." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "scrolled": false + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "05/14/2019 15:48:15 - INFO - examples.extract_features - tokens: [CLS] i love you [SEP]\n", + "05/14/2019 15:48:15 - INFO - examples.extract_features - tokens: [CLS] hello everybody [SEP]\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['[CLS]', 'i', 'love', 'you', '[SEP]']\n", + "[101, 1045, 2293, 2017, 102]\n", + "['[CLS]', 'hello', 'everybody', '[SEP]']\n", + "[101, 7592, 7955, 102]\n" + ] + } + ], + "source": [ + "import re\n", + "def convert_text_to_examples(text): #把每一行的句子变成一个实例,一个实例中包含text_a,text_b(text_b目前是没用的)\n", + " examples = []\n", + " unique_id = 0\n", + " if True:\n", + " for line in text:\n", + " line = line.strip()\n", + " text_a = None\n", + " text_b = None\n", + " m = re.match(r\"^(.*) \\|\\|\\| (.*)$\", line) #想要匹配这样的字符串'You are my sunshine. ||| I love you.'\n", + " \n", + " if m is None:\n", + " text_a = line\n", + " else:\n", + " text_a = m.group(1) #匹配的第一句,比如You are my sunshine,my only sunshine.\n", + " text_b = m.group(2) #匹配的第二句,比如I love you.\n", + " \n", + " examples.append(\n", + " InputExample(unique_id=unique_id, text_a=text_a, text_b=text_b))\n", + " unique_id += 1\n", + " return examples\n", + "#疑问,当text是一行的时候,line是一个个字母 -> text是[\"***\"]的形式\n", + "#print(convert_text_to_examples({\"I love you\",\"hello everybody\"})[0].text_a)\n", + "\n", + "def convert_examples_to_features(examples, tokenizer, append_special_tokens=True, replace_mask=True, print_info=False):\n", + " #把实例变成一个特征\n", + " features = []\n", + " for (ex_index, example) in enumerate(examples):\n", + " tokens_a = tokenizer.tokenize(example.text_a) #tokenizer的作用是\n", + " #print(example.unique_id) #*****************************\n", + " tokens_b = None\n", + " if example.text_b:\n", + " tokens_b = tokenizer.tokenize(example.text_b)\n", + "\n", + " tokens = []\n", + " input_type_ids = [] #segment embedding\n", + " if append_special_tokens: #输入参数中默认为true\n", + " tokens.append(\"[CLS]\")\n", + " input_type_ids.append(0)\n", + " for token in tokens_a:\n", + " if replace_mask and token == '_': # XD\n", + " token = \"[MASK]\"\n", + " tokens.append(token)\n", + " input_type_ids.append(0)\n", + " if append_special_tokens:\n", + " tokens.append(\"[SEP]\")\n", + " input_type_ids.append(0)\n", + "\n", + " if tokens_b:\n", + " for token in tokens_b:\n", + " if replace_mask and token == '_': # XD\n", + " token = \"[MASK]\"\n", + " tokens.append(token)\n", + " input_type_ids.append(1)\n", + " if append_special_tokens:\n", + " tokens.append(\"[SEP]\")\n", + " input_type_ids.append(1)\n", + " print(tokens) #*******************************\n", + " input_ids = tokenizer.convert_tokens_to_ids(tokens) #把原来句子中的词语编成在字典中的编号\n", + " input_mask = [1] * len(input_ids) \n", + " print(input_ids)#***********************************\n", + " if ex_index < 5:\n", + "# logger.info(\"*** Example ***\")\n", + "# logger.info(\"unique_id: %s\" % (example.unique_id))\n", + " logger.info(\"tokens: %s\" % \" \".join([str(x) for x in tokens]))\n", + "# logger.info(\"input_ids: %s\" % \" \".join([str(x) for x in input_ids]))\n", + "# logger.info(\"input_mask: %s\" % \" \".join([str(x) for x in input_mask]))\n", + "# logger.info(\n", + "# \"input_type_ids: %s\" % \" \".join([str(x) for x in input_type_ids]))\n", + " \n", + " features.append(\n", + " InputFeatures(\n", + " unique_id=example.unique_id,\n", + " tokens=tokens,\n", + " input_ids=input_ids,#字符串中的每个单词在词典中的index序列\n", + " input_mask=input_mask, #一堆1\n", + " input_type_ids=input_type_ids)) #第0类和第一类,对text_a,text_b的区分\n", + " return features\n", + " \n", + "examples = convert_text_to_examples({\"I love you\",\"hello everybody\"})\n", + "features = convert_examples_to_features(examples, tokenizer, print_info=False)\n", + "\n", + "def copy_and_mask_feature(feature, masked_tokens=None):\n", + " import copy\n", + " tokens = feature.tokens\n", + " masked_positions = [tokens.index(t) for t in masked_tokens if t in tokens] \\\n", + " if masked_tokens is not None else range(len(tokens))\n", + " \n", + " assert len(masked_positions) > 0\n", + " masked_feature_copies = []\n", + " for masked_pos in masked_positions: #用[mask]依次掩盖每一个位置\n", + " feature_copy = copy.deepcopy(feature)\n", + " feature_copy.input_ids[masked_pos] = tokenizer.vocab[\"[MASK]\"]\n", + " masked_feature_copies.append(feature_copy)\n", + " return masked_feature_copies, masked_positions\n", + "\n", + "#masked_feature_copies, masked_positions = copy_and_mask_feature(features[1])\n", + "#print(masked_feature_copies[0].input_ids) #结果[101, 1045, 2293, 103, 102]\n", + "#print(masked_positions) #结果是一个range(0,5)" + ] + }, + { + "cell_type": "code", + "execution_count": 47, + "metadata": {}, + "outputs": [], + "source": [ + "def show_lm_probs(tokens, input_ids, probs, topk=5, firstk=20): #输出结果的函数,要最高概率topk个输出\n", + " def print_pair(token, prob, end_str='', hit_mark=' '):\n", + " if i < firstk:\n", + " # token = token.replace('', '').replace('\\n', '/n')\n", + " print('{}{: >3} | {: <12}'.format(hit_mark, int(round(prob*100)), token), end=end_str)\n", + " \n", + " ret = None\n", + " for i in range(len(tokens)):\n", + " ind_ = input_ids[i].item() if input_ids is not None else tokenizer.vocab[tokens[i]]\n", + " prob_ = probs[i][ind_].item() #这个probs是该字符串第i个位置上填上词典上各个词的概率\n", + " print_pair(tokens[i], prob_, end_str='\\t')\n", + " values, indices = probs[i].topk(topk)\n", + " top_pairs = []\n", + " for j in range(topk):\n", + " ind, prob = indices[j].item(), values[j].item()\n", + " hit_mark = '*' if ind == ind_ else ' '\n", + " token = tokenizer.ids_to_tokens[ind]\n", + " print_pair(token, prob, hit_mark=hit_mark, end_str='' if j < topk - 1 else '\\n')\n", + " top_pairs.append((token, prob))\n", + " if tokens[i] == \"[MASK]\":\n", + " ret = top_pairs\n", + " return ret #返回的这是个啥" + ] + }, + { + "cell_type": "code", + "execution_count": 48, + "metadata": {}, + "outputs": [], + "source": [ + "import colored\n", + "from colored import stylize\n", + "\n", + "def show_abnormals(tokens, probs, show_suggestions=False):\n", + " def gap2color(gap):\n", + " if gap <= 5:\n", + " return 'yellow_1'\n", + " elif gap <= 10:\n", + " return 'orange_1'\n", + " else:\n", + " return 'red_1'\n", + " \n", + " def print_token(token, suggestion, gap):\n", + " \n", + " if gap == 0:\n", + " print(stylize(token + ' ', colored.fg('white') + colored.bg('black')), end='')\n", + " else:\n", + " print(stylize(token, colored.fg(gap2color(gap)) + colored.bg('black')), end='')\n", + " if show_suggestions and gap > 5:\n", + " print(stylize('/' + suggestion + ' ', colored.fg('green' if gap > 10 else 'cyan') + colored.bg('black')), end='')\n", + " else:\n", + " print(stylize(' ', colored.fg(gap2color(gap)) + colored.bg('black')), end='')\n", + " \n", + " # print('/' + suggestion, end=' ')\n", + " # print('%.2f' % gap, end=' ')\n", + " #print(gap)\n", + " avg_gap = 0.\n", + " for i in range(1, len(tokens) - 1): # skip first [CLS] and last [SEP]\n", + " ind_ = tokenizer.vocab[tokens[i]]\n", + " prob_ = probs[i][ind_].item()\n", + " top_prob = probs[i].max().item()\n", + " top_ind = probs[i].argmax().item()\n", + " gap = math.log(top_prob) - math.log(prob_) #计算两个词之间的差距\n", + " #print(top_prob,prob_)\n", + " suggestion = tokenizer.ids_to_tokens[top_ind]\n", + " print_token(tokens[i], suggestion, gap)\n", + " avg_gap += gap\n", + " avg_gap /= (len(tokens) - 2)\n", + " print()\n", + " print(avg_gap)" + ] + }, + { + "cell_type": "code", + "execution_count": 49, + "metadata": {}, + "outputs": [], + "source": [ + "analyzed_cache = {}\n", + "\n", + "def analyze_text(text, masked_tokens=None, show_suggestions=True, show_firstk_probs=20):\n", + " if text[0] in analyzed_cache: #分析过的缓存\n", + " features, mlm_probs = analyzed_cache[text[0]]\n", + " given_mask = \"[MASK]\" in features[0].tokens\n", + " tokens = features[0].tokens \n", + " else:\n", + " examples = convert_text_to_examples(text)\n", + " features = convert_examples_to_features(examples, tokenizer, print_info=False)\n", + " given_mask = \"[MASK]\" in features[0].tokens\n", + " if not given_mask or masked_tokens is not None:\n", + " assert len(features) == 1\n", + " features, masked_positions = copy_and_mask_feature(features[0], masked_tokens=masked_tokens)\n", + "\n", + " input_ids = torch.tensor([f.input_ids for f in features], dtype=torch.long) #把input_ids增加了一个维度,变成[n_features,sequence_len]\n", + " #这里的n_features实际上是句子有多少个单词位置,每个位置依次换成[mask]\n", + " input_type_ids = torch.tensor([f.input_type_ids for f in features], dtype=torch.long) #把input_type_ids增加了一个维度,其实每一行都一样\n", + " input_ids = input_ids.to(device) #拿去GPU\n", + " input_type_ids = input_type_ids.to(device)\n", + " \n", + " time_start=time.time()\n", + " mlm_logits, _ = model(input_ids, input_type_ids)\n", + " time_end=time.time()\n", + " print('time cost1',time_end-time_start,'s')\n", + " \n", + " mlm_probs = F.softmax(mlm_logits, dim=-1) #最后一维,也就是vocab 换算成概率和为百分之百\n", + " #print(mlm_probs.size())#这里实验的是torch.Size([5, 5, 30522])\n", + " tokens = features[0].tokens #不知道要干嘛\n", + " if not given_mask or masked_tokens is not None:\n", + " bsz, seq_len, vocab_size = mlm_probs.size() #三个维度分别是batch_size, sequence_length, vocab_size\n", + " assert bsz == len(masked_positions)\n", + " # reduced_mlm_probs = torch.Tensor(1, seq_len, vocab_size)\n", + " # for i in range(seq_len):\n", + " # reduced_mlm_probs[0, i] = mlm_probs[i, i]\n", + " reduced_mlm_probs = torch.Tensor(1, len(masked_positions), vocab_size)\n", + " for i, pos in enumerate(masked_positions):\n", + " reduced_mlm_probs[0, i] = mlm_probs[i, pos]\n", + " mlm_probs = reduced_mlm_probs #压缩一下大小,节约不必要浪费的空间(只需要第i个batch里面[mask]位置的词汇表概率即可)\n", + " tokens = [tokens[i] for i in masked_positions]\n", + " \n", + " analyzed_cache[text[0]] = (features, mlm_probs)\n", + " \n", + " top_pairs = show_lm_probs(tokens, None, mlm_probs[0], firstk=show_firstk_probs) #传入的probs是二维的\n", + " #print(\"************************************************************************************************************\")\n", + " #print(top_pairs) #******************************\n", + " if not given_mask:\n", + " show_abnormals(tokens, mlm_probs[0], show_suggestions=show_suggestions)\n", + " return top_pairs\n" + ] + }, + { + "cell_type": "code", + "execution_count": 50, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "05/21/2019 16:22:56 - INFO - examples.extract_features - tokens: [CLS] he is dies . [SEP]\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['[CLS]', 'he', 'is', 'dies', '.', '[SEP]']\n", + "[101, 2002, 2003, 8289, 1012, 102]\n", + "time cost1 0.0779261589050293 s\n", + " 0 | [CLS] \t 4 | . 1 | , 1 | the 1 | ) 1 | \" \n", + " 19 | he \t* 19 | he 8 | it 6 | she 3 | and 2 | the \n", + " 0 | is \t 33 | then 15 | soon 12 | eventually 7 | later 4 | also \n", + " 0 | dies \t 4 | dead 3 | alive 3 | right 2 | beautiful 2 | not \n", + " 93 | . \t* 93 | . 6 | ; 1 | ! 0 | ? 0 | | \n", + " 0 | [SEP] \t 11 | \" 5 | he 2 | . 1 | and 1 | it \n", + "\u001b[38;5;15m\u001b[48;5;0mhe \u001b[0m\u001b[38;5;214m\u001b[48;5;0mis\u001b[0m\u001b[38;5;6m\u001b[48;5;0m/then \u001b[0m\u001b[38;5;214m\u001b[48;5;0mdies\u001b[0m\u001b[38;5;6m\u001b[48;5;0m/dead \u001b[0m\u001b[38;5;15m\u001b[48;5;0m. \u001b[0m\n", + "3.6602350262062977\n", + "time cost 0.0883021354675293 s\n" + ] + } + ], + "source": [ + "# text = [\"Who was Jim Henson? Jim Henson _ a puppeteer.\"]\n", + "#text = [\"Last week I went to the theatre. I had very good seat. The play was very interesting. But I didn't enjoy it. A young man and a young woman were sitting behind me. They were talking loudly. I got very angry. I couldn't hear a word. I turned round. I looked at the man angry. They didn't pay any attention.In the end, I couldn't bear it. I turned round again. 'I can't hear a word!' I said angrily. 'It's none of your business,' the young man said rudely. 'This is a private conversation!'\"]\n", + "#text = [\"After the outbreak of the disease, the Ministry of Agriculture and rural areas immediately sent a supervision team to the local. Local Emergency Response Mechanism has been activated in accordance with the requirements, to take blockade, culling, harmless treatment, disinfection and other treatment measures to all disease and culling of pigs for harmless treatment. At the same time, all live pigs and their products are prohibited from transferring out of the blockade area, and live pigs are not allowed to be transported into the blockade area. At present, all the above measures have been implemented.\"]\n", + "#text = [\"The journey was long and tired. We left London at five o'clock in the evening and spend eight hours in the train. We had been travelled for 3 hours after someone appeared selling food and drinks. It was darkness all the time we were crossing Wales, but we could see nothing through the windows. When we finally arrived at Holyhead nearly , everyone was slept. As soon as the train stopped, everybody come to life, grabbing their suitcases and rushing onto the platform.\"]\n", + "#text = [\"When I was little, Friday's night was our family game night. After supper, we would play card games of all sort in the sitting room. As the kid, I loved to watch cartoons,but no matter how many times I asked for watching them, my parents would not to let me.They would say to us that playing card games would help my brain. Still I unwilling to play the games for them sometimes. \"]\n", + "#text = [\"After the outbreak of the disease, the Ministry of Agriculture and rural areas immediately sent a supervision team to the local. Local Emergency Response Mechanism has been activated in accordance with the requirements, to take blockade, culling, harmless treatment, disinfection and other treatment measures to all disease and culling of pigs for harmless treatment. At the same time, all live pigs and their products are prohibited from transferring out of the blockade area, and live pigs are not allowed to be transported into the blockade area. At present, all the above measures have been implemented.\"]\n", + "# text = [\"Early critics of Emily Dickinson's poetry mistook for simplemindedness the surface of artlessness that in fact she constructed with such innocence.\"]\n", + "#text = [\"During my last winter holiday, I went to the countryside with my father to visit my grandparents. I find a big change there. The first time I went there, they were living in a small house with dogs, ducks, and another animals. Last winter when I went here again, they had a big separate house to raise dozens of chicken. They also had a small pond which they raised fish. My grandpa said last summer they earned quite a lot by sell the fish. I felt happily that their life had improved. At the end of our trip,I told my father that I planned to return for every two years, but he agreed.\"]\n", + "# text = ['The problem is difficult than that one.']\n", + "#text = [\"It was Monday morning, and the writing class had just begin. Everyone was silent, wait to see who would be called upon to read his and her paragraph aloud. Some of us were confident and eagerly take part in the class activity, others were nervous and anxious. I had done myself homework but I was shy. I was afraid that to speak in front of a larger group of people. At that moment, I remembered that my father once said, 'The classroom is a place for learning and that include learning from the textbooks, and mistakes as well.' Immediate, I raised my hand.\"]\n", + "text = [\"He is dies.\"]\n", + "import time\n", + "time_start=time.time()\n", + "#text = [\"The play was very interesting.\"]\n", + "#text = [\"The question is easy than that one.\"]\n", + "#text =[\"The apple a eat by me. I had a very good seat. The play was very interesting.But I didn't enjoy it. A young man and a young woman were sitting behind me.They were talking loudly. I got very angry.\"]#因为外面有中括号,所以是二维的\n", + "analyze_text(text, show_firstk_probs=200)\n", + "#print(analyzed_cache)\n", + "time_end=time.time()\n", + "print('time cost',time_end-time_start,'s')" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['Tom has black hair. Mary has black hair. John has yellow hair. _ and Mary have the same hair color.',\n", + " 'Tom has black hair. Mary has black hair. John has yellow hair. _ and Mary have different hair colors.']" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "text = [\n", + " # same / different\n", + " \"Tom has black hair. Mary has black hair. John has yellow hair. _ and Mary have the same hair color.\",\n", + " \"Tom has black hair. Mary has black hair. John has yellow hair. _ and Mary have different hair colors.\",\n", + " \"Tom has yellow hair. Mary has black hair. John has black hair. Mary and _ have the same hair color.\",\n", + " # because / although\n", + " \"John is taller/shorter than Mary because/although _ is older/younger.\",\n", + " \"The red ball is heavier/lighter than the blue ball because/although the _ ball is bigger/smaller.\",\n", + " \"Charles did a lot better/worse than his good friend Nancy on the test because/although _ had/hadn't studied so hard.\",\n", + " \"The trophy doesn't fit into the brown suitcase because/although the _ is too small/large.\",\n", + " \"John thought that he would arrive earlier than Susan, but/and indeed _ was the first to arrive.\",\n", + " # reverse\n", + " \"John came then Mary came. They left in reverse order. _ left then _ left.\",\n", + " \"John came after Mary. They left in reverse order. _ left after _ .\",\n", + " \"John came first, then came Mary. They left in reverse order: _ left first, then left _ .\",\n", + " # compare\n", + " \"Though John is tall, Tom is taller than John. So John is _ than Tom.\",\n", + " \"Tom is taller than John. So _ is shorter than _.\",\n", + " # WSC-style: before /after\n", + " \"Mary came before/after John. _ was late/early .\",\n", + " # yes / no\n", + " \"Was Tom taller than Susan? Yes, _ was taller.\",\n", + " # right / wrong, epistemic modality\n", + " \"John said the rain was about to stop. Mary said the rain would continue. Later the rain stopped. _ was wrong.\",\n", + " \n", + " \"The trophy doesn't fit into the brown suitcase because/although the _ is too small/large.\",\n", + " \"John thanked Mary because _ had given help to _ . \",\n", + " \"John felt vindicated/crushed when his longtime rival Mary revealed that _ was the winner of the competition.\",\n", + " \"John couldn't see the stage with Mary in front of him because _ is so short/tall.\",\n", + " \"Although they ran at about the same speed, John beat Sally because _ had such a bad start.\",\n", + " \"The fish ate the worm. The _ was hungry/tasty.\",\n", + " \n", + " \"John beat Mary. _ won the game/e winner.\",\n", + "]\n", + "text" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "config" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [], + "source": [ + "with open('WSC_switched_label.json') as f:\n", + " examples = json.load(f)" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [], + "source": [ + "with open('WSC_child_problem.json') as f:\n", + " cexamples = json.load(f)" + ] + }, + { + "cell_type": "code", + "execution_count": 89, + "metadata": {}, + "outputs": [], + "source": [ + "for ce in cexamples:\n", + " for s in ce['sentences']:\n", + " for a in s['answer0'] + s['answer1']:\n", + " a = a.lower()\n", + " if a not in tokenizer.vocab:\n", + " ce\n", + " print(a, 'not in vocab!!!')" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [], + "source": [ + "for ce in cexamples:\n", + " if len(ce['sentences']) > 0:\n", + " e = examples[ce['index']]\n", + " assert ce['index'] == e['index']\n", + " e['score'] = all([s['score'] for s in ce['sentences']])\n", + " assert len(set([s['adjacent_ref'] for s in ce['sentences']])) == 1, 'adjcent_refs are different!'\n", + " e['adjacent_ref'] = ce['sentences'][0]['adjacent_ref']" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": {}, + "outputs": [], + "source": [ + "from collections import defaultdict\n", + "\n", + "groups = defaultdict(list)\n", + "for e in examples:\n", + " if 'score' in e:\n", + " index = e['index']\n", + " if index < 252:\n", + " if index % 2 == 1:\n", + " index -= 1\n", + " elif index in [252, 253, 254]:\n", + " index = 252\n", + " else:\n", + " if index % 2 == 0:\n", + " index -= 1\n", + " groups[index].append(e)" + ] + }, + { + "cell_type": "code", + "execution_count": 62, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[(2, 'fit into:large/small', False),\n", + " (4, 'thank:receive/give', False),\n", + " (6, 'call:successful available', True),\n", + " (8, 'ask:repeat answer', False),\n", + " (10, 'zoom by:fast/slow', False),\n", + " (12, 'vindicated/crushed:be the winner', False),\n", + " (14, 'lift:weak heavy', False),\n", + " (16, 'crash through:[hard]/[soft]', False),\n", + " (18, '[block]:short/tall', False),\n", + " (20, 'down to:top/bottom', False),\n", + " (22, 'beat:good/bad', False),\n", + " (24, 'roll off:anchored level', False),\n", + " (26, 'above/below', False),\n", + " (28, 'better/worse:study hard', False),\n", + " (30, 'after/before:far away', False),\n", + " (32, 'be upset with:buy from not work/sell not work', True),\n", + " (34, '?yell at comfort:upset', False),\n", + " (36, 'above/below:moved first', False),\n", + " (38, 'although/because', False),\n", + " (40, 'bully:punish rescue', False),\n", + " (42, 'pour:empty/full', False),\n", + " (44, 'know:nosy indiscreet', False),\n", + " (46, 'explain:convince/understand', True),\n", + " (48, '?know tell:so/because', True),\n", + " (50, 'beat:younger/older', False),\n", + " (56, 'clog:cleaned removed', True),\n", + " (58, '?immediately follow:short delayed', False),\n", + " (60, '?between:see see around', True),\n", + " (64, 'but/and', False),\n", + " (66, 'clean:put in the trash put in the drawer', False),\n", + " (68, 'because/but', False),\n", + " (70, 'out of:handy lighter', False),\n", + " (72, 'put:tall high', False),\n", + " (74, 'show:good famous', True),\n", + " (76, 'pay for:generous grateful', False),\n", + " (78, 'but', False),\n", + " (80, 'if', False),\n", + " (82, 'if', False),\n", + " (84, 'fool:get/lose', False),\n", + " (88, 'wait:impatient cautious', False),\n", + " (90, 'give birth:woman baby', True),\n", + " (92, '?stop normal/stop abnormal:strange', False),\n", + " (96, 'eat:hungry tasty', False),\n", + " (98, 'put ... into filled with ... :get in/get out', False),\n", + " (100, 'up:at the bottom/at the top', False),\n", + " (102, 'crash through:removed repaired', False),\n", + " (104, 'stab:taken to the police station taken to the hospital', False),\n", + " (106, 'hear ... humming and whistling:annoyed/annoying', True),\n", + " (108, 'see ... juggling watermelons:impressed/impressive', True),\n", + " (114, 'tell lies: truthful skeptical', True),\n", + " (130, 'but:disappointed', True),\n", + " (132, 'visit:invite come out/invite come in', True),\n", + " (134, 'take classes from:eager known to speak it fluently', False),\n", + " (138, 'cover:out gone', True),\n", + " (144, 'tuck:work sleep', True),\n", + " (150, 'influence:later/earlier', False),\n", + " (152, 'can not cut:thick small', False),\n", + " (154, 'attack:kill guard', False),\n", + " (156, 'attack:bold nervous', False),\n", + " (160, 'change:hard:easy', False),\n", + " (166, 'alive:is/was', False),\n", + " (168, 'infant:twelve years old twelve months old', False),\n", + " (170, 'better equipped and large:defeated/victorious', False),\n", + " (178, 'interview:persistent cooperative', False),\n", + " (186, 'be full of:minority/majority', False),\n", + " (188, 'like over:more/fewer', False),\n", + " (190, 'place on all:not enough/too many', True),\n", + " (192, 'stick:leave have', True),\n", + " (196, 'follow:admire/influence', True),\n", + " (198, 'fit through:wide/narrow', False),\n", + " (200, 'trade:dowdy/great', False),\n", + " (202, 'hire/hire oneself to:take care of', True),\n", + " (204, 'promise/order', False),\n", + " (208, 'mother:education place', True),\n", + " (210, 'knock:get an answer/answer', True),\n", + " (212, 'pay:receive/deliver', False),\n", + " (218, '?', False),\n", + " (220, 'say check:move take', False),\n", + " (222, '?', False),\n", + " (224, 'give a life:drive alone walk', False),\n", + " (226, 'pass the plate:full/hungry', False),\n", + " (228, 'pass:turn over turn next', False),\n", + " (232, 'stretch pat', True),\n", + " (234, 'accept share', False),\n", + " (236, 'speak:break silence break concentration', False),\n", + " (240, 'carry:leg ache leg dangle', True),\n", + " (242, 'carry:in arms in bassinet', False),\n", + " (244, 'hold:against chest against will', True),\n", + " (250, 'stop', False),\n", + " (252, 'even though/because/not', False),\n", + " (255, 'give:not hungry/hungry', False),\n", + " (259, 'ask for a favor:refuse/be refused`', False),\n", + " (261, 'cede:less popular/more popular', False),\n", + " (263, 'not pass although:see open/open', True),\n", + " (271, 'suspect regret', True)]" + ] + }, + "execution_count": 62, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def filter_dict(d, keys=['index', 'sentence', 'correct_answer', 'relational_word', 'is_associative', 'score']):\n", + " return {k: d[k] for k in d if k in keys}\n", + "\n", + "# ([[filter_dict(e) for e in eg] for eg in groups.values() if eg[0]['relational_word'] != 'none' and all([e['score'] for e in eg])])# / len([eg for eg in groups.values() if eg[0]['relational_word'] != 'none'])\n", + "[(index, eg[0]['relational_word'], all([e['score'] for e in eg])) for index, eg in groups.items() if eg[0]['relational_word'] != 'none']\n", + "# len([filter_dict(e) for e in examples if 'score' in e and not e['score'] and e['adjacent_ref']])\n", + "# for e in examples:\n", + "# if e['index'] % 2 == 0:\n", + "# print(e['sentence'])" + ] + }, + { + "cell_type": "code", + "execution_count": 51, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "data": { + "text/plain": [ + "179" + ] + }, + "execution_count": 51, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "sum(['because' in e['sentence'] for e in examples]) + \\\n", + "sum(['so ' in e['sentence'] for e in examples]) + \\\n", + "sum(['but ' in e['sentence'] for e in examples]) + \\\n", + "sum(['though' in e['sentence'] for e in examples])" + ] + }, + { + "cell_type": "code", + "execution_count": 73, + "metadata": {}, + "outputs": [], + "source": [ + "# with open('WSC_switched_label.json', 'w') as f:\n", + "# json.dump(examples, f)" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [], + "source": [ + "vis_attn_topk = 3\n", + "\n", + "def has_chinese_label(labels):\n", + " labels = [label.split('->')[0].strip() for label in labels]\n", + " r = sum([len(label) > 1 for label in labels if label not in ['BOS', 'EOS']]) * 1. / (len(labels) - 1)\n", + " return 0 < r < 0.5 # r == 0 means empty query labels used in self attention\n", + "\n", + "def _plot_attn(ax1, attn_name, attn, key_labels, query_labels, col, color='b'):\n", + " assert len(query_labels) == attn.size(0)\n", + " assert len(key_labels) == attn.size(1)\n", + "\n", + " ax1.set_xlim([-1, 1])\n", + " ax1.set_xticks([])\n", + " ax2 = ax1.twinx()\n", + " nlabels = max(len(key_labels), len(query_labels))\n", + " pos = range(nlabels)\n", + " \n", + " if 'self' in attn_name and col < ncols - 1:\n", + " query_labels = ['' for _ in query_labels]\n", + "\n", + " for ax, labels in [(ax1, key_labels), (ax2, query_labels)]:\n", + " ax.set_yticks(pos)\n", + " if has_chinese_label(labels):\n", + " ax.set_yticklabels(labels, fontproperties=zhfont)\n", + " else:\n", + " ax.set_yticklabels(labels)\n", + " ax.set_ylim([nlabels - 1, 0])\n", + " ax.tick_params(width=0, labelsize='xx-large')\n", + "\n", + " for spine in ax.spines.values():\n", + " spine.set_visible(False)\n", + "\n", + "# mask, attn = filter_attn(attn)\n", + " for qi in range(attn.size(0)):\n", + "# if not mask[qi]:\n", + "# continue\n", + "# for ki in range(attn.size(1)):\n", + " for ki in attn[qi].topk(vis_attn_topk)[1]:\n", + " a = attn[qi, ki]\n", + " ax1.plot((-1, 1), (ki, qi), color, alpha=a)\n", + "# print(attn.mean(dim=0).topk(5)[0])\n", + "# ax1.barh(pos, attn.mean(dim=0).data.cpu().numpy())\n", + "\n", + "def plot_layer_attn(result_tuple, attn_name='dec_self_attns', layer=0, heads=None):\n", + " hypo, nheads, labels_dict = result_tuple\n", + " key_labels, query_labels = labels_dict[attn_name]\n", + " if heads is None:\n", + " heads = range(nheads)\n", + " else:\n", + " nheads = len(heads)\n", + " \n", + " stride = 2 if attn_name == 'dec_enc_attns' else 1\n", + " nlabels = max(len(key_labels), len(query_labels))\n", + " rcParams['figure.figsize'] = 20, int(round(nlabels * stride * nheads / 8 * 1.0))\n", + " \n", + " rows = nheads // ncols * stride\n", + " fig, axes = plt.subplots(rows, ncols)\n", + " \n", + " # for head in range(nheads):\n", + " for head_i, head in enumerate(heads):\n", + " row, col = head_i * stride // ncols, head_i * stride % ncols\n", + " ax1 = axes[row, col]\n", + " attn = hypo[attn_name][layer][head]\n", + " _plot_attn(ax1, attn_name, attn, key_labels, query_labels, col)\n", + " if attn_name == 'dec_enc_attns':\n", + " col = col + 1\n", + " axes[row, col].axis('off') # next subfig acts as blank place holder\n", + " # plt.suptitle('%s with %d heads, Layer %d' % (attn_name, nheads, layer), fontsize=20)\n", + " plt.show() \n", + " \n", + "ncols = 4" + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "metadata": {}, + "outputs": [ + { + "ename": "AttributeError", + "evalue": "'BertSelfAttention' object has no attribute 'attention_probs'", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0mattn_name\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m'enc_self_attns'\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0mhypo\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m{\u001b[0m\u001b[0mattn_name\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbert\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mencoder\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlayer\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mattention\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mattention_probs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mi\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mrange\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mconfig\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnum_hidden_layers\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m}\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 3\u001b[0m \u001b[0mkey_labels\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mquery_labels\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtokens\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0mlabels_dict\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m{\u001b[0m\u001b[0mattn_name\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mkey_labels\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mquery_labels\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m}\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0mresult_tuple\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mhypo\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mconfig\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnum_attention_heads\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlabels_dict\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m(.0)\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0mattn_name\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m'enc_self_attns'\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0mhypo\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m{\u001b[0m\u001b[0mattn_name\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbert\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mencoder\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlayer\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mattention\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mattention_probs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mi\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mrange\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mconfig\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnum_hidden_layers\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m}\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 3\u001b[0m \u001b[0mkey_labels\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mquery_labels\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtokens\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0mlabels_dict\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m{\u001b[0m\u001b[0mattn_name\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mkey_labels\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mquery_labels\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m}\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0mresult_tuple\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mhypo\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mconfig\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnum_attention_heads\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlabels_dict\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/home/qsj/miniconda3/lib/python3.6/site-packages/torch/nn/modules/module.py\u001b[0m in \u001b[0;36m__getattr__\u001b[0;34m(self, name)\u001b[0m\n\u001b[1;32m 516\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mmodules\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mname\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 517\u001b[0m raise AttributeError(\"'{}' object has no attribute '{}'\".format(\n\u001b[0;32m--> 518\u001b[0;31m type(self).__name__, name))\n\u001b[0m\u001b[1;32m 519\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 520\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m__setattr__\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mname\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvalue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mAttributeError\u001b[0m: 'BertSelfAttention' object has no attribute 'attention_probs'" + ] + } + ], + "source": [ + "attn_name = 'enc_self_attns'\n", + "hypo = {attn_name: [model.bert.encoder.layer[i].attention.self.attention_probs[0] for i in range(config.num_hidden_layers)]}\n", + "key_labels = query_labels = tokens\n", + "labels_dict = {attn_name: (key_labels, query_labels)}\n", + "result_tuple = (hypo, config.num_attention_heads, labels_dict)\n", + "plot_layer_attn(result_tuple, attn_name=attn_name, layer=10, heads=None)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.5" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/Untitled_linzhuo.ipynb b/Untitled_linzhuo.ipynb new file mode 100644 index 00000000000000..9627f95eb0ef4b --- /dev/null +++ b/Untitled_linzhuo.ipynb @@ -0,0 +1,239 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "%load_ext autoreload\n", + "%autoreload 2\n", + "\n", + "from IPython.core.interactiveshell import InteractiveShell\n", + "InteractiveShell.ast_node_interactivity = 'all'" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import json\n", + "\n", + "import numpy as np\n", + "import math\n", + "import matplotlib\n", + "import matplotlib.pyplot as plt\n", + "from pylab import rcParams\n", + "\n", + "import torch\n", + "import torch.nn.functional as F\n", + "from pytorch_pretrained_bert import tokenization, BertTokenizer, BertModel, BertForMaskedLM, BertForPreTraining, BertConfig\n", + "from examples.extract_features import *" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "01/09/2019 14:00:34 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-tensorflow/uncased_L-12_H-768_A-12/vocab.txt\n", + "01/09/2019 14:00:34 - INFO - pytorch_pretrained_bert.modeling - loading archive file /nas/pretrain-bert/pretrain-tensorflow/uncased_L-12_H-768_A-12/\n", + "01/09/2019 14:00:34 - INFO - pytorch_pretrained_bert.modeling - Model config {\n", + " \"attention_probs_dropout_prob\": 0.1,\n", + " \"hidden_act\": \"gelu\",\n", + " \"hidden_dropout_prob\": 0.1,\n", + " \"hidden_size\": 768,\n", + " \"initializer_range\": 0.02,\n", + " \"intermediate_size\": 3072,\n", + " \"max_position_embeddings\": 512,\n", + " \"num_attention_heads\": 12,\n", + " \"num_hidden_layers\": 12,\n", + " \"type_vocab_size\": 2,\n", + " \"vocab_size\": 30522\n", + "}\n", + "\n" + ] + } + ], + "source": [ + "class Args:\n", + " def __init__(self):\n", + " pass\n", + " \n", + "args = Args()\n", + "args.no_cuda = True\n", + "\n", + "CONFIG_NAME = 'bert_config.json'\n", + "BERT_DIR = '/nas/pretrain-bert/pretrain-tensorflow/uncased_L-12_H-768_A-12/'\n", + "config_file = os.path.join(BERT_DIR, CONFIG_NAME)\n", + "config = BertConfig.from_json_file(config_file)\n", + "\n", + "tokenizer = BertTokenizer.from_pretrained(os.path.join(BERT_DIR, 'vocab.txt'))\n", + "model = BertForPreTraining.from_pretrained(BERT_DIR)\n", + "device = torch.device(\"cuda\" if torch.cuda.is_available() and not args.no_cuda else \"cpu\")\n", + "_ = model.to(device)\n", + "_ = model.eval()" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "embedding_layer = model.bert.embeddings\n", + "layers = model.bert.encoder.layer" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [], + "source": [ + "layer = layers[3]" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "BertSelfAttention(\n", + " (query): Linear(in_features=768, out_features=768, bias=True)\n", + " (key): Linear(in_features=768, out_features=768, bias=True)\n", + " (value): Linear(in_features=768, out_features=768, bias=True)\n", + " (dropout): Dropout(p=0.1)\n", + ")" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "layer.attention.self" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([14460])" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "words = 'policeman'\n", + "tokens = tokenizer.tokenize(words)\n", + "assert len(tokens) == len(words.split()), tokens\n", + "input_ids = [tokenizer.vocab[token] for token in tokens]\n", + "input_ids = torch.tensor(input_ids, dtype=torch.long).to(device)\n", + "input_ids" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "torch.Size([1, 768])" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "embedding_layer.average_position_embeddings = embedding_layer.position_embeddings.weight.mean(dim=0, keepdim=True)\n", + "\n", + "def embedding_forward(self, input_ids, token_type_ids=None): \n", + " if token_type_ids is None:\n", + " token_type_ids = torch.zeros_like(input_ids)\n", + " \n", + " word_embeddings = self.word_embeddings(input_ids)\n", + " position_embeddings = self.average_position_embeddings\n", + " token_type_embeddings = self.token_type_embeddings(token_type_ids)\n", + " \n", + " embeddings = word_embeddings + position_embeddings + token_type_embeddings\n", + " embeddings = self.LayerNorm(embeddings)\n", + " return embeddings\n", + "\n", + "embeddings = embedding_forward(embedding_layer, input_ids)\n", + "embeddings.size()" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "torch.Size([30522, 768])" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def embedding_get_all(self):\n", + " all_embeddings = self.word_embeddings.weight\n", + " token_type_ids = torch.zeros(all_embeddings.size(0), dtype=torch.long)\n", + " token_type_embeddings = self.token_type_embeddings(token_type_ids)\n", + " all_embeddings = all_embeddings + self.average_position_embeddings + token_type_embeddings\n", + " return all_embeddings\n", + "\n", + "all_embeddings = embedding_get_all(embedding_layer)\n", + "all_embeddings.size()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.5" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/Untitled_linzhuo_maskedlm.ipynb b/Untitled_linzhuo_maskedlm.ipynb new file mode 100644 index 00000000000000..20caa7bf8b9416 --- /dev/null +++ b/Untitled_linzhuo_maskedlm.ipynb @@ -0,0 +1,1082 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 76, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The autoreload extension is already loaded. To reload it, use:\n", + " %reload_ext autoreload\n" + ] + } + ], + "source": [ + "%load_ext autoreload\n", + "%autoreload 2\n", + "\n", + "from IPython.core.interactiveshell import InteractiveShell\n", + "InteractiveShell.ast_node_interactivity = 'all'" + ] + }, + { + "cell_type": "code", + "execution_count": 77, + "metadata": {}, + "outputs": [], + "source": [ + "# import seaborn as sns\n", + "import os\n", + "import json\n", + "\n", + "import numpy as np\n", + "import math\n", + "import matplotlib\n", + "import matplotlib.pyplot as plt\n", + "from pylab import rcParams\n", + "\n", + "import torch\n", + "import torch.nn.functional as F\n", + "from pytorch_pretrained_bert import tokenization, BertTokenizer, BertModel, BertForMaskedLM, BertForPreTraining, BertConfig\n", + "from examples.extract_features import *" + ] + }, + { + "cell_type": "code", + "execution_count": 78, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "01/17/2019 18:31:04 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-tensorflow/uncased_L-12_H-768_A-12/vocab.txt\n", + "01/17/2019 18:31:04 - INFO - pytorch_pretrained_bert.modeling - loading archive file /nas/pretrain-bert/pretrain-tensorflow/uncased_L-12_H-768_A-12/\n", + "01/17/2019 18:31:04 - INFO - pytorch_pretrained_bert.modeling - Model config {\n", + " \"attention_probs_dropout_prob\": 0.1,\n", + " \"hidden_act\": \"gelu\",\n", + " \"hidden_dropout_prob\": 0.1,\n", + " \"hidden_size\": 768,\n", + " \"initializer_range\": 0.02,\n", + " \"intermediate_size\": 3072,\n", + " \"max_position_embeddings\": 512,\n", + " \"num_attention_heads\": 12,\n", + " \"num_hidden_layers\": 12,\n", + " \"type_vocab_size\": 2,\n", + " \"vocab_size\": 30522\n", + "}\n", + "\n" + ] + } + ], + "source": [ + "class Args:\n", + " def __init__(self):\n", + " pass\n", + " \n", + "args = Args()\n", + "args.no_cuda = True\n", + "\n", + "CONFIG_NAME = 'bert_config.json'\n", + "BERT_DIR = '/nas/pretrain-bert/pretrain-tensorflow/uncased_L-12_H-768_A-12/'\n", + "config_file = os.path.join(BERT_DIR, CONFIG_NAME)\n", + "config = BertConfig.from_json_file(config_file)\n", + "\n", + "tokenizer = BertTokenizer.from_pretrained(os.path.join(BERT_DIR, 'vocab.txt'))\n", + "model = BertForPreTraining.from_pretrained(BERT_DIR)\n", + "device = torch.device(\"cuda\" if torch.cuda.is_available() and not args.no_cuda else \"cpu\")\n", + "_ = model.to(device)\n", + "_ = model.eval()" + ] + }, + { + "cell_type": "code", + "execution_count": 79, + "metadata": {}, + "outputs": [], + "source": [ + "import re\n", + "def convert_text_to_examples(text):\n", + " examples = []\n", + " unique_id = 0\n", + " if True:\n", + " for line in text:\n", + " line = line.strip()\n", + " text_a = None\n", + " text_b = None\n", + " m = re.match(r\"^(.*) \\|\\|\\| (.*)$\", line)\n", + " if m is None:\n", + " text_a = line\n", + " else:\n", + " text_a = m.group(1)\n", + " text_b = m.group(2)\n", + " examples.append(\n", + " InputExample(unique_id=unique_id, text_a=text_a, text_b=text_b))\n", + " unique_id += 1\n", + " return examples\n", + "\n", + "def convert_examples_to_features(examples, tokenizer, append_special_tokens=True, replace_mask=True, print_info=False):\n", + " features = []\n", + " for (ex_index, example) in enumerate(examples):\n", + " tokens_a = tokenizer.tokenize(example.text_a)\n", + " tokens_b = None\n", + " if example.text_b:\n", + " tokens_b = tokenizer.tokenize(example.text_b)\n", + "\n", + " tokens = []\n", + " input_type_ids = []\n", + " if append_special_tokens:\n", + " tokens.append(\"[CLS]\")\n", + " input_type_ids.append(0)\n", + " for token in tokens_a:\n", + " if replace_mask and token == '_': # XD\n", + " token = \"[MASK]\"\n", + " tokens.append(token)\n", + " input_type_ids.append(0)\n", + " if append_special_tokens:\n", + " tokens.append(\"[SEP]\")\n", + " input_type_ids.append(0)\n", + "\n", + " if tokens_b:\n", + " for token in tokens_b:\n", + " if replace_mask and token == '_': # XD\n", + " token = \"[MASK]\"\n", + " tokens.append(token)\n", + " input_type_ids.append(1)\n", + " if append_special_tokens:\n", + " tokens.append(\"[SEP]\")\n", + " input_type_ids.append(1)\n", + "\n", + " input_ids = tokenizer.convert_tokens_to_ids(tokens)\n", + " input_mask = [1] * len(input_ids)\n", + "\n", + " if ex_index < 5:\n", + "# logger.info(\"*** Example ***\")\n", + "# logger.info(\"unique_id: %s\" % (example.unique_id))\n", + " logger.info(\"tokens: %s\" % \" \".join([str(x) for x in tokens]))\n", + "# logger.info(\"input_ids: %s\" % \" \".join([str(x) for x in input_ids]))\n", + "# logger.info(\"input_mask: %s\" % \" \".join([str(x) for x in input_mask]))\n", + "# logger.info(\n", + "# \"input_type_ids: %s\" % \" \".join([str(x) for x in input_type_ids]))\n", + " \n", + " features.append(\n", + " InputFeatures(\n", + " unique_id=example.unique_id,\n", + " tokens=tokens,\n", + " input_ids=input_ids,\n", + " input_mask=input_mask,\n", + " input_type_ids=input_type_ids))\n", + " return features\n", + "\n", + "def copy_and_mask_feature(feature, masked_tokens=None):\n", + " import copy\n", + " tokens = feature.tokens\n", + " masked_positions = [tokens.index(t) for t in masked_tokens if t in tokens] \\\n", + " if masked_tokens is not None else range(len(tokens))\n", + " assert len(masked_positions) > 0\n", + " masked_feature_copies = []\n", + " for masked_pos in masked_positions:\n", + " feature_copy = copy.deepcopy(feature)\n", + " feature_copy.input_ids[masked_pos] = tokenizer.vocab[\"[MASK]\"]\n", + " masked_feature_copies.append(feature_copy)\n", + " return masked_feature_copies, masked_positions" + ] + }, + { + "cell_type": "code", + "execution_count": 80, + "metadata": {}, + "outputs": [], + "source": [ + "def show_lm_probs(tokens, input_ids, probs, topk=5, firstk=20):\n", + " def print_pair(token, prob, end_str='', hit_mark=' '):\n", + " if i < firstk:\n", + " # token = token.replace('', '').replace('\\n', '/n')\n", + " print('{}{: >3} | {: <12}'.format(hit_mark, int(round(prob*100)), token), end=end_str)\n", + " \n", + " ret = None\n", + " for i in range(len(tokens)):\n", + " ind_ = input_ids[i].item() if input_ids is not None else tokenizer.vocab[tokens[i]]\n", + " prob_ = probs[i][ind_].item()\n", + " print_pair(tokens[i], prob_, end_str='\\t')\n", + " values, indices = probs[i].topk(topk)\n", + " top_pairs = []\n", + " for j in range(topk):\n", + " ind, prob = indices[j].item(), values[j].item()\n", + " hit_mark = '*' if ind == ind_ else ' '\n", + " token = tokenizer.ids_to_tokens[ind]\n", + " print_pair(token, prob, hit_mark=hit_mark, end_str='' if j < topk - 1 else '\\n')\n", + " top_pairs.append((token, prob))\n", + " if tokens[i] == \"[MASK]\":\n", + " ret = top_pairs\n", + " return ret" + ] + }, + { + "cell_type": "code", + "execution_count": 81, + "metadata": {}, + "outputs": [], + "source": [ + "import colored\n", + "from colored import stylize\n", + "\n", + "def show_abnormals(tokens, probs, show_suggestions=False):\n", + " def gap2color(gap):\n", + " if gap <= 5:\n", + " return 'yellow_1'\n", + " elif gap <= 10:\n", + " return 'orange_1'\n", + " else:\n", + " return 'red_1'\n", + " \n", + " def print_token(token, suggestion, gap):\n", + " if gap == 0:\n", + " print(stylize(token + ' ', colored.fg('white') + colored.bg('black')), end='')\n", + " else:\n", + " print(stylize(token, colored.fg(gap2color(gap)) + colored.bg('black')), end='')\n", + " if show_suggestions and gap > 5:\n", + " print(stylize('/' + suggestion + ' ', colored.fg('green' if gap > 10 else 'cyan') + colored.bg('black')), end='')\n", + " else:\n", + " print(stylize(' ', colored.fg(gap2color(gap)) + colored.bg('black')), end='')\n", + " # print('/' + suggestion, end=' ')\n", + " # print('%.2f' % gap, end=' ')\n", + " \n", + " avg_gap = 0.\n", + " for i in range(1, len(tokens) - 1): # skip first [CLS] and last [SEP]\n", + " ind_ = tokenizer.vocab[tokens[i]]\n", + " prob_ = probs[i][ind_].item()\n", + " top_prob = probs[i].max().item()\n", + " top_ind = probs[i].argmax().item()\n", + " gap = math.log(top_prob) - math.log(prob_)\n", + " suggestion = tokenizer.ids_to_tokens[top_ind]\n", + " print_token(tokens[i], suggestion, gap)\n", + " avg_gap += gap\n", + " avg_gap /= (len(tokens) - 2)\n", + " print()\n", + " print(avg_gap)" + ] + }, + { + "cell_type": "code", + "execution_count": 82, + "metadata": {}, + "outputs": [], + "source": [ + "analyzed_cache = {}\n", + "\n", + "def analyze_text(text, masked_tokens=None, show_suggestions=False, show_firstk_probs=20):\n", + " if text[0] in analyzed_cache:\n", + " features, mlm_probs = analyzed_cache[text[0]]\n", + " given_mask = \"[MASK]\" in features[0].tokens\n", + " tokens = features[0].tokens\n", + " else:\n", + " examples = convert_text_to_examples(text)\n", + " features = convert_examples_to_features(examples, tokenizer, print_info=False)\n", + " given_mask = \"[MASK]\" in features[0].tokens\n", + " if not given_mask or masked_tokens is not None:\n", + " assert len(features) == 1\n", + " features, masked_positions = copy_and_mask_feature(features[0], masked_tokens=masked_tokens)\n", + "\n", + " input_ids = torch.tensor([f.input_ids for f in features], dtype=torch.long)\n", + " input_type_ids = torch.tensor([f.input_type_ids for f in features], dtype=torch.long)\n", + " input_ids = input_ids.to(device)\n", + " input_type_ids = input_type_ids.to(device)\n", + "\n", + " mlm_logits, _ = model(input_ids, input_type_ids)\n", + " mlm_probs = F.softmax(mlm_logits, dim=-1)\n", + "\n", + " tokens = features[0].tokens\n", + " if not given_mask or masked_tokens is not None:\n", + " bsz, seq_len, vocab_size = mlm_probs.size()\n", + " assert bsz == len(masked_positions)\n", + " # reduced_mlm_probs = torch.Tensor(1, seq_len, vocab_size)\n", + " # for i in range(seq_len):\n", + " # reduced_mlm_probs[0, i] = mlm_probs[i, i]\n", + " reduced_mlm_probs = torch.Tensor(1, len(masked_positions), vocab_size)\n", + " for i, pos in enumerate(masked_positions):\n", + " reduced_mlm_probs[0, i] = mlm_probs[i, pos]\n", + " mlm_probs = reduced_mlm_probs\n", + " tokens = [tokens[i] for i in masked_positions]\n", + " \n", + " analyzed_cache[text[0]] = (features, mlm_probs)\n", + " \n", + " top_pairs = show_lm_probs(tokens, None, mlm_probs[0], firstk=show_firstk_probs)\n", + " if not given_mask:\n", + " show_abnormals(tokens, mlm_probs[0], show_suggestions=show_suggestions)\n", + " return top_pairs" + ] + }, + { + "cell_type": "code", + "execution_count": 91, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/home/qsj/miniconda3/lib/python3.6/importlib/_bootstrap.py:219: RuntimeWarning: numpy.dtype size changed, may indicate binary incompatibility. Expected 96, got 88\n", + " return f(*args, **kwds)\n" + ] + } + ], + "source": [ + "import pandas as pd" + ] + }, + { + "cell_type": "code", + "execution_count": 93, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "110300" + ] + }, + "execution_count": 93, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "df_train =pd.read_csv('/nas/xd/data/gan_prompt_remain_OPENAI_TOKENED_new2.txt',delimiter='\\t',header=None,quotechar='&')\n", + "len(df_train)\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 100, + "metadata": {}, + "outputs": [], + "source": [ + "ss = [row[0]+' '+row[1] for row in df_train[[3,4]].values]" + ] + }, + { + "cell_type": "code", + "execution_count": 215, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"Why didn't the skeleton go to the dance? He had no-BODY to go with.\"" + ] + }, + "execution_count": 215, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "sss=ss[22]\n", + "sss" + ] + }, + { + "cell_type": "code", + "execution_count": 229, + "metadata": {}, + "outputs": [], + "source": [ + "sss =\"why didn ' t the girls come to the party ? i had no - one to party with .\"" + ] + }, + { + "cell_type": "code", + "execution_count": 230, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "01/17/2019 19:40:30 - INFO - examples.extract_features - tokens: [CLS] why didn ' t the girls come to the party ? i had no - one to party with . [SEP]\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + " 0 | [CLS] \t 3 | . 1 | the 1 | ) 1 | , 1 | \" \n", + " 100 | why \t*100 | why 0 | and 0 | but 0 | \" 0 | ' \n", + " 63 | didn \t* 63 | didn 15 | couldn 9 | wouldn 5 | don 3 | hadn \n", + " 100 | ' \t*100 | ' 0 | - 0 | , 0 | = 0 | ` \n", + " 100 | t \t*100 | t 0 | d 0 | s 0 | n 0 | ts \n", + " 69 | the \t* 69 | the 9 | other 6 | my 3 | these 2 | any \n", + " 32 | girls \t* 32 | girls 8 | boys 7 | guys 4 | police 4 | others \n", + " 73 | come \t* 73 | come 25 | go 0 | get 0 | stay 0 | came \n", + " 98 | to \t* 98 | to 1 | for 0 | into 0 | at 0 | after \n", + " 42 | the \t* 42 | the 40 | my 5 | this 3 | our 3 | that \n", + " 83 | party \t* 83 | party 3 | house 2 | club 2 | parties 1 | dance \n", + " 99 | ? \t* 99 | ? 0 | when 0 | . 0 | because 0 | , \n", + " 38 | i \t* 38 | i 20 | she 16 | they 10 | we 5 | he \n", + " 81 | had \t* 81 | had 17 | have 0 | was 0 | saw 0 | has \n", + " 100 | no \t*100 | no 0 | twenty 0 | number 0 | non 0 | zero \n", + " 88 | - \t* 88 | - 4 | other 2 | ' 1 | real 1 | . \n", + " 99 | one \t* 99 | one 0 | ones 0 | friends 0 | girls 0 | girl \n", + " 100 | to \t*100 | to 0 | i 0 | they 0 | a 0 | the \n", + " 15 | party \t* 15 | party 9 | be 8 | celebrate 8 | dance 7 | play \n", + " 97 | with \t* 97 | with 1 | to 1 | for 0 | tonight 0 | around \n", + " 96 | . \t* 96 | . 2 | ! 2 | ; 0 | ? 0 | | \n", + " 0 | [SEP] \t 5 | i 5 | \" 2 | and 2 | the 2 | . \n", + "\u001b[38;5;15m\u001b[48;5;0mwhy \u001b[0m\u001b[38;5;15m\u001b[48;5;0mdidn \u001b[0m\u001b[38;5;15m\u001b[48;5;0m' \u001b[0m\u001b[38;5;15m\u001b[48;5;0mt \u001b[0m\u001b[38;5;15m\u001b[48;5;0mthe \u001b[0m\u001b[38;5;15m\u001b[48;5;0mgirls \u001b[0m\u001b[38;5;15m\u001b[48;5;0mcome \u001b[0m\u001b[38;5;15m\u001b[48;5;0mto \u001b[0m\u001b[38;5;15m\u001b[48;5;0mthe \u001b[0m\u001b[38;5;15m\u001b[48;5;0mparty \u001b[0m\u001b[38;5;15m\u001b[48;5;0m? \u001b[0m\u001b[38;5;15m\u001b[48;5;0mi \u001b[0m\u001b[38;5;15m\u001b[48;5;0mhad \u001b[0m\u001b[38;5;15m\u001b[48;5;0mno \u001b[0m\u001b[38;5;15m\u001b[48;5;0m- \u001b[0m\u001b[38;5;15m\u001b[48;5;0mone \u001b[0m\u001b[38;5;15m\u001b[48;5;0mto \u001b[0m\u001b[38;5;15m\u001b[48;5;0mparty \u001b[0m\u001b[38;5;15m\u001b[48;5;0mwith \u001b[0m\u001b[38;5;15m\u001b[48;5;0m. \u001b[0m\n", + "0.0\n" + ] + } + ], + "source": [ + "# text = [\"Who was Jim Henson? Jim Henson _ a puppeteer.\"]\n", + "text = [\"I went to school by myself. I had no seat on the bus.\"]\n", + "# text = [\"I thought that John defeated Mary. I was wrong. _ beat _.\"]\n", + "# text = [\"Did John defeat Mary? No, _ beat _.\"]\n", + "# text = [\"That mary defeated John contradicts the fact that _ beat _.\"]\n", + "# text = [\"After the outbreak of the disease, the Ministry of Agriculture and rural areas immediately sent a supervision team to the local. Local Emergency Response Mechanism has been activated in accordance with the requirements, to take blockade, culling, harmless treatment, disinfection and other treatment measures to all disease and culling of pigs for harmless treatment. At the same time, all live pigs and their products are prohibited from transferring out of the blockade area, and live pigs are not allowed to be transported into the blockade area. At present, all the above measures have been implemented.\"]\n", + "# text = [\"Early critics of Emily Dickinson's poetry mistook for simplemindedness the surface of artlessness that in fact she constructed with such innocence.\"]\n", + "\n", + "text = [sss] #\n", + "analyze_text(text, show_firstk_probs=100)\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "def words2heads(attns, tokens, words):\n", + " positions = [tokens.index(word) for word in words]\n", + "\n", + " for layer in range(config.num_hidden_layers):\n", + " for head in range(config.num_attention_heads):\n", + " for pos_indices in [(0, 1), (1, 0)]:\n", + " from_pos, to_pos = positions[pos_indices[0]], positions[pos_indices[1]]\n", + " if attns[layer][head][from_pos].max(0)[1].item() == to_pos:\n", + " print('Layer %d, head %d: %s -> %s' % (layer, head, tokens[from_pos], tokens[to_pos]), end='\\t')\n", + " print(attns[layer][head][from_pos].topk(5)[0].data)\n", + "\n", + "def head2words(attns, tokens, layer, head):\n", + " for from_pos in range(len(tokens)):\n", + " to_pos = attns[layer][head][from_pos].max(0)[1].item()\n", + " from_word, to_word = tokens[from_pos], tokens[to_pos]\n", + " if from_word.isalpha() and to_word.isalpha():\n", + " print('%s @ %d -> %s @ %d' % (from_word, from_pos, to_word, to_pos), end='\\t')\n", + " print(attns[layer][head][from_pos].topk(5)[0].data)\n", + " \n", + "special_tokens = ['[CLS]', '[SEP]']\n", + "\n", + "def get_salient_heads(attns, tokens, attn_thld=0.5):\n", + " for layer in range(config.num_hidden_layers):\n", + " for head in range(config.num_attention_heads):\n", + " pos_pairs = []\n", + " for from_pos in range(1, len(tokens) - 1): # skip [CLS] and [SEP]\n", + " top_attn, to_pos = attns[layer][head][from_pos].max(0)\n", + " top_attn, to_pos = top_attn.item(), to_pos.item()\n", + " from_word, to_word = tokens[from_pos], tokens[to_pos]\n", + "# if from_word.isalpha() and to_word.isalpha() and top_attn >= attn_thld:\n", + " if abs(from_pos - to_pos) <= 1:\n", + "# print('Layer %d, head %d: %s @ %d -> %s @ %d' % (layer, head, from_word, from_pos, to_word, to_pos), end='\\t')\n", + "# print(attns[layer][head][from_pos].topk(5)[0].data)\n", + " pos_pairs.append((from_pos, to_pos))\n", + " \n", + " ratio = len(pos_pairs) / (len(tokens) - 2)\n", + " if ratio > 0.5:\n", + " print(ratio)\n", + " for from_pos, to_pos in pos_pairs:\n", + " print('Layer %d, head %d: %s @ %d -> %s @ %d' % (layer, head, tokens[from_pos], from_pos, tokens[to_pos], to_pos), end='\\t')\n", + " print(attns[layer][head][from_pos].topk(5)[0].data)\n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "01/10/2019 21:46:20 - INFO - examples.extract_features - tokens: [CLS] jim laughed because he was so happy . [SEP]\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "jim @ 1 -> jim @ 1\ttensor([0.7248, 0.0842, 0.0656, 0.0407, 0.0319], device='cuda:0')\n" + ] + } + ], + "source": [ + "# text, words = [\"The trophy doesn't fit into the brown suitcase because the it is too large.\"], ['fit', 'large']\n", + "# text, words = [\"Mary couldn't beat John in the match because he was too strong.\"], ['beat', 'strong']\n", + "text, words = [\"John is taller than Mary because he is older.\"], ['taller', 'older']\n", + "# text, words = [\"The red ball is heavier than the blue ball because the red ball is bigger.\"], ['heavier', 'bigger']\n", + "text, words = [\"Jim laughed because he was so happy.\"], ['cried', 'sad']\n", + "# text, words = [\"Jim ate the cake quickly because he was so hungry.\"], ['ate', 'hungry']\n", + "# text, words = [\"Jim drank the juice quickly because he was so thirsty.\"], ['drank', 'thirsty']\n", + "# text, words = [\"Tom's drawing hangs high. It is above Susan's drawing\"], ['high', 'above']\n", + "# text, words = [\"Tom's drawing hangs low. It is below Susan's drawing\"], ['low', 'below']\n", + "# text, words = [\"John is taller than Mary . Mary is shorter than John.\"], ['taller', 'shorter']\n", + "# text, words = [\"The drawing is above the cabinet. The cabinet is below the drawing\"], ['above', 'below']\n", + "# text, words = [\"Jim is very thin . He is not fat.\"], ['thin', 'fat']\n", + "\n", + "features = convert_examples_to_features(convert_text_to_examples(text), tokenizer, print_info=False)\n", + "input_ids = torch.tensor([f.input_ids for f in features], dtype=torch.long).to(device)\n", + "input_type_ids = torch.tensor([f.input_type_ids for f in features], dtype=torch.long).to(device)\n", + "mlm_logits, _ = model(input_ids, input_type_ids)\n", + "mlm_probs = F.softmax(mlm_logits, dim=-1)\n", + "tokens = features[0].tokens\n", + "# top_pairs = show_lm_probs(tokens, None, mlm_probs[0], firstk=100)\n", + "\n", + "attn_name = 'enc_self_attns'\n", + "hypo = {attn_name: [model.bert.encoder.layer[i].attention.self.attention_probs[0] for i in range(config.num_hidden_layers)]}\n", + "key_labels = query_labels = tokens\n", + "labels_dict = {attn_name: (key_labels, query_labels)}\n", + "result_tuple = (hypo, config.num_attention_heads, labels_dict)\n", + "# plot_layer_attn(result_tuple, attn_name=attn_name, layer=10, heads=None)\n", + "\n", + "attns = hypo[attn_name]\n", + " \n", + "# words2heads(attns, tokens, words)\n", + "head2words(attns, tokens, 2, 10)\n", + "# get_salient_heads(attns, tokens, attn_thld=0.0)" + ] + }, + { + "cell_type": "raw", + "metadata": {}, + "source": [ + "0,2\t-1\n", + "0,3\t-1\n", + "0,10\t+1 动宾\n", + "1,1\t+1 动介\n", + "1,4\t-1\n", + "1,11\t0\n", + "2,0\t+1**\n", + "2,6\t0**\n", + "2,9\t+1**\n", + "3,5\t-1\n", + "7,4\t-1\n", + "11,8\t0\n" + ] + }, + { + "cell_type": "code", + "execution_count": 45, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 45, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAQUAAAD8CAYAAAB+fLH0AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvOIA7rQAAIABJREFUeJzsvVmMpUeWHvZFxH+XvDfvzX2pylpJFptksxdquskZS4Ks6ZEgzRiWbRgGJC+SX+bBMmADBox586sAA4YtGDBgwYYlQLAsyYsMQ/BgWjCgMaRZejTTPdPs4dossoqsqsyqXG/mXf6I8MNZIuK/t1jFUbenCFQABLMy7/2XWM7yne+cY2KMeD6ej+fj+ZBh/7gf4Pl4Pp6PZ2s8FwrPx/PxfBTjuVB4Pp6P56MYz4XC8/F8PB/FeC4Uno/n4/koxnOh8Hw8H89HMX4qQsEY8xeMMe8YY943xvzKT+Mez8fz8Xz8dIb5SfMUjDEOwLsA/hyAOwB+G8BfjjG+/RO90fPxfDwfP5Xx07AU3gTwfozxwxjjFMDfA/CXfgr3eT6ej+fjpzCqn8I19wB8kv37DoC3Pu8LbdOJXdMHcqPFAMZYxBDSr4zB51k2pnKAsYiz2YI/GiBG+j9APzc/0m4hThd8N/+MLZ8pv66xFtF7+rlyiLP6c69VvJMBvb/8P7/2omeIIfscirkz1gIx0q/4+8Y5erb8HRrfK57naZ45e8a5eVn0PXkGww+d3eex8/oFR34dmqeY5qDVmt8b2bOYyiH6kD7/pP3WbiNOp4//uxOda9Lct6r5fWEMLYXsg2xunmY9Pm/k3z/F4UGMcetJ3/lpCIWnGsaYXwbwywDQRQ9v4Ts0YXUNWIdqexP1vfuAczCtCsYYhMkEdrCMMDoHgpcL8YJWgHOA97Dba/APHwHBw3Q6sJ0O/MkJ3Poa4sUF3cNYwBrYq5eB0xEwmcCfnAHGw/Z6COfntFjO0XVDhGm3EEYjuJdfQLx7D2E8oWdzDuH8HMa14a5fQri/D7u1gfr2J4B1dBhm0+J5AcB0OoD3iN7DVC39jPnWVxF/54f8edC7GQvjLMzSEvzhof5N379VIXoPtzJEuBgDIW2kOJvC9gcIp6f8CwDWZNdvwbQqIAREH9KzWgcED9vtAtamOZFnNQZuMEC4GPO/+XmMBSIfLr4GDQdYPrDtNh2OGGCXlmB6S/CPjmD7PdrI3iNOJjBLS7D9Hup792E6HcRZDWMNrSGA6uoVhIePEKdTmE4HYTSCXe4jjEYq8KprVxAvLuAPHqa/ZaPauwx//4GuRxiP01rlUtM6uJUhzb91MHCIhu4bJ5NibavrVwFj4D+7jziZwC73+TMWZthBHE9gnKV78fd07QDEeobqyh7qT+6woOBHGNA62l4P4eICdmmJ9l6rTWtgXZofnns5V9+N//D2559KvsfTfOgLjrsArmb/vsK/K0aM8b+PMX4rxvitlunC9vuIgQ43bfCAau8y/TyZIEwmQIy0sSNtLNvt0gYEEENEnEwQ6xp+fx8AHSY7HMKfnNBm7rSBV15ArGvY/hIt0ukI/v4D+OMTmG+8AliHcHEhD4lY1/S5GGgzWQf/wW06ILzZ6dAaxHqGeHgE0++jvv0JbTIgHTIAdnkZAFDduAbjnB54WAO3ukIf+v47AAC3sQ77+iv0vKsrCJMJ/NERX8jpNd3GOm2CGBHHE8TplJ4lE0THv/hV/bypKph2G3ZpiSyreoZwfg67uYFYz2heaVJlrRDOz+n5YkwCYX2N7uV9eh5eD/1/DDo/CB4wFm5lCDscwvZ7tKbn5/AHD+HWVhBOT+FPT0mQ1jUJqtkMttdTrWyWlmA6HZiqQn3nbprnyYTWbzSC7XbhVlfo3rMZ/MFD+oxYgsbAtNpwwyHqu58i1jVMq0IYj2E6HbidbZhvvkbfB2D7fQCg/WdJUYlgtasryQI1BrAO9SefIh4dq3AOF2OAlUMcT2D7S+kw8Fz5kxNaN+9JuIQA02qna1uHyHvTrq3CLi8ngcCWCGKAXRmmuW+19Vw97fhpAI0VCGj8DkgY/DaAvxJj/OHjvjO06/EtfAcA4OQQpwuyJmwXh6s5bL9PB5XfJ79OLkURA/DW12B/713ApQ2Ua0iRxoXLYSzcMt0jek+Hm7V8nNW04a2Dbbf0EPuDh9lmsYV1I5terZJyEudN51zjZt/Xr1RVcU/jHNzONurP7qvFpKZufm3R7MHDVFW6pnWodrZQf3avuIepKtWk8nnZcKKdRJDLe8g7us2NYk7s0hLCeALbbiFMZ5lFAbitLRXuttstNGoxP8bQ9XkdRXk05862WwjjcbpWNp/F82d7Rz/Lz2CW+ypcYExyH8Qiipn7kz1nMa/yfhvrZM1m81gMecfC0kIxv7Kv3dYWEDz8o8P0MZ5bBK/v8934D38nxvgtPGH8xC2FGGMN4D8G8KsAfgTg73+eQAAAo1rGwMth5OEGg3SoM4nptrb036bTgV1fg11aYuuBrwMSCFY0cAxwG+sw33sb0Qc1h8nvDEnbzWawgwHcyjBp8uBJyDhyB8LpKQmIyQTGknaw3Q77hRb+EWl04xwtbm7dxAi7zJrn4oIPpknSPEbVTAAJPCOWhHVqwQAkwADa0LJh7fIyYCzqu5+mDcWuhbparNFII3m6n8m2Q/Co7+/rs1U3riHWtR5ONxyqhWCWlopNHWc17GAA2+kAxqhAiBMytcF+vlhbYTwmDdftwm2s04FhgQBAtbfsEdvp0LPL81sL02rToe/15vaX7ff0+fSQt1v6brGui+fXvSMal78XJ5lSipEtyKjzBV77OaFkrN5L9qx/dJiEkfdp7a1Lc8TXtf0+3HCo/47TKVlOrMT8/j4JhBh1v4WLC/rZOnqfzLJ80viJWwp/lDG0G/Et/DwAOsSmVdFLtdukmS7G5CexWVVIdt4kYTqjwym/b/ry2RAtJL6yXVtFODxCnNV8j6Dax7TadE0RSo35yi0Y02oT7nAxLqU7axXTbsM4C390PD8JDe0nP9Pi14QLsB+5EIDL3Ym1FdVCxfWBhGc0rALxQ8VqEkCseO9MS4n2t4MBwtnZ3DNVuzuECT3m/gu1PrDQP29+d07by7s35lwBYMaFClCwCZSK8I8hme/ZtUyb9gHYTI8hlvdDZp0aA7u8TK5crsyye+fzX/zcaiPWs3R/6+CW+8l6NkZxBB2yLrklmM2f29kGpjP86qO/9cdjKfyRRrZAcTYFQlDfW7SJabd1Y4pkNy36XfQhCQ3n1Jxyl3dUc+QaJPAEh/GYrt9ukW/W7ajED6MRLVaOB7C2yv088eXc2lrCHcSMtE4XJ04miwXComhI9nM4P1c/P1xckLbPv84CUgSiaVUIJ2cLr297PT1AhKv06fvBk9/J2jbOpmR6ynsYS+azNWq2hvNzEqpygIyB21inudhYR33/QXkY2DpwwyF9dmWoGlHckkKbZXvCVC16/sEAbmdb3QDTqtI9+IAmPISfaTCg/SJmfmOYqqXzEWdTwlrEEjGG15XxqlmdrCkRCLmVe3JC72QswukpWQC8R1WLg6y7WNcJC8kwmTij53SrK2ohxmkpWMJ4Ur5E8Api6jqL6wHAP9hPIPNTjGdDKIAOlanYpGPzPE7o5eVAywY2rTbczrZK4VjPkvkVIsLhIUyrjfrjO2qWK3gIKFItJln90ce0eN6j2t0pzDr6gQ6HmM7GWXomYxIYx1ENt7NN/jwLJru0pM/uj47V8hCgrAiTAgqA0YsnEzmMx6QhRhd6AG2PzOLq0i4DjYE3b7JcZNhul/zudkvfL4xGyVoIXsEv2+3SRmNLTEx8+axefzBIGitGsk6MgX/4SAWoPIdptWF7PdJ4McIfn6hmc5sbqnkFLJS5IFeJwMFwegp//wGBiZOJam3ZE7pGMncxwp+NEoCrGyDqvmhq8nB+ru5MHmmwgwErjJDmZcE14X3CKkRRWZesAWMRzs7IIptMUF3ZUxdO56vTIeXhfXJtxAUBijXMXRICclP403Y7+mxzmMXnjGdGKPjDw3TAJpkkjBwKPD+njWMdYj2D33+YtFOMCJMJhcrqmUp9yyEqOXzql/E9omipNrkAprcE//AwTerSkvqctt9Pmq3d1sNgnIMbDnUzxrMRhcbEGuHP6YHiDS6xavUfRQDkgzchfdAQKLfUhala8I8OVdDV9/fpPZzTA0iYQQJpYa2a3Ybfi8zOdrr+6IJ+1+/pRpbvu60tuCFhLGpxbK8Xj6saCiDgkA9KnFE0JNdw1eVLcOtrfN8sktPpkAXAa6//Z81N8xXSRjcWbmsrheOqit4pcyv8yVk6tBoVibSfFrmEmUCTdQinp7p/CmHOWIg+p0trGOuaffwAt7tD8+cc7cvJJEVP8gMOqLBLoVFL6+6SglDBxFaIYGl2aUldHeVGfIHIA/AMCQUA6WUyFBjWwR/TAXNrKwnQiYEAvYbvLCaamMKmqmAv7cD2+2VUA6R5zNIS7GCZNoIjgWM7HTWRAY5sjEYUXrIO4WKspnIMEZ7jxgIihotxKdjmXtSoS6SaMXgCv2JMwKgAaYBqE+JD2ARg8rxIJCPOprBL3WKjx3qGcHFBB5XfW66XDhNzDwTFFt8aZBn4/X34M7IswsUFfe6H7+j8wDJZi4WNbbd0PcSEtv3kwtWffgZ/eEyHNITkjk2nOncCiIqFSA8fk39PC4DIgLFEUPS7wcM44rwAgFtd1f2hIVIebmc7XZIBRH90XAhqf3JC0SY5bO224hW22yWXhnEwvQcLsPrOXQ05kiWStHcM2RoD85aIYGWPOeR2MECMkUKUIngNuR1qRX6B8ewIBUap1YUALYJhbefW1jTk4oaMogsBxzo9EPlGdqsrsMt91B99nAgr2WRXV/YQRucIR8dEcNp/mCRxBrCF0YjcFTYN42xKFoFztNENEXsMk3CqSzt6OIrIgoJfFtWlXTpcDIgCSIdWhFe2yd3mBr2XRgDS0vmHj2Ba7TmhF6dT1WLVlT0Yy5pWTE7FPEIycwU3YV9bohvyOdPpkADs9+FeflGFpM75YKA8EzVlxf06S1iHcUQEIp95lg5rIcyYe+G9ClwSXAlwBOgQu9VVDZcSa5Lmx22sKfHJHx4mToGGEckS8hkGIlrXbW6oSa5zYKxem3x9izAaET41HpPbK88nURnen/J+pqr0HiJ01Nrg/YwY4TY31K01zqkL0bxmOD0lt/nsDMYaBX5tr5fA3qa78znjmREKln38uWiBsbTYZyMy58W8NQbu0m6yGgRVb7cLP2sR0i9ugYTsCNWniIH95mt0nVZb7wfrEM9Ghe8a65rZcedwL16nex2fwB88RDw9S34im3eywemVDOp79zlEGBKHgk1eRd8ZVIUxdH9AwcFYz8hc7PfhVleKeYvXLqef5b6OKdg5ss7WinGOLJ9AfzdVSxF4OcjGOdrkkwnidEYH4fYd+o78Z5nZKYAxA19N8ozgKf6QXDW7uqIHKcdBALEWbIo2IGEa0bNr5Ry5n2olJqwgnJymiAZAVoVGMToFLqSuD2tyf/CwsNZsR4BoIRVZEoSbG7rfcrITKbkKiEEVAKxDDFEFu223UF29XLyfCCJ/8FDdYX9ywkqipdeUdyLy2kzvL3hOGI1YmDy9QACeIaEAa4nJ1umoljKdDplrdQ17fY/Q9+U+/MEjmKpFFFBAtYlptRnJbtGGFLSXGXAy6WE8Sea3dZCwrKkqhN+jZE5F4AHaNJkFUr1wg9yGo2Myoz8g9qj5xiv0GeW5ZwKKAURTVarJ/NEx8SbEB2aT1r32sgoV1YYs+cNolAGgbG4KS0/ej836/Flml9YUDwGI/wFjEgv04SMCvwJpY9tusSVBW8QOBprXoQDvZKJ03uqFGwrChemMhJdgAwxaijUQZ3VC3zMBpdcGSmSeBY5ahowtRO8LQFNcMXXJYizMaaFB6+dnJDCFMwIkK8FUVQIo+VAFcQnVYqFnDkfHhekv1o+Mau8yzTtTjsn1TQCu//Q+WV/sctE9bcIueLjhkNzbjJsiwC5AQiuMx/ScIkDDY7gTnzOeOZ7C3BD+tuQJZJxuAAWrLrKZlR+aQvNwSC1xwktSlH5XTctF/H2+VEa7lcVRjABYCGDlI6HRjc8t+Lde93HPGXxiYcqzZteZY8w17mEHg8eHSxsxdTWh89wTGZ/3Ho/7LnMDhKFpOh11DwoW6pP26YI1mov5P+mzwgURjCILi8pemuOr8POJeZ/fL/+8HFgAj2eXPubZCj5Gvtbqjj5+r+U8mj82RuMfafCEq98NqKRT7cJJJQDSphf/Sv9vk7ks12y3gBg0ISUXEqqVhFgkklnMOAX5QjILWXOLaZgLoDiZqIazTQS7MRQsy+6naH8DSIqZhsoBKveVl+DWV8ls502ifAXx0dkFcRvr5IcyUzJH2MPpKfzRMardnfQZMZuNUZReAMOUs5EwEr1fvqY54NYwjXWuGwcgD6lKaFS1plhcGbvRdDosSLyCvzrHXbJEiKSWPV+D5GS73XmSUYwJT1ErYMYRGLZk2cITLoRxFqZqMTs0hSPd6iqR62Tv5ZaeMeV6y+G3RFNXQFT+b13CZjJcSbGifC+B9vhcNOUJ49kQCoASR2SiXJ7UwRvCHx3rIsoBAaBEp9x6kBh0GI1oU1/e1kNcbW/Cfv2V7N5MgRWTj33kFIJqqYWivp/8bKy6CW5nez6cBOhiuZ1t2mjse6trIaFY7+e1etPakUtWLfj3foxwepbMTg3TJXai+qePDslcZT8zckhM5sv2evCHR2rq26WuzrO4WnpwJAtP5r9fUosFMyhyM1qVgl8wWYq5uAl1nULGliyhwNaRxvsNHbo4ndLBY0Ecp1PYwaCIRLnhkDJme70ir0J5IhJaZWBZ5i9nCobT00Ss4r/7kxPaq5kG9hkZzq6uEB9E8IfgNdyeEtRsZo3GpEh6PXJZOGri7z9IVodz88qK30MEshsMir0kQGSc1V9IMDwzQgFAkZqbM7Dscj9Nnmz8XCLOZqoJAVB+QjsjAMWA8P5HhJx3O6gfHCC+82MAQPXCDToAcz6dSbkRDOKIL2hsFs5i4MkNBvD3H6TwElDGr4NH4NTgHNF2q6vpliJ8RPDJyBJtZDOrNl1dgbmagEW5n+33i81ehOL4/4k7UVMtCQHjJhMCEuXfOfgbo3Lv5XmaNQVU4IhmZyEE8BqLO8CH2nY6lCzFFGHEMAfSKvNwNqX9IRR0iRiwEpHNL+FD026p9aS4CL/zwkS0/OeM/VlGIAj70AiCPEO/rzkbhVKwDu7FG/oMplVBsoEBAgTJTWwRVwRIew4gIFkiEBmfA8HT2TAWcTKBPxsVAkP2r2235i25zxnPlFBQAGU2pUVmjeFPzii1ms1zYe/ZwUBTimevXyeEvNtFnFEs3VQVbLulqHkYjShZp0s+ndvcQPj0HqXuHh4qr0GTjmQhMqkuZqXtdlN4bnUF/mxE2it4QqPZklAJLb4xD93kkrgiG4u1V74RhYMAgIG3BFRhOgOOTtVSUSuJ8xNy4SJaxX7lRRUcid14UR4KAIgZ4SsPraIkZQlYmQ9B8CX6oNdYMMJ0BtPpoNq7TEJSLBAmSWk24PpqOpD9vvrvxjnUd+7ynJA1YgcDxFlNYC67RZjNkCe+CQ+m2ruchPTWlq6B29pI79XYA/LuhhPkNDSbCQ/TanO6eYB/n5RQOD3l1PY6CVMBRc9GmQLKaN6c4q7ALZJ164+OS+wls6SltEAYj2nPPOV4JoSCcZY47Wdn6otLkRQA5EIspZg0/UC1FfzDR7C9Hqrf/BH8v/JVmOU+CQwWHnZzA/HVm+UN5aA7h/j6SwmsAWuFkOEEsqEzrjtRX0k6h/NzDVeKZPYHD0k7GQMjh71NYb763v3EwANSXD1G4Ou3SgZkDh5JuM054OUb9M5Xr8AfHsLv71NYancD7tqVlO+xMsTFz3+NJ9nCrlBmY3j/IxZ+hMG4tTU6dC/dJMGcRUz8yUk6JOtrsKsrcINBirtnB92triRKMwtOFZQ72+Rv93p6iKq9y7q+9d1PUd/9FP7wUPMjhCQlwjRPWxY3SHINAMJpYj1DnE6LuhtwVpF+U7WI3AVomK/+9DOKUDkH46we1pAT44xBtXeJDt03X1OsJtZ1So7KQ+NVC3j9Fru8Vq2KnEzk2JW0XabcG6LPq0v1J78Jyrg9BN78KuWbyNoyhdltbekcS60KCdXajXXizKBhuTxhPBNCIfoA8CEI4zGZWAwgqZDgVOQ5bcPkEbRasL/+A70OWi0ipWytIv7OD9XPNd0OUVZfuol4fgH77scKetlej/w6JgoBULRX7yuaJmfh+aBS2VQtXVT36q0UFt3ZUn6E399HJRwLNq8BIP4LCodWV/ZU4ysvYjpNYOeDQ2JNMs1Z3s3ef4T6w4/os+026nv30d3nVOfdbcQLTi1m7CKcnqK6tKuMUVN7xPGEQcIWTNWCW1sjevVggHgxBnygrNV2C9XVKyQsWBj4kzMODdO1w3Smrl48G8EOl8kCZGAybAwLHoAMNYMZr1FaNgC3MkR17QrNiVgpoin573KwJHU79hLQSJTrBsOPsQS73E80dwH6ALJW2m0NgZuP7irmVF3ZK2nZxjDeEeAeHKbIAfM8IIJMLAtWJiRgiP8hFvF4s61cEvzWD6GsUbClZh2wuQrcuq7v549PVOHUexuoP/1sHsx8wng2QpJmPb5lvlOE06RcV156C8B8OMeYoqCEfpez4my3m7IqqwruymXEYzLhYIlim7MdhQsgTMMmKl2EHTl2LsVNTKsNu74K/2AfdnkZxnBdh0ZoSwSBPGeRhvu48FIWqpJiJbbfhx0sI47H8EfHRVEQrcbDiHWeTu12tukZez2yFsaTIvkrDw/GGUUuwuEhzNISCdSNdYSTsxR+sw5ufTVpcnBMna05fS+ePzBoqM/Hf9fCMXk4NSMeAYxhZPPlVlcUgFYgkxPqHjeUtv45Q4BSzRfRSIpNiD5TnqvLl1Df/ZSEszFAq4UwOi/37qLQMwCJ0kgoVABVt7GOOBqlbNU8lLvgHexgkHJIJITK4V23u436kztfspAkoD6RoMpkslIo0Q4GqK5fpY3OG0RIMwBUO4mAsAxI1t/5GUqFvUXug13uIx6fwnTaBKSdndHm//bXyLS7doWQ48GAFnSWhI/t9ShHnnkO1c3rcMNlMsePT9XX9Td3CTAck2thl5YUKxFzUTAD0RA5XRsx0rvJv0UD3rpJQm1jXYuVxLpGff+BZl+awYCyJ7/+Sir3ZQzsUpfSqUWbPNgnC+tiDLu+mog4DO4p2Lm6AtvvITBoF0ZULCWcnCHOpjj/N99EdfUKHebzC7KMWLDCGqolmL2zGwxIEIaIs3/9DdKmmxuaKdgkZdler8iarPYup88w1uCPjpX1CICwJIlqZDTz6uZ13WpxOiNfPwcPAQ0lAlChKJarkOjibMprMGEmo0VkJSL1HYl27OGu7tHlqwr2668QiSkTLmJlxboGBHsSUPrkBHZ7k8HEZSW5CanKtNtpboGCQm6Xl/WsmDaR/DS8+hTjmbIURAsYa7ieHdXNs4NlxPOLhKbygWiagabV1mKYNIkt5X4Lw84fHav2MVVFVYOyFN76s/uqLUUriBQ3nQ7cpR34u1SiLNd0TY1W/G4Boaj53M3CrgVZS37XasPtbCGenAJLXU0jFnAznJ4xgLoJzKYAx9HD8UmBjYgWdKsrnEFI0RMAKqho0tgSWhlqWrRkotpOB9EHuI01hBOqHWBvXEX4gPCKMJlQRIbxFoDqUsb+EvyP3gdiQLW7g3B8oqYw4TC2jHY05sxtrCNejNXCKoqUZlZOronztWhaFoVFIDUJcotTwO0sSSuGCNvtIIwnlIw2Ok9l4DJ3UFLF3au34P/wfcIZWBDWDw50PfQdhZzExXTc9hb8/kNi8TYtzvzdGgVzcmxDLApTVfi12d/7klkKQpIBtAir5NcHMfdB2jY20HlJzhF/0b10E/7hI4QTLqv1rdcRLsbK2MujGOH0FG7vErHojIF545UkdGZTuLU12CEDQi9cQ337E7oPa1cpDpoLBEHs9XdqIi+YbmMKmq0m22SIvmIO3hMYd3KC8OiIhNobr1CM/uAh3KUdIEb4/X3E6Qzn375ByVJLS3pg3NoaZYb2ejQfkqx0MSZtZSzc2ppSfIUCrdwRvk4Yj2H7S6jvP6BD7T38ux/Qtfhwxek0+dvWITw4UIGAGOF3NxIqHqR2QFaCPScc8VAhAgLbNLoTqHaF27uUPpwJEw2JSoRAXEsprMogpLEpHV8iOBqC5bwFBE8WSauCPzym70oEKUYGPGuYHllr8eNPKQLGQt4fPKQw+FJXrai8/J7Z24VbZTe02wEubRfWExHtWmQ9i4BfW0t7Tbg+r72sods8VP6k8ewIhZzdlmUHCjPMvP4yAAKh4qzWyYSl4quyIWNdI372gPztIQOW79ymxd7ZTtmGkwlsv4/qhRsI9/cRZ1MKa33/ncRxB9V5qO/dJzLJOx+qdhEAUARNdeMaLdgbXy0KxuZ4hOVELiXtiB+dx/mF0q1f4tRcJqZIzoJd6pJ2vn2PAEfrqFhMp0NmPIDOP/5tuiRrmVjPtBp0nNWp/kGMcNubAL+3PzpCGF3AvfJieg8AZm21sNDM2ioh5v0+VbziqtLu1guJBDVYJoHaJtKRZr2+/CLi7/6wQMW1SpH8my2HYpuEyDkA5H4V2a8hUFn9BhhNVZr4HTJch0J1KTvULpGQ8IfHGh0J5+dUP4NDxymJKmqJelVocl0GgMPpGZWgv6AKX7bfU2WkbQE4SiW5ErbbRfjoDlUX57od/p330/bguqBxNiUFxVaSlv1nl8mtDBFv3+V5/RLWUzCtSiseKZNRyTaUSYb3bqt5JUklbnODDvvmBuw3XuXPcwq2c6hv7JClcXoK027DP9iHP3hIhKXBAKbbQTw8TuQRjniI1Hdra8kauXmV8vKtFNbMog91TenZFxfA27SAeeaaPFfk+oru5RcT0OhTKFNGnEyUXyDEn3B6CvfiDZh1IjtpmjQj/G5jnawW5+D/8H0yP6U82qu3yPWQgjRnZ2zt1Cq44ukZwGFTAVjNqVRgvMdJAAAgAElEQVRVIqArHh6Rf753GabTQX37jvYUgPcKNPp3P6DQm/wuBtiN9ZTSbIwKcZkrenYpDy++fkbj5XRj4xxwZZfWRMKiEuefzljolglA8fWXNEJhOUSX/726vEv74+SE9haHF6cv0WcVtO5zbcS8JBsL1Won67EiroC1FLH4FmfeOpssFnY3REBE71HtbFFV8XqmrEa3upIwJkMFdAFuEZBZSTIP1dXLgLHwJ2e6r5Uw9pTjiZiCMeZ/BPCvAXgQY3ydf7cO4H8BcAPARwD+nRjjoaGg/H8D4BcBnAP4azHGf/Gkhxja9fiW/fMFukrVgoxq9DAaFWWx+eEKs0qyH+3KgFyOx5WEFzak0Gez4VaGRHgRbSSFUiXKcXGhBUk04SZDhuVZKYR3Qaa7HHy5PT/rXOKLPFsjYaja2Up58bEsd5+XiHdfeQnhw4+LpJ281L3tdiHZlkqgkXwNiQhkBWLFp3bDoWIN+jf2j5VlOiPuv5TKF8A4Ty5ya2upb0UzIsPPJzRy224Vfrr+P/P5H3edfB2KOco/0xA8hgvsKI7QxHmaaywp2HnCHpJAV0yF597tbCMy5oOM4Vm9cAP1hx+lOcpL23/9FZi7DyhVvVnlfHVVS8XpXs45NRm2IJjITzL68D8B+AuN3/0KgH8SY7wF4J/wvwHgLwK4xf/9MoD/7imuD0SULgOQOgRVFewWmfz+4SPS8Gy+S265W12BHQ7g1ldhX7pOeejabel1uLU1QrhBGqe6foXM22+9hurKXkqk4XRm2+8D7RYRpvgQWg6F2uVlDUlJkU0xz8hf7xLQ9tWb3FfiPJVx5/uH8QTu1s3id+nFE+POLi0Rd37/QBmQAGBvXEnXkpx96xB6nRSFWV4mLOXFq4S5bG6Q78sWi3Eu8TE0+5SfJ0/qYgKTmKlmQOXjTbsFe/MaCcDhAG53m1yIvV3S+pe2tVGJbEzTaRcFdKtLu1BiD0ACazCg/hrMiBS+gZKU8vnJivKab76m19GwMg9TVVrCXhPvxG1a7qfQc4xZuDukvIcYVdNKrUsVLuI+cMUoqfiFGOFfuU5l7NbWcPHGdSUfiaJzwyHqDwmYFVZlXtp+/9trMINlreko7E0AKeQaPDF0hV3J7QYEKHfDYZFh+zTjiUIhxvhPATTqheMvAfjb/PPfBvBvZL//O5HGbwBYNcZcwlMOBQ8N8d9Nqw184yuoP/oY1d5lktBnZ9yAZZpKY4WI+t59+IOH8D96D+FPfTO94LsfE+vv3n32tyzCgwPY118BfuMH8PceEMdAEONul/y4h4+IyANQeI1j1eH0NCU+NTanPzpCOD2jzfsbP1Cw1C4taahRko0IpW+pH56HqkRIhfFEwS0tsWYd/Hsfotrdgdva1HkzzgFvv49wfo76z/4JClF9/13y20cjCpsCmvTlT06oyAx/322sJybf2YiSzFpZss2rtwjYs1brBcx2U70B/2CfsJ279+CPjuE/+IiuLZTkySTNM79HXOWIR5ZzEccTqqshWX5cS1MSjERQagOYwYCE7h+8l5q9ZFaZ29pKeytQLkQOUku17JQc1uLvbRLgrXvSFvOn4WImrAlYWFinv/X7DPpO0fvRPXpmSfJqVZrrEesa4fiErtXpKGi48907hJFk99VqVHnkjcHaWM/IMg2eOSSBXKL/n2o07sQYP+Of7wFgR21hc9m9RRcwxvyyMeZ7xpjvzUChourGtSw7kjSw/YBYZPXdT1W6w9jCPMobyJhWG/af/T7c5gbcSzeBVuoR4VaospPd2oA9PmOJarUqsu33FEmWpBsYM5f5aAyF6Rz796lHY+QKRVzOi7n1wn6sb3+iCTG21yN25dmIaM8cFjOtCv6AwlVuuKyaTBqquFdf0jJbfv8A7rWXKX4/m8LtXUL1wg10f3SXojSzaVaQY6Zl2uFcqkbNc+YPjwm9ZywknJ3BPzokwWMM/I/eI//05JSe9cYVHN3qcjn3/YTU15Qi7Dh3xVSVam1TVVoG3XY7CO/9OEUNNtbJwukvaWKY8Abc9lZq0CIbXNYkIykZDpMC0KiQ39/X8HbuUrh1OnhCMU/JYTMCbT+7l0KpV/Yg9Tdsm1Kjq8u7pLgsJ8wZU1onrbbmloSLMfzdz2A31qkpz8Y6WaSDAdzqKrl9TMuWqJPl1oPIBGKh7QWwbrX1nY1zBKSKm5SFXBdGvh4z/qWBxkigxBcmO5S9JKmkdf3xXQWJpBaCFM80rbYmOWmWImucau8y3MsvqqSurl6Gf3QE/+HH8IfHGiL0X7kK2+3qZBvnYAYDIATy+dgyILDNagKRIN5gumg4Oka8GMO02KSXeoCbGzBLXfjTU0WuhXMvCxRG55TEEiPs+iqxE6X3ZU4wsVSwlqItTM3d3oDvt7WaUAwR4b2PKMTV6yEsd1H/+DbC4REVk+12y2IcnOtguP2droUkG4VAmI1GJLYQDo8U3Arn58opiHfvYf3tEQm1FZqbOKvh1teIK3LrWsFPsP1+lnVZs6Bkf77fJ3IVA2SaFzIawd16AfH4hA6mMXqNPHmNyE9Be34CRPcVsNofctr4RYp05NhUkULOmlbrKAIEFrICCVyKzt/f19A1YiReTFbTQNwR2+upNVh/cieFePl6/vgEeHSM6uqV1Ovk7IyqWu1dVrdPt0W3WxCR6D4MemeZkJpN2yhK9DTjjyoU7otbwP9/wL9/quayzWGM1TCWlj2rZxSWEaQfSHkFmghjqCnp4RH8ux/Arq0i/tw3EEfniD/3NbitDTz6a2+SHwyg+owyIW2nQ01FZ1Pc/XdvIXz9Foy1cLvbqHZ3tAR33j48XKQKvJL9KOBfrKkBajg+peQVYxEOHhYVggEQ71/AquU+6k8+VRO+urSrpiYAreyM4AmraLVhxlOEpUrLvlU7W5RE9TOvUBLQD/4QAJuaxsJw2zvhJ4g74u8/gBsOU5s96+Bf2iu6Lbudbfj7DxAmE9Q/vj2H6kfvgd/8fSBExKs7HD9vIV7dRRiP8eBnaM7N6y+jvv1JQu0NpZ6bdhvTP0PJWuHwMLEqs4I2sA7+3Q/gT09T6T3mk9hulzRv8PAP9tXMFy1L5c+iWgymoqhCtbtThnyNJW6DMAOFA7HcB6xNxVMD13eQegWzacpr6ffJbQwRFff9sN0uHv3b3yArYX9feQi216OwcvAkEIJHuL6D+u5nyEesawIm97YJeB0MlB8Szs7KbFLhlEynaZ7ZutSSAF8AU3gqRqMx5gaA/yuLPvyXAB7GGP+GMeZXAKzHGP9zY8wvgfpI/iKAtwD8zRjjm0+6/orbjG/GP0sLJ8hsnmrcYGctQqNNq02FVM/P4bY2Ud+5SxyEew9Sj4ZOB/7br8L+v7+XvxxJej4EGlEAihJa8tlqZxv+4aG2h6Psx8B5/hkvP5POYsJVN66h/ujj9Lec6cjvmbPTbLerBULk+RYsDl33x7fp/7c/4dThZdT3H2ikBKBWbuHoWMlFwgZ0G+vALNGuhc0H7xPDFKADtL1JSTYxwt16gVKChb//4g2EDz6a88NzLVVdvaIHvLq0qw1slWWaswc5f0MZfTJHGXNV5rfYN8I9sEQMC1z0VvM5spJ5dmmJLJyrlykKwL+XKFSe+yDftasrlKDX1L68P6IPlPS2u0PMRWFaNmjcyrIcDkkJcaNiuWd19QrCo0MNseda37TamtMwx6bN2ZpZod6njT48TUjyfwbwrwLYBHAfwH8B4P8A8PcBXANwGxSSfMQhyf8WFK04B/Afxhi/96SHWOnsxrfwnbL+3QKTR15yrpceaILd5gbi6RnVNljqwvR68AcHxGH44BNyD6ROY+O70ucRzqVuwJwQBKT+jKJ17O62uiEASJr3e8T3l5AjAEnjNsv9Mpy6gKIqi6t9GAVFzrsSSTRkdQWm14W/90Axh3y4tTX44xNUe5foEJos4YgWtuw7ARAAmpOB8nBgxi50w2V162yf6knUd+7CbW4inp7qAW3SpfPiOHklJC0Dn98vWxsAiqBrf1HBKWSNeJ4Xdp2Wa2VhbrexTiQlXiMVnjJPeb3PRddr7M+53pmNtZU5tcvLxFdh0powHRGizkPiLWzTNR8TttWQY/M+8ohinbCC+G74B08lFJ4IS8YY//Jj/vSdBZ+NAP76k645N+oaMZaFLCnuCppIDuMUGWcApBmtWepStIAFAqSn4/k59Sb47CD52NIghpFg6vHoKIFpltKTVVvyYoWzUYpejMcIHyevSDamXRnqgos0jzPGFBbks7vhMJUnZ7pzNFT+3bTamuYtgB0xBAcw3a7iEMLdsL0e7HCgqb+mw416pT051680rTbc5R2E/YcaKrVLXditDYR7KZciP6C21wNCSF2wWOBJ921ZN39wQHM2nSkxy22sE+axPCzyIJQVWM/KA8VhyXB2Rqg+R148a9Eoa8Y8gvSzT239co4Br48IEvfay/BvvwtwsR15L7vUhRfOCT9H9B7VzevwH5M2FnOcGgOVCqsWKy47wJonwd2d/MmJ9mSQkvnh4iL5/rNUFUquWXBSxHIEkhUtQlYURmY1iOCM0q3rKcczwWiMIWRNSEgT5/54GI1SafKtrVSIZDbV8KGpKtRfvYn4s6/r5Jh2G+cvrlGPyBduJFfAOWB7I+WmX1zALveVhae1+05Ptdy5aCO1ZrLNHaf8GQZCw6hkKAIArCPchH+W5i1FUVbrgFaLQbtpAgmZMgwQLjH+xjUSDt8mn9y0KAU7cBUet0LEK7d3SXkLAp65jTUgxNSsNlKzm3g2Qsyo5lJLUYhLsBSlqV64kaHdlp6DD6IUUIn1TA+Q1MHwkjk4HBKeocVXLbW8z6wRETTSbi71PKiohgOgnAotuiokHuEcZBmSpt3WA+LffpewCo6CaAOejKnoLu1qX4f6Q3KHyG2l2h2CZ+VZitXujt5fIz6RXS9rU9l1FrZSW1LmH94rlyYdjJia17z5Nc436RGovry8sDiwtiOUCmLWFSHXpxnPRJakYgq5P9/rwdy4gvjBbfKvrl8lc73pVjSywgpTFSD/cHOTDoz3OPr33sTq3/nn+j0xocWKcMOGRgOU+pyzG5s4h7IEfallCzacdZpdh2+9Bnzv7TmN4zY3UgQgez+3MgRa7WQhSIZj4M3EiTZaSenigqwiNs/lsDTz7t36mnaYMs5qT067vorw8JFyKKTHggjWAuNouCY5g1B8c30XmQd2tYB5M1uyV6Vwav63HJN43GgyPuOs1n8Lb8Hv78/VZsgxnurGNaJ+17XO8yIsyDhHxVmkxZywG72fs2xzNmWezSnWR+EKZXU98j3idrZpL+c1JsQt4u8WrExpEuPclyxLkifNLveTtplM4H/4jmolAdCKQhO9nibY6MTnWnUwULMWMcC9+hLW//c/KG/NBUblPiS9OdYtTEepi6gsNs5CGw416y5w12npMi1aR8JMbnUlCYTgk0AwZZdpbZCicWhi1PmjYwR2MwDADAZaVqy+cxf1vfv63uFslEx9RvW1c3PWzBUgV0CJQhyKjN5TEVrmTiwizIhAcDvbKSuwUb9RnlvXS1Dw4DX/X9qyyz6gjVynPIjGHlGBwKG/RSMvjEOZiVkOymRC5euk9mSegMaWDGJEffsOkeGOjlNl6wbrVuYqSEUrcRklBVusFQl/ZwlyMre0JmLmz1PXm0ojnp5phE4qguVdpvN+JDRPVl3Hpx3PhFCQl/LHZE5HRr013gvowQOgk6t8fC7e4VZXiNY8HNJmvriA/carqC5fovjw2+9q1V+tXSclz2dTQnsz/ytHraVLMADSdPnkx6ikK9PpwH79FaUxCyklnI3o81kmqHxXCDMIlBoteQM0KV67PcvhrC7tEurc5g7T/DzmZ77KhyWrBi1ChD/rOE3bvPFVeowLrhFpTUpPZ94BhTHHWrAkb54qbp5/sK+WVjOPxLTaZaJa3iaeD00zCUqqJIswMUtLhAE1TeUsB0LThrPn0uI2WdOc6oUbgCNeQx5yzk1uf3KiayFuAHXlYjpzXRdZh1TsZLlwae03X2NAnEr0uc0NrR/puBwdAA2P64iSlk3hb7e1RYQ1LshbvH5VkWDleYiTCezaKispoo7bG1fYig5fwiIrdj2+ZX6BQKlHRwlVbbXJj88Q4MTQMqlLdc7tFjQ2xKIEmQzb71ONQckEbLUpzMZ8AfqQQ3Vph0JvfE26eQPB55EnDMkz0+ezgiE2K+8GpAhFUeyzIvP+/Fz9TRlagKZVJSSeEfNq73J6/kZBF9U6zSIk2bsuJLYYqlYUPr1HPjEXmZGwWRG94DnID1S+Tup2NCILajrLemTl3XJso3DL8r9lIdvHjceFK+X/bmNdsRh5psJ1y7AjU7XIVZA05ccN68pIDsp9K5ZgM4qWuzPFenPhHCW5LYo2WAe3tYHw6CjNI/9eQspfLvcBDAZxb0b6FdXBp9r9nAsvpmWrrRuyqNfIyLX6iT7VNFShwaXbhLEYZ1OqrycasNWmBRWzV7TMjWtqLhZPXlUphg4Bx2q9tvYGCL4AxNRNEVCU2XH+8BDS4xBAUezEsDYvNCeTkeRnTRgSkhHPjyTI2CwdHEDCJPSF0t/qjz6hSEtG6NHGL5GTh/jzmq+QNSORAxCms5RYJLfpdBKVXD4v77zcV/cpXFyUe8Cm54sxwg2XkxvBAG4+Yl2nordMBBMuACWb7SvHJJ83BbqFNszaXYrc6nw3B6+B8mlkvTkSU+BNnLOg6etZTU3N1eDP+oODwiIqBu8vf/9BAsqz0K/2k3zK8UwIBVM51QZaTjxr2SUED+HVSy0A+btbW9MNEU5P4SSrkvtFwFMMm7TzmNJ8l/u60cKfeSPF0KVJqpQQ582kxTti1IMq9fXcKy+l6AlvyjCZcPv6hiXADWekKIcdDJJwExwh2/j+5ExN4ljXFOdmVqVsNLu2Ri7LylBzL9TfFHyFOw/FnIcA0rb1nRRetVLbEFAfWT7nHz5K4KAcvgxMlQ5fbmsrJT4BJXFH17xKhzEGxRtMiwqLCGPP3XqBLa5aMyUVA+G+DonkFnSPAGQVaiEZOdScul0Qn0CHtrqRamNSd6ag9T5FqEuhFDmIeU0HadCiYVwA0vgFMTJPxqmrGX0oXeQYE3Yh+2g81ncQC6U5l7q3+LsSMhVqPkXPHlNGYMF4JoSCasYYNW7rT7KyW/zvvDNSbjKG01MqtrK7TazF+w+ojBqbcNLoRRHxV16kDc4Yhf2nv0e/f+kGpGCImsd1Ej5yAMTf9UfHFAX40XsUAx8O1TpwK0OS3DkKri3I08GXMu12aUkPlZY0N0Tj1oQWm3gBbm2NUmZHI/iDA4Qf/CEia9U4nWk8XNOSeS6FHVftXSZXJas9qIlZYg1waFSwBfmdVClyr72cSttndQnj6WkSRpI5mB0UGJP6eEiNBg4NinUo4Vj/7ge01uw3a2VjsWiE4AVoiNVwx+wwGqX6jy7VTZDv0f3aaiH4j4VK7XUNjSToGaO5JDEjSGkVMHk+mQOpTyGujzHU2zJwGX2eW9OieVccSwBiycI1qVMZgJTuLiAmoAlkso5SacqfnJAw+oIQwTMhFIy1KmmVnZf7dgJ6XWRAFaAAWKxr2kiPspJU0gYtI3dQJp5FePdDigrkUYsYgXvEUbcSvxd6dBYdKDakSf0tAagbEcZjRfWFey+HUTZgysqrdeHn8t5jpFwKIAFXfN84nVJRGdZktBEDpNZBdfkS3ZevGbUqEaP4d+4S+5KJPXAO/uCAhMo4cSeoluORalTL5Bh/NkL86E6qT2jpHcJ4khh3Mq9g/1+AYueKcvBhNNIokvABBFSV99Up4bR5/VsWbQoXY81KlKiC5TyDpg8ugk+S1tRkl/n1gYDv0zN9D+VnsKuRBF7QUKf8venWAqC+GXIfdkvjdFrUUNBrZEl/hHttUfOfg4d8zdQRXSp3h/GYrt3ipsrSVSw/B08xngmhAAB52qhoaUHW42SimW5EYKHOQm53u0TxpzOtYRAeHRU+GmKkTLxANRgis/ncay/rxEvNv8jCh9KcqZeD++pX4IbLlMxTtTD5pW+nJrUZ8QbWUY4BJyXlPRXscp9cm8x0F00nnZWkgaouOIc4q+1NIgRxmDUywBavUWmycHFBmodBtfrup6SFWdDaG1eSiS+CTbTPzjbsC9fUUpFmv25jveDdV1f2CkadVny6vKPuj2lVRFBaX6WoCbtLkimqIU6ktm3u1VupapDMB+ca4Ge/TinwmW+vpdglm5HXQSJXeafyPLXara5QCzmALCmO2thGdirtvQpufU0tDQqdZjUNsqxL4xzC4aHiR1oqLw9hipIJqSSduFiS8KaNiKxLmIyh2iL+wX7Bz1CrK/ikmCSkyxajgMNfRCAAeDaiDytuM75pfmGOGFI0eUGJugrJyLQqGGMo7ZiBuhwB19j5dMZ1EVvcDWg51cqPqWxZkWwj6HieV8DWhSYM+STNxdRWMKpB6XVfeUmLcOYEH0LYZ0V8XTUsfy5H942l/oLh7EzJR7bfh11bJXxALCmbuk+bfk+rMjfR9bzkvZqhrTYlCf34tkY5lHcvwFuWr0Apx0GtNsSYmtZ0u+w/Z3knmfa3nQ41UMlCbMXIN3WeI5HNU0Fiy0KKcTyZS1bThkJxPm+gunk9ZYU279OIPsl7KYCsCqhs3lLQrvOIRvaz29qidWy31NJyy/1EpJPrAoQrjScpytCM6jSHdTCtCr82/rtfnuhDDCHFhaVJZiYQpHGHWhGtrMim9xQyOzwEtphsJBsiRITJBGE6I7ClnsEsdeFWVzVxqRAIDH6FaQKrtPRWjFoWzDgHd3m3NHEBkvBc+lvrF2aItn/nfe2OrZqVEXjFHrKoglgvAFIhGb6uXaGWZcKlCKNRAgxjSLiAdQjTmYZmpcMzQCCYWGJAAt2k1BzVDrRq6UhhVNNuw/Z7RO0VK+z8PJmv4h8fZ+5UPSsT0WIkF6fdpvoOXHG6aIwDcLESagykORn8/fyZEbyCpHINoalLPQO71E3kIj5MOQsTAAkEHnl16Zyfoa/gE+1duAt5yrt+jpVFPvKSAAAxLBF8qiTOVZPkPIh7Ve1sI4zOkwslFod1hRJMN2LsqpEE+HnjmRAKQOaHyaK7tClweSctjsSzARIIzmnref/uB4lwwqQhMomDNpUNx6eK4krYyJ+eJtxAwb0khZUsNR5TJOTKZfhP75Hg+ZPfTAscvBJSpHR7swV4OD6hhqJXyAWSKk3CUbBSFTimdGwRHuqfxoj603uAVAfOXAGZIylbJxuvunmd3mEyodx/gOLfzCIFALx0owjpSZadVBUW01fM8nByWnAGANrs8r2yqcv8Vqvvfko+tfQJ5e/TuleqHQU0lb+npLWSqRfZ1zZVVR5C7lYdRiMC+8TUll4N1iUuR+5WXZUGuBmb01qdI9PtkBUmLp+legd5FXBYh/hz31A3Rlmd3kPrbvIzFGHb3CLl+6vFJYJCIicSrg6xCN+rQvoClZyBZ0UosMTOpZm03rKrK/Bvv0tSUJpaCHDD35H23sQXJ79bWIFubZVyH6Q/5GsvlrTizI8WinU4OUtmmbEqIOSA1h/fpfttrMH8s++TD/+nv67PLoBjtbtThI/c2hotYLuFWCUNKwcmXlwgcEWfQpPx88kzi2/q73yaNg7/TebFra7CbW+p1opn7AJlkQLESBZSv0+b970fa+ahHCIET/0xuTWdmrJyyKTCjw8UfRGzXAqkyiFruATa/MTY1EsBSVALjyPWNSrmDMj1TaeTniU7UGIBmJUh/PGJlniLS4SPVFf2iOYdoyL2Uj7dra2h2tlKAHSMiAePCiFhl5eZzj7TPRq4B4NgP25tLVV6Brlw5p9/X7krgq3Eulahb5eXaZ5ffqEU8GydAoRvhTEVbMkFRWGphlQ13DjL+FfJD3ma8WwIhQyoy6v/utUVmEHqnCPFKPVzvCEk9VTixGFEgGG1d5lyAsZjrbNnD46VyWi7HSU4mU6HEHXhyud+Wn5ApahLu00FUxg0av/uB+kQ8ILldf4AqIXij44xvUqazLSJehxramkepjPKa+BKO80hEQi3sU4gmRQJ2d6CW1tJ2mMyQXj4iFBxY2A6LDiC1ygAXZDrR4hVJUP4AMYoOUorZFtDPAI+jMLMTFTfOpWQk/kDyArb2Sb3kKMyupHzjRsTbRyGUsltn/xrt7FeuFuG09SVLQjA71OvCX94TGzKP3gP4eiYSGqcNaqdmT/6mATs4SHq+/uwWxuFGZ7zLcTFaT4rCb3AoN+R7inas7VWahZClK4J4xwSqYqffJauL+/COBfhLYmAV+AstuSBCPYjHakAzLsVnzOeCaFgrCVmGqAv57Y2EUYXRLMFNEyoABxXN7btVjoIWXnu6vpV6gtpiBnp71MSjL//gMGlGdU/8F61jCQf0aJYvZYwCIUNJ/eqdndIQDiHcEZaU0vJtygXQ5OqAG2iAutQ/foPFESUNGO5tj84IL8xRKrazLkc+Qhcv3H/r3wD9vVb9F5LS0kjcYKWYR+3vnNXN5uk+VIL+QpuuU+UZq7dQDfwcLs7amKbTgfu1guJeXl8kkAxPZRs8ciBZUS9CLMyYh+OjmGlCYyY41nKvD8+UY2ufT+ks7WYziYVbhGGJIxBdW1PLSR9PiHGiaXJ7ykhYwCw3Y6mSisTMstNUOuHhxxut71FuTFZeBggnMJUlRYdFiq636dUcXHvhNNQdMo2VjEbbSUgtH6OfuQRCLe5qetqL++S4F1e1vDpIsLT48YzEX3QVvQyTNlOXrSvHv6QxWn584gxpTFLMRVBfZmjILFxSQVutpVXTj83URWue+DiqXlb+9ySKJqlNBB6JSllvq9bW4M/PCwjHYKYZ9EOzf0QtqXw9YfUe0E5+DJfC0pyFai8/EkQd+USUOUhu9Qlkhg3jXFrq6n4SSYsYDkFXIqOSHSmgYKLxtVITiw/MXMAACAASURBVF7eTkzyTtksWAuu5Kh/Iz9D53jRPbO05ceWeMsrfOVD1j6PJvB6mU5bU9ObzzBXHYr3T5NnoWuURzYWPYNLPSuL3+WRLsHVGmegurRL89cAwKu9S/i/P/6vvzzRByADRkQAcBNNBdlAB0uKn2oxiwzdT3xzMundZZacvR41YB0OFYkGFiCyYvozCCTFQkTzaWPb7J4Slxaz2PR7yvKTZ44hZoh/W0OWcTZNjDm2SAKz3YQdqRwHKRvP/n1uoubUX9vtkoWyvlZYHxqLZ3eLJoxb8HHiljSWEUKSdkbm5xOhRYVsLwh8FEB4ke/KhUblGcN0loBEBoc1YsFDQFBae6vl4HMQsHBz+HBJfD8PMUrTFfPqi2rtFAIht2DU7aOMVaGjC3We2rnVuk+VrwFwkljilRBWwP0jVldI61/Zg5V07ex+QBIqSmzLBILtdumaEorf2iryRMQaEeFZf3ZPrTvFbWJ4Yg2KfDwzQkH65wmwYoxJNfmzRCKJtceaOkYrSxBpcsOYCoWENY5EaM2DccqaFMujiIEncAjWKBFIzEZNLspSZ906Ic1SzDSOzpXOqqW3gwdko0rVYtBB0mSmmPoKSl/DXPhE5lkoh58biZiKG7ZkQsIfnxQt3hFjUcUpXIwz/j61Vac1SBo7j1wASOXHhXkZY8rL6HQI+c7zP6xL+SlASVWmSWYBUzbh1dqLjlOCp1Nt7qoAYF4vgOfOPzoid6aRiBRGI+pDGiMd3HztWeuKVad7IKTSbnFGLe8DE4S0pHrWS1QrcvH86DobZrwGD//ZPUAEEr+7Pv6UrFARRMrT4VydnCdTsB9jVG6Ifg9QMF4Kt1ihzT/leKJQMMZcNcb8P8aYt40xPzTG/Cf8+3VjzK8ZY97j/6/x740x5m8aY943xvzAGPMnnupJWNKJZpHyXfoceY0AjQyY4pBpCFAyEv/wQz0I2v9R0Fzhp3Pcv8i+q2sCyxjQ0XLeMc4BgFIlSVqsCcUZQAojGpOow/krNyvzyO/l4OSx7npGxU/29wm4oklRFFv7IUhvBTafBavRtmOdDkU/plM23WdaZLaIwTesqDieFMw/AXX90ZHmJMhh1TXJIw7Nd5xNKSO0ofU0CjHNzOMsrKZt2YDEbASAGLIYP5OUGKsQincqRtJV7MU4isZo8pvMVf68jEmohcLP1Cyooy3bOCxL7NHkPlLyFr2LlnXj64XpbI4enuc8FJZYxpTUproyzQzeimUVJxMGgp8eJngaS6EG8J/FGF8D8LMA/rox5jX8pPtJLghbAWnho/fK4wdQAC8AUiUdiT0DqXTY5oZaGxq6zKoDaaUcZGa2EGVYamsxzNNTCuHljy6+sky80GwlYy9SIpRkV85J7WzBovdztFtxoUQYae0E0VaZtVOY1t5npdCsEpUklKXPLAdOkoPyd2MryXQ7wCwzmXNuv5jC+cbL4ujyuWZaM4C0wS2Xu1tQFh5IAlS7R0sJs3ZD0+ff0SIq02ItNelIsk3rmoSu5FCMx2k/iCJiq0XeDYA2J1KlVKfismpR5XOZUZ6BTPDmez9/79xSzHM3jCmtg3z/NKyGRXP5pPE0vSQ/k87RMcZTAD8CtYL7ifeTLDYNWw5SbTmPrWsqcS4kOHKQZ7DJZhWXw63RodSQXOMwCiNxwSSk+xhDZczzP3PCiXIpJhNuhEvWiQiDPHlKDl9uRsooUGghDDF4ptpRNkxGN65euKExbJisC7ExbOlIqm7Wu5Kvo6XWJfYv4KWYz2dnWglbUO6F84O0+ZuIt0Ylch8aKIHjxgYu9oRNqexqpueEt6Y2XEDa0VCn3iCBgPl95LtFinUWtQASSLrweWPU7EflVeSCy2Zubx56ziIrsl/dq7eK72gSW2M0ha5aMl9wfCFMgZvCvAHgN/Ev2U+y2UsSMZIp18q0lRwIButyqRlDIhfBOvhHh9q7T0cjnuwfHRbdeAstyxhAYq6VcWBdiJjSnyVFFSCcQTL47Ma6UmwBDq9laLvUPJDQYTFioIOf3z947QoVzs8pzJlrFn7P+se351qWy7vbjbV06GIGmokQAODWVjMQzibNGDyRoV69RfyLRn8DGFNsQAmxiUulZjkX/1Dtnpn4QKaV5aCyq6PYT0hRhbmRu5qCEzTR/1a7qBlJh89qCwFdo3YLdnm5ECCSoKSZqguGApnySEfHqK7safq0lt0DoLRtoBTeMRXyiSOmuHMfT1m7MB7PEdaAZEWpa9HoBfK046mFgjFmGcD/CuA/jTGe5H/7o/STLHpJguvyS1JQSMU0Yz2bixWnCcp8LEf58+Ir6gvyRquu7BGKy8hunsmWcwl0ZMksAEqUXK2TlmoU0Vi210tVm9LLkub4yktKKtFnXWCyC9gmwsH2+8AfvEe3rlrc2Zo3OtdaVMshtyCyg1J/9HFqmabCNZG01HzOQoCSfWhabQqhHp4U9Rol0mGqVrIK1JUIlKRmLLdg4zZmWgpfDntay6IMOsgCC6NRgcabToeupxuC/XmlPpvCIpEsWHqPkv6sPIHpVDNUZa2NMcACd0dzYdgNyQWZWIw5zbu+c5expZBctSyCIpaPZL/CML+g3aZCPVyfM3fTKGVbiGENF5LxDyBZoU2Oy5PGUwkFY0wLJBD+bozxf+Nf/0T7SQryrv9W/80qeEifmyXOeGbm5eGiZEVIC/lWUV1I2n2bqqUVhQCk66YXL1JxcxBIN27eCswY2PW19Oy5qdnvI3z4cfps9p6KOud/YzDKdjup1h+DnkWRj45jEDF1Hl40TFWRhvc+haqQzOnq2hUlBkn40z441DkHQB2nufhsnBK4KZWP5sK7MTJJJygCD5TCVQBdeb7C3DVmzhwWl7Gou6nuTooeCKgaJxPq88DXQwhFbYhiHWfTAoj2p6epJqIkRomFma0T9WLoF+vW3C9zVY+ySIj2K+WQqXSQki7e/vQ0RXXaWcHirFqTzkN2T7pvrRGMLzKeJvpgAPwPAH4UY/yvsj/9nwD+Kv/8VwH8o+z3/wFHIX4WwHHmZjx2uCGzC6WUFk+EWxkijCfFYiw05YyBhIoKDALgjklEtpHMynB6Sh2YsgrDGsPO/czgtSaAmPYaomyiujEijsdUQiwn3BgqchL5QMo11J9vl5vfra6oxi7ciyxUpeO3flgsulZHDmXn5NzaSq3hW9TTAED98R29rtCOw+GRvpc0nMkPbnXzOv0596vn/PpEOlPT1pjyObn2Zri4KCMuuvEZC8nSumVetSVAtnYFqUiwm6zxS5qsjFgmBziLYAg7Nc6mXPo9lCa5pUSxKNe1TjtBqTXLh70JwqaGNSnErPUfhOMymVC+xWhElbaW+0mQZftULyvuYKaw5goFPcV4GkvhTwL49wH8vDHm9/i/XwTwNwD8OWPMewB+gf8NAP8YwIcA3gfwtwD8R0+8gyECEKWdBqZ3WjJNux3Ydks54PmQUtiyGEK1FVNY4rOWQ3AARyJCVBorAMYGyG81P/NV3fgF+0yIJXUNu75KB3wlRQmErusPHgL3UixZ0rPFVNV27t94mUJY/O7iFlk2uekFkwtjX76ZTP08Ti+fYb5GbtIKum46HUx/9hXlEwCsdSV0KIDX3i5p2mWhnKdKQ6bbgdvapDyLIdWClIasynngtZT7q/DjOVY6NF9bsYZlFrTOlZpYCUuW0qcNkbo00zFmCWU5oYyfI04mVDZuOEyCM8Z5cLBKBWflXaq9y+kZgFTaLGT3CD7VwZB/+7QeptMhspy4RfzMhTWXuUVFmB1QbArGIF5czPUiTVmWQpSbJUEoSlI+/gVqND6zNOe58FazOarwDpi2mhfOkCFdjOaouACEgpxvEKGuFnRmoNT68m+ANmrIKvc+YS4X9TgszG5xh3I8gO8tTD1tQJODoo1h+309LG65nzQXv3dBGZZO3oveU0HHjH3X6L6UR3Jknuc6O3dS41ddr/z/9Uz7Kjbvl6/1onkriqQsWINmEZWczg5kFkTWVqAoNb+ATq++/WCQIkWZT9+8p1Cvi5L/xUNmlO5suNWVFLGSv+XAuMxPo2x/nlKdr+nTdp1+ZhiNhSnEfmHh+0liDoMuiQPAmAKHLvPh7z9g0z1VNKLaAKtEhsp6AQJQ8kju97q1tULiqlkYfBk6jFTlWfotAkjRCTFTM2amhFuBzLRnwDQnUhUxcLYqJIkrnzdTVVreTMGw/IBnvmd1ZU+ZiGE0Ii0tlYiBkiSWr4cc4H5f3TFidNrE+BTALdvoOVqulHH5P5OXikxJDuOpZcR5J0R57haCtIzfW51brU/IB8N2u/TcWWjbDQZEvbZGq4fnh1YL1eRzmNPqs7Z3OVkrz9gEoNyQovqS3sQppVstLK7EnUet6EYpAiH/FpA3BxNFoeUcii9flmTFk2QM+dOMbptORxuKEuMsq1rLxVYEh5A4sCLsGSio6asA7OoKVV1aGVKaa3bg6TpVYQpH0V78b7u0hOrSrn5H+kjCEKU1nJ9T92bw4kgSC/vSmvq8vqr3bG5y6Y4l9wQoVbq6fElBRa0KneMRS91CCwLQ4h/266+ome33KadB7htnU8r6vHldhaIONkWpNP4U4CarUr5N065FO2baLgc0hTGo1Zoyjae4R9PvNamgCVotrsEYyjyZfMi7M49BXBHb7VAeRpZyDUAjULGu9WctCQ/A7G7pHGjo2bkydGrdnLUJoADD3Vde4t+lMux5RigJvU4SJrNZik4wk7YIw+a8DEvuVWRLzW1uFL1CZK4fZ1UuGs+m+4CygSqQTPt8SD46gDlXQA6M7XRg1lZQ3/6E4t7GMDOyrKGn9QObJjRIcgupSSSwXRkWPp48S97ByXGxDzE5czNPtK78rjCncxJRc31YO9ilrkYAcrNcP8bXlxqOc5YYm/75M7mNdYTjE832jIzgF3ORuWLScVoOVPHs+bw0zfdG1mZ5gyxbNKv+LAcm/7npYorwXdjkNze/F7lIDWBY3YpFn80fl10I2+3qWsCmOpoQsC93wXhOkqApM2Vljtz6GlXzjqmLulqnfD23sQ5/eDy3Z4HM/eDx5XMfBOwSXsDZqOw/0CCiwJRUz9yE0pDUrIY/G6Hmev4FsryTSnKLphQtk5f8MlVFRCQ2yaQEvAoE4ZgLgCaVgdptqp/YAEI1Ti4WilCiM/9a+1JWKVNPKz9LWq5LVaMkj0M6TeUHzx9SN+RqZxvKl+AqzUBpfvtHh4nqO5lo/4tcoIRxKhceTk9pXdity/NQdG59Jnh5rQrLKXMTUwOVts7hXHRH+RBlD8r8wAHQ8J5eRz6XHXyJ8jQbF9N7jufDu/keZCtBan1Kt25TVaqg4sVFkb+gbiS7f7a/lKy+/Nl4jkiYWxb4dSrp1utpX0//8NG8EuN51H6cX8B1AJ4locCHUX3NOk1CUStAN0MZ0hLfFQBgbfKRORlKWV5ynbzYpVR70gNapxLbrjTjw2hUCATtes3PArAPKQ1MWdDIYZM6CmJqx1nyP6WsV55aTabfTIuFgO/nj08UQFTuAB9k02JQcnNDfVD/MHWszjNFgWzTNMCsAmk3JgkbEZycVryoA5QmFTX5HHJt5n8ImQtIDLzoqRJSUWE6P/ziWzffQVwftnCopN+8tSO4lD85mw/7ZtaFhjkbrpya9FIg9vwcjgv25PU2hJhW1NcUnIQTpDTXRZRilbmvYAu4ESUKzGPI5yU/+Jody6OJcTxpPDtCAZhfPNEu+YZbwGdvAjhFMooxcBtryURmLVzfuUsT/carRZKNmn68kIV1sXd5zqJRzgSXUHMrw9QYJOMg5LF6re6bmaxqIQmNWeaAhYAcQikaUoSb8p4JEoUxBvH8IrOassiMbPSGlZNbMRL/183MQlNy9aXkmj5n1SJ3gi2VvF1esW7ZYVYTOv+cAHYSnsswpCbzE0DGfK3nw3s814WmZP86r+tQCEPlAFjKOcgBW50/TpaSNnMgKrukoOurMO/CtKoiJ6LAVGLUeqTppej67tYL6f1FyeVWRy7A5HMiVJjbIYDyXNfuzxnPHqbQ8PHURwQW+pBFiKYZdnsMPpBHDUxVkSXifcIUssavcVaXeEUjpKWmMF9roY9vqWiH+pwmHfRmGPWJgzdsweTjykll9WSTnkNMeC7k+bQMtyYesDBUCyTyUPbvPGSqIcSGz7ywx0P27ADKeWyG7vK1WDSHzfDdF3jnJkbzuOeT++QdxfXe+brk69a83KJQNRPFtO9JIzQ99/765cyCVgVB9/3SYgoy2e7WTQAgsGbRgvNh1J4A8t0YC61EfQMIBa+u7JUCgbPcxLKwgwHXfGylCc0IKQCoSxQLEgW25KC06LuioWy3q70sBB0G6NC4ne2UAdrw+dzmhjIf032H+l3ZWG5lmKyaxjA5VuGohFusa5ilpaQlq0rzKzQsqRcwsBvrSfOwhlVTVDa9TWQb0fJatNWYFF0R7QUkoDEkyrUyHfO6AcZofobWwcip0JIjE6MmuEm3pTx0DKCsFynPL4cpcw8kA1e+I79vYiv5sEvdMvdDQ4cxMTdjLArrAtAEvjibpsxTtsI04qDPn3EPcleOr63/z0KWSofPWsg9zXh2hIK8IJvx/p33E+ovflk2RAOGs7NUXnt1Zd5UzKoK13fupgxEY2B4kW2/r81Dw3isyL5odbdOZrUdDBAuxsyWJKZcUfvAewKPWHqH8TgJIZ/ajMW6RtxZV2xhjujCcXtt+2UMTI/YmeIqVLs7CKOLlN6cZ+ChxAPibArDLEXTaSf3oq4Jf7FcgITTxyV7MJ6NUpgxBuCNV9OhzXJO6MKZHyzAIZcmU7Nc/X8qsycl6rT0/QJgzw0GxArkHImcZmy73bI4SqdDRV8WaXhrlByle6kJMkbqFylsVdtf0me3UhhWr+e0SRHxQrIDyuBgEw/xh8eJPyB8EnaL6gcHcxaSgpQZp0XvwZZW4qtkPU7FGs0+/0UyJp8Z9+Fn23+RtUJmYjVCSc3qSPJ3NWv5ILjlPndUylqb8XBbW9Re7uioAKk0E49Dfob9w4KgxGMhi84YuM1NSqKRsNpgkFrTAfNmb9P1aYbP+Pp6XZPYfW5tjcA03egVtZLTTlKPX1c6CCG5MwvGXIFTmeuMATkXbsznIiuUu/DdTCNE22Qp5oVLi4fPQppNd+Jpx4Lw4ML3bpr7T5hXeT7tFtXppGzc7LuPLXab74eGiyWdvsPoQkHavB/JQh5Cw33+0rkPQoyRwhL50EQPsAZk9pdocGEgVnuXqLDpyUlhZs/+/LeAN79GGZEHB1SOPctkc9euUL0C9g1Nu0VWw+lpUfBUSVLOkYnf66mrAWPhD0jaS3uy9HKsJd/8mpqWCXgKyf9vho749/7hIyK6xFRYFDaZmdX1qxqtye8r1xMyi3v5RTqM/X6qumSdFvHIa1nE2RTVjWuqgUzVQnX1igoENxyWIJgxRRs5f/BQG63o8zhHhWA6HTbl0/YTS4CKyNikeef2QqXgrYJn4nYsACIlvVvnwzqqlcBaXqNPWQRIgV5p0dfrabvAOaAPKFPvg9fsRIlc2X4f7uUXUwRMBELGjhT3kCp1U4EV027Dra8qWzRmpQVya6hZLiB/FgCL6098zngmhEKinoaUcQZo52nTbqWFD1KswlLRlOyF6zt36QBtbakl4QYDtL77u7DnM9WMbnc7bZClLtX6/90fwg2XKQlFcgGAotAIJVpxT0TO6tNU5uA5ASv1i1ArQQC477+r/5Z+k3QAOGyXtf0qNEjwWuDVSm3ER4cwbUr9Fh6G7XZhh8sal1fOwcWYTPCP78IuL6eU4A4nm/3oPd70VhPDYAw1SmGtHGfTos6kVLAWX7W6fImYktIEtddDffsTnovMlTl4RGFCpv3abhduY13vq3MfY6qQ3djUcVZrCTzppiUunbwXwKX386rLHKEIp6cII7KwtIZHJqDCCTXurW5ez9Y/68iUHWbb6xEXhNdVGKf+8JBK8BuD8Z96Ff69HxO3JC8qIy3kDCcELnXhj47JkjgmxRYvUn/OkNf5tE57Z8q72X4/JZTJHgI0tPu045kQCjGEFJLLzHWJ9VPCkVWOQc5fz8OVWgl5fx+22yVgUVrTv/2ebuDjN/dgV1coLTszhcPoAv7BPm12ibOzSyJl222nA/fSzVSEE9Ay29LOrSigQi9YRhqCT8k/0ogWINDQmKJEfKHNAISv3yKtvHcZYTRiDGSJsgevXsb4jZtFvQXb72sNgjCZpGzRl+hzcmjidKq9MrVJKg/ZaP6A6NFCP/f3HyB+5Sbs6gr1heTNmxKyUoxfXDytBRAoNTmMqcK2uA5uc0MFi7//YN6CYrfCLhGZS/dEJ9Wd0HqaF8Tt8EfHc1aEuggZYShVoZ7CP3xEzWb5MEqujXvpJoSara3qGTA1rQp2ZwvVznYRgmz/6vcov2J7C7Geodq7TAd/SolgWgxlwLhPNwHg4WIMt7mRMjlFSIbU6EaEZzg/10bJueB6rHvxmPFsYAp2I76Fny98UVNVnKdwxnX1G/5l5i+5nW06zBvrCMenKWtyMkH402+g+p135vLgRQMS8GjLbLcFmW45cSUnp5AJnwpr5NWJ1W/OficgnsnLkef+duPZ4pSsE61uJPPAGIuwKx+XGSeHwa2tkBUlLe3zYUyWsFQTD386Ld0DIGtmS35udfM64sNDshq4wY3OlTTCyTIDJbwsc6jhYbacmtcHUNLbG1RkHY/BYhb58FJl2R81akUsaBCjNOEmNbsZhViAl8BYVDeuwn98R+dgEVZiu91kzeRrJiFFbjFgl/uJ+JbR4e1SlwBpN5/foOF8dse+G/7BlwhTiKlZioBTsa7pZ5lE6/QzhYQH4B9QG66DX3qZQkBirnc6sL/+u9RdeThU37K6vAv7ta/QoatrhNE5V2Kq0mJ4r0U2ACSSS5a7L01Q7fJyUYW52rvMLcWyakC9HncNZvcHKLVXJHKUXeqS7w9wU1yjFoVld8F8+2uoblyFvXkNZkgZdfbGVcrf39mGWy6rTdulLuLoHG5zI2kSY4qQp7AwAZBmzEJspqpgB8tajNZ2O9yFOsCsr+L833qr6Npsl5Y0C1ULlFgHy+SoPDKSyrBzOjZ/V4loXA/CDYcJaBYMhnNbgP+PujeNsSzJzsO+iLhvy5f5cl9qX7qqq7fplTPdTVJcZoYz5NCQbYKGZcu2bBCWYMmAYcEQaf8wbP6iIQOyBRiGZRMGbf8gDQGGQXkRF4skRJEazcJZepbeu6uqa+nMyqzc33v33vCPs8SJ+15WV0MtovoCjc7KfO8uEXFPnPOd73wHSp7ScZUXlXfQtFi436MN28YjVU9KjW9NlSKnF8PCPMLiIvXRZJzIz85SsRrjKX5ujijldYW4eY+1O8bpuWNNnujcHGXLJKUuHqMPCLN9Em9hI+kvndPGNqrD2KJ0vJTIxzp5VEIdl4yO843CuY84Ph4p+l/Q4UKA4/SSWLu0izitCVA026Rt/GxfeQZLv/kN1OxV+LkFahAyMwO/tJjk2JyjNu78bz83R9Z4fg5xexvus59B/GffAcCsR8kkSL1DrGkyDg7VWLlAC6zeP0BYWkR56w4t4LqC686oZ1GPxipNLl6CehhlqdLt4VbSgNBWZNxGL6yvof7Bu6i4qWusaMcu33yHjM7hEWkg8i4TrlxE9ea7dC92J4kRcTSmF2mc8BbNwEihFjeaiYdHWUfjauc+3D6FdTPvvp8JdOrnuLNTtX+QcQbEE8N4TB6C7NJAIusAukOH1ZUka+9Iw7De3ydDZoDRlP6k8KM4ewbVrdv0/Vu3+fzkdRTnTyfMAxRquKJI9R6c1QJAlZndTl5LwN5AHBHW4gezCa/Y20sduM2zhPkBrenRmPAm9lJ03bMnKI2ApIAv3rqbS/zVVWboXJvL1kOb6NaMQxQb64jjMYkafwyj8Eh4CrGuUd64mZh3nY522RVcIe+InJBi4cirhefYsNq6B/fCE6gPDzN9RgHqNNZdIeQ4HhAfAt8mMFC9Er5edW+bJLEW5lEfHKZdK9aUzRDmoKc+i+4JKpdVXv/RcaLBKlmGLLxWenJMq+pMQrCJEdIJqLz5gXoo4omU77yn8bWAgXFEacvq9bcSoi3AnYQa4xF16LY9K4DkEYGMFVUL1hOAn798QYk3rtXOWuP5fh/17h4L0fCLYbwPqROw/TfgnKbz1Bh3OmTEJYPjvL5QAKWf1etp5OnL6zcQa5KV04pEfobyvetZJiHMUzbFpmnr4ZAzW2MaV+EMlCVEjNU5Im/J+gDIaFS7uxoWiJdTcQVqtb2NMDdHhWzMo/Fzc4l+zQxYCeHq/X1IGb6uOx9Ut0P6lsThkIrwasZktra1WRGaHJAHHI+EUdAFwWxA/Z1B3zMJMrOAXVHo7uvnZjl1xzTl195CcWpDtRTC6qq6o8X6KsZffAnx8Ih2lE1CzoWOW+/cJ7e3rkiAI0aKmQ3HXuoKAHLRnXcEmo1L1N/9Qb7z1JXiCL7dSmlWu4PGOnspsjbohpp8/PlnFb2XcQqzfVQ/+YLWefheD/VjZygV1utR1eYu4SaxZEk1voemYEtYXdUYWHUbR2M45xCuXkY9HKL8/Euo36OsR71zn1rLD+YIwByP4BcXyLhxBsb3+wQuXjyv6UgLoAJIhW8S/gklm3n85OInhSRXFKj29shzEOOiOf60dvzpDfUeJkhAoJBFO25bEpAJ/0QdSlmU/PuaBVZdCKmQjT3D4swpAglnevBXL1E6lZ+5PjpOpeFFAbe+Qvcy26f1HiPKn36RQqcnrmhFpxOi3pWLKoenzy3PZ9isGoZN4ducdDwaRoEPoeRqLCXUTH5BskIhLifWgqRuF7D5Wp7Y8tZt1KsL5MZyy7XIKHzr976O+t4O6s0thLXVxIxkSnC1u0txtNH/r3bup0yAbZ4qqSPOIYf1tQT86ALlsuXjY/iLZ1VNSJ+f6y+0DLscJbNXSAAAIABJREFUZ2MAAIe/8DK6v/tN3lXbCMtLxL/Y3UX4w2+i2t5hcG4I/94duOEI5a3bOPrScyoY49q0KAUAFA9CisWk2U2sKEsioVA9HKJ6421K8/7xd3We6uNj1Lu7KN95D9Wb78AV3H7deWUB1gcHpBq1R25//WPPZo1WRLJMWYqtBPQKnbs+PEwYQvApVcdEtbCwkBWLUfw/j1gYEM5SmnUeDYYiRqcoEJYWVJwWIO/LFUUiDvHcKvO0AYKWN2+h2tzC8VNnUb/9PsLqCvxsnzCecpzUnsoS1Zvv0Pri9GYcDtH6o29RmHl7Uz0VKdWuXn9rEvDkw68sqVfzoD4VJx0Po+bcdc591Tn3Le4l+V/y7y855/4p94z8Ledcm3/f4X+/yX+/+DA3ouXNhrQiHZiL82eIrXc/scPieKTy7a5VkLLO0TGlyZjgJOpE9Z99D2FpAb7f58Xq4Fpc31COUY/GqO7cRX14iOLUhioch9VV1Pv7qWMvV7f5fi9zxySetUarunOXQgqRmwe4JPg+igvnUL31Hrn7NvTJWHq0OOvj9HfXamPwT6/ry+CevIx6N3VuClcv0+dm+ywc2ta4ufvbX1WSi6QFw+OPISzMo7xFrcur+wRCOtaxUA6AGF52bRWs5HsOT1+D4zCnOHuGPJVZirE1JGSvo7q3QzjIn71J88giu9SIxrOOgVfUHY4UrWj39eqSq0ExOg313h7n5NkQHR6SGtY71xV0FLFcAYctkNhkv1abWyjfeU93aU0zswqY1Em4Tkcp7DqPMaLgKsrWH38X/vJ5VHc+BOrIJe9ery/6HWFlWb0JgMIzAGTMW6kmpz46IkWyKa3yXIvnnDM4BFY/fOgAPJynMATw+RjjcwCeB/CzLN3+XwH4OzHGKwC2AfwSf/6XAGzz7/8Of+4jDxe8Ki0D0ElyRQvVrTuoDw5VpiwrA5W+AhWFGP7yBYrdbbHMk1dRbW4RHvCTL9DLtrunjEDpu+BnZhSQAqjDr8Uy5LzUxyBP5RKl+SATeonjvKdDfUCAW/nedS6s4sV71KhaNJNI1NYD9UBEd9J1OvD7xynVVZZwh8QzqDa3yGhyHwcAqH6K+vx6ZvIBQPX6WxyDVsrKjHUEpM6AjaosclueHEdjNZLV99/Uz5Y3biKsLCdBkC4L1oSgOX04D7dhPLP1NZR37mrxk7jfsaq0nD1LQZsslBCg7NjpmIxJ1MZf4CyS8/RCh0BhDd+Lepq6GOkehPWIOmUlBFgULCCWpZLXmju3uvd1RPX9N2hOuMELkeHYI2TvsNrcUnzA9/uo3nibsJnhkNY/E+V8r4fy+g0UaytZM1zJmmXFUsBkOv8jjofpJRljjELgb/F/EcDnAfx9/v1vIO8l+Rv8898H8AXnTvBz7HWqmpqVmMq54tIF+Nl+QnH5P2k0K4QY+b6fnSW3qq7JhX7xCdo971LWorz5AYqvvQ7//FNwc7M0cK88mwRbBTW3wqmyS/b7Ga5hc+/wgZB0R1JvKs2FHAvIwp/5QVZdKBVzrk0hQZO0Y2XbPWcWyrffRf2TL9Au/OwTAFfkFefOIly5pACi73YR/uAbBGZxxgCAGgJXFMAzV8jome7RQqISendWsw9g89//HGEu1y6jOHsGxYVzpAFwdJzAxmceI0PDWpr6HL22jjcZOp/t0r7Xo0Y4h4eUgWBNyzCYJVwHgPA6ZP7CuTPaZVtDudEI9fUPWHuxRji9kV3LeYdifU2bykp4EK5cQv3YWfKg9g+0S7RiOUAWiljQMiuSc46Kq+bmKCMxHKqnFccl6W+sryGj0UvZuSXHGQ9CQOh6534GjEr37+LUeppXMagf/QqmW34Y8pJzLgD4OoArAP47AH8bwJ+yNwDn3DkA/0+M8Rnn3HcB/GyM8Qb/7S0AL8cYNxvn/KugrtToYualn+j/Igl3iLw2MKlLJxNgCCdK8GlqCQBaQVcfH5O7Vdcob36QCDHi8p45TS60WPtmIVWDmJLVyMekdwiAzrexjvL2HV0cStAJqaRaulDH0Zib3cyqnqN83z5PVmDEY+NnZminf+Yq4jdf09JgS9MWA+WZFqyEGCESMbgX1tdoR2MKtD6nmQ8tl2bmqE0fhsGAuhlZlSwgFV+Z8RYClbre0ruhrogwdH83k4XXOZDiICGWCRjrOYVqCFfN9QIgI6SF5SVC5htFQ1bbQPQ0JuowYkwFXIJ9WdUlGaspJLiMSMU/h8VFwgoW5rVLFwCEFQIfbZFd9kz8OZlbgHCU6v5u9tmwsIBqe/uTLYiKMVYxxudBLeA+B+CJh/neR5wz6yVZHx4m8EY+w26TvuQvPU2LanFRX0p5AYpTG/DPPQn//FNJ3ruOGP3Y0xSX3rqNeHCA0Zd/BDf/xovZYrnxixf1muGxi8TFZ9c+DAZ5Lbow9MQ4cc66uHCOCDT9Psq7m4TSc84aPiBcPq+gT318TLH38VB3QnCjU9/touQ6B1ugpAsY0AKZanub8uPf+WECzY64b8WPPkf6EIu0w+KJywiPP5YUnLlhr3Sgru5+CKxSSk08FVvOTievFAEX0VcAcJ/9DHERmHZenNpQAo27eJYa+szMoLh0gUKLw0P4556kl2jEgBsvYuUCsI5GKliiYrmwugp85poyOiXEqA8P4S+dT7s2l4CHpx4nwJE7kImH2TQIluIs5whnTqVwx1LrkbIYvtfTEvRYlonwFmsFwkWaHkgcjvD0NTIIK8s0jzMzVER2+aI2Qa537qPa3FQFaN/tKkXatdpKoqo5lJMQNizOKx5Duh91luX5qONj05ydc/85gCMAvwxgI8ZYOudeBfBfxBi/7Jz7h/zznzjnClBH6tX4gAsN/FJ8pfhyij0PDsjVFVT4pJhIduq5OWIlri4Do3Gi2/IRVpYJU7CqzHVEuHweAFC9/X5KzUl+nnPCQkudSntu7G6IXMRzdEyl24B+RsAtydU369tlp51K2eXrkT5BmRml+idfQPv6Nsq33yVgdJvEV8PKMqRyszh9CvHwKKPJ6mmF5LS8BIzLvNQXYMCwzrwP8WSKjXWUd+6iuHge9Z0P1a3NVIh8UIp19izcM7FJARaVq8yba3TMkvPqwdwUF3zeGq6pGt0Y1wkVI0Mvt887Md/iZSCFUpbKLsS7sLyEev+AwoTHLsAdHqtHOjEPQkluzH/WnMYeZpy1C9fCPNX92NJsuZ+DQ/xe9VufjKfgnFt1zi3wzz0APwPg+wD+EYBf5I/9FeS9JP8K//yLAP6/BxkEAIRQsEsaR9zkZTwGPJUhiz6+Em8ESeYGsfXeHgGOd+4mTrs9ZKGMx/AL82khbO0Ad7eIVso0Z9/vwQVPXIJ2s2Q7p9nq7sZZkbAwj+ruh9piLCNcxcgsyBr18TAp7fCh32m1NVUFT2nYsLKMYBrXitBJWFyE/8NvUp8J5+BmZ/TZqs0tJa6Ut+6gvnI28eaLIjWECaxNOS6BM0n9B84nb+bgQDEPABoalVywVL7zHqXlmCaOaFrCtVsKXOq9CwGJW/z5uTk1/lrZadq8oa6Uuq3pS4BeLsZ/RNxWyVLi1bTa8M8/deLSU9ymrtQ7dO02io31pHbF2IsUJE1I+zdCFilhr+5tK++gfvcGZaS4UrKJS4jnRidljUbmfKSLJVzAGt7E5mRWaEOItt7fz3Gyjzg+0lNwzj0LAg4DyIj87zHGX3XOXQbwmwCWAHwTwL8VYxw657oA/lcALwC4B+AvxRjfftA1Bm4pvhy+RDqGzd4Owo0/qZVXutEJDEAtvnF/1YpKfCzMtCJpF9pdqhkj64Q27mOiL4XBNaz6zsQ9mmuF+cHUir6mp+T7fVJyMr0CZHeyO4sqKLFnIUVVxemNjPYNQGnUFtyyAi5aWGNi1fjqsyi+/756Zg+qxsuwjIagje1/oc9jdju7Myv93WBLrlWkqtPmPHHMP7HzmzVhMQrFlSzo2e9rxaX2yyhajAFs0ove6+XUd6PraXVBlWwnYZwA04IJDAao9g+yYrJMgMasP7lGVgxnx9wWT5Xjhy6IejSqJN1SfNl/Mb300wAV83+74+nAP/ME4g/ezEEdaRO2tozqrXcnFo3v91Dt3EdYXaX0I+eLZQFMHHxvxcY6pY/kRQGyF5O4DDNZ4w4XAvCZa4jffO3kikZ5ZmDC6CmwyuIdrihQ7ewoyFSc2iA9A1OVSG3Ih5pylHr9/MT5mNojPP4Yqjfe1nsRQAzsXcSjI+VIqPitMSxhYV5rCgS9B/c80P6N9lnNPJ+kyqRgpa3K5HurtrcncQIgGfFmhWU8WUlKfp7WhEiHjp9bgFQtGQe0PkO9lX6PvC6jDj4RJjZAUYRA1ZHimdgMggHjIUVXzTXDz1BcuoD61h38ztH/9umpktSMpXM5uOU8UZQlVcQ6+gpImhZe9Ws/pF89fS1pLh4ews0PiC0mBkcO7/QFqbbukas8LpUTYA8FG7mSrWSClIYXEiMeD6mSbrafqSC5EIjs9B26R+GxE+jVySdbQK1GCkldxdGIUp8M7NXHlP4rb91m9HuM4tQGL5ZaKw7juES1u6+gGwANH+z5BaCDD8kgCPnq6EgLyKqdHbj5gS486UVgj2rnfhpTIPVUlJDgqcchVacETLZSrCwGk7+X1YEACs4p52JnJ9/pkdxqquik34fFxYw+bjUeNTRF2nVr6yVapadWmzaBQ6odCVacx3kKf9kTjOMRsLJEil1Li5q9sAB2Jo7C1/e9rupm0i/jxIsvdTAyj1KRqocPKN99/6EVvIFHxChEfljNz7faCMzKKz+4RfX93NQjzPYTAjwu4frc0JVR8eq1H1IV4nBIL20RSFYMoHJTphbH0Yhyx9euYPSlFxF390gIY3GREGR5KZnBVx8eEtBnGHN2wsKZDfJemHIs11MyTiUEIQbwnnuSSDSyszAJxbUKeB4HWfSavwY4Di+5dLavhJWkityiTk/MgZd8vfPE5qt2doBOh15uURPiQ6i6goUIhVoMQz0ak7HrdFBcPE/8AvYawsICSdT1SGBWBWflpTO5fDEe1Ws/pE2grqi2ot1KuIKwDxuhlJ+by8uthUchPSmAlCmQOTTIezw6yp95pqfNi+vhMPtuWFmmNSkcAs5qCMuzMgzbrMEOd31Sj8gHxDub2khISvgltA3LS5TONf0nVWjHefheV9PbMh/Sc1VIZbLJ6NoTr0fm85PmKfyLPj6yFT2QqhBtQ1gbrzuWEuN8sfZCHI2SqAe7imF5ieXM2giLC6i2d1KKib+nFX1Si2Fy9GFjXWPy4uwZ1Pe2SZyk30tcg4vniW4qMbksRL5XJeaA3PyJHHsjvn2QkKpfXUa9Ra607WsohTpaui1doptZBsn/M4IP71XdmeokjugeWQZfRG+pyo8MVH08TEIpZg41NJuGN5hra9weayJnlUZOvuFq+26XQqheF9W9HZ13iws115F6A73eRDZFsJdw9hRL0PF1WLVqQijGzqVdp7xG/OwsGTiAeCiMgYgoj2ZYZmZIL3N7W9evxXGA5LFMZCsa/TBcCKrX0OTU0Hk+jZgCG4WJ2HYacUiUiAREadTR28ac9hzFmdNMAeZJldhT0nKrq5TFMCQhYAqAJuBcowFLuHYFbv8w1V9IxZ4segE966T03CRC2ftpXk8PMTQmhSc8/AnCjyUeWZ0Cvoc4GqWGuIYglS3KaSlSOz5GhFZ3+UYIMIETWaB2youvxCE+bwaAcqbDArgPajxrwWJ50ae+bBbPcS4jBVmykn7Gzo/BQmyaVQyVBTPD/EDxkLC4yGnLRiOZuqJUOmeQMrzjhLRy816a4/zpUnM2i17wAqVo8lGcOa2ume+YnpOGKhzmB+o+h/mBuqzScqy6c1el3Wz1mKra7O2hurdD37NEIfVGcqBH27/xM9Rvv4/y5gdwvR7C6jLRjbnyUDQLi/VVCikEkGOSi33mcOZUGhpOP9oWbqqkzAVHojANXpBhYZ7cTefVjQ+PUXGNbb5S7+0l6naMWXdqee4wGEzstvK8cr+aEnNeqdxqECTeZaOjR12RQtEUgwBABUckXJT7lsyRUolj5I2CvMOs4QuYXGTo6aKf2Vxvuquae7EbZrRpVWkFyAViGbEppMpEPzdH6lBSKs4VnNX2toZU1fZ2Rpii87Cm53CUPDJbWCeboMVceH7C3Fw+llLs9TGOR8IoOPMSCqgYS3KzPDcxEeUdotcep0F0LjXolNytI2WgMD+gPP/qCrnH0l3IB2LSCf8BtIjr4ZA49gKE8d9dp4OwluTVwmMX9QVRBDxGyhM7Km+t7txFPDrKiVTOo7x1G2F+oIVJKmISU618ffuuMtLUCzB4RCaYwbl+ACT8WVeoD444jCDlHtduo37nfVI3Pjom7OTcGSoLZqCPSoUXM2DRz81pmCF1JmIE/cyMjitAC9L3ulT6K5J1wvGQObm3neJmEN9B8YuFhWSIOx0tjBMSWFY0Vle5HiTrLUith2u3qXZibk5DHzksJpEdMcmm2UbDTUxDSvtVWYnXjhRPSW0DrbFRwqP6vUxQ1xqmMBggsAFRgzQcwp3d0LDPGleaDyMWJMbBisS4XG7v4xyPTvjgv5jEUrqJDizuHyraFbNcvqD+5VhdvYkY1oc81gVSLAtk+eTsnLbgSV68xlgpd70phiouosh0c645rK5kKlD5yUJiR+7cP3EHzZhv9lmYBlsfH+dpS+EQ8DMV587SPTRdzGYKlgEwxJq8JxMKnRTSafhxAiaEWGtYFRYXUe3sIKytovpwiwqZQpjAO5qCpCdxIab+3nIR2MXO+mFKiMAv0gQ3Yvt+hv3kF0xhhIYAskaco8rcH7yVx/h8TEuf6s/i7rvUuUtCTL2/aWtCnocp6BPzC3xyjMY/ryPMD3Q3oPbkKYctYA8A88Kwkg6rD0nsF2XQjIJOHA6pgk+bgrS070ImUNqSHoKml2Awsa/sqNxVWsQ1lNIM6ORqJ2FmwcXhMNMZlInyzzyRxZD1/kG6xyI1PnFFkapCJX7l0ML3uvRdLniqDw/JW9lO7eflmcrrN4jFaFSjJkqS+XfEvd/K0nwyJ/qsyklYIBVurlMAoP0d6Tt1Fn5U24T5VHfuQsrR48Ehaw2kF67Z4l7P0djBFbhjRSY4l8Yf0F29lrXEL4wVepEXjoBf9hpMmf7UBj/8LCIQJHNX/eCtxKp86Wlde/J51+kgvvJMHiYL1iNaFGWJsLCgY6AiL9J2T9azeMACrIs3Js9ZV0nR6yGOR8YoQOTJFAcwt+YciYDKAmR5tGZDVDjp6xcJ/eUwwHU6KN+7ruIpfn4ui6VpF4tKQRWaKMXKZVqYdUXgnuEWqEsobhw/g3gp2UvM2IbcFwDgnev6neoeUbSloYjW6vN5MqnzGFVfwnExDXxSrbIiKMo9YGMUy5JEO7JUJ6XVinNndUjD8iK9JKI1YEIFeR5RbJYXSjoYAUTd1uI0dm2tInOGUQCpbiEm0VQbzgCW/pwX+VD24giu3YIIsuhYmwyOGhpz3swb4Rer3t8nibWdnclYXi9K86mp46JIisv2+PYbyomR1HMcDlF8711eH5SWFe0NbSLknLa4l9RzrCoGMY1OB+uZiseg12eD6IqGp/wRxyNjFGpxGaOpGfDUxJM65Rzp76rdXSaFMNLNk62CHGwcpOoQdUzy7pcu0I7GtRZhfS3RgE+vw19Mijaqo8gVjHR/Neqd+7qTA6TQ5Htddf/ls9I5Ww7hAUi1m3gnln8fq0qbgSihpSbFZqsyBEB1IePBAYrLF6mGY3U13SuPTZjtI148TbE2KyzX+/sU54oEe4ykIfDhZopRD4nOWx8fJ64HZz2Ki+ezOorIKVDxDmQHlNSfZ0NuCUeWDGSfyxoBkV6Xhiiu00Fx5rS61Oq18Wag2APLpsM54p0orb3IQyAfeFwM8MybwNGXn6fmPw2ANAwGSS+yJsm6sLSYpUNdq0B4+hpvEOPUMMi069M5CoF0McUD4//XP/E8k+NMGl5SjOORSgPKJilpdz0vA6dZFughjkcHU3BfUNeNgJVGPG1SK5azoLlfixs0YkklCdn0GMdfWusPMgJhY43k12R3acbw9piCQdhKxakUZok511ZR3bmbxYmKlzSvEWutiZdzCPouKUVF/Q8o/s29ihr+6WsqJqvU8IauRFheQr27T9WGwyGldqXEWMaax7g4ewb15lauzsTPqHF6syek0amYqK1o4DrTcAlAEP1WXnUpYwvkngVnaqgsf8p8NOax+fesnsToYEzDOiy2o+EgY00Z5uRYol40NnzyKkX7UcKXsLxIuhrNMZmyrpSnMOX5hNfxO/u/8enCFAAQSs4GISwu0s5zjaTSUVcpFvYSL8aUf54jGfPi1IYKffp+H2FjjcDAq5chZa+uKGjhHh3BdTuEwrPBqA3IJ8anOHtGY1UROYUPKNZX6fdPX0Fx7iwBex9+mGI8Ew7Bh1T9CKC6sJ5o0IwNiOUXz6E4taGTrPJmzz5BIc7envY+KC6cQ31/l1qdVzX84oI2RRVP5/Vf7pHOgZFjs8U0ADB8jj0bTn9qj8RY01zweQHqiSGduWUnDwNKCdfPXtXvwXmElRWeuw7qH3uOxur0uqLtfn6QeWLWU5CxF5afX1lCPJWyRq4ouNVcVGNpx3n83GMZvmR3aMB4g4a7AdCL5M5Rali1O2Tz4DoPuKSvqCEhv6Cqq+Ac3MoSaVvMzKih07ocQBXI43hE/VPbZISq7R3Gkjqc3empgS5Obeg1XJtUscIsMVzVo/SB+oQcH08WhD3geDQ8Bb+cGI0Na6vpyWmtvQT51jyvn7SUYrUZ7fadDurRGMX6KukxOpfviPb76nI2iFKyoOT37E1kZCbBEICJIpup93rSLmbuQ3e+o2ONnbOWevKskmo9Ok4pxUaNvd0dbQZGx3saWcaMvbAww5lTqO/tAOMxhEEq3IBmcVp9eKi7pPWQtMpwigx5xva049b82RwZcYoNGmKcyCJk89HwMOTvls2Yfcf8PC0rkBVSOYewsoJ4fKw6C+rpNElcdl00ajlObA3XIG2dlIn5dGUfhJxjB0RQ45pbrl06lz7bMvlck1tWhNUuEl744oE4rrcXZWhXtAAG7IrTG4kOKzsI8w/8zIx2eQ5LixA1XtEu9LOzWusAAOGpxzPkPDz+WAZm+V436TJKnMv3bgHApFHASDez3xSEtAVk0pj17oek38fxdbMtuo4LkBiRrJ84rbWb8C+ApDMpLnl5/QPyWqTtnPBMGlkDjfmVd3Ck5J5YxwSuydhLhqaudaeXvgt842atRPJImGeh48zjRvyNWs+roZe83KbWw4YsYWU5BxcFK3FO5911OnDciEaAV+VMCEgbiRgmzYOr7fuJfGXAW81ecG1K9qwxToYrDBprTwiVGUi8Ccs2fdjj0TAKgDL0lN1nqtfqw0PgXhJP8RfOaCyqbdcr09SkkcMNs30tXppQzOEuU6grqmHgzylGMBgkXj13ea627hHJqU4S5vXeXsJCIMU+UUlH1etvUZ2CLJqjY+Ve2BJhQccV/Oz1aOEJeUkIQdJ4lH/v+zMZQ1A6F7kWaSnKyyipNgHuRC7MtVv0Yp8kCc7uuaRM7UEudEvvr7YZGSCBeD6RbxBJdEaVkI33J2lm35+h9LSkna14iIQZYqwGs5M4A8ggit6lnkf0GDh7IBkbq48IH1QWzxofEXi1m5FQkWNZUn8GwaxmZpK3UFM7OJKwD0BFoax6AWwcbbjYnHMZH9242IgIDmVVmOzv4D5eL8lHxihk6KykV6RNlg9AldIr0jiDGIwlCVzGmHZqs6v5mRmUT1O7dLsbZ3JV0gXp8JBoyRK+gNNqy0sJKOL0XrW5RfiE3UW574Kcszi1oTukn5mBO7VGWRTGRxSMUwA0cuyc4tT64JB2GBOP6n20iqQavbdHikgi3jocalv34vQGfcUg7ERp9qoMndD7o7R7SXagqhBffQ7F5YuaK9dxqysV3A0LC/RSj2ghirKyCojMD+gzII9F/j6R/+eX1QWSOs80MuXzpnsTQC8BYRqpF4NgQq7VzkugW23NxEgaWofVqDrB+YmSZvUg5XY5fJPPqcQ6NwhyP/IMS+OldSIp3Gr7fjK2otUJJJJVw7hqKpqB74xgJ38382arIz91KUnHL6m4sZTP9xmVVpl5xuIR3XSW1G45p4sYKY20vkYhQqtA68YWaTQ+y3qzbNWTWGdKbVr0X60u7xhUStzRdFu1dY+BR0+stvdv6uTHcYnyzoeZtzM6u6jxn7Z8Y4KNNKaRikPrAseqTo1a5D+fyEvWWMhCIsHOCH/5AsobN5X45LqdRFyKdfayWDIS6gr1SzResSzh/uRbiFxkpuIjtlWb83DzcwhLSa9A7jndXA0sL6SYtyioY5dRT9bzgfUYWK1JUpNwDn5hnvtj5lyWap/qGvw8UZCpNmUh5fXZuEr/BMIqxmkuvJGLX1km2rZgMmycfJ97VUg3LU7p2k0mPPFYmruvvwaMxkSNDyETcbUuPaW1e/os9fEx8MqzJNsXa/IqONMlhlW/e+5Muh4/m4Yjsj4auMuDjkcDaHRL8ZXuVzLVHgDq9tjyZ0tfViCyjqS5wHLc8kJpSkiEVYGJ82uKsFlZaf9u4vVmmsxWz2VqQa02wprpltygtNr0kyg9CQA1DbiyalN+Zkb59XbXOvqXP4fZP3oDCJ7OY0qQw/oaannJOh0CXA8PdddRiTfDnVeqLXMmLJXbtdrU9eiHb+ocqG6gGYupMnU2VWmAyxPpxKa0e0JFqTm2zbmzc9Q89UeUczc/N/EsPlCLwdEYKqhrT8XUbimSyrpWAyeDznL6bpc2x/mBNilSLZBej8NMqhGSEv8MCG1UTz5s6fQj4SnAOYr5rUUTIJENhTxolm/mMMP3uHdir5s8AEDlzsE7UHj8MbL2MzMIj1OqKjz1OH1WdBCE0WeyB7IT6u0a0o0wDi17LJ2HAAAgAElEQVSYB7CLyA1dXadDO7v2QSSZLQoPDhIYyozLadiI73XppahT9V5x4WwKc37qRfT+z6+i2tlBdfk0EXzm5hC4IzTRiaPG0NXenoqFulabqgfZwxBPolhf1dSbyKppHPzc49TRemEe9cvPIFw+D9dusUCNR1hf0/y4PpMTQk2dvBmebzUIHJ7ZMRB8qVkir/PRSgVj6u7zWnKdDrekc3mYAmRgaALkyIOCZ9DQOQVdLSVaQqdq534yrjpZ/MzXLpNRHo1QX0pMUcGRJCwq1leJhNXYzeOTj9Fnj4e6dqKIB/HaU+FaaeDDArfi7Wjo8TE2/4c2Cs654Jz7pnPuH/C/P7lekrxAdCGYHZxQfYPEG8KQAD/1wQEBPvukA+ie5i693lENxfICdY/64ZuEeg/mCLisK9RvvcdgnU9unaQ6axvfkisW1tdoAkyc51oEQsbRKKO8Krg3HJJLK2KdAm4CWV5ZdllVz+GFUFy+qL8Lg1lmI/ZZEKRGsbGO4o+/i/GXfoSM11e/g3p7B6Onz6H84DYVQp05DcSa1I69Kfc1141jymhoC7jbdxTZd62C0obcYcp99026x34f7p98C/HWXdQHB9w2nlmf7XaSYivL1JiVswl2FxN5M+H8K9biyYBanYaJ5SOaEEBeh8I8Fn2OfEEn/EDCORlzDuvi8VCNkhx+hhSz1ZPjo1hb0X8Lnb3+3hu6lt3r7yrQKufz0j9ze4e6nkvZu7So++ZrlDkJHqhTdgFVBb+ypB2rPFcD0wJJdPvq7ubJae4HHB/HU/iPQNLucnyivSSVdcaLEABN6tGRphqt4o2WCwuXnDn3cThEfO1NAp5WlglVf+s9VSSiSsxDasLaasPP9rXdeX18rAVDlsNvC6cqljX33RT/CsVXgLqm1wDnqGx4Sq2GphWnsPhkoZdvv0svx/wcqicu6GJ3IVBfwbubcK0C7T/8jnodfm4WnTdT16vyg1u0mLlQiyixM7mOpM35x0gl3nNzkI7Pmi2INdz8AMWlCwkMY4+iWFuh0IS7TOuzAkqKsv016e+E5ssc0VyPyfh7l4V+sRzrTmm9ibC+pjhKduqCmsk4o8EhWSvd+U1JsuJLxjOJ41FKY47LzBjI81nDIwC5a6U0o19bSWX5LCFX3vwg02IoTm+Q52GIRtXWPe0NqvdTlijfv5EM4fZ9Fceptu6lDFU5nqgveZjjoYyCc+4sgJ8H8D/xvx0+4V6S02rcxf20smVNyycpIgDY+qVXUZza0EksP7gFPzeLsLJM3H3vcfDFpzF89RoA2iH3fuIq7f51RHHhHIqL55X842dmVGpbhDNkgdn0I3xAfOoy1QGcPgW/ugw8c0V3QO0PaVzM+sefV0EMcUcFiFIWJ6AurCpC/em3tXpz+MUXaLeJNVyvq95WceYUMNOjZq+cww4LC0l8hY84GmkqN5alXte9wF21du5rm3f6Qk0A3OwshSLjEvHwCO/96qvAlYv00g5mgVZLnz2sLCdQjrGXejTG6Esvof4LL9AzGiMhLMkwN5fwov4MeTo+UKbpGbpP4QkAZKz9YxcUPNYMyfwAYW0lo8ALa1DHgfEQqs/g0CQEHP0rn0uGUlLVDfl8gDIWxakNXR9hQB2tJZwFgNhuob6/p1m1sL5KrMluh7xZ51Df20YYkLEFQOPDmJaER96wSOUoTq1nXm5YXKQNj72arLflQxwP6yn8NwD+FgAJ5pYB7MQYBaW5AYAhUJwBcJ3uO5YA7vPnTzyc94Tq+kALEuTGURoxppSh6ddIC32e4qaa5M6Wf/1PSMRkMEC9v08NVJYXEA8OUL53HfXhIXq//XW0fudrqG58gOLcWcy+vQscURORammA6sYHZMGLAn5pUXXvEohDPQqKjXUI9RZ1hfi179I97txH+e77hDoDigZX93a0NNn3+wj7I20AQ7l4cpnjcIj6rXdTrH1wSIvuzGmeMQKt/GCA3tffJcP39DXN0ceyJAWp928AgFaGum4H1e6+koRci/QppfLTd7vAJuXbw61N2tE5W5N22Ba9CFUFLMwBRUB9dIQzfzCC36MCtKPLSySO+sqzGipIuhAA/OwsfLeD9u98HVtPdyk1a2pPJGNR7e4mz7CqEI8JjK02NxG/8T1IIyARxHVFgXj9FoGxwtBkD005CQCFjFxaLnMh4LRlVMbRCHPfvEWGVDaBjE+RwNJ6OFQgMJZjVDv36TtXz+s8vvOvr+l1SN3bc+h7CNftIFy5BNQ161TSNW79aI+KqixPogHG+n6fmv0Y6XgqS086lPXBFAD3AcfDNIP5lwB8Jcb4151zPwXgPwHw7+ITbjD74+4r9o8N0JGFTrnNWCaMAkynerbacN0OGYerl1G/c52MzHBI2APrJGisKhRlSzttHkwJFvQ/E0QR4dg6ZvGpZlRMLDlBIzafc0VBWv8MfE48ox0fxTrqpNdoCDBhhdroZQVB8ij9Ge1LIM8mgiKiKuRPb6B897qOh5+bI+O5vcMvM4USKjbz5FW4e+TKNu9VU6n9GaWUeyZMTXP5s6yA0M0tzVfOa/UKbRaigUvZwia9jow7r7ET15Xca1Mm3a5T25SY122xsU7Gwjf6gPB3baVnvbs7obNY/dSLCH/0rfy8huRFYawRjZlyb/aZP0mNxh8D8Bedc++COkJ9HsB/C2CBe0UC1HhWJIVuAjhH9+UKAPMAciVUAM0GsxJj+35f3TIB91yrSDTlEBLSK4y0xkRqcdPeHinRvHeDBmY4VOQ+jkcI58+qey0y2sU6leJaIgydOKXZRDhUOQ0CTLE7rvciE8OVmmFlOS0ybobqiiJlXoAkHW7R8+WlrJyc8uVE/nGtAuHsaYTZPhUWce09Vfjt5wumrvQ/vzCvUvZSOEWKzG1Fx2PX4A2eyq3R4tLdJ6l4ync6cL0u7XQf3AFa1IUqhT38XKIHwc/sBAQWV5zHWMZA1oPeNxKmpEIjgGJL+jln+BtyvsfPT34WoBCIKcSWnUqpQKo6VJ6JfWHlM9ZraBgjFwLQS+zcG3/tMypl16QeR2m5Z74PAMPFlhLPAKg0IY0Bay8cH08vjRYwt6nt8BDHRxqFGON/GmM8G2O8COAvgXpD/mV8kr0k6UJUYccpOuXPSzwHJKtnT2fYh9kCk1ZwLGtenD1DhsU0rC3fvY44GpPGwu4uUZUZ1BL3VV/GmOosYpmEV1xRpHQSLy6tDxA1KFEP5hcqrC4j3v5w4lxynWxYhkNUW/doV3YOYWkB9RFlXOKYDFT94RbgHcAhj5+hXUn+DhBOoAbTOZTXbyRZNhYFLdZW2Ftiifcbt1M2gEuny+s3aAy//QYBkKMx4v4BqjfeRhyNUV6/kUqDxWsrivRCiP7iZ65lwJm8yIr+2x3RjEtYmIe/emmitkKOpmBKmB8A331z0vv0BkzW2gavc4JYEzbA9Pus0tLMraw1/b7wWsAAMShNePq//idKEQ8bawo4AlQXIh207Ivf/+2vo97c0msrPds5w5xk42lqKOikeRp9Ahx9wPHPw1P4ZQB/0zn3Jggz+HX+/a8DWObf/00Av/JQZ3O52pJmIpijIOixylrZ1GRdIayuIly5BL+xloFFO//Oq+TCtVvwj1+mxX/uLIoLVGC195XPoHznPR003WX4njSGY09BKxB5gRHf/RhhYSGBXkxAknOGVQKVohgc4dSbBaQ1H5xqbWYifLsFV5COQPnTz+tOJSBSfXCE6s5dlDduJnDrlWcQf/Q5+vmbr6UFLi+rCpSOEFZWUN7dTIsvRiVIqTIV8wDC6grgHW7+x5+j8GFhHvf/8itkgJ++Rt2R+JmKi+cRy1JTsHF+FmF1Gf6t6zj8V1+GLVcGkPXfUGTeFCRVu/uovv8GpVYBZFRkeVlkLpmh6nvdiWfWtcMvlCgty5wixlzbQsbNzFnGATChi1Vzhg+IZ9Yx+vKPwLUKFBfOobxxUwHxJBHIPUuOjhS09MtLQKsF358xKtVJ8MczuCygLIAURjcIXeFjgI2PDKPx1d7PkxbAYJY44cbyhZVllLfv5Br5jYdWRh73KNR47colVG++MxFPiqa+Cl02S7YZk4hHRyeLrDQOoeLq+RqNOQDoAsras5swQ9lqXE+g2IEwC1mYVhZkcfEcqvdvZPeo2YSFeVS7+5y67KZKRjksNuEdNbm5+UHuDgvWcHQ0UbobnryK+u33gWeuIH7je7Qzm3FFCHDOEXegGf+rEW5lGIxzTlOX0gcTQBI86XQonch4iLJWpe17k7nIPBJpEmQBR43LTQPY7OWW3pAyN9IUqK6mMzANftIsqVaBGsZHwsICtd4TUeKG8E0YDODmZql1vb1fQM81gb2Y6/uZGVT7pgWie3hG4yNjFF52X5hcNCfdm3UFJfaWGNOEB3I0u/7aGnlVGTaToi69BfpYNZouksA5CXW0mOukezX/tgrCUym7DwA6VQPBUoobnbEBWtBEvzUG1LqQTXCvQQWeAPCa9+RZsXpnJ53vQXMm5z2JWszn0HFuXEtZkFO+b3PxU4FCezTo5g8al2nNZafpKegzNNaFnocb/0wbG0tfVwDadHp64NrKTnTC2Jux+3Q1g+EjlmPuRUAPGJaXNO+cf9AsQM+KRrGGu3Z5wnvw/X5eq89uYhgMtPlnrCOlGEGAktB/ARjSyogUndptohBz2OL7fWL6HR+rKyjuncTHSsF96nHaAYdDzd1nAJXEhta7EGBrZga+26HQ59Q6AgOivteD1OkXF84pmYswh2QQwrUrGq8G6ZNhX4baLGiei/B4KuzRWn/WenAtlo83casXFSI+JI0qoKPv9zN6r3/+qRQuGQC4aUSdT6W/9oW08bWyIO0zVLSja3jSYHJmegwma6V/HuXApKppm/NrRWa7nYUj2VHXCE9cmYjrw8K8GnbVsShLfd6wuqpra+rhnFGsokyEKmhraJOUyB/2eGSMgsTy2svPkbpwxiwEdCF4I3lW7e7SwvgBU29ZMKQ+OCB0fmmRBmx9TRdPfXSMGCMJni4vodqirj3Fxnq2e9g0UrVznzQcb36gmn1xOAQun+Xd4EBVheESjVdYf9X33wBAKLKTBqHyX4xKuxX02/d6abH2+wQc3tvG6NIa0YkB+MEcYozw66sYn1mCm5tjYVNupsvc+P0nlkj1Z2mR3Hl2gXWB2/iWS5OrN97OsBYh91g5c/KUyEtzTEcOT18jMZG7JAJb7+0hPPU41U+0SHKvOHsG+MHbBJDKGPDcuk4nGdhZAvu0C1KH5PPUOISQ9CnZCKpitKQkB0nQhCpR26l+QdYUn8t6R/4zj+vcF5cvTr6cMSbyF1dlIhJ/Ris7AcTRGNX338gqNQEAp9Y0mxQPD0mCUBrLssdb7+2lZkfGqAimJG0GhPLs5FljnfAvl9oXPMzxSIQP82ElvtL5OdP5yWcZCO0fadORYv1sn8G5OdIrcB4ueLh2G9Xenr5s0/LM2TllpxFMgQkmtsmKtg/zDn4wUM76RFVdU0TTcC4yb4abwU40PRG30rjxUjVHKb9ZVJtb1B/z7iaRv1jwQ0OHqoLvz9CYcBWd3kszlm66nza2lv6N0t9RQhhWopa4u9hYR3nnbsJzrl4mw8JHWJgHWm3SsWz2ZjyhUtCGd81GLtnu7l0635T5CBvrZEibPAa7FpgRqkaocY1mek/WxUmxvVzbLy4SDgSol5XhWNlJ05p+YOUokDxm+bn5MaFzMx70u+Pf/PSED5EVlV2nQ+2+2i1CbY2YRBLLNO5qu2UacBRw3S6OfvZFuGuXuS6esIbNf/MFqv579gkUly7oziLexvhnXoJwFcC9HeN4BH9qnaw6pxKJBEQvm6QKhaEXFheJosrIcVhayAU6YsTuv/GyvlBhMKBdVLwJWVRm56bFRffju13EqqZ7GI0QWWot7u3TIts/gDOuZFhZYjIVs/VeeiKxImUHXaK6/LC8hLC2ivDk1czNHP78Z8lwiBJ2TZwR3+8rUUv7bLTaOHrmLPEVnr2qAK+GgABwZgObP38FYXUV93/hBWKkLiwk+rpzSs+VsYhVpVWKrt2Gv3iOwhr7EsSayutNMRNAXsX+v/YycT8+3IRUP8oYZPRf5+EHAwq7eE60HymQ77TsngsgKPfq5+Y4VPQI167g3r/3KmJV4d6XH1OmLtVuUCZJMi+u08HhL7wMOIfi9Iauy72vfAbh6WupPkUUuGI0Iq5RPTU5inNnU+EWK2Zn8nofcTwSnkLWddqyAOV4wC5mrbSg7RNAkjmPZhsAosrOz6Uu0+Y62hpdFusU0UzAgFHNa9nzWYCQpeiVA286PecnTt8BoBWGYW0VcZ/z2uJN8A5erK2g2t7J0GxpRqty6rxDNwG9aXX4CpABuqtJyzPhHVT3d8lQdDqo9g+0o/I0DQPxtMTN991OyjBM2eEzQHmKBsO0MbNduCX+pzHezL7bBPiax4mg97R5Ns9nWwmEwSw1c5mdRXziItz33iaPy+pu8DqwHa7DYIBq/wD+qauIb7xDmQRuSiufnWAxnrD+rLDxp0tPAQngkeq/TGJKXlST+6cvMa1XftdqExjY6egLGFZXEdbXMPzKZzUGs8BSdW9Hr21zufXBQYpNgakLBwDxIhgMk1oI2xGZTkaL3j91VTtch9UV+sih0R20XAm9kUpj4fDEFUTRSHQO4fR6GrNWwWnbBQQGTV1RwM0PSKZeANx6OqAXy1K1JcC1EMJX8CIVDxitx1T+HZYWSbau3SKDsbyUdfUGqAlPfXgIPHuVCV/LSpPOntUCYjLvWuLsEZ68iuLsGUrZGTn8ZjWg6GoUF8+lFKkBfbUVW5nL3JO6Vlcxlow3YkHEJhnIOcWSCCyuaYNyHvXTlxD/2XcUWxKMQIFL5xGrWhvbUE1Mjfq7PyCPdHubDKpkxLzPsmX2UGl/GQejPv6wxyNjFCzQFeskqlKcO43i1AZVJj7LVXwhKCfA+RQfYzhEPLNOk1xXVCzDFXi9P3gN8EHde/IaunAvPIE3/vaLJBYyHGLzr72a7qminZEEWSYXgWu1Ud34gAqvmBgUq5qANdPRWjyU2GM3L0bsv3QeymlgEEkQbG3Cur6G4vJFtfj12+9rNWRYXkK1RP0StLM2qJNznJ9FsbEOP9tHeZ1KbMP/fEw7liwSC3Lys735b3PB1sI80Gqpa2ylyqUkvD4gUBV1RcZofQ1+cYFe1F6PQDTQog2PP4bqxi0uXgP8Yxfx4VceQ/nTz9McX7qgIYg1xOIu1yy7h7pCPdPG9o+fI3B5PCKCD28OWvthMij3X1iHe+EJve9waj0LMeiXQY1MtX0f9WgM3+1i7/NPTEjbazaM/y2grmAArk1VtGFuDuHKRYSlBWx9pq/XoTH01Dv1+JjGrdelepfNTepb8eRVHTutvuRUugC1ChLLBlqLMlVJ6142ukVSuJ6mQ3HS8eiED+FLU11H3++n1mMNYKfpAheXL1IBD5ATUCQMkH83GmNYV3GiaIavIR14pLBFYma5z+LcWaIAA0klR2Tk6oiwOE9KyhXVGBTnTqN8/0buvleVekmS4szCIecQ5uYIh6iJdh1WVqjlniHnZCg6H8XGOuq9/fzZjXvp+32MP3sN4Q++QX/zAeHyeRXJlRDF92dS1R1nMNxMD/X9PQUE7UsUVpYRmZYthUFupofIRWnV7i7XXsQJYFXHRYhAR0cozpzG8Oo6wj/6RpZ2k34ezZ28OHsG1e27EE2M4szpic7fUu+RFV05h9GXXkL7H34tfXDa2ErIYoFlS2KqKhr71QXU3/4BmocAzRk/wnTPyjg2TTk6ASWBifDadssWmb+H5Sk8OkbBfxEAklaiyUC4TifpGK6uotrczJhwMjh+dpbwCF5gmQGwuIBhtRWXL6K6eStLeenPNs6WhSPYAOssxKoifciKCqL0JQXSi2qxCmluYrIqlv3oOh2VfgeQ2oEJIm0ReL4PSRVW93fTuBmylcUtbAYhrK6qKEeTxDVx2LhaFr0p7MmA0po6aFebWyfG7AAUPPVLC6jufpiM+6kNVB9uprmymagTsJ0TMwBNXMd8NiN4OacswyZmoRqfMaI4taGVj3pfYhAaWYGwtpo9l703EvvJDXSYHyByVSulpLvZ+p3IuFkPRubVGAlda3x/nz7yEsf5InvtWoWiz1rLDlCrec5IpEmoFbHVfDGQ94+MUeNH30+CJuXb7yblJEBTjqp+LDGo4UvYhRkW51OhFbulkenEtjBHzqlKzTG96DatGodDpfnKPVtCU23PC2hdgVZWyjgZzKC6t6PhjpYKA5omo3CCefXPP5XmxIYX5hmSVBqNS/Yy8vXFCE28qOaQng7Vh1skngIyvuWt20m5SioJJRNVJg1O8nQK/bvvdk1BXOoyDSC1CpB78kHbs2k4wJ6CPruM0+6+voT19k72HNnzcSbAFaS4pDUu5lAq/MFBTjSKURWmtGrVpiPFsDR7c8qfuX+GKjg7lzpwf4xGMMAjYhRcq6U1DvQLViOSdvLykCzzNXEwgCjAmAIt/JJY8CVWhDXE8Uj7EwBkQCSHHAYDRLbKcThMkto+tV6Xc2n9AvcPiA0WnD3q45TNmOhlwAy9LH/vk4ipZa7pd7ztANTI1sg5xVBECje8iMhKxST/zQ8GNCbffysH1cQgMSNRpOvE+Cnh6GFkvxqVhtqxqd2iojLpAA4kb60ZC7u8HNi+lDVzM/RvTDATYFGl0VR0lSnaYsh27icAr9edBBdj5NoaGtNMkt6wHG2DHwmB9R6l2Y5zBvROu3t9eJhLtDfm1OIDKiPH60QMnXxOC7oa4/5RxyNhFKQst9rcIg+BLWksSUpKAC9VdZtG2PAhMcjGuXtphT0V3AOIWWgGXtKFBGKV6h4Kyg5AZbkA4iIASBVurDMo95NObF5S3nWzXoviEUTDauTdQphqdoeXc8Y6wg9mlfOQ/sa19BK6AElDkj0N8UBkzKR3huAg9hmIYbiQfi87lHgQMfWCsPOhHY7ksBRusDfjqMN1vb+P6s5dAlbl2p5xHG61nj2jTzuinT/9G7/Q0pFJCXAF9dXIslt8jrAwn9HE5dkk/WrZksJmzbQRNPOQSvhdQaK+ExmlGKeHQS1DoLIVj+b5pRJUO2l1O+oRqDqZGDRzTw97PBJGIZaV6gXIC00xcUmLhT0AYRXKoteBF0l38SJsfOcSb95zzz+NzY+OIL0qAWgLNRlIinkN+FlXKiUP5C6ypJYkUxKuXDQPGPVzspAzbINde1cUxMCUHRlpx8w4/0g7aLV1j9Dobid7aVy7Tci2IPjHjepBHj8ZM1uDoPepoQjLlFt3toGTwLm8C5MwKA2QGoTaDeQitnxdVxSoP7hNNRy9rqb2pG37xOJm/oLv97mzV8w8I6VfF0Xm3VGKt8xSrQCFCc2+DgC4xsPxi88aCMy+Fdo0zWtal1J3ATBXRARbmjUIZjxEHkDXhpCTJCOj4GLynmV+BDPLsIYp3uPDHI+EUQBA9fHGlfKygGz4wOkYtbSya7M7a9lzyvKKVILr5+YmshiyQGyPQuFJAFBUWCd3cRHEnhsTMMQxofZzaBUob90mb+P1t+h3HfOy6g7m9b5lQQlTUrykLP61h5F08+0WSaTN9vMy4/4MUFWo7tzVcmlRlKKxKbKdyhUF/GA2c0PpDwmzEc8tOyT0Eil0Nr6WVJU+S1qN2mFLDja+fmYG0tOivP4B4GmMivNnE6iXGRLzgh4coLq/O5GTl/WRqTbbF4XVsMQ7a4Yqlmuh9RXl2PT98JnRA6DEIiEyxbLUWhPFh+wha3k4RKZvLKGbDwnbkt/rAxrOidyzbJpI86wG4yGPR8YoRNnJ+IGsVLWAWwoKAZB0GrgDDwDUu/tpR7CfBZJqDXsc+cUFnU6luWr9fVKTVqEQLggS4VVlCFr5b3b7FAAS7gSQxeq2HHtaBZ8aJOlHaOiq9fEx6v19xNGY9BCkn+HRUZ71cEbAVIypjiE9b3VvG0H6K/C1w/xAF5b0p6B/J0DOPpfta6H/NoaF7nmotQVC8a2PjzkFV+t41fv7CPMDMhACNjbCQh1HmcPmwX/XcWkwAONwiGpzK9XcSAgg8350rPdfbW4lT0pDUbmvkaY0ZW0Jj0NfTBumGGq0HGF5aaJxjogCy71OC3nUo+Dns71AldrMWbKHPR4doyDWLkNXeScaGuabzQBUFZGCtHilTrt2YzeSyj/xJKRTlJT1uha5mF7kyMtx2s07HZaIZ31Errj0rHhD3gDHmr2eSnRLhoPOVyp45IrCIOheYz/bOVtKlEU3URZlsDu+GK6ax0YyNgarkGtmWglFKzFIVQ/Ak5Ez+IdbmFfCTRyTxoR0toplqeXmANJYFEU+9lbiTLw6uUaHdCCKjXXddZPhiah2duB7XRQbREjTeF76SvIhWpPpZkJWvq6akdYDkhCC60qysInXjPSrnAaiqkajxbfkuwIkHh2nLNVsXz2panc33yQASkUeHJAxmJsjslpD1EXnSsIjLt8XD6jJbsw2oU8bppAdumsn1BsxkhaAACegRRZm+0l+K8asEtDPzGRFLNoshnfm+uiImF8G+AOQsgmaAorEDHz3fW2KIruwH8wpP0F3uKMj4Ih6UfjZfra7+eXU1UeMTphNyLQF7Ci2HKd75oVWfnA7xaGtgrQnj49pV6iq7Jn93FzGkdcxtIQX9miKM6ey545licgvbVhbJU7H6Y3Uit5RAxTZgYTQFEujX2li9qaHAedQ71Fdhci30z8S54GMV4F6fSkVRRVFXlPhA+LBoRpARGo2I95dHJf0ch4Ptcu1zq1zcH0qeFMjbb3Lo+NJbguPV80hpAVdMz0Fg8m4VpvAa3PfWYGSD6pDKl3AtTK23eJz1tn6z4ygyMhxZ/YJkLuJY3zE8WiRlyQ+M9LbpJE/WfY7cbhEqLHqS67VTt2DjTttC0Wa55OY2JKfBDzKUoInFc2Y6yiZyLVB+z4AACAASURBVMqJN7ImzTjTz8woRqDsNvme3HtTYWrKc2TknI84JgqEhKlXJTT9pPJg+xz6OfP3CbUioSRbYthHKG5ZabT8xll5ajwlXpe/jSjLYZ8new4g+52Of4MkNFE+LbF6wxPLbsGMJ4AJAlX2fOwpNN+DjKjHa0rHr3FeOplhPNZp0/x0kpfkRwb4wvwgpeMaqRlxrwGkclIpGrF5Y+YjiAsYlpdoRzakleLcWY2v6Usc/wp3gScwzM1pnBg4ry/qTrJoinNnqaiGU31i6RWJN4aDgNGksAwgVdoZ5D8DW9ttbpKTXGJJN8ohvAYRmYFzJB7b76e0m4ybYB9GqVp2eAtU2fRmc76yaazM90AvvNRl6PmDyacD2hTV4hea9r12BWFlWfuFyhilZ20l3EYBQPOii4x7XeUGoZGpsZ4CsWIrukdeI6KSZDkH4tnFcZk4Njyuyjvp9dR7sF5hNL0hJdNmx9V3u4kmL3wLfaYqhaCMm0yGbSacNunMhzketm3cu8657zjn/sw59zX+3ZJz7nedc2/w/xf5984593e5wey3nXMvPtQ1eIFatlq9T12HguSp5WHrKrnXgLpeMpn0D3qxXYtarIVV6nEo4YErikRQKUuEhQV16ZT5yAVEMtjV7i6h8FWlfRJjWRKo1O3CL8yj3rpHacKj41SnwEdYmIdkK4r1NboXMWL88mUsTCA9o8ThIaQy4PEY0hPDtdraFs612yoYSj0qWjh+7jz3gzxK9SOSaeHrVz/9IveMIDe7Pj7OWIFieAVHUcNn2/k5r9JimireT9kU8eT80qI+o7r61vDwDle9/hbxVzjsCutrcJJaBvIdlEPJrEdGv4dw+byOneVOKK5iAcza9H/odZUgVu3uq5qXnBs+qT7Z8ns9X4yo9/ZQXDzPWZ3EHdBwjZ9BwOriovSooPYCYiyaPJCaQwy7MWqI5QWEjxMb3cMcH8dT+OkY4/PG/fgVAL8fY7wK4PeRpNx/DsBV/u+vAvjvH+bkcTikmNfEu5GLh6p9wz/IXD+fqgpdgyUoHkWbekmUt27TouLyVNfraW17eedDWrh1lSo1y1LjOFFfoi/m+d44LrWAqdrcJM5Ap0MFUKXRDQS0NiEOh4izqcot29lszG+Q8OL8WW6KOtQKOUk3iltbPX4ezhG9VTwieNppem9+qGMqz6chEV+/fXuP7m3/IH/pGAl3LVbxKUg/Qe7Xr+ZdAaOUV49H1E/RGOtYUqZE+muIF6U7a9FKGZYYCc8A9Hfx/i7VlvA6CCvL6qkJUKiufF0BdUT11rv6zIkZWqcCNO8SbsPXJW5DycAqpbxVs1FqbbqdLEumB4OYxYVzhL28dwOxjiSEw0Sq+t62bjgKRo9HqG/fhZC28Ozjer1YN7JmJh1JIeZxEicqS523ZkjyMMc/T/hgG8n+BvIGs/9LpONPQZ2kTk07wcRR5wU5QsnVcMC6fGYg6YeYuhwDtEu0W/Bzsymtd+4MyqunNVYjZR2vC0RicEWPhdUG2jVl8cluCUAR8TAY0CJeX0M9HFIJLocBUpdhFYawswuhCksHJoDFTs3OLC5i+d51MkDewc32M8zEBTKO7ps/JG/m6BjV9n3+G6drt7YTCIYEQqpWAUBy7QCX47ayOgKRzqd4+xiuP6Mxfv3hVvL0GvlwytHnKH2sKuqRyanmsDCfXugyXQcA4v5BRi2vj48J/LQ7tGwAU8hB1e4uqUEJEcmqT/PLmyk2cTjl5wfwgzniP0g6z6aWY0xeXWNtijdQvnc9hQP9mUSBHo8IxBTmImNNfmYGUVLTsUb8+vdSiGB2ep0Xyc4dHtLnDbiZ3QvwscKHhwIanXPvgNrNRwD/Q4zx7znndmKMC/x3B2o/v+Cc+wcAfi3G+I/5b78P4JdjjF9rnDPrJfkXWn8x7V626uyE+7P6dVK5NlFaigaIZf6dyXd705NRMgtifW2lGn/W8ieUGCP17sFPl28XgKghDpr1L5wCOmZj1qH2c1N1+8w4ZOeyYyguPN/DBMAnRxMw9Xkp7rQ5SDc5Zc6awFfj3BlA2fjbhJKSuVerVjRtrLRqsPk8Ex824J4powaQQF1g8t7Nz1OrQSWUEUm55hh8xJGBtnIY4HPaM1lAPNP5rCr8XvVbnyjQ+OMxxhdBocHfcM79hP0jt4X7WGmMZi9JQVIl1egtjgDkA+Ncthgzg2AAlnD1ciqLXV4id441D91sX8GaYm2FW4KTFDqlKCkdWB8cKPfddi1yUr+vHH8Pv7RAoh8xScanVJbXEmt9nrqi8EGARt4htQsWOCfPYGIcDhNdeYrlV4MAUArxwrkUV37uM5quzLjzcg0GMymVa7gcc3NkGM14KxfEAJUq5y7y7f1+qgI0+fLi1IYaYSlAE+n3dIEkoBvW1xBWV7PsRbFBQik1V7d6gzHIfQmFOSwtJuzDAIDZYXL8cTxK3p94KGaDkc/rtRiMbGZmBNtBVRHGUifaczZ+gNZbZPfmXEbBJ2PgJ64/9ZDQws7zQxoi4CGNQozxJv//LoD/A8DnANyRsID/L3Wi2mCWj7NIzWcffPDAEaB3mLUDk4owNRYTsb0pgXUezjmVKHchwHW7KN+7Dr+4yPH/FqHO1y4jDmY1bSVt4KUUV2okRAFI6LsuBITVZaUPx/EI1d0PUW/voLh4nt3aOqHizGHwfRN6SM5dsJJIfIn6INGupbempCWL9dUJ4osl62TDyVJzrt1G+MF75lokSqIZGs65Sy2EjUfrvb3JylSO74Xh6dstlB/coi5Nh4cIy0uJ3hsTyg4fSECV3XbBHPzqcsbWU66/c5RS3dvL6lOqzXu6Q8bRCMMfe5LmvEMCuuBye1FSknMRc9FTmbasJblOoxzdFa2UxmbDJpkcuU840kyQDJIyEaWXJp9LwG1hG0q4EIdDWhutVoaJyOFaBdcEJSOp1+b7Vm+LvVul/osHZnghD3s8TCv6PgAfY9zjn38XwK8C+AKArRjjrznnfgXAUozxbznnfh7AfwjgKwBeBvB3Y4yfe9A1TlRe4nwsgFQ2Lbn/BgFHXMkJYRQrxCIWf5o4LIw7PM3NtWMyLa/OxkcFUeRo8BWaRyZmOu1ogJBhbTXV6fugSkiagwcy9SDJcU8dC+s227Ct8dyqPiUqTYY7YV8AOxZT3dsprvOJIix2vqZxJGQ8+fwi6mJbtWWbR/O+HnQ8ROrV3oeGOLbs3cy3Fbxp3of8TbqGZWQp+/y2LaBZvxkPojbZlzpqiwKZ+0+Sp7AO4B87574F4KsA/q8Y4/8L4NcA/Ixz7g0AX+R/A8D/DeBtAG8C+B8B/PWPvIJxMZVPsLSoAGAUKXGAXL12i1zBkHL0ytqzOexuJ3XNsYuxQWBRZF1c5OZLDZIzC4MB74JjdVldu80pp5bWDbiiheHP/UiaxBBUp8F3uymPDQaJjC7hBJhquA2+10O9c58kvMWt3ye5tlgT69P3eoSYs6GM5ZgND4dR62smbRt1V4lliTA/YPn2NmdQFlOFIXM7qClKqlOp9/eVyaffNyClBVElvef7fdrVH39MOQqS4VAegOGa2EKosLxEO73k5qVadHOLGs7oHLIRXVggQFn4BteupOsZKXlNUxpwNRn0nEmYufR1kqBXDYh2O1tDcczdxcxYSNij5c/LS2RIuIGs73aUi9M0msp5EY92bi7xVjj74nsJ7/o4dQ/AQ3gKfx7HwC3FV1pfRla4IfJkHHfaKsDssIwuAQinKdZMO+wuwpTjZjPUpucRzp3WFuPZeawnYM4r+njaUOWkndGeK05hz5m/aUpMgDa785vnUUaktKMvCkrFmr6GGZjlPFGEGU+ZqpfYbHIz7RHsjtkADE963qnexUft7OZznlPMxdkzExqME+cT72IwADod1ZI46fpT5+4kb8js1E3PrDku2S7f7NXZ9LAE+LQMSju+U3Q55RAdyE+XxDvHdJLDRqypOcniIkjyq0FhNVY9LC2QlgAfVhwDQALNnnkCosSrXIUQoAVJdQWYluVa6VeWymKM4xHKt9/VIqriwjn1VMJsPwFIvHsVF0he3HUMch/y3c93uwlYFEzEPGdYXKSFxl6DCn4w0Cbaiol9yM/DfHh1pflZ6v19asBi5OzFMwCQ1KHM4efmyMOYEgJl5cz8bzmXSubXRlS2WVxkvCB5ZgEhRSvBzrefm0tgqL6IXsOZ8sZNxFefm2j04opCC9XkWtXuLuqd+waLcnmhk64FJhDZegU23Pp58QB6vSQJaMFgn1OhfbdLrMZOB7YMvDmeMn7FmdMoNta1wjQ9WyLzCegroGlYX1NZQR3fhzgeGU/hZfeFbCeSFuN6SMxu03dSYBOImqpNSIVR1tg9bWxMn2ukL5vxmVzaxnBs7VVrb0p6MKyvUdzf8CDCk1dR/fDtlOaUHcOIxdrnRczVrC1AWXO5tPwclhaUVVdsrKPa3DIl4Fzi20jpCerebNenbvuLTyJ+/TX6OYQ8bm6MG0LIW6pbLr/z1Bhl577WFWgmZEpNSxaD833azygT0dZ/NLAdIZRJ85qpx0npQcEKTGfvE9N+YuRM/UMYDKgIy+BOVjXbNvKx61juWdahn5tT9e9EaDMivyblqKlwebROB5FDCXmeTzol+ed38MMLi1HTOAziNSvRaGHwhHE7NzksBx8gI0DU0VQkpDp3MDuBSR/BpboF309Vj/XRcRaraUsvAPXWvXRts+iq778Bksw6Zq46KwO129kOamsZHMeWAL8EIUBIKrEsqcqvrghkY2ykvLuZGp8yYcbPD7JybYmzM4MgzxKI+BW/9l31PpRuK3UohkZuVZbFGxPAVjQvRC9QEHjl6nO8buchXw8Ng8A4iVSQ5ik7p/cqKl3V9nZGLpoQatGHdomUJuNbNbxTc39+dlaxGOFRyN+r3V2i0PN1RSCWGKHHqrAkaw3OK2NRMQTnKPPDxCT1fhvgpZ+by/ur8n/1aJxJEH7iKck/j6O4cC4VxDCOAAB+MEe9A2w8KnllAX140MLGejIYBgByzz+F+GPPk2s126fCJn6B4niE0ZdeoqIoQW8Lqifw/T7pOAqHXyYIgFBo9YUdUeFVWF2lWgvT8RcgA3XjP/tRej7vgBeepL976sqsuEWnAzfT07/F46HZcSjVKrUexeWLVHAj35sfwPd73BOTwSs2nvd+7vG0UOtKad2SXkVd4e5/8DK55/Nz1BBFlJ1N1sYuPDEINsVHu+EMG1ECOLWlPYDw+GMozp3F6OUnVORWtQJiNO3ZfWYohDI8+gvPoPz8i8m1FzZgt5vR5IWSPfzKZ1GcOUUhpnPwl8/rHAuoV5za0HoRq2dR/eRzPHdSR9DS+xGAV42ghK2S5pX6iXYLRz/zHH+O1mZYWUZYWTa8hzqtPU57yrNLM5esQzaDvohRS9Yticu12kR55iMLpR7ieHTCh/AlFU5RUEpcaOlpaAA0ALlbdxJ4A975FuYnezY2dibbUGSCTTYF+PLdbtaAJKyvkc6/fL5xjyLnVu3cz5/NtLu39ybYRLMUuXne5n1MdYubKdkppb5hMGAuRpnCiCYjUNzqBstP3GLJlU87v/D8s6IiiadPuN8mMGv7Vkx7Ng1XZvup0YxhFNqwVNeV83CtAk6wLQ4NMtCasyuqkNw4MoYsZ35cr0e9Nl96Cvj26zk4aMM1R01+qt1d6lR2cARtciQgolGjlrApJ+yZtWEZpDJu3n26uk4DSGCJtB0XRhjvpPrQxjrnL4nPMwXeAEVlyUrR7fya8v0YVVuPvpxy/sXpU8m9lyyFbadmXkRt/GFccftCVdvbJPGFBHZpA9fmEWNyTRl0y1xYc94mOJipGsvzvPJMCpOmKEPDOcVmZEFZTUO9B5mnusquKeFCfXiYMB3rbjPgZYFPO0fZ/QJ6bjU+4v47l3ZBm3ni62mKcndXwT2/MJ91kNZxM+sqjkZUV2EIRM3nqHYbc2VA6ybduj4+ptClruC+93Yq3zfgaLpvj2pvT5vOUNarZeosanovxqmnic0MaUm5pHOtuhV7J59KOTadYB60+uAw3xEEGQZ0UQoFWAp/MoaauPYNTMFWO4rsFYC0INjdllbf5c0PqFqOZdpcCJS25INKjefS97i3oz5T47mkqrE+PKSQJduFc/RYzqHP29BNoO95/R4AlR7XMIo/47/6mio723Sk3pvztIjVUNZZSk0NAV8rDAZ5v0z+Dg1muq4ckjol/glfo9m/YFp5r9kM5Dz6AsY61zcAEM6corFifUORsqt2dlCcOZ28BNY5UHUoASnNxuFaRZbt8b1u0jHgo9q6l3ARYVQKACmhhOEv6NwaD4tCBq/NYIS/oDiOeAli1MQw67DLO2OYp4KLmIrThz0eCaPg/HS2X3ZEUvqVw3e72kqOhFQ9wA1hXdHK6J3FmdM6QVIGHZaXSOBUhDMkzuQX2WYVqL06uY1ixe2EW/c/jkbaeSlITC9WW3URCSAqr9/IDUCkCjkp0dUKOUA9IxFQkWeTBSVGMoVOxtjUFfzyEmJVpwUqhT98b2F1OaWyuOJTD1nAkn6rSU+ivreT5lBAUyN+Iy+lgsOxRr27TwZlMDDy9eOEVcg9yzw342H5G3syzbaA5XvXdZ5pDtJLXd35MJsnOE//19qTmL1s9fEweROc3o3S81L0OMSgMNIvcvA2S0YpbE/ns816+b7icJg1PrIhi8yl6JHq5mjXVCNdr3oSziUcaRqIe8LxSBiFWOcoMDyh3xoXTXN7GRxTEHA0RrW3h2pnh9KOrQL+EpVglDdJEVj4BfVorHz04tSGxsJCWVWrLLvT3h58t0MvuQ9Uqmy6M8mO67tdnZDw1OOpLRuHHXE8UtaZhA31cJheNhqM5DqXYzJyPPGiDykaDmJEfH8mLdqyVAEP+Uxx7izi/ZTeFQRfX0TnSA6e0e/K6ClovT9nKmQMASCcPaWL3vd7pJRlXFwA8P1edi0VqDH09Ym6DRsaNfEWgysp96ElMvnG3WejXd6+k77KL3Nx8bwqQAHkEUzN49cpxWrnR0MhTrmKMEoaYJ9CmZrwlbC6TJiGtCGQhj2sAJ5rNST8SutAYgRqao8YjQGNVUUep5lvcOdt3+sxANogRn3E8UgYBXsIRTRWdVa7Ln9LaP9Y4zl6cYr0YktI8PZ7+jNiVHqzxKSxqqjCUhFnlncPPhtEC3QVZ07RyzHbTyAPwGrPXmNmt3tg+kEYaz4eJ2CIF0BtWs0JhkDeC7n0cTwixWKAd6GxYih+fZWyF68+q2Sh8uYtGpezVLlYXr9BXoJIrjHH3nop0vJewwRJwZal9j9wRUGZEe5bUb7zHomoHA9R3d9lEZk8bne93uSC9IE++8qztIBPincjsQ4nCD0mLSfzqDiTzHWT7MaeoJ8foHz3fRQXzimC75wj8Rihay8mVSjf7SIsLtLLzAY/K5ePEdXN2yn8s2EbGLw9PCS8yTlE4S9ISt3wMYS2XZzaILo6Z2UUoD0+1rZy9lpi8JNK01hFhmU8ppKiTjgeneyD+4L+24IogqxLd+XUrYdjQCNq6TodSi3ZmN+Aj5ZUEpaXUN3bpji73QY6HdTb27kiEcgQWbKOnC9D8AUhLsdKt50m8kn3YzpNTyzcRB5S8EhEZ83xINrtBPEHmCha0ntsft90+rYkrmlCppZIVpzaQHn7ziS5TDw9SbfJWDaySmF+wHJnnbyYiefUkqImxtUH8lCGQ23fPm0M0nog3kQsSwphDPiZFZLJvcoaNNobGag7pVhJlZlnZsjrknE7gSwlazFtFLV6rbGqKDOxtzfZcXtKYZtdH/J7ybh86oRbLXhjNQcssp4Vu8j/venQVBSo9/YyIkkwvQmoroHJUVv3FKeIoxHq3aSwowg0x3tpoowCsSn1TbhGwiKsfh59wVOGQ3LwPt9RdXcz5wMoZJK2bDo+43KiUEZCnjgeUb6ef+eKFkm/S6l0TL00EUJG/hGGpBiEcOUis+yGSrLxc3ONrtuL5G2FANfrJVxGFn+sU1w8zvEON0NeRDwe5iIudmer6Lr67M2Xva44q3OUv1Q8f5mnyA2F5Pmq3V24S4kfY2nMtlWAGARtzmLCk2a5M4RgVkfFvBR4tPiQEJuKgmpjpEhOQgzv4JeX2GDukraDNndJgKPOvfm9GATh2NRT0sMPOh4Jo+DEbRXk3bRTk0M432FlWTUKAWjbdyAZDZmo4uJ5lDduJl2GmIuIwLk0iaKazAOr5Kh+H86z8rKRGpNrSSgirp4XxWfZrXxQfnttOibFF5+cyEwAQBjMJoFXpq5aaXNRkZZYcsIt9CFxOth7kapGQc8TxlFmoJZrt1XYw3nSo4hVlVKAdZU1xvX9voYMYWVZSViCcYTVVd65izRW/FIVG+uoPtwiQ1bVEykz20NT6cB8j1ljF37mBHQ2pNN7XQ2rbE8MncfX30mfr5MStet0iDRn0pKq7mzLtmE2CE5li26F3RhSatkl3Cim544GE6m27pEHs3WPeBE+aOs5ewjobEvmw2CQmgnv7UFEYT/O8UgYhWhcUoSEtqpFFPBxZobafIn2oVH5AZDJhoeVZdR3NxWAk/NQR+mRmUivUuhZHtlzn0LpB8gTpQcDRdpo1ChKJ6ESr0CTgnrycr/+fuqraBZ4dX+XFkJkoc6YVIHCwrzm3xFrwgqkWMfiJSb9GSRdWkd9lsyQyK4lHoQBNBOYlYRk1XMClHPhQkDJY+1nZthbaynQKgCxdjyKEfEw9bIgcLLhFgP6Elm1aM322BekpoY5YTBINGxeM9XeHoGaPmRS/xomsUdWXLqQYSxxNCJuiym2A4u8Op+yAgDIkHa7zHg0dHH2DF2rrcS1LMsi3kyrnb+4NmUrGIX1Rvh9kdBHa37Kktb3xyyVbh6PhFGA8aSbffoAIKysIMwPaNHyZLtOJ2uS4oqCBkd24uGIwCN2dUUQVQxIWFnOgB5hs+l9cAFU4HZo/KF0o3WlYBBipPy43L9174xhcy88TbiHd4jHFANbejNA8b/0oQDoxRDDI0xIoTGLNoJQdgGkWgbeWRU7YDfcdTqpcWrNasYsfe4bxkK1Kuwi8yF9n3c89Q6A1ABXlIqN5oF4Db5LOIkNoTLBV5alk3vWTuTT6iPYu/S9Xip7b6eUqIjzuhD+//a+LMau5Dzvq6pzb+87yeawuWtGnMWaRYs9EzmBZVnyFhgGYgQ2EsQxDPghfrCBAEmMPOQtcF7iGEhgOLFhJEDgLMpiww5sJ7IEJE6iaKRRZGkWDWfhkBzu7Gazl9v3nlOVh3+pv869PeyxqLA57gIIdve999yzVP31L9//fUTjzzkj6VLUfpG3LlBym/EpslgVXJZS1hIVPAughkRZuSyQiNm73PgY0f21iWI0BBlwRzAZ6GrlGOMnkkLIiw2Sz0dvlxhbDkf0d5e5H0Yxc+029odRSBz/BmpmCUuLOiH95CSaGzfQrK1lSyxlnvl5AEDc3kY4uqw1ZVdVqnmoE4Qps+LmJsLSIuqr12hRSRzKnoHoRMoia+7ezR1x8sAVRFXDMa02TB5kaPJKDfylb1K4ExN9ny2vCX5gnLL5ziwu+oHjcFObj7duMw+iV9GZ6pGjhZZDkiSdIONiQrO+obudNBelnZ3CqIrHQHVxo7IsRoCZmHVi9/sEExfZeCft8HVpbHd2FMhkY/iwtEBZ92efpONKuVAYpRzrZRx7pCwBe5eh2YkYmNQzA7Qq4jqVSt5Jw5R8VvoN0qCP1R97qsjPaE4CUF1Ka3QRDFQb0M1M9EHcON0TWxq1eA8xaOL9uk6F+vK78BwmNjdukGGw3oVUU2zOqhWOCFo0bm+jjbu519gX1Yc5v5S+G98PADnTbbOqJrMO7wqtAwAFBrxg+LXZ2VbLaTi2jPrCRWqtvbOu4B9NDrUyujJkVxb8uWSmhQ5MduV2pr09hIbNMkjb+jyAfM1CQS/XZqourtMlAxg80sZmDg94QjbrGzkh1cqUlydU9g8QQMtRua7XG2oH9jMzwGBAry0tAk1DrdHSy2GrNfKd9nnIsNesoVtAWJhTLop2q7a9h5pgbF2HxvJCHyc5IPmMmQ/22bqqgj+0REQttiIEDPVRDM2NThd+aqK45/bYQwQtUt2RFmlZuO3KzahjIM8hW42R/JlUrHTNDOqHq3VacgoACint6uRxfQ8BRgZKdqlZaJ8lz1wIOoHpwDnzXZbTfGEQwgKRnGiuwsjAFVBl5+Cli4+PLYzOIqAiZVHbx255ECwNmyQM6UVfuoYSN0puIiWK7TWTnydCs7pK/AlNpKz31hbc9BTnJmKRP2nrMqjb2RKlkQSVhi6ctJLPxLt3qZNyfByNJMQAdbclNCgMgpV6T6kEDAnJCJ9rc7vFgTBiERbakrJz82IS/QhXVZpLkO7XYj6YIR5EfekywlPnKOQUA+Mce4bdXGKV58qs1vqs+HwUgu3LUqJ2W0pVbWMjc4NanAN7HZpYN2K2EhoWXKHOAd1OBvkxYM45V1a77jH2hVFw3mu8Swg9upkCWZWHV8S8xpDo69NTqE4cz3V94/JrGMEJvOrMKa3dW6VpSxArPfvOZ0ae5uYtSK1fhWoE+eYYvMSMupLkS3VNLcFNFh+REKQ6c8rsqp3CgCDlVuKwsEBJr1i6ry54BdtYUExz81Y2IHUN//TjcN1OUe/XBB73OagRZtfU9kZ4oXMHsrGNDRIbT6F4l8SosgobJuJq+YiSicgi0xGbLApjy3hAkQDUewPkMms7GZeiVlJSpBBKZNU0yWrccD81lb9DkqivvYE03s1eKLv7RVlcQjxDFNOsrVE4kmIO8z75dMEdIWGJMG6JChZ9zhgqDs9kswpLC4WXNSQ9z3NZS5Bc3oz9gZ7rXsZetSTnnXOfc8696px7xTn3wv3UkkwxUuwoNOvbvUyuIjuraYRRhpuCcou0J+tLlylZc+oE9zy4/BmZ7J0umktXhpl7ABJD1XDDqXOh3QAAIABJREFUZHn7AyJMnZpSPIKiD5nownn6nJaW1rM0mlQU4IO65PBhiO8xo9KYzIXp3tPODsXtbJzC4x/SnEmzujpE0lL0XTgHd+12ZnDirHgb91FfuJhLwSly7iOQAZUwQROQUfsX0qCPuLqmhkSrJtIbINfW6+kiBZDjc06Iibeh6l+yu7YmtMgEUjJ4eMcXOXebW5GF1c7yh/l5orIXaXkD3mrOv4XeZ58pznEo0QmYe5YykzJvQn58HP5Pvp7LuBMT2YjzdzbXb+i8kOvPTXZZXr6+dr3gdhBYupZRu91cmuWqlsjavZ+y5F49hV8F8AcppccBPAPgFdxnLUk3Jr0FTi2yYsSBomwnBkMJSfmhEHiGauH1hYtIm5sZuCOfnaBdLJw4Bm9x95zUrC9dhpb2QIZHYMr1xUuUQKqN2KcnfL6fni7iz9BSUtbzj03R5ZdvgMshUSszDVBYlQa17gLNK68TwvPu3Qxkkl1ua4sauIyrrrTwwOgJIlUbw00pXlD91gXuvuwXO6BFWpIupynHCTu2eDRVRYtednTx4pJpApKd1Oy8urhlMbbASRrmmaGJTMuTOOJ9rtPNVG0GWGSN1djvfxn1pz4K5YG0WX9x0yUsMoYq9fu5QS1FEsFhT7JZXSXpvK0t8jjZeMmzTYOaxIMZhyBYGc1lFc9poBUVJYtNaSjv8X6IVu5pFJxzcwD+EoDfpHub+imlNdxPLUl+4M3NW8WEleYaP0XaicpjsLWlqkzOO200ilwpqE5RI1RzZ51KhfyQq+MrhOLrdlC/+TZSSvBHuf3XB83UWt7CZmOzjNtQJnzUk5GY22TeARTs1LZt27IR5RgyZ5iL2N+HInSqzp7W7yx4BIGSrg45Hq0MshOA0sz7qamhCSOS9faYKRHLj5CfasjGpLZyz8S7gHAl8AIuCGv4mMHce+udubEx7dT0MzNIKVFpUe6j3AsxnJL8tGGAaeO2orL2Gv1cuwPTF2GIf+oc3e8//gr6n3ludNOUzRewkRKMR9zO/Az1latlXoTng3Re2rb46uQKXfOJFVIK385t/VahzD4HAXnJGnGdSsMT1+mOJvLZZezFUzgD4AaA33LOveSc+w0WhVlOKV3h91wF6UMAwAqAi+bzl/hvxXDO/Zxz7kXn3IuD1Csl2G1dFlCSEwHLAGA8fIa8xo0NjWvrCxfJHet2EW9mvsTm6nU0q6vwjyxzTJxI5Rcgd3hpEWF2mngPAY0jh2rDzKkHRypKBRgoUuXBjY2VO1MyqsEpoTn/tjmeqbJwiFPy7lGDmEyq+q0LJSuTp444AfEA2XuSxG199Zo2kMkOrJ1+kgyUZO/WVg53gIyKi00R7oTlI+o5aZNQh6C6qd8vuRs62bVVr8SqiQNKmeacU/Rn3NhAvHuXMvqcENX72E4+ppTjafHOzDyyXIpxcxPxTu4HKWDIbKDiN17Vz3f/8EVsf+qpoXkJiIHtFK+NOj9FYjpCR+ZzjawERc+/uXwVqd9H/daFwmu1OQQLbbawfsnlpH7fMHztXYYe2JtRqAB8FMCvpZSeA7CJHCrQiXy7WpJuTAVT+MXCTWukUUngsowDV/FPfojSIi07ZRrUygOomdixMTQXL+fMrwFApZ0+4sZmlhprw5Cd08QfISA7pbsLqOYjmiYnqHi4TtdY+MwlWNSgU9IEp36uzekvSVk5VmwUyiwuuS3RyrUUrdx6rLKbTiZse0glRv73k5NAKwzxU1MFWajKooENi8i98/mpAjQoDyCVAUVQJupXUM8nNjmBOWpE03jE90U5E8DdtOyuF41VktQsqj8xV2V4jP3+l9V7AJDhzJxXsZUQ6xUCxLAdNzbgpyZ1jkiPimw+no2oMI7bfBp8UIAUQIZbvCnLLC0yg3ZOpZjub/gA2ukvpZS+xL9/DmQk7p+WZGIrblmN2w/e5fbWtL0NBWpwmU35DZIBLHFW3TaEpH4ffn4ui82aXT5ubEBl32xpCNBdX2jKFSIrC8mUGuFDhiPDuHq2LJSSGip7jYXXwH9Tb4DJOhSKbCDH4tHYvgQ4X2Si5b0Eya1KjQNzXrKwJeQIy0dUiTkN+pqXadbuFIQfSukuw5daB4oNsL/L15rwQvsHpE5vy47ivZkh+gvFtSKHdspMdPcuMXqxwbY9NPZ8XKc72jCOjSF+41X4Z58sqzPtkaJStau3tUX5iLixoQlGOUdpD08pk+ikQb9k4DYldTnnphUSKHDJskdV2XPb67inUUgpXQVw0TknJvLTAF4G8LsAfpr/9tMAfod//l0Af4OrEM8DuGPCjNHDoexwBMidl7jbJMwU7COTg0t+woMQlo8Mu64M5ZUYq7l1m1iS61rdVIHjhoW5coeSuJXBQ/KzhjJ8bsUOJg1dgtc3gqJWG2AkkEcEUHiEpUWaKN1Ojgt5B+v/4MeLBJld4Im1IOQ1SVZJ15+6/GZRyGQSYpf6+k26X9dvaMkU4MUv4RNXGlSB2nohLZi3lhqlF8DE+2SMM6DHVRWqI4fKXAmDxYrFyInByDuvksBIwtCI/NpyJ5AxMQolF32MQZ9YvGan87nz83OdLuLXXoZ7Ms9NgUoDKJCeEq6EpcVMIptyMlByYM36us4NafwDkLkXYMMFj+rYUZ0voyD47RyYJkD3OPaEaHTOPQvgNwB0QTqRPwMyKP8OwEkAFwD81ZTSbUd1lX8K4IcAbAH4mZTSi+91/EIMxuLDeRRszvrHUC4IGfZvuyATJStsWYtt73rqs3fRjvl5hKVFCjEUxGKEVzWL7kd/v3VRR6D0BMGnXZay0NvPadT1C9X9/NwQqs4iI4eOaX7WHXfUvBh1LanFevwe471Eb7R/Q4z+iHsj7ycdhXr4/tr7bu6RegxDQCojfmP/H3W/R9yD5lMfRfjiS4XHA4AMuJXzG/X59ms+6Of81BRXc0wSc5dzcp3uMOfGiGMDuL+IxpTS1zj+fzql9OMppdWU0q2U0qdTSo+llH4gpXSb35tSSj+fUvpQSukj9zIIxffUpaiGJGWKzKndjW3vfmSpcOtO0wnR/8Zqxs1NaogCtKnIz1F81qyv6+7XHmFpkfIZm1vF8SWvEbe2EObnSHT2yKHiswVJqhzbwmfFI5FdwsKR28YQQDh3Vq9HQVQyWQW2bVz7cGixFK+pOplg1uY9ul1lPm53cBZlPQmNqip385mhlQBz39sNV3aiS/bcUse328I1Q2/LbdZYLy6UCTjBqNS1eiI2oeg6lRpLeuMwLWB72P6Q8IWvkhS99D+oMSr7IMLsrEmi++FFy16UMHDF7R7RrlnCmpSG7oeEn4UX4A10ur0W9jj2BaJRh5R3jBsc5ucRFhaUXRkp0u9jY0NehYQAfnIycxJYVJ7hQmyuXUc4vKT1+7S5pezQdDBme+4YncV1yoRr7GqrJHz8uLFJ8ODNLco9SFtrTHkRAsUDloSVPWZBY26/h93d5pXXIYhBy4Ak50L/5YVQX7laKk+dO4u03dOQyrNATrNOrduilaE7rQ9kFEX3kr8n1bXiNKQ8CVCIoaEFewHN+no2FBI6OOJdKIymVGAM85FClG0exlarIMQ5UfEtUiGyhsCWDzNkPVeVbKgq3aA2nJPuWVHIbq7fQPrY43nOcjLU3utU1wgLcwyfj/oM8/P2RXu4APncJz7CYLsREns0qeheM8+nhFhhfk7by4XK7f2MfdEQNesW0/PdH1Z3WWreVqW33WA0RNkmklvGjS+alZhpyQ4p/Yn4jKXdshqObdetLUjiOl1qjd3ehl9YQHPzJoT2S9h+2mIhlkau/R1tF152Y2JcogkSDi2hvvzuMD2ZOZ6fns4KUzaBuVtI0joX6TaNG5ukVt2udUvYNYoibJcxFEKM2DWt20yLJeaWcHPubdq4ofOX0u4uxKW73rvWcd5z8Hv6P/hxdP/rS0VC13Wo7yIcOVyAx4rjSigDqDcgyF5JhGsTnNwXExa5bne4Kax9H/g+PVx0bK5MjhRwZilh2fqzcwWrkb5fFh+Dm7SUcyeLfohWgR8fp/d3O1quEfp3/R7J1kvpyLj4lsUnDfp5wQiirEV0oZMaUENRDBvmdDrFpEkxqUhLWJhHGtTZw+Eya+G+S2KMKewBKPinpCdzIz9nd7hm7Q7jGUzOwAedmKqv0RrKD2Dc8PbuLxO9TXMP5zW7r/kde4/k/EQW0JQNNcHMHoXvdoa0Ots1/aKEyK9LU5z8XoS1fC3iDbhOF2Of/xqqU9w7EnmjGRBjtDyrwhsCMiiPGb4BZAQni87K5ljMKfF6GBVZwP19GGIaG5Wne6+xP4xCQn4IKQ1nmHmIPgNpBiatEdvRVurx3U7BtNRcv0GsTOZ9AnW1Bkb726sKqWcaTDgX0WYgluSlAEbC7HSx+AtEocT7PLTxR0hNGDsh7qBVUqqvXqMQ6uiyZtr9xHhZQkspk8ucPU0Z8+1ezsjDZMnbJUk9qZzXKajsbRhTVahOHScDLQaGE3UKHzY72EjDP+gPPTMKAarMxWAMdPt9elzJtcjzES+u21XiVy3DaukxKzBZqjyAFqf0egDg6lbmtLS9Iwo5XlvH4LMfJ2gyhwNKBGsAadKx2e5zkXsj16yvG6NoyVKssdNnFEIBBIy9nd1Lp7uM/WEUgCJ7Tbuq11hQs8d9Ci9kV3ZVhnLahFZ44jFy2xYWgE5HWXvpINRBWJ05RQ9rZgZhfo4ESrRbzTA4NZlrz3W62iLdhu1qH4agJ9fu6C7uJydRX71GOHfRqbDYCWl+YmMgLqIlFwFQAFDqS5cpXOEQQfkX7S4IEJy7aVQ5aSgZZl1Zrly4qiJiD3FTg1evQMrCcq2CrgxLi0Phie7gthpgS5Rtl5cNi9CMwQdUJ47n5q4Rn5drFtKY+vK75XPZ7lEilPNVothNL5agMaSkm1NqGm0mQkpECV/XmU6e54nMv1TXaFZXMf7lNzD47seLagAlDQdZ1Zrfn3Z2aCcXmjhXAseGFjMns22JNNWsPC4Gknsh8g0wKlF7HPvDKPDNsIk43d2l3msSLrorhUCGgkMMUX6Kb76jxiNtbyOdPpY/I8CPS1fYQNyknaPbQcOQZ9ftaAY+zM+T5xJo14/9gcqQu6pDD1pcVmfAQ0Y0RfIbzbOP5Z3a0mPxeckiogUWcyKJodSRm5/C/Lx+l2ggAlCNBQtKEsNav/m2Gpfq1An1hGQXls695g5x/DU3bmi1xcKKRYELPsONU2RDe3Q5t1+nVIRYcCTQGhb5886jOrqsO7ufmkJ1dDmHaux1kPHraWemYv857FBPwooDGe/HC9bAApr4NRd8sfAk9KQXPerv/5gagPqdSxCqM9vlmXo7ZdK430f44lcRHj2jf4tbWwhHDiskXL9vZka7bYUpTMRhAA51bZVHvyQVf6uvXCWl8/m5DLcWAN5u6M/3GPvDKHCZqbl1u3CrLb27ZSUWunRxlaT0ZJWfXKeL+Imn6EG+8mbeDeR4rP3gqg65utJQMj+HuLlJGXhuS469XiYdjQ3S9jaqI4eYvyAW7p30PKTeTqEdEHd20LlwA0XZq4WvkMlG+oRejwlkl9MOlV7zgc736OGcpJSdVCasKQlauTcBUVl3Xyo9cX1dSVMk/BFIb9GYI+XhENCwEA1ciy8hEcehm56CIjsnxhHvbhAEeGsL9dVruypwk+5EysxSKN1uLeVKKMGGobl1OxtgT1R2cu6qHwpkKndTMuz+z29mRuaU4GXTspiGTgdCuQ8gJ6ev39Ln5sfHEdfuUKMX82pIqV2fqTSSRaMuPc2GUCptZi5oiK35HaLZU6SmhIYcHr0f47A/jIIDAqPXtBmJd/YwP1dw2hHeOxZxqHYsWu4EAO5//V84dt3a1tc99xQlmD7MFl36CcazGx4eWabFJOUltr7h0JJy7sUe7WLhsbP63WnQR1g+rOcXlhbhJyaIe4877dITZ8zu1THqVhwSCfqvyvLtAn+lAyS4pQWs/s0X4J8+h/gXn0N86x2qtszP5xKu93r+zcamsi1T3mNW4djV6ZNDoYckvarjK6ivXSfDEEnU1T1xFtXZ00g7O+j96Cew/lc+jri6huaTH8mPtVs2CfnxccSbLNd36jjSLTJE9fWbdD1dg4EwBi1ubqI6Tch55SJocT8C0IYvLedKHuaJk7nkZ7w5TSTzjk2xfw5ZHcOM06BPFazrN/LcZCxA2t4mMZp6kHNDzzyBuN1DdXSZmLB6PZz/h8+R1uQdQwDEydTq6DLc4nyRQIRzGByeVm4IyafJ625ulnANsUE4vKT9OGoQ7UgpozP3MPZNSfJ73KdHl7Nkh/M+l6Z4KJJO0GD9gbIKy8R33S7w9IeBl16Bn5/LpUnzXdWZU4g3GfrcNPAT47QIWRuyWc9yaH5ykvj0+IH48XF6QMwn6OfnUF+5pknJ5mbeMcKhJdTXrufvF85ISTrJd/DO4KoKiSsvfmqKKNauXdc4NNV1Lr9y6/gQKtTE7tXKMTJM9jNtpChM4vPch5DeuEB18LEx4NwZuLcul/x/hmPST0+T59QfEMpuY7Ms9/Jx4s4O5XQGtfJXFIOfjRgtkWhHYlUpBo9VR5cLQlQ5d2KXppJymJ9T8hY55wIGzHmEuLXF/I0Ed1Yy1XYJ1zw3BQrx53Un5++oThwnCT9bEh1RGnZVh3Ajr56n1v7pKVIvY3EdYanWe9M6nvVI289cb+nUFP5o418+RCVJQK2vDk4qucDdYSZ2Liwm5xqiAICaBji1wnkAasjx71yjmI0TlQD0u8LsLJpLVzKVd2yy4CwMAWbVgas61N9ehDU1AViENZnZnNzYGJrba0X2Wi9tclJp2uU66AcOcTrU/Rl7PYpRmY6+uXZddxcBVkn5NW1vI5x7VJOUFo2ovQychItbWySqcyjncArNA7MLR0nu9nqIX38VbnFePalUD+gZML4/9ftw3CyFo4cLDIMwN7kzJ4CUUL/5Ni0Ycdcl/uVqir33zTrzPna6xOnwCIUyhUEwSUrAuPGGZ1Ob2PQz5JnF7R7NM6PoJChNoY9rUwGGQ0vqeRQS8FwKdZ0u+qcPk/Hilv6CPRw5KZ2aBggEYkr9PnCURHTSk2dLZKsjUFuYmy2SzkPQ8REbfXGO9xj7x1PwP0CWWijDqt2Zh7W8YysTLfBRYS3FYxBmGnmdqdHdzAy1Fbd2KCtdpruyabUGMtCI2IE8GyrDvW+w9Pb8dgPeFN1+u+yg8r6UEvzMtHojcl2iPajH4GuVRJ10l9rkm3oqhuU4LCwQ56CU9QReLO/j+x9OrCBeuTZUWmzrQNrnMvK11nt2+1t16gTqdy7l62MQlVxHQZIqRLq9luy9PW5b19M8i9TE4QYj+d6hD+zSy8JgJKEHKKTmjcfR9hjb2pGuqspnUOAoOrAExqPu60MFXnKsgWDLkjaxkt/IHoIQlJpdOCwsECErC2moCIZoHw5qBTxplpZzE3F1lchHeQeSB5P6/SyT1uoB0HMxtWbhQVDxElGdEmlykamX2NdqE8oQPQig5DAAPWzJXcB7ilGFdBZgoZcJLnFlOTupBIRDh/JkkWYupnLTNl+Ty1DxkwEZRD8+phDisEhVGT85SWQgreFZREd6+QX/L9UF1b8AeWvVUSK+sZWcfLCQF48PXAnoGDc8skRey/gAFOptbECqOUrt5py2l2siThYjg8EiVxpy+3IuXWpXpCFOaXu6Uq0gXA0Zlzs/9rSWfQu8BKBel04zhksXOBHpCTFzJCyS+pQfG+P8hmmiakHB9zL2hVFIMRYn7TrdEuwj5cRkWnTls7yAZQLX717h3gMu421s0KT3DtXyESBFxRDId4lXUl9+FwCUOw+Alv9cp1I4qZBb2BEWFgDniachNtT2LfXhSDuzGL3m5k0qH3Y6ORbn67SEJ0pT3smVh+b1N+mQvZ1CMEd2JL+4wJDovhKXEL/iDqIQo4ZQMkBz2dePj2tWOxw/hvDEY3p94sprQmtxnjyDifFcSvYBYfkInetgwKIskfpFOFYvqgts2Jv1dTSrawpGEwOVgTlZ6k1CC31+0lsibcetHTI1jXpH/kOn9d4iJfJKjcKzhnMCMZff60HZ3GW+P25s6PO1YZsLAeH4Me6H2dAW+Lnf/TopbHPi03W68I+eVsNHBpoMnn/qHBCjVmukYQqFoYpZ0jCEItQCwNyOezcIwD4LH0RLst3vIAzKmvBznlSKR0mPt1xBPzVFce52r2AztgrRxWi5/ILVD8tHMn59hHsrhsV1u/Dzc4i31/I52cSSvN/0T7hOl3AQxlWVXTYszqO5eYubtfxQyS7MzgJjY8qqNJRIMwrXWqWRkl4/NwPt2kcgt8X0o6BphgRvxGVth31haZG8GXGRW30j9r7csxfBvt8HTQiPGn58nO5X6xnrc+SqEumI5jZ123cysi25AE15CEtSLoUOC8z4yUnyOmxDExvzuL1N7fp1nXsY7FyR/ANDn4fC5NZ3qhiPvCZhSUwPlxgMALWSaKjUIxPOVRVcp6JMdsUUaOKiiyW3RBRc16U/MFXa8qGcPPREpZ0GtSZrXKeLsHxEATKeseji/gFA2tpGeuEZ3cWqUyfyAzMkmmlnhyoEE+NKr9WO/wBoQlLOn0RlXHY7mRCkub2mCbTCIDz/NHVLrq+T5yGCJfUAQjsfjhzWXVaIWnMIVnajKhxXGKzkXAUu2x9k8E4LfkvxrIHsSrmt01WpNr1W7uFQt1g8HTEmQEb3jSAQKc5Z50c3SwIoEU7UsEe9S+eyYU9JiXJsp6Tkh6h3g0Rw9Pu5KxUA9aMIvFnLxK2qAIdmhKLM6MswP094GNY6bdbXMyKWz1/PWXAGHCIpc7fcJ2NcXVUVBkEAeCmmh1AMRiYAu7FxYyO7kSEgbm6rCy1NT356SheCTTa6qkOVBpjJGlnNaWuL4uKZGXVlRbMBvOOpHiIfT7PygwH8i68grDxCpb0LFzPIxCwUGVotkZp7VekikGNrWMRxccGlIBORwSxpZ0fjcjc2Bv/NtzLqM9EErY4u53Pa3gb6g+xW9wcZU8+4fttKTrviRAmJtd4Q4/iV/NXE1aN2d6lMFPyVpsnIqhupyrXFEchubHM5JoyU+yQ8DAXxr3w/A94kHCgYsmXB2bBVwpaJCeIFlc8Jh4KFiUuS21QTLNmKqzraZh43NpijkgSDmrU1JcYN0u7O95Q8L89GhJCj0tSUdnYQjhzKHhnPT9ftllQC7CFoT08odTvvNfaFUYBzWk2wGHpZ3EIkEdlQxF4vTyrDJyDHUQ1Cpn5vXnkdgrqL29uAoNgYvozYID56gpJ83pTIONkEUEnNBU+M0ExTZl19OZ5abAY1SdysZUc+tiYMJXFlZMe9SSRJl2RYWsw8iTs7WRdgYYGQcocPo7lxk4xPoCqIaDq6qoI/fVyPFxYWENfuFJWacGwZbmaaJrtoXiwsUHVG4tOxMYKFg2jmbZginoHkKjRpya+LUZb7M6rprWB8BhQLoqAu9uzImHHS0CTSfLdTGCGReKuOLuc5JkZAemtSKvgSAGhDWvN9pGMkIK42qlQNuzEURTKcX6+OLmdq/FZoVl+6rPc23wfa/OpPfhd9xLCYa3eshHqcRLe9NOKJiQG9Z0jWGvvCKKSYMe1WQ7BZu6OsMlryEzd1UGtCh/4gWeFyosmuH+ZmtaEncigSez294eHaGpF6Srwri1Ji49lZxQ0orhzIEFSw4TCutMaz2mPA7nkIiO9cJjEaUbCu68wBmQw8md1HyYn4p84RtfrODsLxY4i8o8XVVer+3NkhcBA38OguceOWupvN6qqGKgAtnnjtBgGwRMQFQDx7LDMrcygWDhGjVLxhy6Ae8IxbkJKtvCQLr9spSXJ3TPeeVAIAFBT1ZrGluqbYnjETGm7ydxTCwvzcxEBQZ2lOHOp9Fbgz5yVSQ5gCIWcJX/xq9oa4jJvMhlIdXdY8hL1WPYV+H9WxR1BfuYrm4uWcPGfvIjx6Rt9f/wWij3djY/Dcgt35yusQCToZ3jYAxiZjK/SG536bwjMakSfabewLo+A6XLqy7MKtRUkupekbYFdOehgAKOwWQBFPhsOHkQY1qpMrQ7uTdKjVFy9BOwL1xMzPIZcGiwVsiF+BlutfXGNmCBa3OkrYwTut9FeEI4fhxFXmeyCIOzcgsBRSQv32O5RV58Ygwsqz4bt6nRiN2BMSXgQ5V2F70spEj7oJPbd8A0D6CmH/bczd3LhBBvLuXc0LhKVFdbVFBDfMz1FIMk3wXO0fkXtpuAI8w7jle8LS4u67m5Qt+WeA4nt0TAl2xGf0GRhPRHMiEoYG7rI0m4LOBwnRVo5BiF7rq9cgjWHaiSsGzgf4mWk6HntrQoQrBq85/5bmucIXvqr3MV64ROcs4RdvEn58PGNSEtO2S8eqhDjWKLU6efc69qIQdc459zXzb90594v3VUuSd+ehvvqhNw5zBALkXfjJSdRvX+R6Otf/Y6Q6NetGNJfezTGodFcC8I9R/4Ofmiph1ON5Z2tWVzVuB7L7JuFOkX9gl1QnqHPkkgtYx8bjEufLIogsTyfNOflBEJryjbdp4QjTdSIBFDc2hnTluuIaYq8HHF4gaDG7kkrWwc039npJKWoaaTuzGus95mOK4ZPF7VaOZhKX2VnNjSBFxI1N+KkJpIbEXvVYvIOrEWXjKL9Xx1eIA9MOntiyCJSBS5Jw3KQ2auhcALv0I4RRVGqtaVCdOF5ct94/ee/msGIYkL1CF6jpync7ZKSdg5+iZqvqkWWtdEiYRfoN07rgpcktLMxpyFmtHKMQoY3o7e2UqlWmkpRDYJ+fzR7HXijeX0spPZtSehbAx0AMzf8J91lLUmJ4u5PrRLSJSKEW411OMANxawthYS6Lohh0GyXIKEETHj2ji9ePjxFG4OYaqtMnsxwdD8uRIOdSX7mqVYswN5vDHR+QUoKfInFXd/JYTn5xptsvH87XyQrD6noyKMj26Eu5W6KTAAAZmElEQVTGeChB6QOab72hQqR+YoIMTn9QCMc0L38rexnPPFGUD8UlVy9lcxNp5UhRwgsL7KpyB2e76tCcf4tyNjdvAgzblnJkOHwIzdodlbYDAD8xger4CgN+fK79p6Qxfn3pslLrybyQ3AH1dnRo4bbLu+xpxq2t0ht8/GyZrAQKoRY/M1PoiTZXrkKFYJ57HBZfAiCrLkn4yLkg5cAQNqipSTRvvkOHffQk/LNPor78rhqgZnW1aFdXBWsWEW5u3SaI++ws6nevUGMa50ZkvpEKecyoSXM/FHeRooZdex3vC6fgnPssgH+QUvqkc+41AN+XUrrCYjBfTCmdc879Ov/82/wZfd9ux511i+l7wmezFXeeatBbW9Rz0Knguh0CZpj3ABgNI/YB4exJNOff0nqy61TA2ZOI33hVcQppQH3sbn4WaW09Vx0iqTo3L39Lz9HPzJR8h8BwqNOq1/u5WUTmJ7Dvr1aOobl2I/MH8vUIp5+FGtOXU3mpWV2lazSL342NUfVmYoI6TIMn2TxTT9d+iJBx+eHQIvXhA/l+WPRmighzsxSzskivdmr2+8xl0S/vgfmOkaP1ertZyb6veK6WL5OfRdzYwCh4snJA7tJcN5SJb8G2C9wEV7/0mVgocuu4Ba6AjxPmZjPVfmvOuKoDP0sEOUjUa6HkQdxYh8U5SpLznCEFNAOLlr4fw9FYrRxDc/0mNVUxUlfwDd8pmPNPAvht/vnb0pJsD7VsVuST3eq4va0Mwy6QbmKYnVYadc81+NTEnEx7423KJdQ13Dgh++I3X6N4ryHFHD8xTnwJUxO6kN3EBJx3ahC0dfnuXfjFeaXaSi88XcBN5Tz85GTGP3ApSeXBP/5dcN0uEYe0XVlWKtasfTspxh6TNn49/ZgCiXDuDJq1NaTNLSUDAaAUcdXZ0zSJnuD27kEf9bUb2WM6dbzYafw4ayPyAksNNYkFrnSkutZkmCyI6vgK0gsf0coFkNuc9T6GgN5nnlECmrufelxfK7AAptwnXanFNfUJrSnoxpGusSzM2Vnl5ZTQSZN3MYsW5+eQMRbiYfqxMVNJ8XoedoTDh9UQuE6XKiXHlhXzsPbXny/jfa6SpZ0duJlpym3J9/b7cJMTwI3b7B141JcuZ49risOS6anci8OjfvcKrCRfGvQJPfs+xp49BedcF8C7AJ5KKV1zzq2llObN66sppQXn3O8B+OWU0v/gv38ewN9t6z84534OFF5gHJMf+17/o6XlN6Uj7XMQS71Lo9TQOdtdUnZhgNzb5SNAf5DJRWQYS6/ts/aY4qqlFqO0EYKR8lVB1srfa88NwPC1OII9v6dKsOxsIF6C+q0LI9GARBizDRf86HyN3U19FsSx2AUA6h0VorZmd/QzMwRrZpCOlmpbzM3tHV+vxe6ybeZl/pve3/ZzF2xJu4nM7thLi5Sgcxnh2d7tR90/qz4u+o9W4t22aaf+QL0wvzhfsDe7ilmXZS7JvTYIUvl+PQ/j1RTel3iABoU50jsaMY++E57CDwP4akpJ+lW/LS3JQmAWY3mH5KEeweGlnFQCMk+gHZxEC8tHtInIvkbNKJmjsVo5BjQNmrW1IhnIX8A6k6mwwpJXILosn/MCAMW729s5AdSYpKmUwizIxTRyKW8jx7eu6iBubuWY2lyf5iO6Xd0l67ffoZhWwhrGBMATG5OXHgXBR1jtCYtyizkJqs9IKg9S767rkQKvcZPayVVoFbzI2qS6m5sjnk3K5yw9BMWHyEhRn8KIjYDPS4R1Bc1p51IjTFM8x1JdDzNgyymZ+64J1S7nXASFKs/ZtGlLKTUN+trinsustc4Pm2DWBqdOfp6W7l0bxk4cyySykltijI54Fta46BhkceX3M96PUfgp5NABuJ9akkCRHBIXNW5vk8UVN1Ky2/J+e7FNg+badYgAqiZ/wOGFqESDXKzm0RzRhIUFMhZnTwMA4u214uZ6xqZLF6Q8wLbSswuBO9byblUYHfZ4hKkYAOL2tu7O8e5dDSsUrSgTUERIuAtSav+F98HhlQjuAtAmKPKyOmhu3dZs9K4Kzu2/2cVoSsWasBtRA98tr6CGqOVBuU5VGH91z3fLmpu/q7HiUXhZcuzWNQlyUF12FZhpeXbeiORyjkb+lzlYHV/JTFeQ+ZsJVP3UVC5HG0XxxJB+SQYO5Ss4xKnffFuBWGLw5XUnc8AcE6A5r1FA3OU57zL2ZBScc1MAPgPgP5o//zKAzzjnXgfwA/w7APwXkN7keQD/AsDf2st3CGBEQDLwAYGJRABk19Fm4U2cZ+MmcdP8zAzC448iCtxXsfUd4H9/HYFpuJrVXNIM3FBi++613MUeQtF+DNBEco661G6v0o7MlGqxl7UrhJVYXT11BQ2ZaIcVqXwod27pwuNSZ+ztZG6GblcNqeA5Uk29HU7KeVNTuSohTD5iWOQ5V2ZhOrN4DbjI0ppFo55sd8Fi2EVt711KBWZAKfP5viiKb5R34FypxcEhhBq69vfahC6Q+wsk8dptYRd4AakIsLkPZas8G/bVtUyEK8dIGTxFr6Xha05Ju3gBFIZNwle9b5tbqI6vsDeQ9SOGlL55iOem3uP7AC/tny5J9+n3fpPNENvfW9RYAPLPNg9hLLBmvd17KDUZazxUZbDva1UfiqrILh2Heo4jlIt27RS0mXNzD5RQhcEsaXu73G1sDDqivm6RgDaeFU+gTTIiZB7eGqH2dbWy+MU9Ne5xocjFOYOCSKR9C0w+pziuvVY+9sh7Our19nNsH9PG//aYu+S29Lt8FozVCpjkdlrzQkiDlJyHRzj3KJrXzhfnWR1focqVeJLcFzPkZdj5y+vmoSJZAVDuTDIYCaYTW8Q4JLayyDPvmH48d46RIMuwRkOzdkfxDbY8BkBdwzA7rS43f8Hw+Q1dg7mdsgBabpvkD+S7iusEhheuWHq0vCNwW/Ldu+oGYzDIySuJ02VxW0OTEmk/Tk+X+hOGFBagmLro70gE1UZKioC010fviWrAhq7TGAQ5vgwVVpFr9ObzkoMRnoMRI8zPZREWPSHe3S3Ds4QA5v6U59jqhYiZZVv+Hg4fzuxVrfmg7M+R2+jN+ViqfBn6bGIzJDCULlwqdSB9QH3pMprnn+LbmfQzYhBUQ5VFh6Wr+P2M/WMUZHEbOLEMCw+2yk36elVp3O+qSjsApRMNoPptmJ3VOLCx9NrOobmzzkQptNtL/VzYmnSimvO05+tM2SocFv6+tmdDE1RVivncNcknQxYzMzyHmZksDeYcuZEA4t0NvTeyC/mZmVze5XOOvV5ma+ZJHO/epdKWqTRUK0YfwzkVxwFoAvqpqeySAlnijGHN+hxt8s8PG4dCmk+SqKOG9br42vMzMjs7ADc+jrjdIx0QCZkmJ7NRN+GDxSkMJaY908CbnIFoKSidPnNXaONRa0j7OqSzVyoyHGJZ9KYia83GINfqF+aHAGcA4P/7S3Cf+AjNm2i8Jud0zgtqtZ2b2svYP0ZBIMdzs9TPsLAAP8418YnxcmH6EoCS6ppQYZz1b27dzruYo3JSfekymg2TQeaJG7/32bxLaNPLoLXTDms1Kg16LLvQ/NRk1lWwBkFi+MlJOjeTG2k/NOUbZNagZmNTqdiREtWsOevs5+cQpqeyIK8JH6zXVL9zOedk+Nost2Ma9FG/W+aDo1C9sdfjJsrFHM49Cj81Ad/t5N4KSbaJYRdvThZvl3o8wuysVnlEKamYCwJhNvfchUAlZFvN4ftYX72Wk8Bm11fqdefLxKhUImSXbg1Vjpb38HzR94pBC6VBkXNywSP1B+TJxUzemluoHTNZs4R9zFL0pN4tHmqu/BTn9+U/zZUkCfdMDkLvn8ut6Hsd+yan8HznB0fjD3xAmJ2mXUBip11izt1ieMdAF6EJB4Dq9EnUFy5STN0faI3e1uLluzTWc5mtp/yCnJtQlFlvp30aZaztmFlq0EpaAkWsPSrUGMqrGPyGErOmjASk3TDvdJLL0EYqTc6VKFE/PT2EHJTYVRqZNGvefna75EzsPRuK5c11KrOR7LCGdGcojzHquO3nY5GMNg/Vei6CEB2J6xiVP2rnT+zbDc/HyOOYe6QaHLvdI8OgJehSAHAfewr4xnnCh1hSWnM/Bcfw0OUUhqyZeWDN+kZJQGLj6kNLZFkt6gzQUpOfmqLJa0IJgOv7ibv6pHbdpYSPTB7ZGbQOnJLSowPI+Q0AmXotIvZ2EBbni11A8wt2F9vZYVe/5KhstwDbB+27nezaMkOwnKsYhDA3S4bFMAsVICJmOlKFJPkOOVeejFGqFPa+Gk5EweurGhGfIwAtl7a7Uovrst5X6zrbHpg+f95lFYkoHoiEAXax26+zFaOWQbAtz4XK9W7D5Lj0+1kZjC4+90PY/hKAY365TuNFNZZCzXwH/cwQeO4/EYMQ5ueQvvJN+Pm5oaqLlMnhXNaq3OPYH0bBYTixpPH5YQhFWiEPLgCdm7foJjUNhxzjaoEFnivcAWGW+PL9+LjGprZV2mLcC3feJGv82JhyDrrJibyDyuJkXkX0B+TO2fi3uGZ+eFVF5LDiPQgByajyXkqAJyUrMY5p0NcFIoahEQIVA+NVbcsUlYWpYAluf4+ct52kPpDBW1rk+HWNr90Pxda7dbxq/sUHJtLdxZNoI1tb5ze0q7IB9lNTCNMG4+AN54Z9rrHJ3JUx4w6UJVpPmI8xNkbPyYfcNq9JWOmAzfkMUYTWhivJIS1m6Ld+hf3O2DB1XicDrLgz1IYqtAnSZ0bR/0koVzHEOzUjPOtdxv4wCvYZSAZbXpImpd5OWZONpceQYgKOLFHvAt+86uSKfhaRkpRxcwupodZepJRl6nj4qcnhHcecj6olxYZicokhJdPMf282NhVboVWTIkamWx97vSwlBlrYkSGz+aSywYzb2xQSmMy25huQd0TJlAu3ZTTtyMrgLByDAmKyRllyIF1TfeF73qyS1yVt5NJbQgcrF7mt/AidnnA5qJiLfLc1oHrv7xHemtwMHLeXC8bBeFL0e2mcrfcgYZQm5uT9AOUpInU2IjbKeyHdq5onEG+HF7GwXykaEkDzzqXh/JR51oIEVSq7lEYu7DSo0dy9q9WGuLWVPTdTkRtJMHOPsS+MgmbjU269VdCSAEEMH2HhMgJaS25eO0+8B4wmEwhw7PVyi7IneLKfGIefmcHgu06hMjx5cXMrLwRBIHa75SSoa4RDSwU7EO3cUSe28071JxTmal1mm/swICwAKn0n11bgL/jepJ0dquvLbs+ZbgtjBqDJ2iIEYKNmW5fD7HSxo6r30UTlJFCC1NggzM9plyUd1GnOBUDZwy+kLSYRCcDwPcZy8co9kcqEKfVqk5VZWEJqopUPWTyxKZCFQyGLJLdN67TmkWSuJQII+amcxNPWc+l7se3vYFkAoKjeVMce4UttAZicH4ba87mLNxs3c0+Nfoxb70UIWaDY4ckPD1VT3k/lAdgnRqFowpGY1daWwQrLMRUXrHE8W/rq7OkspyUlnpQQHj2D2B8gnDxO1nt7G4lj5vAnf6qVCoEIF41MkRa7805JY1NdM9f+IJ+LabZCSvCnT+jCpQvzQ4mqdn+DLDzRhJSkpbrAJqEo5xeOHKJrbhrqmlvfUEq18MRjmdPBVEAUace0ZvABcWMz9wTAyOUx0YwAbIQEN/V21JhSSzvv2Cyl7rjc2c71yPCTk0jcQFYdX9GdVvIdOppGjbQbG9P28eI+drs5L8PudvEazAYiBsYYW9uoZBGL2ag58ix5PmqeYIxVpNVbofBBmuyaG7dy2CWVHIN70EevYQJtfpKralZXAedzstcYxzSoC6Iaue/Nq28Micm6EIa6Ot9r7AujAKCIxYqssAyZXJycsckhnURNo00gQvHmqgq4fguuU6F++yIZiUOHcivu0iKVhiRnsMiNnyllvUbDDKTaB8kAYQB6eIJWA4hgQ0pUgHpC2rORWoSasktJpxwzRImrbcuM1ZFDiJ98BuHwYSLu2N6Gn5tF8+GTEBbjMD+P5pXXuexndAHMrqYTRURiARTy8SA8Q+z1aLJ2OwQn5/JZfeVqsegtX2bq94suyTA7i2rlmLJZWZbl+uIlXdCCEZDjyD0HUBgz9ZC63VymlIVjk7n9fgHxtsIyoxKSMoq2eDEERas70/mv3aF438wJMcLh7Em4bhfVyiMjk5eU25ojwhWTa/Bzs5ofESwJMXzFAtNRsH0xLsZ1KvIeqoo4GXjujNQ42e3a90tJcgjmzMlCPzHBOxTFaq6qNKQY9V5r9TWxZNqv222oVrF313JXuxRlS2n8XdrrPjuNuL7Bu0aLBERi1FZLq6orLyyUrdyS5dZdOocRfmpyqL26OnuaJNxGlbjMzqhqUlJeNddiW3LtOds435LBhOmp4aTfLmNIhOe9INFyLhYDwgQz7Xsq5WAr2KMGqZ3kbVc8zHlQaBSH27DNMfzMDJHRyHv4s20YuQsht0tzeCvMy6Ou2YVAhp3JcwHxoA3MfhSpjLRvW51S/rw/exLN62/ptf3R5r96uEqSCv+1MblQXRm4rbi8BYMuew+xt1PkDvgHVEKDxq6bEGIgpQwtda7YjQuEmYUvq3fSMQuGutXSoM9S91HdVr0es/vFzUz75mdmtNZP7qJDeyFagA3tSEZ+zbxfPCEAQwtVAFgyqWkXzQpWEnbZRRiWFnMHqLT5egfPEHE/NakegTZM2cqJ2YmFLEUp7jpdJclRFGvL0Np+BWoC6+USonke0tAk7roS0/JzstRuAKgqZBe8CSNEZdt1uzkUNUYl3r1LnoEcU0rN0hTnAzFeB2qnV3m4bhfOfFdhZJl/I/XIiIXjj1B/ycR4FqaREmhrSCgj/wtsOjUNmtfOwz3zuF7bXse+MQrSHy7QXhX6MBZZSllAq+Qlr5nMsM0s11euahed8444BVtJJxu25DbXWo/bLhEqw7Gcz+ZmFredm80lJInjLQ4j5k5JihfL7sEiLGntDG33XieLTDjnMuSY743Uqv3EBFIvl7asAbDVCUHGNbdXM4irzjFsc+MGHVM6NaX0C2jlRI213J+trTImrgdafZCs/9BCld1RPi/XYmNyn/UkqkeWqVHLQH9TbSo5XI1Q2QATgur3mvssCT75HskhyL3TzwuWhX9ubtxQgRbt6bDEq3rTG52romUigr2u6mjpWDYA5VwQY6tVG5/b7/s5z+bHx4GXz2P7x78b72fsG6MAgBb15qbu/EXtO8WMJJTdVyS0kmEMFj5GINeeuRPQdbvwp09k9WaJQ3nI55Rcxedeet21zO5HmIIcezbXrgMxIXKJ0dat5bwtrVgGrww/BoXZClRbYkfz3UXyzHmmdGdyFc2oM3pxblYFdIQW3k54hXIzFoE+a2regBoGW4oTwlk5J1nM2cjRTuonJ4dzKEZuTz8vr/F3tIlghsprirTsUH6lXWmpOopdEdSq7qqCZ7DlTwUzEUel/Z60s1MIs2gysh3+AGTsfU5qpkG/TPY5V1S9ACjVveSW9LNcMlbvT3Ibsck9EZr/iAWRC5oGE7/zZapK7HHsi5yCc+4ugNce9Hl8h8chADcf9El8B8cH/fqAh/8aT6WUDt/rTXvHPn5nx2t7SYA8zMM59+IH+Ro/6NcH/Pm4RmC/hQ8H42AcjAc+DozCwTgYB6MY+8Uo/PMHfQL/H8YH/Ro/6NcH/Pm4xv2RaDwYB+Ng7J+xXzyFg3EwDsY+GQ/cKDjnfsg59xqrVP+9e39i/w3n3Ann3Beccy87577pnPsF/vt9U+beL8M5F5xzL7ESGJxzZ5xzX+Jr+besJAbn3Bj/fp5fP/0gz3svwzk375z7nHPuVefcK865Fz6Iz/Be44EaBedcAPDPQOpTTwL4Kefckw/ynP6Mowbwt1NKTwJ4HsDP83XcX2Xu/TF+AcAr5vd/BOBXUkqPAlgF8LP8958FsMp//xV+334fvwrgD1JKjwN4BnSdH8Rn+N4jpfTA/gF4AcAfmt9/CcAvPchzuk/X9Tsg8ZzXADzCf3sEhMcAgF8H8FPm/fq+/fwPJAH4eQDfD+D3ADgQmKdqP08AfwjgBf654ve5B30N73FtcwDeap/jB+0Z7uXfgw4f/kwK1ft5sJv8HIAv4T4rc++D8U8A/B0A0kO8BGAtpSQNGfY69Br59Tv8/v06zgC4AeC3ODz6DVZG+6A9w3uOB20UPlDDOTcN4D8A+MWUUtGmmGg7eWhLPc65vwzgekrpKw/6XL5DowLwUQC/llJ6DsAmcqgA4OF/hnsdD9oo7Emh+mEYzrkOyCD865SSaG5+W8rc+2x8EsCPOefeBvBvQCHErwKYd84JXN5eh14jvz4H4Bb277gE4FJK6Uv8++dARuKD9Az3NB60UfgygMc4g90F8JMg1eqHajhScf1NAK+klP6xeem+KnM/yJFS+qWU0vGU0mnQc/rjlNJfA/AFAD/Bb2tfo1z7T/D79+0um1K6CuCic+4c/+nTAF7GB+gZ7nk86KQGgB8B8C0AbwD4+w/6fP6M1/C9ILfy6wC+xv9+BBRDfx7A6wD+G4BFfr8DVV3eAPCnAD7+oK/hfV7v9wH4Pf75LID/A1IZ//cAxvjv4/z7eX797IM+7z1c17MAXuTn+J8BLHxQn+F7/TtANB6Mg3EwivGgw4eDcTAOxj4bB0bhYByMg1GMA6NwMA7GwSjGgVE4GAfjYBTjwCgcjINxMIpxYBQOxsE4GMU4MAoH42AcjGIcGIWDcTAORjH+H7u0LyyqGrvxAAAAAElFTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "head_size = config.hidden_size // config.num_attention_heads\n", + "layer = 1\n", + "head = 1 # 2, 3, 10\n", + "wq = model.bert.encoder.layer[layer].attention.self.query.weight.data.view(-1, config.num_attention_heads, head_size).permute(1, 0, 2)\n", + "wk = model.bert.encoder.layer[layer].attention.self.key.weight.data.view(-1, config.num_attention_heads, head_size).permute(1, 0, 2)\n", + "\n", + "wqk = torch.bmm(wq, wk.transpose(-1, -2))\n", + "# (wqk * wqk.transpose(-1, -2)).sum((1, 2)) / (wqk * wqk).sum((1, 2))\n", + "plt.imshow(wqk[head]*wqk[head])\n", + "plt.show()\n", + "\n", + "# q = torch.matmul(pos_emb, wq)\n", + "# k = torch.matmul(pos_emb_prev, wk)\n", + "# (q * k).sum((-2, -1))" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "pos_emb = model.bert.embeddings.position_embeddings.weight.data\n", + "pos_emb_prev = torch.zeros_like(pos_emb)\n", + "pos_emb_next = torch.zeros_like(pos_emb)\n", + "pos_emb_prev[1:] = pos_emb[:-1]\n", + "pos_emb_next[:-1] = pos_emb[1:]\n", + "pos_emb, pos_emb_prev, pos_emb_next = pos_emb[1:-1], pos_emb_prev[1:-1], pos_emb_next[1:-1]\n", + "\n", + "# pos_q = torch.matmul(pos_emb, wk[head])\n", + "# plt.imshow(pos_q[:32])\n", + "# plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": 146, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{\n", + " \"attention_probs_dropout_prob\": 0.1,\n", + " \"hidden_act\": \"gelu\",\n", + " \"hidden_dropout_prob\": 0.1,\n", + " \"hidden_size\": 768,\n", + " \"initializer_range\": 0.02,\n", + " \"intermediate_size\": 3072,\n", + " \"max_position_embeddings\": 512,\n", + " \"num_attention_heads\": 12,\n", + " \"num_hidden_layers\": 12,\n", + " \"type_vocab_size\": 2,\n", + " \"vocab_size\": 30522\n", + "}" + ] + }, + "execution_count": 146, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "config" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['Tom has black hair. Mary has black hair. John has yellow hair. _ and Mary have the same hair color.',\n", + " 'Tom has black hair. Mary has black hair. John has yellow hair. _ and Mary have different hair colors.']" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "text = [\n", + " # same / different\n", + " \"Tom has black hair. Mary has black hair. John has yellow hair. _ and Mary have the same hair color.\",\n", + " \"Tom has black hair. Mary has black hair. John has yellow hair. _ and Mary have different hair colors.\",\n", + " \"Tom has yellow hair. Mary has black hair. John has black hair. Mary and _ have the same hair color.\",\n", + " # because / although\n", + " \"John is taller/shorter than Mary because/although _ is older/younger.\",\n", + " \"The red ball is heavier/lighter than the blue ball because/although the _ ball is bigger/smaller.\",\n", + " \"Charles did a lot better/worse than his good friend Nancy on the test because/although _ had/hadn't studied so hard.\",\n", + " \"The trophy doesn't fit into the brown suitcase because/although the _ is too small/large.\",\n", + " \"John thought that he would arrive earlier than Susan, but/and indeed _ was the first to arrive.\",\n", + " # reverse\n", + " \"John came then Mary came. They left in reverse order. _ left then _ left.\",\n", + " \"John came after Mary. They left in reverse order. _ left after _ .\",\n", + " \"John came first, then came Mary. They left in reverse order: _ left first, then left _ .\",\n", + " # compare sentences with same / opposite meaning, 2nd order\n", + " \"Though John is tall, Tom is taller than John. So John is _ than Tom.\",\n", + " \"Tom is taller than John. So _ is shorter than _.\",\n", + " # WSC-style: before /after\n", + " # \"Mary came before/after John. _ was late/early .\",\n", + " # yes / no, 2nd order\n", + " \"Was Tom taller than Susan? Yes, _ was taller.\",\n", + " # right / wrong, epistemic modality, 2nd order\n", + " \"John said/thought that the red ball was heavier than the blue ball. He was wrong. The _ ball was heavier\",\n", + " \"John was wrong in saying/thinking that the red ball was heavier than the blue ball. The _ ball was heavier\",\n", + " \"John said the rain was about to stop. Mary said the rain would continue. Later the rain stopped. _ was wrong/right.\",\n", + " \n", + " \"The trophy doesn't fit into the brown suitcase because/although the _ is too small/large.\",\n", + " \"John thanked Mary because _ had given help to _ . \",\n", + " \"John felt vindicated/crushed when his longtime rival Mary revealed that _ was the winner of the competition.\",\n", + " \"John couldn't see the stage with Mary in front of him because _ is so short/tall.\",\n", + " \"Although they ran at about the same speed, John beat Sally because _ had such a bad start.\",\n", + " \"The fish ate the worm. The _ was hungry/tasty.\",\n", + " \n", + " \"John beat Mary. _ won the game/e winner.\",\n", + "]\n", + "text" + ] + }, + { + "cell_type": "code", + "execution_count": 47, + "metadata": {}, + "outputs": [], + "source": [ + "with open('WSC_switched_label.json') as f:\n", + " examples = json.load(f)" + ] + }, + { + "cell_type": "code", + "execution_count": 48, + "metadata": {}, + "outputs": [], + "source": [ + "with open('WSC_child_problem.json') as f:\n", + " cexamples = json.load(f)" + ] + }, + { + "cell_type": "code", + "execution_count": 49, + "metadata": {}, + "outputs": [], + "source": [ + "for ce in cexamples:\n", + " for s in ce['sentences']:\n", + " for a in s['answer0'] + s['answer1']:\n", + " a = a.lower()\n", + " if a not in tokenizer.vocab:\n", + " ce\n", + " print(a, 'not in vocab!!!')" + ] + }, + { + "cell_type": "code", + "execution_count": 50, + "metadata": {}, + "outputs": [], + "source": [ + "for ce in cexamples:\n", + " if len(ce['sentences']) > 0:\n", + " e = examples[ce['index']]\n", + " assert ce['index'] == e['index']\n", + " e['score'] = all([s['score'] for s in ce['sentences']])\n", + " assert len(set([s['adjacent_ref'] for s in ce['sentences']])) == 1, 'adjcent_refs are different!'\n", + " e['adjacent_ref'] = ce['sentences'][0]['adjacent_ref']" + ] + }, + { + "cell_type": "code", + "execution_count": 51, + "metadata": {}, + "outputs": [], + "source": [ + "from collections import defaultdict\n", + "\n", + "groups = defaultdict(list)\n", + "for e in examples:\n", + " if 'score' in e:\n", + " index = e['index']\n", + " if index < 252:\n", + " if index % 2 == 1:\n", + " index -= 1\n", + " elif index in [252, 253, 254]:\n", + " index = 252\n", + " else:\n", + " if index % 2 == 0:\n", + " index -= 1\n", + " groups[index].append(e)" + ] + }, + { + "cell_type": "code", + "execution_count": 56, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[(\"The trophy doesn't fit into the brown suitcase because [it] is too large.\",\n", + " 'fit into:large/small'),\n", + " ('Joan made sure to thank Susan for all the help [she] had recieved.',\n", + " 'thank:receive/give'),\n", + " ('The delivery truck zoomed by the school bus because [it] was going so fast.',\n", + " 'zoom by:fast/slow'),\n", + " ('Frank felt vindicated when his longtime rival Bill revealed that [he] was the winner of the competition.',\n", + " 'vindicated/crushed:be the winner'),\n", + " ('The large ball crashed right through the table because [it] was made of steel.',\n", + " 'crash through:[hard]/[soft]'),\n", + " (\"John couldn't see the stage with Billy in front of him because [he] is so short.\",\n", + " '[block]:short/tall'),\n", + " ('Tom threw his schoolbag down to Ray after [he] reached the top of the stairs.',\n", + " 'down to:top/bottom'),\n", + " ('Although they ran at about the same speed, Sue beat Sally because [she] had such a good start.',\n", + " 'beat:good/bad'),\n", + " (\"Sam's drawing was hung just above Tina's and [it] did look much better with another one below it.\",\n", + " 'above/below'),\n", + " ('Anna did a lot better than her good friend Lucy on the test because [she] had studied so hard.',\n", + " 'better/worse:study hard'),\n", + " ('The firemen arrived after the police because [they] were coming from so far away.',\n", + " 'after/before:far away'),\n", + " (\"Frank was upset with Tom because the toaster [he] had bought from him didn't work.\",\n", + " 'be upset with:buy from not work/sell not work'),\n", + " ('The sack of potatoes had been placed above the bag of flour, so [it] had to be moved first.',\n", + " 'above/below:moved first'),\n", + " ('Pete envies Martin although [he] is very successful.', 'although/because'),\n", + " ('I poured water from the bottle into the cup until [it] was empty.',\n", + " 'pour:empty/full'),\n", + " (\"Sid explained his theory to Mark but [he] couldn't convince him.\",\n", + " 'explain:convince/understand'),\n", + " (\"Susan knew that Ann's son had been in a car accident, so [she] told her about it.\",\n", + " '?know tell:so/because'),\n", + " (\"Joe's uncle can still beat him at tennis, even though [he] is 30 years younger.\",\n", + " 'beat:younger/older'),\n", + " ('In the middle of the outdoor concert, the rain started falling, but [it] continued until 10.',\n", + " 'but/and'),\n", + " ('Ann asked Mary what time the library closes, because [she] had forgotten.',\n", + " 'because/but'),\n", + " ('If the con artist has succeeded in fooling Sam, [he] would have gotten a lot of money.',\n", + " 'fool:get/lose'),\n", + " ('Alice tried frantically to stop her daughter from chatting at the party, leaving us to wonder why [she] was behaving so strangely.',\n", + " '?stop normal/stop abnormal:strange'),\n", + " (\"I was trying to open the lock with the key, but someone had filled the keyhole with chewing gum, and I couldn't get [it] in.\",\n", + " 'put ... into filled with ... :get in/get out'),\n", + " ('The dog chased the cat, which ran up a tree. [It] waited at the bottom.',\n", + " 'up:at the bottom/at the top'),\n", + " ('John was doing research in the library when he heard a man humming and whistling. [He] was very annoyed.',\n", + " 'hear ... humming and whistling:annoyed/annoying'),\n", + " ('John was jogging through the park when he saw a man juggling watermelons. [He] was very impressed.',\n", + " 'see ... juggling watermelons:impressed/impressive'),\n", + " ('Jane knocked on the door, and Susan answered it. [She] invited her to come out.',\n", + " 'visit:invite come out/invite come in'),\n", + " ('Jackson was greatly influenced by Arnold, though [he] lived two centuries later.',\n", + " 'influence:later/earlier'),\n", + " ('The actress used to be named Terpsichore, but she changed it to Tina a few years ago, because she figured [it] was too hard to pronounce.',\n", + " 'change:hard/easy'),\n", + " ('Fred is the only man still alive who remembers my great-grandfather. [He] is a remarkable man.',\n", + " 'alive:is/was'),\n", + " (\"In July, Kamtchatka declared war on Yakutsk. Since Yakutsk's army was much better equipped and ten times larger, [they] were defeated within weeks.\",\n", + " 'better equipped and large:defeated/victorious'),\n", + " ('When the sponsors of the bill got to the town hall, they were surprised to find that the room was full of opponents. [They] were very much in the minority.',\n", + " 'be full of:minority/majority'),\n", + " ('Everyone really loved the oatmeal cookies; only a few people liked the chocolate chip cookies. Next time, we should make more of [them] .',\n", + " 'like over:more/fewer'),\n", + " ('We had hoped to place copies of our newsletter on all the chairs in the auditorium, but there were simply not enough of [them] .',\n", + " 'place on all:not enough/too many'),\n", + " (\"Steve follows Fred's example in everything. [He] admires him hugely.\",\n", + " 'follow:admire/influence'),\n", + " (\"The table won't fit through the doorway because [it] is too wide.\",\n", + " 'fit through:wide/narrow'),\n", + " ('Grace was happy to trade me her sweater for my jacket. She thinks [it] looks dowdy on her.',\n", + " 'trade:dowdy/great'),\n", + " ('John hired Bill to take care of [him] .',\n", + " 'hire/hire oneself to:take care of'),\n", + " ('John promised Bill to leave, so an hour later [he] left.', 'promise/order'),\n", + " (\"Jane knocked on Susan's door but [she] did not get an answer.\",\n", + " 'knock:get an answer/answer'),\n", + " ('Joe paid the detective after [he] received the final report on the case.',\n", + " 'pay:receive/deliver'),\n", + " ('Bill passed the half-empty plate to John because [he] was full.',\n", + " 'pass the plate:full/hungry'),\n", + " ('George got free tickets to the play, but he gave them to Eric, even though [he] was particularly eager to see it.',\n", + " 'even though/because/not'),\n", + " (\"Jane gave Joan candy because [she] wasn't hungry.\",\n", + " 'give:not hungry/hungry'),\n", + " ('James asked Robert for a favor but [he] was refused.',\n", + " 'ask for a favor:refuse/be refused`'),\n", + " ('Kirilov ceded the presidency to Shatov because [he] was less popular.',\n", + " 'cede:less popular/more popular'),\n", + " ('Emma did not pass the ball to Janie although [she] saw that she was open.',\n", + " 'not pass although:see open/open')]" + ] + }, + "execution_count": 56, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def filter_dict(d, keys=['index', 'sentence', 'correct_answer', 'relational_word', 'is_associative', 'score']):\n", + " return {k: d[k] for k in d if k in keys}\n", + "\n", + "# ([[filter_dict(e) for e in eg] for eg in groups.values() if eg[0]['relational_word'] != 'none' and all([e['score'] for e in eg])])# / len([eg for eg in groups.values() if eg[0]['relational_word'] != 'none'])\n", + "# [(index, eg[0]['relational_word'], all([e['score'] for e in eg])) for index, eg in groups.items() if eg[0]['relational_word'] != 'none']\n", + "# len([filter_dict(e) for e in examples if 'score' in e and not e['score'] and e['adjacent_ref']])\n", + "# for e in examples:\n", + "# if e['index'] % 2 == 0:\n", + "# print(e['sentence'])\n", + "[(eg[0]['sentence'], eg[0]['relational_word']) for index, eg in groups.items() if '/' in eg[0]['relational_word']]" + ] + }, + { + "cell_type": "code", + "execution_count": 51, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "179" + ] + }, + "execution_count": 51, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "sum(['because' in e['sentence'] for e in examples]) + \\\n", + "sum(['so ' in e['sentence'] for e in examples]) + \\\n", + "sum(['but ' in e['sentence'] for e in examples]) + \\\n", + "sum(['though' in e['sentence'] for e in examples])" + ] + }, + { + "cell_type": "code", + "execution_count": 73, + "metadata": {}, + "outputs": [], + "source": [ + "# with open('WSC_switched_label.json', 'w') as f:\n", + "# json.dump(examples, f)" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [], + "source": [ + "vis_attn_topk = 3\n", + "\n", + "def has_chinese_label(labels):\n", + " labels = [label.split('->')[0].strip() for label in labels]\n", + " r = sum([len(label) > 1 for label in labels if label not in ['BOS', 'EOS']]) * 1. / (len(labels) - 1)\n", + " return 0 < r < 0.5 # r == 0 means empty query labels used in self attention\n", + "\n", + "def _plot_attn(ax1, attn_name, attn, key_labels, query_labels, col, color='b'):\n", + " assert len(query_labels) == attn.size(0)\n", + " assert len(key_labels) == attn.size(1)\n", + "\n", + " ax1.set_xlim([-1, 1])\n", + " ax1.set_xticks([])\n", + " ax2 = ax1.twinx()\n", + " nlabels = max(len(key_labels), len(query_labels))\n", + " pos = range(nlabels)\n", + " \n", + " if 'self' in attn_name and col < ncols - 1:\n", + " query_labels = ['' for _ in query_labels]\n", + "\n", + " for ax, labels in [(ax1, key_labels), (ax2, query_labels)]:\n", + " ax.set_yticks(pos)\n", + " if has_chinese_label(labels):\n", + " ax.set_yticklabels(labels, fontproperties=zhfont)\n", + " else:\n", + " ax.set_yticklabels(labels)\n", + " ax.set_ylim([nlabels - 1, 0])\n", + " ax.tick_params(width=0, labelsize='xx-large')\n", + "\n", + " for spine in ax.spines.values():\n", + " spine.set_visible(False)\n", + "\n", + "# mask, attn = filter_attn(attn)\n", + " for qi in range(attn.size(0)):\n", + "# if not mask[qi]:\n", + "# continue\n", + "# for ki in range(attn.size(1)):\n", + " for ki in attn[qi].topk(vis_attn_topk)[1]:\n", + " a = attn[qi, ki]\n", + " ax1.plot((-1, 1), (ki, qi), color, alpha=a)\n", + "# print(attn.mean(dim=0).topk(5)[0])\n", + "# ax1.barh(pos, attn.mean(dim=0).data.cpu().numpy())\n", + "\n", + "def plot_layer_attn(result_tuple, attn_name='dec_self_attns', layer=0, heads=None):\n", + " hypo, nheads, labels_dict = result_tuple\n", + " key_labels, query_labels = labels_dict[attn_name]\n", + " if heads is None:\n", + " heads = range(nheads)\n", + " else:\n", + " nheads = len(heads)\n", + " \n", + " stride = 2 if attn_name == 'dec_enc_attns' else 1\n", + " nlabels = max(len(key_labels), len(query_labels))\n", + " rcParams['figure.figsize'] = 20, int(round(nlabels * stride * nheads / 8 * 1.0))\n", + " \n", + " rows = nheads // ncols * stride\n", + " fig, axes = plt.subplots(rows, ncols)\n", + " \n", + " # for head in range(nheads):\n", + " for head_i, head in enumerate(heads):\n", + " row, col = head_i * stride // ncols, head_i * stride % ncols\n", + " ax1 = axes[row, col]\n", + " attn = hypo[attn_name][layer][head]\n", + " _plot_attn(ax1, attn_name, attn, key_labels, query_labels, col)\n", + " if attn_name == 'dec_enc_attns':\n", + " col = col + 1\n", + " axes[row, col].axis('off') # next subfig acts as blank place holder\n", + " # plt.suptitle('%s with %d heads, Layer %d' % (attn_name, nheads, layer), fontsize=20)\n", + " plt.show() \n", + " \n", + "ncols = 4" + ] + }, + { + "cell_type": "code", + "execution_count": 40, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{\n", + " \"attention_probs_dropout_prob\": 0.1,\n", + " \"hidden_act\": \"gelu\",\n", + " \"hidden_dropout_prob\": 0.1,\n", + " \"hidden_size\": 768,\n", + " \"initializer_range\": 0.02,\n", + " \"intermediate_size\": 3072,\n", + " \"max_position_embeddings\": 512,\n", + " \"num_attention_heads\": 12,\n", + " \"num_hidden_layers\": 12,\n", + " \"type_vocab_size\": 2,\n", + " \"vocab_size\": 30522\n", + "}" + ] + }, + "execution_count": 40, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "config.num" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.5" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/Untitled_zeoliao.ipynb b/Untitled_zeoliao.ipynb new file mode 100644 index 00000000000000..104e29a6c09ee9 --- /dev/null +++ b/Untitled_zeoliao.ipynb @@ -0,0 +1,1501 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "%load_ext autoreload\n", + "%autoreload 2\n", + "\n", + "from IPython.core.interactiveshell import InteractiveShell\n", + "InteractiveShell.ast_node_interactivity = 'all'" + ] + }, + { + "cell_type": "code", + "execution_count": 336, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import json\n", + "import itertools\n", + "from itertools import product, chain\n", + "import numpy as np\n", + "\n", + "from pytorch_pretrained_bert import tokenization, BertTokenizer, BertModel, BertForMaskedLM, BertForPreTraining, BertConfig" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "CONFIG_NAME = 'bert_config.json'\n", + "BERT_DIR = '/nas/pretrain-bert/pretrain-tensorflow/uncased_L-12_H-768_A-12/'\n", + "tokenizer = BertTokenizer.from_pretrained(os.path.join(BERT_DIR, 'vocab.txt'))" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "def reverse(l):\n", + " return list(reversed(l))" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "def mask(ent_str):\n", + " tokens = ent_str.strip().split()\n", + " if len(tokens) == 1:\n", + " return '[%s]' % tokens[0]\n", + " elif len(tokens) == 2:\n", + " assert tokens[0] == 'the', ent_str\n", + " return '%s [%s]' % (tokens[0], tokens[1])\n", + " else:\n", + " assert False, ent_str" + ] + }, + { + "cell_type": "code", + "execution_count": 276, + "metadata": {}, + "outputs": [], + "source": [ + "A_template = \"{rel_prefix} {dt} {ent0} {rel} {dt} {ent1} {rel_suffix}\"\n", + "B_template = [\"{pred_prefix} {dt} {ent} {pred}\", \"{pred_prefix} {pred} {dt} {ent}\"]\n", + "\n", + "# causal_templates = [[\"{A} because {B}.\"],# \"{B} so {A}.\"], \n", + "# [\"{A} so {B}.\"],# \"{B} because {A}.\"]\n", + "# ]\n", + "# turning_templates = [[\"{A} although {B}.\"],# \"{B} but {A}.\"], \n", + "# [\"{A} but {B}.\"],# \"{B} although {A}.\"]\n", + "# ]\n", + "\n", + "causal_templates = [[\"{A} {conj} {B}.\"],# \"{B} so {A}.\"], \n", + " [\"{A} {conj} {B}.\"],# \"{B} because {A}.\"]\n", + " ]\n", + "turning_templates = [[\"{A} {conj} {B}.\"],# \"{B} but {A}.\"], \n", + " [\"{A} {conj} {B}.\"],# \"{B} although {A}.\"]\n", + " ]" + ] + }, + { + "cell_type": "code", + "execution_count": 405, + "metadata": {}, + "outputs": [], + "source": [ + "def make_sentences(A_template, B_template, causal_templates, turning_templates,\n", + " index=-1, orig_sentence='', entities=[\"John\", \"Mary\"], entity_substitutes=None, determiner=\"\", \n", + " packed_relations=[\"rel/~rel\", \"rev_rel/~rev_rel\"], packed_relation_substitutes=None,\n", + " relation_prefix=\"\", relation_suffix=\"\",\n", + " packed_predicates=[\"pred0/~pred0\", \"pred1/~pred1\"], predicate_prefix=\"\", prepositive_pred=False,\n", + " predicate_dichotomy=True, reverse_causal=False, conjunctions=[[\"because\", \"so\"], [\"although\", \"but\"]]):\n", + " assert entities[0].lower() in tokenizer.vocab , entities[0]\n", + " assert entities[1].lower() in tokenizer.vocab , entities[1]\n", + " \n", + " def form_As(packed_rels):\n", + " relations, neg_relations = zip(*[rel.split(\"/\") for rel in packed_rels])\n", + " relations, neg_relations = list(relations), list(neg_relations)\n", + "\n", + " As = [A_template.format(dt=determiner, ent0=ent0, ent1=ent1, rel=rel, rel_prefix=relation_prefix, rel_suffix=relation_suffix) \n", + " for ent0, ent1, rel in [entities + relations[:1], reverse(entities) + reverse(relations)[:1]]]\n", + " negAs = [A_template.format(dt=determiner, ent0=ent0, ent1=ent1, rel=rel, rel_prefix=relation_prefix, rel_suffix=relation_suffix) \n", + " for ent0, ent1, rel in [entities + neg_relations[:1], reverse(entities) + reverse(neg_relations)[:1]]]\n", + " return As, negAs\n", + " \n", + " As, negAs = form_As(packed_relations)\n", + " \n", + " substituted_As, substituted_negAs = [], []\n", + " for packed_rel_subs in zip(*packed_relation_substitutes):\n", + " subs_As, subs_negAs = form_As(packed_rel_subs)\n", + " substituted_As += subs_As\n", + " substituted_negAs += subs_negAs\n", + " \n", + " if \"/\" in packed_predicates[0]:\n", + " predicates, neg_predicates = zip(*[pred.split(\"/\") for pred in packed_predicates])\n", + " predicates, neg_predicates = list(predicates), list(neg_predicates)\n", + " else:\n", + " predicates, neg_predicates = packed_predicates, []\n", + " \n", + " B_template = B_template[int(prepositive_pred)]\n", + " Bs = [B_template.format(dt=determiner, ent=mask(ent), pred=pred, pred_prefix=predicate_prefix) \n", + " for ent, pred in zip(entities, predicates)]\n", + " negBs = [B_template.format(dt=determiner, ent=mask(ent), pred=pred, pred_prefix=predicate_prefix) \n", + " for ent, pred in zip(entities, neg_predicates)]\n", + " if predicate_dichotomy:\n", + " Bs += [B_template.format(dt=determiner, ent=mask(ent), pred=pred, pred_prefix=predicate_prefix) \n", + " for ent, pred in zip(entities, reversed(neg_predicates))]\n", + " negBs += [B_template.format(dt=determiner, ent=mask(ent), pred=pred, pred_prefix=predicate_prefix) \n", + " for ent, pred in zip(entities, reversed(predicates))]\n", + "\n", + " def form_sentences(sentence_template, As, Bs, conj):\n", + " return [\" \".join(sentence_template.format(A=A, B=B, conj=conj).split()) for A, B in product(As, Bs)]\n", + "\n", + " def form_all_sentences(As, negAs, Bs, negBs):\n", + " causal_sentences = []\n", + " causal_conj = conjunctions[0][int(reverse_causal)]\n", + " for causal_template in causal_templates[int(reverse_causal)]:\n", + " for A, B in [(As, Bs), (negAs, negBs)]:\n", + " causal_sentences += form_sentences(causal_template, A, B, causal_conj)\n", + "\n", + " turning_sentences = []\n", + " turning_conj = conjunctions[1][int(reverse_causal)]\n", + " for turning_template in turning_templates[int(reverse_causal)]:\n", + " for A, B in [(As, negBs), (negAs, Bs)]:\n", + " turning_sentences += form_sentences(turning_template, A, B, turning_conj)\n", + "\n", + " sentences = causal_sentences + turning_sentences\n", + " return sentences, causal_sentences, turning_sentences\n", + " \n", + " sentences, causal_sentences, turning_sentences = form_all_sentences(As, negAs, Bs, negBs)\n", + "# substituted_sentences = sentences\n", + "\n", + " if packed_relation_substitutes is not None:\n", + " substituted_sentences = form_all_sentences(substituted_As, substituted_negAs, Bs, negBs)[0]\n", + " \n", + " substituted_sent_groups = list(zip(sentences, substituted_sentences))\n", + "\n", + " if entity_substitutes is not None:\n", + " for sub in entity_substitutes:\n", + " for ent in sub:\n", + " assert ent.lower() in tokenizer.vocab , ent + \" not in BERT vocab\"\n", + " assert len(set(chain.from_iterable(entity_substitutes))) == 4, entity_substitutes\n", + " assert len(set(chain.from_iterable(entity_substitutes)).union(set(entities))) == 6\n", + "\n", + " entity_substitutes = list(itertools.product(entities[:1] + entity_substitutes[0], entities[1:] + entity_substitutes[1]))\n", + " substituted_sent_groups = [[sent.replace(entities[0], sub[0]).replace(entities[1], sub[1]) \n", + " for sent in sent_group for sub in entity_substitutes] for sent_group in substituted_sent_groups]\n", + " return causal_sentences, turning_sentences, substituted_sent_groups\n", + "\n", + "# if entity_substitutes is not None:\n", + "# for sub in entity_substitutes:\n", + "# for ent in sub:\n", + "# assert ent.lower() in tokenizer.vocab , ent + \" not in BERT vocab\"\n", + "# assert len(set(chain.from_iterable(entity_substitutes))) == 4, entity_substitutes\n", + "# assert len(set(chain.from_iterable(entity_substitutes)).union(set(entities))) == 6 \n", + " \n", + "# entity_substitutes = list(itertools.product(entities[:1] + entity_substitutes[0], entities[1:] + entity_substitutes[1]))\n", + "# substituted_sentences = [sent.replace(entities[0], sub[0]).replace(entities[1], sub[1]) \n", + "# for sent in substituted_sentences for sub in entity_substitutes]\n", + "# return causal_sentences, turning_sentences, substituted_sentences" + ] + }, + { + "cell_type": "code", + "execution_count": 394, + "metadata": {}, + "outputs": [], + "source": [ + "def make_sentences(A_template, B_template, causal_templates, turning_templates,\n", + " index=-1, orig_sentence='', entities=[\"John\", \"Mary\"], entity_substitutes=None, determiner=\"\", \n", + " packed_relations=[\"rel/~rel\", \"rev_rel/~rev_rel\"], packed_relation_substitutes=None,\n", + " relation_prefix=\"\", relation_suffix=\"\",\n", + " packed_predicates=[\"pred0/~pred0\", \"pred1/~pred1\"], predicate_prefix=\"\", prepositive_pred=False,\n", + " predicate_dichotomy=True, reverse_causal=False, conjunctions=[[\"because\", \"so\"], [\"although\", \"but\"]]):\n", + " assert entities[0].lower() in tokenizer.vocab , entities[0]\n", + " assert entities[1].lower() in tokenizer.vocab , entities[1]\n", + " \n", + " relations, neg_relations = zip(*[rel.split(\"/\") for rel in packed_relations])\n", + " relations, neg_relations = list(relations), list(neg_relations)\n", + " \n", + " As = [A_template.format(dt=determiner, ent0=ent0, ent1=ent1, rel=rel, rel_prefix=relation_prefix, rel_suffix=relation_suffix) \n", + " for ent0, ent1, rel in [entities + relations[:1], reverse(entities) + reverse(relations)[:1]]]\n", + " negAs = [A_template.format(dt=determiner, ent0=ent0, ent1=ent1, rel=rel, rel_prefix=relation_prefix, rel_suffix=relation_suffix) \n", + " for ent0, ent1, rel in [entities + neg_relations[:1], reverse(entities) + reverse(neg_relations)[:1]]]\n", + " \n", + " if \"/\" in packed_predicates[0]:\n", + " predicates, neg_predicates = zip(*[pred.split(\"/\") for pred in packed_predicates])\n", + " predicates, neg_predicates = list(predicates), list(neg_predicates)\n", + " else:\n", + " predicates, neg_predicates = packed_predicates, []\n", + " \n", + " B_template = B_template[int(prepositive_pred)]\n", + " Bs = [B_template.format(dt=determiner, ent=mask(ent), pred=pred, pred_prefix=predicate_prefix) \n", + " for ent, pred in zip(entities, predicates)]\n", + " negBs = [B_template.format(dt=determiner, ent=mask(ent), pred=pred, pred_prefix=predicate_prefix) \n", + " for ent, pred in zip(entities, neg_predicates)]\n", + " if predicate_dichotomy:\n", + " Bs += [B_template.format(dt=determiner, ent=mask(ent), pred=pred, pred_prefix=predicate_prefix) \n", + " for ent, pred in zip(entities, reversed(neg_predicates))]\n", + " negBs += [B_template.format(dt=determiner, ent=mask(ent), pred=pred, pred_prefix=predicate_prefix) \n", + " for ent, pred in zip(entities, reversed(predicates))]\n", + "\n", + " def form_sentences(sentence_template, As, Bs, conj):\n", + " return [\" \".join(sentence_template.format(A=A, B=B, conj=conj).split()) for A, B in product(As, Bs)]\n", + "\n", + " causal_sentences = []\n", + " causal_conj = conjunctions[0][int(reverse_causal)]\n", + " for causal_template in causal_templates[int(reverse_causal)]:\n", + " for A, B in [(As, Bs), (negAs, negBs)]:\n", + " causal_sentences.extend(form_sentences(causal_template, A, B, causal_conj))\n", + "\n", + " turning_sentences = []\n", + " turning_conj = conjunctions[1][int(reverse_causal)]\n", + " for turning_template in turning_templates[int(reverse_causal)]:\n", + " for A, B in [(As, negBs), (negAs, Bs)]:\n", + " turning_sentences.extend(form_sentences(turning_template, A, B, turning_conj))\n", + " \n", + " sentences = causal_sentences + turning_sentences\n", + " substituted_sentences = sentences\n", + "\n", + " if packed_relation_substitutes is not None:\n", + " packed_relation_substitutes = list(itertools.product(packed_relations[:1] + packed_relation_substitutes[0], \n", + " packed_relations[1:] + packed_relation_substitutes[1]))\n", + "\n", + " substituted_sentences = []\n", + " for packed_sub_relations in packed_relation_substitutes:\n", + " sub_relations, sub_neg_relations = zip(*[rel.split(\"/\") for rel in packed_sub_relations])\n", + " \n", + " # neg_relations should be replaced first (maximum matching), otherwise there will be wrong sentences\n", + " substituted_sentences += [sent.replace(neg_relations[0], sub_neg_relations[0])\n", + " .replace(neg_relations[1], sub_neg_relations[1])\n", + " .replace(relations[0], sub_relations[0])\n", + " .replace(relations[1], sub_relations[1]) \n", + " for sent in sentences]\n", + "# print(relations[0] + \" -> \" + sub_relations[0])\n", + "# print(relations[1] + \" -> \" + sub_relations[1])\n", + "# print(neg_relations[0] + \" -> \" + sub_neg_relations[0])\n", + "# print(neg_relations[1] + \" -> \" + sub_neg_relations[1])\n", + "# for sent, subs_sent in zip(sentences, substituted_sentences):\n", + "# print(sent + \" -> \" + subs_sent)\n", + " \n", + " substituted_sentences = list(set(substituted_sentences))\n", + " \n", + "# if entity_substitutes is not None:\n", + "# for sub in entity_substitutes:\n", + "# for ent in sub:\n", + "# assert ent.lower() in tokenizer.vocab , ent + \" not in BERT vocab\"\n", + "# assert len(set(chain.from_iterable(entity_substitutes))) == 4, entity_substitutes\n", + "# assert len(set(chain.from_iterable(entity_substitutes)).union(set(entities))) == 6 \n", + " \n", + "# entity_substitutes = list(itertools.product(entities[:1] + entity_substitutes[0], entities[1:] + entity_substitutes[1]))\n", + "# substituted_sentences = [sent.replace(entities[0], sub[0]).replace(entities[1], sub[1]) \n", + "# for sent in substituted_sentences for sub in entity_substitutes]\n", + " return causal_sentences, turning_sentences, substituted_sentences" + ] + }, + { + "cell_type": "code", + "execution_count": 443, + "metadata": {}, + "outputs": [], + "source": [ + "from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler\n", + "import torch\n", + "import random\n", + "import math" + ] + }, + { + "cell_type": "code", + "execution_count": 435, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([[8., 9., 0., 0.],\n", + " [6., 3., 5., 6.],\n", + " [2., 5., 0., 9.],\n", + " [2., 9., 1., 5.],\n", + " [3., 0., 8., 8.],\n", + " [6., 5., 8., 2.],\n", + " [3., 0., 6., 0.],\n", + " [4., 7., 1., 1.]])" + ] + }, + "execution_count": 435, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "A = torch.randint(10, size=(8, 2))\n", + "B = torch.randint(10, size=(8, 2))\n", + "d = torch.cat([A, B], dim=-1)\n", + "d" + ] + }, + { + "cell_type": "code", + "execution_count": 427, + "metadata": {}, + "outputs": [], + "source": [ + "sampler = RandomSampler(dataset)\n", + "dataloader = DataLoader(dataset, sampler=sampler, batch_size=3)" + ] + }, + { + "cell_type": "code", + "execution_count": 446, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[[0, 1, 2, 3], [4, 5, 6, 7], [8, 9]]" + ] + }, + "execution_count": 446, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "batch_size = 4\n", + "idx_list = list(range(10))\n", + "# random.shuffle(idx_list)\n", + "n_batches = math.ceil(len(idx_list) / batch_size)\n", + "[idx_list[i * batch_size: (i + 1) * batch_size] for i in range(n_batches)]" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "def get_frame(frames, index):\n", + " for frame in frames:\n", + " if frame['index'] == index:\n", + " return frame\n", + " return None" + ] + }, + { + "cell_type": "code", + "execution_count": 389, + "metadata": { + "scrolled": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "30" + ] + }, + "execution_count": 389, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "frames = \\\n", + "[\n", + " {\n", + " \"index\": 2,\n", + " \"orig_sentence\": \"The trophy doesn't fit into the brown suitcase because [it] is too large/small.\",\n", + " \"entities\": [\"trophy\", \"suitcase\"],\n", + " \"entity_substitutes\": [[\"ball\", \"toy\"], [\"bag\", \"box\"]],\n", + " \"determiner\": \"the\",\n", + " \"packed_relations\": [\"doesn't fit into/can fit into\", \"doesn't hold/can hold\"],\n", + " \"packed_relation_substitutes\": [[\"can't be put into/can be put into\"], [\"doesn't have enough room for/has enough room for\"]],\n", + " \"packed_predicates\": [\"is large/isn't large\", \"is small/isn't small\"],\n", + " },\n", + " {\n", + " \"index\": 4,\n", + " \"orig_sentence\": \"Joan made sure to thank Susan for all the help [she] had recieved/given.\",\n", + " \"entities\": [\"John\", \"Susan\"],\n", + " \"entity_substitutes\": [[\"David\", \"Michael\"], [\"Mary\", \"Tiffany\"]],\n", + " \"packed_relations\": [\"thanked/didn't thank\", \"took good care of/didn't good care of\"],\n", + " \"packed_relation_substitutes\": [[\"felt grateful to/didn't feel grateful to\"], [\"was appreciated by/wasn't appreciated by\"]],\n", + " \"packed_predicates\": [\"had received a lot of help/hadn't received a lot of help\", \"had given a lot of help/hadn't given a lot of help\"],\n", + " \"predicate_dichotomy\": False,\n", + " },\n", + " {\n", + " \"index\": 4000,\n", + " \"orig_sentence\": \"John gave a lot of money to Susan because [he] was very rich/poor.\",\n", + " \"entities\": [\"John\", \"Susan\"],\n", + " \"entity_substitutes\": [[\"David\", \"Michael\"], [\"Mary\", \"Linda\"]],\n", + " \"packed_relations\": [\"gave a lot of money to/didn't give a lot of money to\", \"received a lot of money from/didn't receive a lot of money from\"],\n", + " \"packed_relation_substitutes\": [[\"subsidized/didn't subsidize\"], [\"borrowed a lot of money from/didn't borrow any money from\"]],\n", + " \"packed_predicates\": [\"was rich/wasn't rich\", \"was poor/wasn't poor\"],\n", + " },\n", + " {\n", + " \"index\": 10,\n", + " \"orig_sentence\": \"The delivery truck zoomed by the school bus because [it] was going so fast/slow.\",\n", + " \"entities\": [\"truck\", \"bus\"],\n", + " \"entity_substitutes\": [[\"car\", \"ambulance\"], [\"bicycle\", \"tram\"]],\n", + " \"determiner\": \"the\",\n", + " \"packed_relations\": [\"overtook/couldn't overtake\", \"fell far behind/didn't fall far behind\"],\n", + " \"packed_relation_substitutes\": [[\"zoomed by/didn't pass\"], [\"was left behind/wasn't left far behind\"]],\n", + " \"packed_predicates\": [\"was going fast/wasn't going fast\", \"was going slow/wasn't going slow\"],\n", + " },\n", + " ## didn't defeated, replace error: didn't defeat -> defeated\n", + " {\n", + " \"index\": 12,\n", + " \"orig_sentence\": \"Frank felt vindicated/crushed when his longtime rival Bill revealed that [he] was the winner of the competition.\",\n", + " \"entities\": [\"John\", \"Susan\"],\n", + " \"entity_substitutes\": [[\"David\", \"Michael\"], [\"Mary\", \"Linda\"]],\n", + " \"packed_relations\": [\"beat/didn't beat\", \"lost to/didn't lose to\"],\n", + " \"packed_relation_substitutes\": [[\"defeated/didn't defeat\"], [\"was defeated by/wasn't defeated by\"]],\n", + " \"relation_suffix\": \"in the game\",\n", + " \"packed_predicates\": [\"was happy/wasn't happy\", \"was sad/wasn't sad\"],\n", + " \"reverse_causal\": True\n", + " },\n", + " {\n", + " \"index\": 16,\n", + " \"orig_sentence\": \"The large ball crashed right through the table because [it] was made of steel/styrofoam.\",\n", + " \"entities\": [\"ball\", \"board\"],\n", + " \"entity_substitutes\": [[\"bullet\", \"arrow\"], [\"shield\", \"disk\"]],\n", + " \"determiner\": \"the\",\n", + " \"packed_relations\": [\"crashed right through/didn't crash through\", \"failed to block/blocked\"],\n", + " \"packed_relation_substitutes\": [[\"penetrated through/didn't penetrate through\"], [\"failed to stop/stopped\"]],\n", + " \"packed_predicates\": [\"was hard/wasn't hard\", \"was soft/wasn't soft\"],\n", + " },\n", + " {\n", + " \"index\": 18,\n", + " \"orig_sentence\": \"John couldn't see the stage with Billy in front of him because [he] is so short.\",\n", + " \"entities\": [\"John\", \"Susan\"],\n", + " \"entity_substitutes\": [[\"David\", \"Edward\"], [\"Betty\", \"Donna\"]],\n", + " \"packed_relations\": [\"couldn't see the stage behind/could see the stage behind\", \"blocked the view of/didn't block the view of\"],\n", + " \"packed_relation_substitutes\": [[\"couldn't catch sight of the stage behind/could catch sight of the stage behind\"], [\"obstructed the sight of/didn't obstruct the sight of\"]],\n", + " \"packed_predicates\": [\"is short/isn't short\", \"is tall/isn't tall\"],\n", + " },\n", + " {\n", + " \"index\": 20,\n", + " \"orig_sentence\": \"Tom threw his schoolbag down to Ray after [he] reached the top of the stairs.\",\n", + " \"entities\": [\"Brian\", \"Amy\"],\n", + " \"entity_substitutes\": [[\"Charles\", \"Paul\"], [\"Emma\", \"Linda\"]],\n", + " \"packed_relations\": [\"threw the schoolbag down to/threw the schoolbag up to\", \"caught the schoolbag thrown down by/caught the schoolbag thrown up by\"],\n", + " \"packed_relation_substitutes\": [[\"cast the schoolbag down to/cast the schoolbag up to\"], [\"took the schoolbag thrown down by/took the schoolbag thrown up by\"]],\n", + " \"packed_predicates\": [\"reached the top of the stairs\", \"reached the bottom of the stairs\"],\n", + " \"conjunctions\": [[\"after\", ], [\"before\", ]]\n", + " },\n", + " ## didn't defeated, replace error: didn't defeat -> defeated\n", + " {\n", + " \"index\": 22,\n", + " \"orig_sentence\": \"Although they ran at about the same speed, Sue beat Sally because [she] had such a good start.\",\n", + " \"entities\": [\"Tom\", \"Sue\"],\n", + " \"entity_substitutes\": [[\"John\", \"David\"], [\"Sally\", \"Susan\"]],\n", + " \"packed_relations\": [\"beat/didn't beat\", \"lost to/didn't lose to\"],\n", + " \"packed_relation_substitutes\": [[\"defeated/didn't defeat\"], [\"was defeated by/wasn't defeated by\"]],\n", + " \"relation_prefix\": \"Running at about the same speed,\",\n", + " \"relation_suffix\": \"in the running race\",\n", + " \"packed_predicates\": [\"had a good start/didn't have a good start\", \"had a bad start/didn't have a bad start\"],\n", + " },\n", + "# {\n", + "# \"index\": 26000,\n", + "# \"orig_sentence\": \"Sam's drawing was hung just above Tina's and [it] did look much better with another one below it\",\n", + "# \"entities\": [\"Bob\", \"Wendy\"],\n", + "# \"entity_substitutes\": [[\"Bush\", \"Tim\"], [\"Sandy\", \"Helen\"]],\n", + "# \"packed_relations\": [\"could reach higher than/couldn't reach higher than\", \"reached lower than/didn't reach lower than\"],\n", + "# \"packed_relation_substitutes\": [[\"could jump higher than/couldn't jump higher than\"], [\"jumped lower than/didn't jump lower than\"]],\n", + "# \"packed_predicates\": [\"is tall/is not tall\", \"is short/is not short\"],\n", + "# },\n", + " {\n", + " \"index\": 28,\n", + " \"orig_sentence\": \"Anna did a lot better than her good friend Lucy on the test because [she] had studied so hard.\",\n", + " \"entities\": [\"Anna\", \"Andy\"],\n", + " \"entity_substitutes\": [[\"Lucy\", \"Nancy\"], [\"George\", \"Frank\"]],\n", + " \"packed_relations\": [\"did better than/didn't do better than\", \"did worse than/didn't do worse than\"],\n", + " \"packed_relation_substitutes\": [[\"performed better than/didn't perform better than\"], [\"performed worse than/didn't perform worse than\"]],\n", + " \"relation_suffix\": \"on the test\",\n", + " \"packed_predicates\": [\"had studied hard/hadn't studied hard\", \"was lazy in doing homework/wasn't lazy in doing homework\"],\n", + " },\n", + " {\n", + " \"index\": 30,\n", + " \"orig_sentence\": \"The firemen arrived after the police because [they] were coming from so far away.\",\n", + " \"entities\": [\"doctor\", \"police\"],\n", + " \"entity_substitutes\": [[\"worker\", \"employee\"], [\"boss\", \"administrator\"]],\n", + " \"determiner\": \"the\",\n", + " \"packed_relations\": [\"arrived after/didn't arrive after\", \"arrived before/didn't arrive before\"],\n", + " \"packed_relation_substitutes\": [[\"reached here after/didn't reach here after\"], [\"reached here before/didn't reach here before\"]],\n", + " \"packed_predicates\": [\"came from far away/didn't come from far away\", \"came from a close place/didn't come from a close place\"],\n", + " },\n", + " {\n", + " \"index\": 32000,\n", + " \"orig_sentence\": \"Frank was upset with Tom because the toaster [he] had bought from him didn't work.\",\n", + " \"entities\": [\"Betty\", \"Henry\"],\n", + " \"entity_substitutes\": [[\"Amy\", \"Linda\"], [\"Bush\", \"Frank\"]],\n", + " \"packed_relations\": [\"was upset with/was pleased with\", \"was hated by/was loved by\"],\n", + " \"packed_relation_substitutes\": [[\"hated/liked\"], [\"was disliked by/was liked by\"]],\n", + " \"packed_predicates\": [\"had bought didn't work/had bought worked well\", \"had sold didn't work/had sold worked well\"],\n", + " \"predicate_prefix\": \"the toaster\",\n", + " \"predicate_dichotomy\": False,\n", + " },\n", + " {\n", + " \"index\": 36,\n", + " \"orig_sentence\": \"The sack of potatoes had been placed above the bag of flour, so [it] had to be moved first\",\n", + " \"entities\": [\"potatoes\", \"flour\"],\n", + " \"entity_substitutes\": [[\"candy\", \"rice\"], [\"beans\", \"noodles\"]],\n", + " \"determiner\": \"the bag of\",\n", + " \"packed_relations\": [\"had been placed above/hadn't been placed above\", \"had been placed below/hadn't been placed below\"],\n", + " \"packed_relation_substitutes\": [[\"had been put above/hadn't been put above\"], [\"had been put below/hadn't been put below\"]],\n", + " \"packed_predicates\": [\"had to be moved first/couldn't be moved first\", \"had to be moved later/couldn't be moved later\"],\n", + " \"reverse_causal\": True\n", + " },\n", + " {\n", + " \"index\": 38,\n", + " \"orig_sentence\": \"Pete envies Martin although [he] is very successful.\",\n", + " \"entities\": [\"Peter\", \"Mandy\"],\n", + " \"entity_substitutes\": [[\"Martin\", \"Paul\"], [\"Cindy\", \"Emma\"]],\n", + " \"packed_relations\": [\"envied/didn't envy\", \"was envied by/wasn't envied by\"],\n", + " \"packed_relation_substitutes\": [[\"was jealous of/wasn't jealous of\"], [\"was admired by/wasn't admired by\"]],\n", + " \"packed_predicates\": [\"failed/didn't fail\", \"was successful/wasn't successful\"],\n", + " },\n", + "# {\n", + "# \"index\": 420000,\n", + "# \"orig_sentence\": \"I poured water from the bottle into the cup until [it] was empty.\",\n", + "# \"entities\": [\"bottle\", \"cup\"],\n", + "# \"entity_substitutes\": [[\"bow\", \"bucket\"], [\"tube\", \"container\"]],\n", + "# \"determiner\": \"the\",\n", + "# \"packed_relations\": [\"is filled with the water from/isn't filled with the water from\", \"leakes the water into/doesn't leakes the water into\"],\n", + "# \"packed_relation_substitutes\": [[\"is full of the water from/isn't full of the water from\"], [\"drains the water into/doesn't drain the water into\"]],\n", + "# \"packed_predicates\": [\"was lower/wasn't lower\", \"was higher/wasn't higher\"],\n", + "# \"predicate_dichotomy\": False,\n", + "# }, \n", + " {\n", + " \"index\": 42,\n", + " \"orig_sentence\": \"I poured water from the bottle into the cup until [it] was empty.\",\n", + " \"entities\": [\"bottle\", \"cup\"],\n", + " \"entity_substitutes\": [[\"bowl\", \"bucket\"], [\"tube\", \"container\"]],\n", + " \"determiner\": \"the\",\n", + " \"packed_relations\": [\"was filled with water from/leaked into\", \"leaked into/was filled with water from\"],\n", + " \"packed_relation_substitutes\": [[\"was suffused with water from/dripped water into\"], [\"dripped water into/was suffused with water from\"]],\n", + " \"packed_predicates\": [\"was empty\", \"was full\"],\n", + " \"conjunctions\": [[\"after\", ], [\"before\", ]]\n", + " },\n", + "# {\n", + "# \"index\": 46000,\n", + "# \"orig_sentence\": \"Sid explained his theory to Mark but [he] couldn't convince him.\",\n", + "# \"entities\": [\"Susan\", \"Mark\"],\n", + "# \"entity_substitutes\": [[\"Amy\", \"Linda\"], [\"David\", \"Michael\"]],\n", + "# \"packed_relations\": [\"is explaining the theory to/doesn't explain the theory\", \"is listening to the explanation of/doesn't listen to the explanation of\"],\n", + "# \"packed_relation_substitutes\": [[\"is illustrating the theory to/doesn't illustrate the theory\"], [\"is paying attention to the explanation of/doesn't pay attention to the explanation of\"]],\n", + "# \"packed_predicates\": [\"has already proved it/doesn't prove it\", \"doesn't understand it/understands it\"],\n", + "# \"reverse_causal\": True\n", + "# },\n", + "# {\n", + "# \"index\": 48,\n", + "# \"orig_sentence\": \"Susan knew that Ann's son had been in a car accident, so [she] told her about it.\",\n", + "# \"entities\": [\"Sandy\", \"Mark\"],\n", + "# \"entity_substitutes\": [[\"Mandy\", \"Betty\"], [\"Bob\", \"Charles\"]],\n", + "# \"packed_relations\": [\"found/didn't find\", \"was found by/wasn't found by\"],\n", + "# \"packed_relation_substitutes\": [[\"caught/didn't catch\"], [\"was caught by/wasn't caught by\"]],\n", + "# \"relation_suffix\": \"cheating on the test\",\n", + "# \"packed_predicates\": [\"felt unfair/didn't feel unfair\", \"felt nervous/didn't feel nervous\"],\n", + "# },\n", + " {\n", + " \"index\": 50,\n", + " \"orig_sentence\": \"Joe's uncle can still beat him at tennis, even though [he] is 30 years younger.\",\n", + " \"entities\": [\"Joe\", \"Amy\"],\n", + " \"entity_substitutes\": [[\"David\", \"Charles\"], [\"Betty\", \"Cindy\"]],\n", + " \"packed_relations\": [\"can beat/can't beat\", \"often loses to/seldom loses to\"],\n", + " \"packed_relation_substitutes\": [[\"can defeat/can't defeat\"], [\"is often defeated by/is seldom defeated by\"]],\n", + " \"relation_suffix\": \"at tennis\",\n", + " \"packed_predicates\": [\"is older/isn't older\", \"is younger/isn't younger\"],\n", + " },\n", + "# {\n", + "# \"index\": 64000,\n", + "# \"orig_sentence\": \"In the middle of the outdoor concert, the rain started falling, but [it] continued until 10.\",\n", + "# \"entities\": [\"concert\", \"rain\"],\n", + "# \"entity_substitutes\": [[\"event\", \"race\"], [\"storm\", \"shower\"]],\n", + "# \"determiner\": \"the\",\n", + "# \"packed_relations\": [\"was interrupted by/wasn't interrupted by\", \"interrupted/didn't interrupt\"],\n", + "# \"packed_relation_substitutes\": [[\"was affected by/wasn't affected by\"], [\"affected/didn't affected\"]],\n", + "# \"packed_predicates\": [\"ended early/ended on time\", \"was heavy/stopped soon\"],\n", + "# \"predicate_dichotomy\": False,\n", + "# },\n", + "# {\n", + "# \"index\": 680,\n", + "# \"orig_sentence\": \"Ann asked Mary what time the library closes, because [she] had forgotten.\",\n", + "# \"entities\": [\"Ann\", \"Henry\"],\n", + "# \"entity_substitutes\": [[\"Mary\", \"Linda\"], [\"Brian\", \"Michael\"]],\n", + "# \"packed_relations\": [\"asked/didn't ask\", \"was asked by/wasn't asked by\"],\n", + "# \"packed_relation_substitutes\": [[\"querid/didn't query\"], [\"was querid by/wasn't querid by\"]],\n", + "# \"relation_suffix\": \"what time the library closes\",\n", + "# \"packed_predicates\": [\"forgot/didn't forget\", \"remembered/didn't remember\"],\n", + "# \"reverse_causal\": True\n", + "# },\n", + " {\n", + " \"index\": 68,\n", + " \"orig_sentence\": \"Ann asked Mary what time the library closes, because [she] had forgotten.\",\n", + " \"entities\": [\"Ann\", \"Henry\"],\n", + " \"entity_substitutes\": [[\"Mary\", \"Linda\"], [\"Brian\", \"Michael\"]],\n", + " \"packed_relations\": [\"asked/didn't ask\", \"told/didn't tell\"],\n", + " \"packed_relation_substitutes\": [[\"was told by/wasn't told by\"], [\"was asked by/wasn't asked by\"]],\n", + " \"relation_suffix\": \"what time the library closes\",\n", + " \"packed_predicates\": [\"had forgotten/hadn't forgotten\", \"remembered/didn't remember\"],\n", + " },\n", + "# {\n", + "# \"index\": 840,\n", + "# \"orig_sentence\": \"If the con artist has succeeded in fooling Sam, [he] would have gotten a lot of money.\",\n", + "# \"entities\": [\"Sam\", \"Emma\"],\n", + "# \"entity_substitutes\": [[\"Paul\", \"Bush\"], [\"Susan\", \"Lucy\"]],\n", + "# \"packed_relations\": [\"succeeded in fooling/failed to fool\", \"was fooled by/wasn't fooled by\"],\n", + "# \"packed_relation_substitutes\": [[\"succeeded in cheating/failed to cheat\"], [\"was cheated by/wasn't cheated by\"]],\n", + "# \"packed_predicates\": [\"got the prize/didn't get the prize\", \"lost the prize/didn't lose the prize\"],\n", + "# \"predicate_dichotomy\": True,\n", + "# \"reverse_causal\": True\n", + "# }, \n", + " {\n", + " \"index\": 84,\n", + " \"orig_sentence\": \"If the con artist has succeeded in fooling Sam, [he] would have gotten a lot of money.\",\n", + " \"entities\": [\"Sam\", \"Emma\"],\n", + " \"entity_substitutes\": [[\"Paul\", \"Bush\"], [\"Susan\", \"Lucy\"]],\n", + " \"packed_relations\": [\"succeeded in fooling/failed to fool\", \"was fooled by/wasn't fooled by\"],\n", + " \"packed_relation_substitutes\": [[\"succeeded in cheating/failed to cheat\"], [\"was cheated by/wasn't cheated by\"]],\n", + " \"packed_predicates\": [\"got a lot of money/didn't get a lot of money\", \"lost a lot of money/didn't lose a lot of money\"],\n", + " \"predicate_dichotomy\": False,\n", + " \"reverse_causal\": True\n", + " }, \n", + "# {\n", + "# \"index\": 92000,\n", + "# \"orig_sentence\": \"Alice tried frantically to stop her daughter from chatting at the party, leaving us to wonder why [she] was behaving so strangely.\",\n", + "# \"entities\": [\"Alice\", \"Emma\"],\n", + "# \"entity_substitutes\": [[\"Paul\", \"Bush\"], [\"Susan\", \"Lucy\"]],\n", + "# \"packed_relations\": [\"didn't allow her daughter/allowed her daughter\", \"wasn't allowed by her father/was allowed by her father\"],\n", + "# \"packed_relation_substitutes\": [[\"didn't approve her daughter/approved her daughter\"], [\"wasn't approved by/was approved by\"]],\n", + "# \"relation_suffix\": \"to go to the party\",\n", + "# \"packed_predicates\": [\"was severe/wasn't severe\", \"was naughty/behaved well\"],\n", + "# \"predicate_dichotomy\": False,\n", + "# \"reverse_causal\": False\n", + "# }, \n", + "# {\n", + "# \"index\": 98,\n", + "# \"orig_sentence\": \"I was trying to open the lock with the key, but someone had filled the keyhole with chewing gum, and I couldn't get [it] in.\",\n", + "# \"entities\": [\"hole\", \"gum\"],\n", + "# \"entity_substitutes\": [[\"can\", \"box\"], [\"clay\", \"soil\"]],\n", + "# \"determiner\": \"the\",\n", + "# \"packed_relations\": [\"was filled with/wasn't filled with\", \"clogged/didn't clog\"],\n", + "# \"packed_relation_substitutes\": [[\"was full of/wasn't full of\"], [\"stucked/didn't stuck\"]],\n", + "# \"packed_predicates\": [\"couldn't be gotten in/could be gotten in\", \"should be cleaned up/should't be cleaned up\"],\n", + "# \"predicate_dichotomy\": False,\n", + "# \"reverse_causal\": True\n", + "# },\n", + "# {\n", + "# \"index\": 100,\n", + "# \"orig_sentence\": \"The dog chased the cat, which ran up a tree. [It] waited at the bottom.\",\n", + "# \"entities\": [\"tigher\", \"cat\"],\n", + "# \"entity_substitutes\": [[\"fox\", \"weasel\"], [\"hen\", \"rooster\"]],\n", + "# \"determiner\": \"the\",\n", + "# \"packed_relations\": [\"chased/didn't chase\", \"was chased by/wasn't chased by\"],\n", + "# \"packed_relation_substitutes\": [[\"hunted for/didn't hunt for\"], [\"was hunted by/wasn't hunted by\"]],\n", + "# \"relation_suffix\": \"until the tree\",\n", + "# \"packed_predicates\": [\"waited at the bottom of it/didn't waited at the bottom of it\", \"stayed at the top of it/didn't stay at the top of it\"],\n", + "# \"predicate_dichotomy\": False,\n", + "# \"reverse_causal\": True\n", + "# },\n", + "# {\n", + "# \"index\": 106,\n", + "# \"orig_sentence\": \"John was doing research in the library when he heard a man humming and whistling. [He] was very annoyed.\",\n", + "# \"entities\": [\"Bob\", \"Tiffany\"],\n", + "# \"entity_substitutes\": [[\"Jack\", \"Ted\"], [\"Mary\", \"Lucy\"]],\n", + "# \"packed_relations\": [\"heard/didn't hear\", \"was heard by/didn't heard by\"],\n", + "# \"packed_relation_substitutes\": [[\"noticed/didn't notice\"], [\"was noticed by/wasn't noticed by\"]],\n", + "# \"relation_suffix\": \"whistle in the library\",\n", + "# \"packed_predicates\": [\"was annoyed/wasn't annoyed\", \"was annoying/wasn't annoying\"],\n", + "# \"predicate_dichotomy\": False,\n", + "# \"reverse_causal\": True\n", + "# },\n", + "# {\n", + "# \"index\": 108000,\n", + "# \"orig_sentence\": \"John was jogging through the park when he saw a man juggling watermelons. [He] was very impressed.\",\n", + "# \"entities\": [\"John\", \"Amy\"],\n", + "# \"entity_substitutes\": [[\"Alice\", \"Bush\"], [\"Nancy\", \"Cindy\"]],\n", + "# \"packed_relations\": [\"accompanied with/didn't accompany with\", \"was accompanied by/wasn't accompanied by\"],\n", + "# \"packed_relation_substitutes\": [[\"stayed with/didn't stay with\"], [\"was't left alone by/was left alone by\"]],\n", + "# \"packed_predicates\": [\"is nice/isn't nice\", \"didn't feel lonely/felt lonely\"],\n", + "# \"predicate_dichotomy\": False,\n", + "# \"reverse_causal\": True\n", + "# },\n", + "# {\n", + "# \"index\": 132000,\n", + "# \"orig_sentence\": \"Jane knocked on the door, and Susan answered it. [She] invited her to come out.\",\n", + "# \"entities\": [\"Jane\", \"Wendy\"],\n", + "# \"entity_substitutes\": [[\"Bob\", \"Tony\"], [\"Lily\", \"Lucy\"]],\n", + "# \"determiner\": \"\",\n", + "# \"packed_relations\": [\"knocked on the door and heard the answer from/didn't knock on the door and didn't hear the answer from\", \"answered the knocked from/didn't answer the knocked from\"],\n", + "# \"packed_relation_substitutes\": [[\"rang at the door and heard the answer from/didn't ring at the door and didn't hear the answer from\"], [\"answered the rang from/didn't answer the rang from\"]],\n", + "# \"relation_suffix\": \"\",\n", + "# \"packed_predicates\": [\"went in/didn't go in\", \"unlocked the door/didn't unlock the door\"],\n", + "# \"predicate_dichotomy\": False,\n", + "# \"reverse_causal\": True\n", + "# },\n", + "# {\n", + "# \"index\": 150,\n", + "# \"orig_sentence\": \"Jackson was greatly influenced by Arnold, though [he] lived two centuries later.\",\n", + "# \"entities\": [\"Jack\", \"Betty\"],\n", + "# \"entity_substitutes\": [[\"Tom\", \"Jay\"], [\"Emily\", \"Helen\"]],\n", + "# \"packed_relations\": [\"was influenced by/wasn't influenced by\", \"influenced/didn't influence\"],\n", + "# \"packed_relation_substitutes\": [[\"was inspired by/didn't inspired by\"], [\"inspired/didn't inspire\"]],\n", + "# \"packed_predicates\": [\"lived two centuries later/didn't live two centuries later\", \"lived two centuries early/didn't live two centuries early\"],\n", + "# \"predicate_dichotomy\": False,\n", + "# },\n", + " {\n", + " \"index\": 15000,\n", + " \"orig_sentence\": \"Jackson was greatly influenced by Arnold, though [he] lived two centuries later.\",\n", + " \"entities\": [\"Jack\", \"Betty\"],\n", + " \"entity_substitutes\": [[\"Tom\", \"Jay\"], [\"Emily\", \"Helen\"]],\n", + " \"packed_relations\": [\"always takes care of/dosen't take care of\", \"is always taken care of by/isn't taken care of by\"],\n", + " \"packed_relation_substitutes\": [[\"always looks after/dosen't look after\"], [\"always needs the help of/didn't need the help of\"]],\n", + " \"packed_predicates\": [\"is older/isn't older\", \"is younger/isn't younger\"],\n", + " },\n", + " {\n", + " \"index\": 160,\n", + " \"orig_sentence\": \"The actress used to be named Terpsichore, but she changed it to Tina a few years ago, because she figured [it] was too hard to pronounce.\",\n", + " \"entities\": [\"Betty\", \"Adele\"],\n", + " \"entity_substitutes\": [[\"Amy\", \"Cindy\"], [\"Alberta\", \"Caroline\"]],\n", + " \"packed_relations\": [\"replaced/didn't replace\", \"was changed to/wasn't changed to\"],\n", + " \"packed_relation_substitutes\": [[\"was substituted for/wasn't substituted for\"], [\"was replaced by/wasn't replaced by\"]],\n", + " \"relation_suffix\": \"as the actress's new name\",\n", + " \"packed_predicates\": [\"is easy to pronounce/isn't easy to pronounce\", \"is hard to pronounce/isn't hard to pronounce\"],\n", + " },\n", + "# {\n", + "# \"index\": 1660000,\n", + "# \"orig_sentence\": \"Fred is the only man still alive who remembers my great-grandfather. [He] is a remarkable man.\",\n", + "# \"entities\": [\"Tom\", \"grandmother\"],\n", + "# \"entity_substitutes\": [[\"Tim\", \"Mark\"], [\"grandma\", \"mother\"]],\n", + "# \"determiner\": \"\",\n", + "# \"packed_relations\": [\"still remembers/doesn't remember\", \"is remembered by/isn't remembered by\"],\n", + "# \"packed_relation_substitutes\": [[\"still recollect/doesn't recollect\"], [\"is recollected by/isn't recollected by\"]],\n", + "# \"relation_suffix\": \"\",\n", + "# \"packed_predicates\": [\"has good memory/doesn't have good memory\", \"was remarkable/wasn't remarkable\"],\n", + "# \"predicate_dichotomy\": False,\n", + "# \"reverse_causal\": True\n", + "# },\n", + " {\n", + " \"index\": 1700000,\n", + " \"orig_sentence\": \"In July, Kamtchatka declared war on Yakutsk. Since Yakutsk's army was much better equipped and ten times larger, [they] were defeated within weeks.\",\n", + " \"entities\": [\"Germany\", \"Italy\"],\n", + " \"entity_substitutes\": [[\"Australia\", \"Japan\"], [\"Argentina\", \"Canada\"]],\n", + " \"packed_relations\": [\"defeated/didn't defeat\", \"was defeated by/wasn't defeated by\"],\n", + " \"packed_relation_substitutes\": [[\"conquered/didn't conquer\"], [\"was conquered by/wasn't conquered by\"]],\n", + " \"packed_predicates\": [\"was more powerful/wasn't more powerful\", \"was less powerful/wasn't less powerful\"],\n", + " },\n", + " {\n", + " \"index\": 186,\n", + " \"orig_sentence\": \"When the sponsors of the bill got to the town hall, they were surprised to find that the room was full of opponents. [They] were very much in the minority\",\n", + " \"entities\": [\"sponsors\", \"opponents\"],\n", + " \"entity_substitutes\": [[\"workers\", \"customers\"], [\"teachers\", \"students\"]],\n", + " \"determiner\": \"the\",\n", + " \"packed_relations\": [\"were less in number than/were not less in number than\", \"were more in number than/were not more in number than\"],\n", + " \"packed_relation_substitutes\": [[\"were outnumbered by/were not outnumbered by\"], [\"outnumbered/didn't outnumber\"]],\n", + " \"packed_predicates\": [\"were in the minority/were not in the minority\", \"were in the majority/were not in the majority\"],\n", + " \"reverse_causal\": True\n", + " },\n", + " {\n", + " \"index\": 188,\n", + " \"orig_sentence\": \"Everyone really loved the oatmeal cookies; only a few people liked the chocolate chip cookies. Next time, we should make more of [them] .\",\n", + " \"entities\": [\"cookies\", \"chips\"],\n", + " \"entity_substitutes\": [[\"apples\", \"bananas\"], [\"grapes\", \"sandwiches\"]],\n", + " \"determiner\": \"the\",\n", + " \"packed_relations\": [\"are more popular than/are less popular than\", \"lose to/don't lose to\"],\n", + " \"packed_relation_substitutes\": [[\"are sold more than/are sold less than\"], [\"are not as popular as/are as popular as\"]],\n", + " \"packed_predicates\": [\"should be made more next time/shouldn't be made more next time\", \"should be made less next time/shouldn't be made less next time\"],\n", + " \"reverse_causal\": True\n", + " },\n", + "# {\n", + "# \"index\": 1900,\n", + "# \"orig_sentence\": \"We had hoped to place copies of our newsletter on all the chairs in the auditorium, but there were simply not enough of [them] .\",\n", + "# \"entities\": [\"newspapers\", \"chairs\"],\n", + "# \"entity_substitutes\": [[\"cups\", \"pictures\"], [\"tables\", \"benches\"]],\n", + "# \"determiner\": \"the\",\n", + "# \"packed_relations\": [\"could be placed on all/couldn't be placed on all\", \"could hold all/couldn't hold all\"],\n", + "# \"packed_relation_substitutes\": [[\"could be put on all/couldn't be put on all\"], [\"could carry all/couldn't carry all\"]],\n", + "# \"relation_suffix\": \"in the auditorium\",\n", + "# \"packed_predicates\": [\"isn't too many/is too many\", \"is enough/isn't enough\"],\n", + "# \"predicate_dichotomy\": False,\n", + "# \"reverse_causal\": True\n", + "# },\n", + " {\n", + " \"index\": 190,\n", + " \"orig_sentence\": \"We had hoped to place copies of our newsletter on all the chairs in the auditorium, but there were simply not enough of [them] .\",\n", + " \"entities\": [\"newspapers\", \"chairs\"],\n", + " \"entity_substitutes\": [[\"cups\", \"pictures\"], [\"tables\", \"benches\"]],\n", + " \"determiner\": \"the\",\n", + " \"packed_relations\": [\"could be placed on all/couldn't be placed on all\", \"could all be covered by/couldn't all be covered by\"],\n", + " \"packed_relation_substitutes\": [[\"could be put on all/couldn't be put on all\"], [\"could carry all/couldn't carry all\"]],\n", + " \"packed_predicates\": [\"there were many of/there were not many of\", \"there were few of/there were not few of\"],\n", + " \"prepositive_pred\": True,\n", + " },\n", + " {\n", + " \"index\": 19600,\n", + " \"orig_sentence\": \"Steve follows Fred's example in everything. [He] admires him hugely.\",\n", + " \"entities\": [\"Steve\", \"Lucy\"],\n", + " \"entity_substitutes\": [[\"Fred\", \"George\"], [\"Lily\", \"Wendy\"]],\n", + " \"packed_relations\": [\"follows/doesn't follow\", \"is followed by/isn't followed by\"],\n", + " \"packed_relation_substitutes\": [[\"imitates/doesn't imitate\"], [\"is imitated by/isn't imitated by\"]],\n", + " \"relation_suffix\": \"in everything\",\n", + " \"packed_predicates\": [\"is bad at making decisions/isn't bad at making decisions\", \"is good at making decisions/isn't good at making decisions\"],\n", + " },\n", + " {\n", + " \"index\": 198,\n", + " \"orig_sentence\": \"The table won't fit through the doorway because [it] is too wide.\",\n", + " \"entities\": [\"table\", \"doorway\"],\n", + " \"entity_substitutes\": [[\"desk\", \"sofa\"], [\"corridor\", \"hallway\"]],\n", + " \"determiner\": \"the\",\n", + " \"packed_relations\": [\"will fit through/won't fit through\", \"will be fitted through by/won't be fitted through by\"],\n", + " \"packed_relation_substitutes\": [[\"will pass through/won't pass through\"], [\"will be passed through by/won't be passed through by\"]],\n", + " \"packed_predicates\": [\"is narrow/isn't narrow\", \"is wide/isn't wide\"],\n", + " },\n", + "# {\n", + "# \"index\": 2000,\n", + "# \"orig_sentence\": \"Grace was happy to trade me her sweater for my jacket. She thinks [it] looks dowdy on her.\",\n", + "# \"entities\": [\"Mandy\", \"Tim\"],\n", + "# \"entity_substitutes\": [[\"Betty\", \"Nancy\"], [\"Bob\", \"John\"]],\n", + "# \"determiner\": \"\",\n", + "# \"packed_relations\": [\"traded the sweater with/didn't trade the sweater with\", \"traded the T-shirt for the sweater with/didn't trade the T-shirt for the sweater with\"],\n", + "# \"packed_relation_substitutes\": [[\"exchanged the sweater with/didn't exchange the sweater with\"], [\"exchanged the T-shirt for the sweater with/didn't exchange the T-shirt for the sweater with\"]],\n", + "# \"relation_suffix\": \"\",\n", + "# \"packed_predicates\": [\"thinks it looks bad/thinks it doesn't look bad\", \"thinks it looks great/thinks it doesn't look great\"],\n", + "# \"predicate_dichotomy\": False,\n", + "# \"reverse_causal\": False\n", + "# },\n", + " {\n", + " \"index\": 2000000,\n", + " \"orig_sentence\": \"Grace was happy to trade me her sweater for my jacket. She thinks [it] looks dowdy on her.\",\n", + " \"entities\": [\"sweater\", \"jacket\"],\n", + " \"entity_substitutes\": [[\"skirt\", \"cap\"], [\"hat\", \"short\"]],\n", + " \"determiner\": \"the\",\n", + " \"packed_relations\": [\"is traded by Grace for/isn't traded by Grace for\", \"is substituted by Grace for/isn't substituted by Grace for\"],\n", + " \"packed_relation_substitutes\": [[\"is replaced by Grace with/isn't replaced by Grace with\"], [\"is preferred by Grace to/isn't preferred by Grace to\"]],\n", + " \"packed_predicates\": [\"looks bad/looks not bad\", \"looks good/looks not good\"],\n", + " \"predicate_prefix\": \"she thinks\",\n", + " },\n", + "# {\n", + "# \"index\": 2020000,\n", + "# \"orig_sentence\": \"John hired Bill to take care of [him] .\",\n", + "# \"entities\": [\"Bill\", \"Mandy\"],\n", + "# \"entity_substitutes\": [[\"Ted\", \"Jackson\"], [\"Lily\", \"Peggy\"]],\n", + "# \"determiner\": \"\",\n", + "# \"packed_relations\": [\"hired/didn't hire\", \"was hired by/wasn't hired by\"],\n", + "# \"packed_relation_substitutes\": [[\"asked/didn't ask\"], [\"was asked by/wasn't asked by\"]],\n", + "# \"relation_suffix\": \"to take care of him\",\n", + "# \"packed_predicates\": [\"is sick/isn't sick\", \"is nice/isn't nice\"],\n", + "# \"predicate_dichotomy\": False,\n", + "# \"reverse_causal\": False\n", + "# },\n", + "# {\n", + "# \"index\": 204,\n", + "# \"orig_sentence\": \"John promised Bill to leave, so an hour later [he] left.\",\n", + "# \"entities\": [\"Bill\", \"Mandy\"],\n", + "# \"entity_substitutes\": [[\"Ted\", \"Jackson\"], [\"Lily\", \"Peggy\"]],\n", + "# \"determiner\": \"\",\n", + "# \"packed_relations\": [\"promised/didn't promise\", \"was promised by/wasn't promised by\"],\n", + "# \"packed_relation_substitutes\": [[\"guaranteed/didn't guarantee\"], [\"was guaranteed by/wasn't guaranteed by\"]],\n", + "# \"relation_suffix\": \"to leave\",\n", + "# \"packed_predicates\": [\"left/didn't leave\", \"was left alone/wasn't left alone\"],\n", + "# \"predicate_dichotomy\": False,\n", + "# \"reverse_causal\": True\n", + "# },\n", + "# {\n", + "# \"index\": 2100000,\n", + "# \"orig_sentence\": \"Jane knocked on Susan's door but [she] did not get an answer.\",\n", + "# \"entities\": [\"Jane\", \"Frank\"],\n", + "# \"entity_substitutes\": [[\"Susan\", \"Sandy\"], [\"Tony\", \"Paul\"]],\n", + "# \"determiner\": \"\",\n", + "# \"packed_relations\": [\"called/didn't call\", \"was called by/wasn't called by\"],\n", + "# \"packed_relation_substitutes\": [[\"contected/didn't contect\"], [\"was contected by/wasn't contected by\"]],\n", + "# \"relation_suffix\": \"\",\n", + "# \"packed_predicates\": [\"got an answer/didn't get an answer\", \"answered the phone/didn't answer the phone\"],\n", + "# \"predicate_dichotomy\": False,\n", + "# \"reverse_causal\": True\n", + "# },\n", + "# {\n", + "# \"index\": 212,\n", + "# \"orig_sentence\": \"Joe paid the detective after [he] received the final report on the case\",\n", + "# \"entities\": [\"Betty\", \"Peter\"],\n", + "# \"entity_substitutes\": [[\"Tina\", \"Donna\"], [\"Bush\", \"Billy\"]],\n", + "# \"determiner\": \"\",\n", + "# \"packed_relations\": [\"paid/didn't pay\", \"was paied by/wasn't paied by\"],\n", + "# \"packed_relation_substitutes\": [[\"gave money to/didn't give money to\"], [\"received money from/didn't receive money from\"]],\n", + "# \"relation_suffix\": \"\",\n", + "# \"packed_predicates\": [\"received the final report on the case/didn't receive the final report on the caser\", \"delivered the final report on the case/didn't delivered the final report on the caser\"],\n", + "# \"predicate_dichotomy\": False,\n", + "# \"reverse_causal\": False\n", + "# },\n", + " {\n", + " \"index\": 226,\n", + " \"orig_sentence\": \"Bill passed the half-empty plate to John because [he] was full.\",\n", + " \"entities\": [\"Bill\", \"Amy\"],\n", + " \"entity_substitutes\": [[\"Brian\", \"David\"], [\"Emma\", \"Helen\"]],\n", + " \"packed_relations\": [\"passed the half-empty plate to/didn't pass the half-empty plate to\", \"received the half-empty plate from/didn't received the half-empty plate from\"],\n", + " \"packed_relation_substitutes\": [[\"gave the half-empty plate to/didn't give the half-empty plate to\"], [\"took the half-empty plate from/didn't take the half-empty plate from\"]],\n", + " \"packed_predicates\": [\"was full/wasn't full\", \"was hungry/wasn't hungry\"],\n", + " },\n", + " {\n", + " \"index\": 252,\n", + " \"orig_sentence\": \"George got free tickets to the play, but he gave them to Eric, even though [he] was particularly eager to see it.\",\n", + " \"entities\": [\"George\", \"Linda\"],\n", + " \"entity_substitutes\": [[\"Eric\", \"Ted\"], [\"Cindy\", \"Lucy\"]],\n", + " \"packed_relations\": [\"gave the tickets of the play to/didn't give the tickets of the play to\", \"received the tickets of the play from/didn't receive the tickets of the play from\"],\n", + " \"packed_relation_substitutes\": [[\"sent the tickets of the play to/didn't send the tickets of the play to\"], [\"took the tickets of the play from/didn't take the tickets of the play from\"]],\n", + " \"packed_predicates\": [\"wasn't interested in it/was interested in it\", \"was eager to see it/wasn't eager to see it\"],\n", + " },\n", + "# {\n", + "# \"index\": 255,\n", + "# \"orig_sentence\": \"Jane gave Joan candy because [she] wasn't hungry.\",\n", + "# \"entities\": [\"Helen\", \"Ted\"],\n", + "# \"entity_substitutes\": [[\"Wendy\", \"Lucy\"], [\"Charles\", \"Billy\"]],\n", + "# \"determiner\": \"\",\n", + "# \"packed_relations\": [\"gave candy to/didn't give candy to\", \"received candy from/didn't receive candy from\"],\n", + "# \"packed_relation_substitutes\": [[\"delivered candy to/didn't deliver candy to\"], [\"accepted candy from/didn't accept candy from\"]],\n", + "# \"relation_suffix\": \"\",\n", + "# \"packed_predicates\": [\"was full/wasn't full\", \"was hungry/wasn't hungry\"],\n", + "# \"predicate_dichotomy\": False,\n", + "# \"reverse_causal\": False\n", + "# },\n", + "# {\n", + "# \"index\": 259,\n", + "# \"orig_sentence\": \"James asked Robert for a favor but [he] was refused.\",\n", + "# \"entities\": [\"James\", \"Amy\"],\n", + "# \"entity_substitutes\": [[\"Robert\", \"Jack\"], [\"Donna\", \"Emily\"]],\n", + "# \"determiner\": \"\",\n", + "# \"packed_relations\": [\"asked/didn't asked\", \"was asked by/wasn't asked by\"],\n", + "# \"packed_relation_substitutes\": [[\"queried/didn't query\"], [\"was queried by/wasn't queried by\"]],\n", + "# \"relation_suffix\": \"for a favor\",\n", + "# \"packed_predicates\": [\"was refused/wasn't refused\", \"refused/didn't refuse\"],\n", + "# \"predicate_dichotomy\": False,\n", + "# \"reverse_causal\": True\n", + "# },\n", + " {\n", + " \"index\": 261,\n", + " \"orig_sentence\": \"Kirilov ceded the presidency to Shatov because [he] was less popular.\",\n", + " \"entities\": [\"James\", \"Amy\"],\n", + " \"entity_substitutes\": [[\"Robert\", \"Jack\"], [\"Donna\", \"Emily\"]],\n", + " \"packed_relations\": [\"ceded the presidency to/didn't cede the presidency to\", \"took over the presidency from/didn't take over the presidency from\"],\n", + " \"packed_relation_substitutes\": [[\"gave the presidency to/didn't give the presidency to\"], [\"got the presidency from/didn't get the presidency from\"]],\n", + " \"packed_predicates\": [\"was notorious/was not notorious\", \"was popular/wasn't popular\"],\n", + " },\n", + "# {\n", + "# \"index\": 2630000,\n", + "# \"orig_sentence\": \"Emma did not pass the ball to Janie although [she] saw that she was open.\",\n", + "# \"entities\": [\"Emma\", \"Alberta\"],\n", + "# \"entity_substitutes\": [[\"Lily\", \"Nancy\"], [\"George\", \"Henry\"]],\n", + "# \"determiner\": \"\",\n", + "# \"packed_relations\": [\"passed the ball to/didn't pass the ball to\", \"was passed the ball by/wasn't passed the ball by\"],\n", + "# \"packed_relation_substitutes\": [[\"gave the ball to/didn't give the ball to\"], [\"was given the ball by/wasn't given the ball by\"]],\n", + "# \"relation_suffix\": \"\",\n", + "# \"packed_predicates\": [\"had enough strength/didn't have enough strength\", \"was open/wasn't open\"],\n", + "# \"predicate_dichotomy\": False,\n", + "# \"reverse_causal\": False\n", + "# },\n", + "]\n", + "len(frames)" + ] + }, + { + "cell_type": "code", + "execution_count": 406, + "metadata": { + "scrolled": true + }, + "outputs": [], + "source": [ + "causal_sentences, turning_sentences, substituted_sent_groups = \\\n", + " make_sentences(A_template, B_template, causal_templates, turning_templates, **get_frame(frames, 2))" + ] + }, + { + "cell_type": "code", + "execution_count": 407, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[\"the trophy doesn't fit into the suitcase because the [trophy] is large.\",\n", + " \"the trophy doesn't fit into the suitcase because the [suitcase] is small.\",\n", + " \"the trophy doesn't fit into the suitcase because the [trophy] isn't small.\",\n", + " \"the trophy doesn't fit into the suitcase because the [suitcase] isn't large.\",\n", + " \"the suitcase doesn't hold the trophy because the [trophy] is large.\",\n", + " \"the suitcase doesn't hold the trophy because the [suitcase] is small.\",\n", + " \"the suitcase doesn't hold the trophy because the [trophy] isn't small.\",\n", + " \"the suitcase doesn't hold the trophy because the [suitcase] isn't large.\",\n", + " \"the trophy can fit into the suitcase because the [trophy] isn't large.\",\n", + " \"the trophy can fit into the suitcase because the [suitcase] isn't small.\",\n", + " 'the trophy can fit into the suitcase because the [trophy] is small.',\n", + " 'the trophy can fit into the suitcase because the [suitcase] is large.',\n", + " \"the suitcase can hold the trophy because the [trophy] isn't large.\",\n", + " \"the suitcase can hold the trophy because the [suitcase] isn't small.\",\n", + " 'the suitcase can hold the trophy because the [trophy] is small.',\n", + " 'the suitcase can hold the trophy because the [suitcase] is large.']" + ] + }, + "execution_count": 407, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "text/plain": [ + "[\"the trophy doesn't fit into the suitcase although the [trophy] isn't large.\",\n", + " \"the trophy doesn't fit into the suitcase although the [suitcase] isn't small.\",\n", + " \"the trophy doesn't fit into the suitcase although the [trophy] is small.\",\n", + " \"the trophy doesn't fit into the suitcase although the [suitcase] is large.\",\n", + " \"the suitcase doesn't hold the trophy although the [trophy] isn't large.\",\n", + " \"the suitcase doesn't hold the trophy although the [suitcase] isn't small.\",\n", + " \"the suitcase doesn't hold the trophy although the [trophy] is small.\",\n", + " \"the suitcase doesn't hold the trophy although the [suitcase] is large.\",\n", + " 'the trophy can fit into the suitcase although the [trophy] is large.',\n", + " 'the trophy can fit into the suitcase although the [suitcase] is small.',\n", + " \"the trophy can fit into the suitcase although the [trophy] isn't small.\",\n", + " \"the trophy can fit into the suitcase although the [suitcase] isn't large.\",\n", + " 'the suitcase can hold the trophy although the [trophy] is large.',\n", + " 'the suitcase can hold the trophy although the [suitcase] is small.',\n", + " \"the suitcase can hold the trophy although the [trophy] isn't small.\",\n", + " \"the suitcase can hold the trophy although the [suitcase] isn't large.\"]" + ] + }, + "execution_count": 407, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "text/plain": [ + "32" + ] + }, + "execution_count": 407, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "text/plain": [ + "[[\"the ball doesn't fit into the bag because the [ball] is large.\",\n", + " \"the ball doesn't fit into the box because the [ball] is large.\",\n", + " \"the toy doesn't fit into the bag because the [toy] is large.\",\n", + " \"the toy doesn't fit into the box because the [toy] is large.\",\n", + " \"the ball can't be put into the bag because the [ball] is large.\",\n", + " \"the ball can't be put into the box because the [ball] is large.\",\n", + " \"the toy can't be put into the bag because the [toy] is large.\",\n", + " \"the toy can't be put into the box because the [toy] is large.\"],\n", + " [\"the ball doesn't fit into the bag because the [bag] is small.\",\n", + " \"the ball doesn't fit into the box because the [box] is small.\",\n", + " \"the toy doesn't fit into the bag because the [bag] is small.\",\n", + " \"the toy doesn't fit into the box because the [box] is small.\",\n", + " \"the ball can't be put into the bag because the [bag] is small.\",\n", + " \"the ball can't be put into the box because the [box] is small.\",\n", + " \"the toy can't be put into the bag because the [bag] is small.\",\n", + " \"the toy can't be put into the box because the [box] is small.\"],\n", + " [\"the ball doesn't fit into the bag because the [ball] isn't small.\",\n", + " \"the ball doesn't fit into the box because the [ball] isn't small.\",\n", + " \"the toy doesn't fit into the bag because the [toy] isn't small.\",\n", + " \"the toy doesn't fit into the box because the [toy] isn't small.\",\n", + " \"the ball can't be put into the bag because the [ball] isn't small.\",\n", + " \"the ball can't be put into the box because the [ball] isn't small.\",\n", + " \"the toy can't be put into the bag because the [toy] isn't small.\",\n", + " \"the toy can't be put into the box because the [toy] isn't small.\"],\n", + " [\"the ball doesn't fit into the bag because the [bag] isn't large.\",\n", + " \"the ball doesn't fit into the box because the [box] isn't large.\",\n", + " \"the toy doesn't fit into the bag because the [bag] isn't large.\",\n", + " \"the toy doesn't fit into the box because the [box] isn't large.\",\n", + " \"the ball can't be put into the bag because the [bag] isn't large.\",\n", + " \"the ball can't be put into the box because the [box] isn't large.\",\n", + " \"the toy can't be put into the bag because the [bag] isn't large.\",\n", + " \"the toy can't be put into the box because the [box] isn't large.\"],\n", + " [\"the bag doesn't hold the ball because the [ball] is large.\",\n", + " \"the box doesn't hold the ball because the [ball] is large.\",\n", + " \"the bag doesn't hold the toy because the [toy] is large.\",\n", + " \"the box doesn't hold the toy because the [toy] is large.\",\n", + " \"the bag doesn't have enough room for the ball because the [ball] is large.\",\n", + " \"the box doesn't have enough room for the ball because the [ball] is large.\",\n", + " \"the bag doesn't have enough room for the toy because the [toy] is large.\",\n", + " \"the box doesn't have enough room for the toy because the [toy] is large.\"],\n", + " [\"the bag doesn't hold the ball because the [bag] is small.\",\n", + " \"the box doesn't hold the ball because the [box] is small.\",\n", + " \"the bag doesn't hold the toy because the [bag] is small.\",\n", + " \"the box doesn't hold the toy because the [box] is small.\",\n", + " \"the bag doesn't have enough room for the ball because the [bag] is small.\",\n", + " \"the box doesn't have enough room for the ball because the [box] is small.\",\n", + " \"the bag doesn't have enough room for the toy because the [bag] is small.\",\n", + " \"the box doesn't have enough room for the toy because the [box] is small.\"],\n", + " [\"the bag doesn't hold the ball because the [ball] isn't small.\",\n", + " \"the box doesn't hold the ball because the [ball] isn't small.\",\n", + " \"the bag doesn't hold the toy because the [toy] isn't small.\",\n", + " \"the box doesn't hold the toy because the [toy] isn't small.\",\n", + " \"the bag doesn't have enough room for the ball because the [ball] isn't small.\",\n", + " \"the box doesn't have enough room for the ball because the [ball] isn't small.\",\n", + " \"the bag doesn't have enough room for the toy because the [toy] isn't small.\",\n", + " \"the box doesn't have enough room for the toy because the [toy] isn't small.\"],\n", + " [\"the bag doesn't hold the ball because the [bag] isn't large.\",\n", + " \"the box doesn't hold the ball because the [box] isn't large.\",\n", + " \"the bag doesn't hold the toy because the [bag] isn't large.\",\n", + " \"the box doesn't hold the toy because the [box] isn't large.\",\n", + " \"the bag doesn't have enough room for the ball because the [bag] isn't large.\",\n", + " \"the box doesn't have enough room for the ball because the [box] isn't large.\",\n", + " \"the bag doesn't have enough room for the toy because the [bag] isn't large.\",\n", + " \"the box doesn't have enough room for the toy because the [box] isn't large.\"],\n", + " [\"the ball can fit into the bag because the [ball] isn't large.\",\n", + " \"the ball can fit into the box because the [ball] isn't large.\",\n", + " \"the toy can fit into the bag because the [toy] isn't large.\",\n", + " \"the toy can fit into the box because the [toy] isn't large.\",\n", + " \"the ball can be put into the bag because the [ball] isn't large.\",\n", + " \"the ball can be put into the box because the [ball] isn't large.\",\n", + " \"the toy can be put into the bag because the [toy] isn't large.\",\n", + " \"the toy can be put into the box because the [toy] isn't large.\"],\n", + " [\"the ball can fit into the bag because the [bag] isn't small.\",\n", + " \"the ball can fit into the box because the [box] isn't small.\",\n", + " \"the toy can fit into the bag because the [bag] isn't small.\",\n", + " \"the toy can fit into the box because the [box] isn't small.\",\n", + " \"the ball can be put into the bag because the [bag] isn't small.\",\n", + " \"the ball can be put into the box because the [box] isn't small.\",\n", + " \"the toy can be put into the bag because the [bag] isn't small.\",\n", + " \"the toy can be put into the box because the [box] isn't small.\"],\n", + " ['the ball can fit into the bag because the [ball] is small.',\n", + " 'the ball can fit into the box because the [ball] is small.',\n", + " 'the toy can fit into the bag because the [toy] is small.',\n", + " 'the toy can fit into the box because the [toy] is small.',\n", + " 'the ball can be put into the bag because the [ball] is small.',\n", + " 'the ball can be put into the box because the [ball] is small.',\n", + " 'the toy can be put into the bag because the [toy] is small.',\n", + " 'the toy can be put into the box because the [toy] is small.'],\n", + " ['the ball can fit into the bag because the [bag] is large.',\n", + " 'the ball can fit into the box because the [box] is large.',\n", + " 'the toy can fit into the bag because the [bag] is large.',\n", + " 'the toy can fit into the box because the [box] is large.',\n", + " 'the ball can be put into the bag because the [bag] is large.',\n", + " 'the ball can be put into the box because the [box] is large.',\n", + " 'the toy can be put into the bag because the [bag] is large.',\n", + " 'the toy can be put into the box because the [box] is large.'],\n", + " [\"the bag can hold the ball because the [ball] isn't large.\",\n", + " \"the box can hold the ball because the [ball] isn't large.\",\n", + " \"the bag can hold the toy because the [toy] isn't large.\",\n", + " \"the box can hold the toy because the [toy] isn't large.\",\n", + " \"the bag has enough room for the ball because the [ball] isn't large.\",\n", + " \"the box has enough room for the ball because the [ball] isn't large.\",\n", + " \"the bag has enough room for the toy because the [toy] isn't large.\",\n", + " \"the box has enough room for the toy because the [toy] isn't large.\"],\n", + " [\"the bag can hold the ball because the [bag] isn't small.\",\n", + " \"the box can hold the ball because the [box] isn't small.\",\n", + " \"the bag can hold the toy because the [bag] isn't small.\",\n", + " \"the box can hold the toy because the [box] isn't small.\",\n", + " \"the bag has enough room for the ball because the [bag] isn't small.\",\n", + " \"the box has enough room for the ball because the [box] isn't small.\",\n", + " \"the bag has enough room for the toy because the [bag] isn't small.\",\n", + " \"the box has enough room for the toy because the [box] isn't small.\"],\n", + " ['the bag can hold the ball because the [ball] is small.',\n", + " 'the box can hold the ball because the [ball] is small.',\n", + " 'the bag can hold the toy because the [toy] is small.',\n", + " 'the box can hold the toy because the [toy] is small.',\n", + " 'the bag has enough room for the ball because the [ball] is small.',\n", + " 'the box has enough room for the ball because the [ball] is small.',\n", + " 'the bag has enough room for the toy because the [toy] is small.',\n", + " 'the box has enough room for the toy because the [toy] is small.'],\n", + " ['the bag can hold the ball because the [bag] is large.',\n", + " 'the box can hold the ball because the [box] is large.',\n", + " 'the bag can hold the toy because the [bag] is large.',\n", + " 'the box can hold the toy because the [box] is large.',\n", + " 'the bag has enough room for the ball because the [bag] is large.',\n", + " 'the box has enough room for the ball because the [box] is large.',\n", + " 'the bag has enough room for the toy because the [bag] is large.',\n", + " 'the box has enough room for the toy because the [box] is large.'],\n", + " [\"the ball doesn't fit into the bag although the [ball] isn't large.\",\n", + " \"the ball doesn't fit into the box although the [ball] isn't large.\",\n", + " \"the toy doesn't fit into the bag although the [toy] isn't large.\",\n", + " \"the toy doesn't fit into the box although the [toy] isn't large.\",\n", + " \"the ball can't be put into the bag although the [ball] isn't large.\",\n", + " \"the ball can't be put into the box although the [ball] isn't large.\",\n", + " \"the toy can't be put into the bag although the [toy] isn't large.\",\n", + " \"the toy can't be put into the box although the [toy] isn't large.\"],\n", + " [\"the ball doesn't fit into the bag although the [bag] isn't small.\",\n", + " \"the ball doesn't fit into the box although the [box] isn't small.\",\n", + " \"the toy doesn't fit into the bag although the [bag] isn't small.\",\n", + " \"the toy doesn't fit into the box although the [box] isn't small.\",\n", + " \"the ball can't be put into the bag although the [bag] isn't small.\",\n", + " \"the ball can't be put into the box although the [box] isn't small.\",\n", + " \"the toy can't be put into the bag although the [bag] isn't small.\",\n", + " \"the toy can't be put into the box although the [box] isn't small.\"],\n", + " [\"the ball doesn't fit into the bag although the [ball] is small.\",\n", + " \"the ball doesn't fit into the box although the [ball] is small.\",\n", + " \"the toy doesn't fit into the bag although the [toy] is small.\",\n", + " \"the toy doesn't fit into the box although the [toy] is small.\",\n", + " \"the ball can't be put into the bag although the [ball] is small.\",\n", + " \"the ball can't be put into the box although the [ball] is small.\",\n", + " \"the toy can't be put into the bag although the [toy] is small.\",\n", + " \"the toy can't be put into the box although the [toy] is small.\"],\n", + " [\"the ball doesn't fit into the bag although the [bag] is large.\",\n", + " \"the ball doesn't fit into the box although the [box] is large.\",\n", + " \"the toy doesn't fit into the bag although the [bag] is large.\",\n", + " \"the toy doesn't fit into the box although the [box] is large.\",\n", + " \"the ball can't be put into the bag although the [bag] is large.\",\n", + " \"the ball can't be put into the box although the [box] is large.\",\n", + " \"the toy can't be put into the bag although the [bag] is large.\",\n", + " \"the toy can't be put into the box although the [box] is large.\"],\n", + " [\"the bag doesn't hold the ball although the [ball] isn't large.\",\n", + " \"the box doesn't hold the ball although the [ball] isn't large.\",\n", + " \"the bag doesn't hold the toy although the [toy] isn't large.\",\n", + " \"the box doesn't hold the toy although the [toy] isn't large.\",\n", + " \"the bag doesn't have enough room for the ball although the [ball] isn't large.\",\n", + " \"the box doesn't have enough room for the ball although the [ball] isn't large.\",\n", + " \"the bag doesn't have enough room for the toy although the [toy] isn't large.\",\n", + " \"the box doesn't have enough room for the toy although the [toy] isn't large.\"],\n", + " [\"the bag doesn't hold the ball although the [bag] isn't small.\",\n", + " \"the box doesn't hold the ball although the [box] isn't small.\",\n", + " \"the bag doesn't hold the toy although the [bag] isn't small.\",\n", + " \"the box doesn't hold the toy although the [box] isn't small.\",\n", + " \"the bag doesn't have enough room for the ball although the [bag] isn't small.\",\n", + " \"the box doesn't have enough room for the ball although the [box] isn't small.\",\n", + " \"the bag doesn't have enough room for the toy although the [bag] isn't small.\",\n", + " \"the box doesn't have enough room for the toy although the [box] isn't small.\"],\n", + " [\"the bag doesn't hold the ball although the [ball] is small.\",\n", + " \"the box doesn't hold the ball although the [ball] is small.\",\n", + " \"the bag doesn't hold the toy although the [toy] is small.\",\n", + " \"the box doesn't hold the toy although the [toy] is small.\",\n", + " \"the bag doesn't have enough room for the ball although the [ball] is small.\",\n", + " \"the box doesn't have enough room for the ball although the [ball] is small.\",\n", + " \"the bag doesn't have enough room for the toy although the [toy] is small.\",\n", + " \"the box doesn't have enough room for the toy although the [toy] is small.\"],\n", + " [\"the bag doesn't hold the ball although the [bag] is large.\",\n", + " \"the box doesn't hold the ball although the [box] is large.\",\n", + " \"the bag doesn't hold the toy although the [bag] is large.\",\n", + " \"the box doesn't hold the toy although the [box] is large.\",\n", + " \"the bag doesn't have enough room for the ball although the [bag] is large.\",\n", + " \"the box doesn't have enough room for the ball although the [box] is large.\",\n", + " \"the bag doesn't have enough room for the toy although the [bag] is large.\",\n", + " \"the box doesn't have enough room for the toy although the [box] is large.\"],\n", + " ['the ball can fit into the bag although the [ball] is large.',\n", + " 'the ball can fit into the box although the [ball] is large.',\n", + " 'the toy can fit into the bag although the [toy] is large.',\n", + " 'the toy can fit into the box although the [toy] is large.',\n", + " 'the ball can be put into the bag although the [ball] is large.',\n", + " 'the ball can be put into the box although the [ball] is large.',\n", + " 'the toy can be put into the bag although the [toy] is large.',\n", + " 'the toy can be put into the box although the [toy] is large.'],\n", + " ['the ball can fit into the bag although the [bag] is small.',\n", + " 'the ball can fit into the box although the [box] is small.',\n", + " 'the toy can fit into the bag although the [bag] is small.',\n", + " 'the toy can fit into the box although the [box] is small.',\n", + " 'the ball can be put into the bag although the [bag] is small.',\n", + " 'the ball can be put into the box although the [box] is small.',\n", + " 'the toy can be put into the bag although the [bag] is small.',\n", + " 'the toy can be put into the box although the [box] is small.'],\n", + " [\"the ball can fit into the bag although the [ball] isn't small.\",\n", + " \"the ball can fit into the box although the [ball] isn't small.\",\n", + " \"the toy can fit into the bag although the [toy] isn't small.\",\n", + " \"the toy can fit into the box although the [toy] isn't small.\",\n", + " \"the ball can be put into the bag although the [ball] isn't small.\",\n", + " \"the ball can be put into the box although the [ball] isn't small.\",\n", + " \"the toy can be put into the bag although the [toy] isn't small.\",\n", + " \"the toy can be put into the box although the [toy] isn't small.\"],\n", + " [\"the ball can fit into the bag although the [bag] isn't large.\",\n", + " \"the ball can fit into the box although the [box] isn't large.\",\n", + " \"the toy can fit into the bag although the [bag] isn't large.\",\n", + " \"the toy can fit into the box although the [box] isn't large.\",\n", + " \"the ball can be put into the bag although the [bag] isn't large.\",\n", + " \"the ball can be put into the box although the [box] isn't large.\",\n", + " \"the toy can be put into the bag although the [bag] isn't large.\",\n", + " \"the toy can be put into the box although the [box] isn't large.\"],\n", + " ['the bag can hold the ball although the [ball] is large.',\n", + " 'the box can hold the ball although the [ball] is large.',\n", + " 'the bag can hold the toy although the [toy] is large.',\n", + " 'the box can hold the toy although the [toy] is large.',\n", + " 'the bag has enough room for the ball although the [ball] is large.',\n", + " 'the box has enough room for the ball although the [ball] is large.',\n", + " 'the bag has enough room for the toy although the [toy] is large.',\n", + " 'the box has enough room for the toy although the [toy] is large.'],\n", + " ['the bag can hold the ball although the [bag] is small.',\n", + " 'the box can hold the ball although the [box] is small.',\n", + " 'the bag can hold the toy although the [bag] is small.',\n", + " 'the box can hold the toy although the [box] is small.',\n", + " 'the bag has enough room for the ball although the [bag] is small.',\n", + " 'the box has enough room for the ball although the [box] is small.',\n", + " 'the bag has enough room for the toy although the [bag] is small.',\n", + " 'the box has enough room for the toy although the [box] is small.'],\n", + " [\"the bag can hold the ball although the [ball] isn't small.\",\n", + " \"the box can hold the ball although the [ball] isn't small.\",\n", + " \"the bag can hold the toy although the [toy] isn't small.\",\n", + " \"the box can hold the toy although the [toy] isn't small.\",\n", + " \"the bag has enough room for the ball although the [ball] isn't small.\",\n", + " \"the box has enough room for the ball although the [ball] isn't small.\",\n", + " \"the bag has enough room for the toy although the [toy] isn't small.\",\n", + " \"the box has enough room for the toy although the [toy] isn't small.\"],\n", + " [\"the bag can hold the ball although the [bag] isn't large.\",\n", + " \"the box can hold the ball although the [box] isn't large.\",\n", + " \"the bag can hold the toy although the [bag] isn't large.\",\n", + " \"the box can hold the toy although the [box] isn't large.\",\n", + " \"the bag has enough room for the ball although the [bag] isn't large.\",\n", + " \"the box has enough room for the ball although the [box] isn't large.\",\n", + " \"the bag has enough room for the toy although the [bag] isn't large.\",\n", + " \"the box has enough room for the toy although the [box] isn't large.\"]]" + ] + }, + "execution_count": 407, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "causal_sentences\n", + "turning_sentences\n", + "len(substituted_sent_groups)\n", + "substituted_sent_groups" + ] + }, + { + "cell_type": "code", + "execution_count": 275, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "index: 190\n", + "\n", + "\n", + "1-1, The newspapers could be placed on all the chairs, because there were many of the newspapers. The newspapers could be placed on all the chairs, because there were few of the chairs. The newspapers could be placed on all the chairs, because there were not few of the newspapers. The newspapers could be placed on all the chairs, because there were not many of the chairs. The chairs could all be covered by the newspapers, because there were many of the newspapers. The chairs could all be covered by the newspapers, because there were few of the chairs. The chairs could all be covered by the newspapers, because there were not few of the newspapers. The chairs could all be covered by the newspapers, because there were not many of the chairs. The newspapers couldn't be placed on all the chairs, because there were not many of the newspapers. The newspapers couldn't be placed on all the chairs, because there were not few of the chairs. The newspapers couldn't be placed on all the chairs, because there were few of the newspapers. The newspapers couldn't be placed on all the chairs, because there were many of the chairs. The chairs couldn't all be covered by the newspapers, because there were not many of the newspapers. The chairs couldn't all be covered by the newspapers, because there were not few of the chairs. The chairs couldn't all be covered by the newspapers, because there were few of the newspapers. The chairs couldn't all be covered by the newspapers, because there were many of the chairs. \n", + "\n", + "\n", + "2-1, The newspapers could be placed on all the chairs, although there were not many of the newspapers. The newspapers could be placed on all the chairs, although there were not few of the chairs. The newspapers could be placed on all the chairs, although there were few of the newspapers. The newspapers could be placed on all the chairs, although there were many of the chairs. The chairs could all be covered by the newspapers, although there were not many of the newspapers. The chairs could all be covered by the newspapers, although there were not few of the chairs. The chairs could all be covered by the newspapers, although there were few of the newspapers. The chairs could all be covered by the newspapers, although there were many of the chairs. The newspapers couldn't be placed on all the chairs, although there were many of the newspapers. The newspapers couldn't be placed on all the chairs, although there were few of the chairs. The newspapers couldn't be placed on all the chairs, although there were not few of the newspapers. The newspapers couldn't be placed on all the chairs, although there were not many of the chairs. The chairs couldn't all be covered by the newspapers, although there were many of the newspapers. The chairs couldn't all be covered by the newspapers, although there were few of the chairs. The chairs couldn't all be covered by the newspapers, although there were not few of the newspapers. The chairs couldn't all be covered by the newspapers, although there were not many of the chairs. \n", + "\n", + "\n", + "******************************************\n", + "\n", + "\n", + "3-1, The chairs could all be covered by the newspapers, because there were many of the newspapers. The tables could all be covered by the newspapers, because there were many of the newspapers. The benches could all be covered by the newspapers, because there were many of the newspapers. The chairs could all be covered by the cups, because there were many of the cups. The tables could all be covered by the cups, because there were many of the cups. The benches could all be covered by the cups, because there were many of the cups. The chairs could all be covered by the pictures, because there were many of the pictures. The tables could all be covered by the pictures, because there were many of the pictures. The benches could all be covered by the pictures, because there were many of the pictures. The chairs couldn't all be covered by the newspapers, because there were not many of the newspapers. The tables couldn't all be covered by the newspapers, because there were not many of the newspapers. The benches couldn't all be covered by the newspapers, because there were not many of the newspapers. The chairs couldn't all be covered by the cups, because there were not many of the cups. The tables couldn't all be covered by the cups, because there were not many of the cups. The benches couldn't all be covered by the cups, because there were not many of the cups. The chairs couldn't all be covered by the pictures, because there were not many of the pictures. The tables couldn't all be covered by the pictures, because there were not many of the pictures. The benches couldn't all be covered by the pictures, because there were not many of the pictures. The chairs couldn't carry all the newspapers, although there were not few of the newspapers. The tables couldn't carry all the newspapers, although there were not few of the newspapers. The benches couldn't carry all the newspapers, although there were not few of the newspapers. The chairs couldn't carry all the cups, although there were not few of the cups. The tables couldn't carry all the cups, although there were not few of the cups. The benches couldn't carry all the cups, although there were not few of the cups. The chairs couldn't carry all the pictures, although there were not few of the pictures. The tables couldn't carry all the pictures, although there were not few of the pictures. The benches couldn't carry all the pictures, although there were not few of the pictures. The newspapers could be placed on all the chairs, although there were not many of the newspapers. The newspapers could be placed on all the tables, although there were not many of the newspapers. The newspapers could be placed on all the benches, although there were not many of the newspapers. The cups could be placed on all the chairs, although there were not many of the cups. The cups could be placed on all the tables, although there were not many of the cups. The cups could be placed on all the benches, although there were not many of the cups. The pictures could be placed on all the chairs, although there were not many of the pictures. The pictures could be placed on all the tables, although there were not many of the pictures. The pictures could be placed on all the benches, although there were not many of the pictures. The newspapers couldn't be put on all the chairs, because there were not many of the newspapers. The newspapers couldn't be put on all the tables, because there were not many of the newspapers. The newspapers couldn't be put on all the benches, because there were not many of the newspapers. The cups couldn't be put on all the chairs, because there were not many of the cups. The cups couldn't be put on all the tables, because there were not many of the cups. The cups couldn't be put on all the benches, because there were not many of the cups. The pictures couldn't be put on all the chairs, because there were not many of the pictures. The pictures couldn't be put on all the tables, because there were not many of the pictures. The pictures couldn't be put on all the benches, because there were not many of the pictures. The newspapers could be put on all the chairs, although there were many of the chairs. The newspapers could be put on all the tables, although there were many of the tables. The newspapers could be put on all the benches, although there were many of the benches. The cups could be put on all the chairs, although there were many of the chairs. The cups could be put on all the tables, although there were many of the tables. \n", + "\n", + "\n", + "3-2, The cups could be put on all the benches, although there were many of the benches. The pictures could be put on all the chairs, although there were many of the chairs. The pictures could be put on all the tables, although there were many of the tables. The pictures could be put on all the benches, although there were many of the benches. The chairs couldn't all be covered by the newspapers, because there were many of the chairs. The tables couldn't all be covered by the newspapers, because there were many of the tables. The benches couldn't all be covered by the newspapers, because there were many of the benches. The chairs couldn't all be covered by the cups, because there were many of the chairs. The tables couldn't all be covered by the cups, because there were many of the tables. The benches couldn't all be covered by the cups, because there were many of the benches. The chairs couldn't all be covered by the pictures, because there were many of the chairs. The tables couldn't all be covered by the pictures, because there were many of the tables. The benches couldn't all be covered by the pictures, because there were many of the benches. The newspapers could be placed on all the chairs, although there were many of the chairs. The newspapers could be placed on all the tables, although there were many of the tables. The newspapers could be placed on all the benches, although there were many of the benches. The cups could be placed on all the chairs, although there were many of the chairs. The cups could be placed on all the tables, although there were many of the tables. The cups could be placed on all the benches, although there were many of the benches. The pictures could be placed on all the chairs, although there were many of the chairs. The pictures could be placed on all the tables, although there were many of the tables. The pictures could be placed on all the benches, although there were many of the benches. The chairs couldn't all be covered by the newspapers, although there were many of the newspapers. The tables couldn't all be covered by the newspapers, although there were many of the newspapers. The benches couldn't all be covered by the newspapers, although there were many of the newspapers. The chairs couldn't all be covered by the cups, although there were many of the cups. The tables couldn't all be covered by the cups, although there were many of the cups. The benches couldn't all be covered by the cups, although there were many of the cups. The chairs couldn't all be covered by the pictures, although there were many of the pictures. The tables couldn't all be covered by the pictures, although there were many of the pictures. The benches couldn't all be covered by the pictures, although there were many of the pictures. The newspapers could be put on all the chairs, although there were not few of the chairs. The newspapers could be put on all the tables, although there were not few of the tables. The newspapers could be put on all the benches, although there were not few of the benches. The cups could be put on all the chairs, although there were not few of the chairs. The cups could be put on all the tables, although there were not few of the tables. The cups could be put on all the benches, although there were not few of the benches. The pictures could be put on all the chairs, although there were not few of the chairs. The pictures could be put on all the tables, although there were not few of the tables. The pictures could be put on all the benches, although there were not few of the benches. The newspapers couldn't be put on all the chairs, because there were many of the chairs. The newspapers couldn't be put on all the tables, because there were many of the tables. The newspapers couldn't be put on all the benches, because there were many of the benches. The cups couldn't be put on all the chairs, because there were many of the chairs. The cups couldn't be put on all the tables, because there were many of the tables. The cups couldn't be put on all the benches, because there were many of the benches. The pictures couldn't be put on all the chairs, because there were many of the chairs. The pictures couldn't be put on all the tables, because there were many of the tables. The pictures couldn't be put on all the benches, because there were many of the benches. The chairs couldn't carry all the newspapers, because there were many of the chairs. \n", + "\n", + "\n", + "******************************************\n", + "\n", + "\n", + "3-3, The tables couldn't carry all the newspapers, because there were many of the tables. The benches couldn't carry all the newspapers, because there were many of the benches. The chairs couldn't carry all the cups, because there were many of the chairs. The tables couldn't carry all the cups, because there were many of the tables. The benches couldn't carry all the cups, because there were many of the benches. The chairs couldn't carry all the pictures, because there were many of the chairs. The tables couldn't carry all the pictures, because there were many of the tables. The benches couldn't carry all the pictures, because there were many of the benches. The newspapers could be put on all the chairs, because there were few of the chairs. The newspapers could be put on all the tables, because there were few of the tables. The newspapers could be put on all the benches, because there were few of the benches. The cups could be put on all the chairs, because there were few of the chairs. The cups could be put on all the tables, because there were few of the tables. The cups could be put on all the benches, because there were few of the benches. The pictures could be put on all the chairs, because there were few of the chairs. The pictures could be put on all the tables, because there were few of the tables. The pictures could be put on all the benches, because there were few of the benches. The newspapers couldn't be placed on all the chairs, although there were few of the chairs. The newspapers couldn't be placed on all the tables, although there were few of the tables. The newspapers couldn't be placed on all the benches, although there were few of the benches. The cups couldn't be placed on all the chairs, although there were few of the chairs. The cups couldn't be placed on all the tables, although there were few of the tables. The cups couldn't be placed on all the benches, although there were few of the benches. The pictures couldn't be placed on all the chairs, although there were few of the chairs. The pictures couldn't be placed on all the tables, although there were few of the tables. The pictures couldn't be placed on all the benches, although there were few of the benches. The newspapers could be placed on all the chairs, because there were many of the newspapers. The newspapers could be placed on all the tables, because there were many of the newspapers. The newspapers could be placed on all the benches, because there were many of the newspapers. The cups could be placed on all the chairs, because there were many of the cups. The cups could be placed on all the tables, because there were many of the cups. The cups could be placed on all the benches, because there were many of the cups. The pictures could be placed on all the chairs, because there were many of the pictures. The pictures could be placed on all the tables, because there were many of the pictures. The pictures could be placed on all the benches, because there were many of the pictures. The chairs couldn't carry all the newspapers, because there were few of the newspapers. The tables couldn't carry all the newspapers, because there were few of the newspapers. The benches couldn't carry all the newspapers, because there were few of the newspapers. The chairs couldn't carry all the cups, because there were few of the cups. The tables couldn't carry all the cups, because there were few of the cups. The benches couldn't carry all the cups, because there were few of the cups. The chairs couldn't carry all the pictures, because there were few of the pictures. The tables couldn't carry all the pictures, because there were few of the pictures. The benches couldn't carry all the pictures, because there were few of the pictures. The chairs could all be covered by the newspapers, because there were not many of the chairs. The tables could all be covered by the newspapers, because there were not many of the tables. The benches could all be covered by the newspapers, because there were not many of the benches. The chairs could all be covered by the cups, because there were not many of the chairs. The tables could all be covered by the cups, because there were not many of the tables. The benches could all be covered by the cups, because there were not many of the benches. \n" + ] + } + ], + "source": [ + "num = next(i for i in range(len(frames)) if frames[i][\"index\"] == 190)\n", + "\n", + "print(\"index:\", frames[num][\"index\"])\n", + "print(\"\\n\")\n", + "\n", + "\n", + "def add_sentence(article, sentence):\n", + " if sentence[:3] == \"the\":\n", + " sentence = sentence.replace(\"the\", \"The\", 1)\n", + " sentence = sentence.replace(\"[\", \"\")\n", + " sentence = sentence.replace(\"]\", \"\")\n", + " sentence = sentence.replace(\" <\", \", \")\n", + " sentence = sentence.replace(\">\", \"\")\n", + " if article.count(\".\") < 50:\n", + " article += \"{} \".format(sentence)\n", + " return True, article\n", + " return False, article\n", + "\n", + "\n", + "article_c = \"\"\n", + "article_t = \"\"\n", + "\n", + "article_s1 = \"\"\n", + "article_s2 = \"\"\n", + "article_s3 = \"\"\n", + "\n", + "c, t, s = make_sentences(A_template, B_template, causal_templates, turning_templates, **frames[num])\n", + "for j in c:\n", + " _, article_c = add_sentence(article_c, j)\n", + "for j in t:\n", + " _, article_t = add_sentence(article_t, j)\n", + "for j in s:\n", + " success, article_s1 = add_sentence(article_s1, j)\n", + " if not success:\n", + " success, article_s2 = add_sentence(article_s2, j)\n", + " if not success:\n", + " success, article_s3 = add_sentence(article_s3, j)\n", + "\n", + " \n", + "\n", + "print(\"1-1,\", article_c)\n", + "print(\"\\n\")\n", + "print(\"2-1,\", article_t)\n", + "print(\"\\n\")\n", + "print(\"******************************************\")\n", + "print(\"\\n\")\n", + "print(\"3-1,\", article_s1) \n", + "print(\"\\n\")\n", + "print(\"3-2,\", article_s2)\n", + "print(\"\\n\")\n", + "print(\"******************************************\")\n", + "print(\"\\n\")\n", + "print(\"3-3,\", article_s3) \n" + ] + }, + { + "cell_type": "code", + "execution_count": 267, + "metadata": {}, + "outputs": [], + "source": [ + "error_index: {\n", + " \"replcae_error\": {\n", + " \"index\": [12, 22, 38, 188, 226],\n", + " \"example\": [\n", + " \"John didn't defeated Sue in the running race although Sue had a bad start.\",\n", + " \"The cakes don't are not as popular as the apples so the cakes shouldn't be made less next time.\"\n", + " ],\n", + " \"error\": [\n", + " \"didn't defeated\",\n", + " \"don't are not\"\n", + " ]\n", + " },\n", + " \"add ',' before 'but' and 'so'\": {}\n", + "}\n", + "TODO: {\"index\":[190, \"few of -> lack of\"]} " + ] + }, + { + "cell_type": "code", + "execution_count": 206, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "True" + ] + }, + "execution_count": 206, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "s = \"[mask]\"\n", + "s.endswith(\"]\")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.5" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/WSC_associative_label.json b/WSC_associative_label.json new file mode 100644 index 00000000000000..4fd2fd1015ae5d --- /dev/null +++ b/WSC_associative_label.json @@ -0,0 +1 @@ +[{"index": 0, "sentence": "The city councilmen refused the demonstrators a permit because [they] feared violence.", "answer1": "The demonstrators", "answer0": "The city councilmen", "is_associative": 0, "correct_answer": "The city councilmen"}, {"index": 1, "sentence": "The city councilmen refused the demonstrators a permit because [they] advocated violence.", "answer1": "The demonstrators", "answer0": "The city councilmen", "is_associative": 0, "correct_answer": "The demonstrators"}, {"index": 2, "sentence": "The trophy doesn't fit into the brown suitcase because [it] is too large.", "answer1": "the suitcase", "answer0": "the trophy", "is_associative": 0, "correct_answer": "the trophy"}, {"index": 3, "sentence": "The trophy doesn't fit into the brown suitcase because [it] is too small.", "answer1": "the suitcase", "answer0": "the trophy", "is_associative": 0, "correct_answer": "the suitcase"}, {"index": 4, "sentence": "Joan made sure to thank Susan for all the help [she] had recieved.", "answer1": "Susan", "answer0": "Joan", "is_associative": 0, "correct_answer": "Joan"}, {"index": 5, "sentence": "Joan made sure to thank Susan for all the help [she] had given.", "answer1": "Susan", "answer0": "Joan", "is_associative": 0, "correct_answer": "Susan"}, {"index": 6, "sentence": "Paul tried to call George on the phone, but [he] wasn't successful.", "answer1": "George", "answer0": "Paul", "is_associative": 0, "correct_answer": "Paul"}, {"index": 7, "sentence": "Paul tried to call George on the phone, but [he] wasn't available.", "answer1": "George", "answer0": "Paul", "is_associative": 0, "correct_answer": "George"}, {"index": 8, "sentence": "The lawyer asked the witness a question, but [he] was reluctant to repeat it.", "answer1": "the witness", "answer0": "the lawyer", "is_associative": 0, "correct_answer": "the lawyer"}, {"index": 9, "sentence": "The lawyer asked the witness a question, but [he] was reluctant to answer it.", "answer1": "the witness", "answer0": "the lawyer", "is_associative": 0, "correct_answer": "the witness"}, {"index": 10, "sentence": "The delivery truck zoomed by the school bus because [it] was going so fast.", "answer1": "the school bus", "answer0": "the delivery truck", "is_associative": 0, "correct_answer": "the delivery truck"}, {"index": 11, "sentence": "The delivery truck zoomed by the school bus because [it] was going so slow.", "answer1": "the school bus", "answer0": "the delivery truck", "is_associative": 0, "correct_answer": "the school bus"}, {"index": 12, "sentence": "Frank felt vindicated when his longtime rival Bill revealed that [he] was the winner of the competition.", "answer1": "Bill", "answer0": "Frank", "is_associative": 0, "correct_answer": "Frank"}, {"index": 13, "sentence": "Frank felt crushed when his longtime rival Bill revealed that [he] was the winner of the competition.", "answer1": "Bill", "answer0": "Frank", "is_associative": 0, "correct_answer": "Bill"}, {"index": 14, "sentence": "The man couldn't lift his son because [he] was so weak.", "answer1": "The son", "answer0": "The man", "is_associative": 0, "correct_answer": "The man"}, {"index": 15, "sentence": "The man couldn't lift his son because [he] was so heavy.", "answer1": "The son", "answer0": "The man", "is_associative": 0, "correct_answer": "The son"}, {"index": 16, "sentence": "The large ball crashed right through the table because [it] was made of steel.", "answer1": "The table", "answer0": "The large ball", "is_associative": 0, "correct_answer": "The large ball"}, {"index": 17, "sentence": "The large ball crashed right through the table because [it] was made of styrofoam.", "answer1": "The table", "answer0": "The large ball", "is_associative": 0, "correct_answer": "The table"}, {"index": 18, "sentence": "John couldn't see the stage with Billy in front of him because [he] is so short.", "answer1": "Billy", "answer0": "John", "is_associative": 0, "correct_answer": "John"}, {"index": 19, "sentence": "John couldn't see the stage with Billy in front of him because [he] is so tall.", "answer1": "Billy", "answer0": "John", "is_associative": 0, "correct_answer": "Billy"}, {"index": 20, "sentence": "Tom threw his schoolbag down to Ray after [he] reached the top of the stairs.", "answer1": "Ray", "answer0": "Tom", "is_associative": 0, "correct_answer": "Tom"}, {"index": 21, "sentence": "Tom threw his schoolbag down to Ray after [he] reached the bottom of the stairs.", "answer1": "Ray", "answer0": "Tom", "is_associative": 0, "correct_answer": "Ray"}, {"index": 22, "sentence": "Although they ran at about the same speed, Sue beat Sally because [she] had such a good start.", "answer1": "Sally", "answer0": "Sue", "is_associative": 0, "correct_answer": "Sue"}, {"index": 23, "sentence": "Although they ran at about the same speed, Sue beat Sally because [she] had such a bad start.", "answer1": "Sally", "answer0": "Sue", "is_associative": 0, "correct_answer": "Sally"}, {"index": 24, "sentence": "The sculpture rolled off the shelf because [it] wasn't anchored.", "answer1": "The shelf", "answer0": "The sculpture", "is_associative": 0, "correct_answer": "The sculpture"}, {"index": 25, "sentence": "The sculpture rolled off the shelf because [it] wasn't level.", "answer1": "The shelf", "answer0": "The sculpture", "is_associative": 0, "correct_answer": "The shelf"}, {"index": 26, "sentence": "Sam's drawing was hung just above Tina's and [it] did look much better with another one below it.", "answer1": "Tina's drawing", "answer0": "Sam's drawing", "is_associative": 0, "correct_answer": "Sam's drawing"}, {"index": 27, "sentence": "Sam's drawing was hung just above Tina's and [it] did look much better with another one above it.", "answer1": "Tina's drawing", "answer0": "Sam's drawing", "is_associative": 0, "correct_answer": "Tina's drawing"}, {"index": 28, "sentence": "Anna did a lot better than her good friend Lucy on the test because [she] had studied so hard.", "answer1": "Lucy", "answer0": "Anna", "is_associative": 0, "correct_answer": "Anna"}, {"index": 29, "sentence": "Anna did a lot worse than her good friend Lucy on the test because [she] had studied so hard.", "answer1": "Lucy", "answer0": "Anna", "is_associative": 0, "correct_answer": "Lucy"}, {"index": 30, "sentence": "The firemen arrived after the police because [they] were coming from so far away.", "answer1": "The police", "answer0": "The firemen", "is_associative": 0, "correct_answer": "The firemen"}, {"index": 31, "sentence": "The firemen arrived before the police because [they] were coming from so far away.", "answer1": "The police", "answer0": "The firemen", "is_associative": 0, "correct_answer": "The police"}, {"index": 32, "sentence": "Frank was upset with Tom because the toaster [he] had bought from him didn't work.", "answer1": "Tom", "answer0": "Frank", "is_associative": 0, "correct_answer": "Frank"}, {"index": 33, "sentence": "Frank was upset with Tom because the toaster [he] had sold him didn't work.", "answer1": "Tom", "answer0": "Frank", "is_associative": 0, "correct_answer": "Tom"}, {"index": 34, "sentence": "Jim yelled at Kevin because [he] was so upset.", "answer1": "Kevin", "answer0": "Jim", "is_associative": 0, "correct_answer": "Jim"}, {"index": 35, "sentence": "Jim comforted Kevin because [he] was so upset.", "answer1": "Kevin", "answer0": "Jim", "is_associative": 0, "correct_answer": "Kevin"}, {"index": 36, "sentence": "The sack of potatoes had been placed above the bag of flour, so [it] had to be moved first.", "answer1": "The bag of flour", "answer0": "The sack of potatoes", "is_associative": 0, "correct_answer": "The sack of potatoes"}, {"index": 37, "sentence": "The sack of potatoes had been placed below the bag of flour, so [it] had to be moved first.", "answer1": "The bag of flour", "answer0": "The sack of potatoes", "is_associative": 0, "correct_answer": "The bag of flour"}, {"index": 38, "sentence": "Pete envies Martin although [he] is very successful.", "answer1": "Martin", "answer0": "Pete", "is_associative": 0, "correct_answer": "Pete"}, {"index": 39, "sentence": "Pete envies Martin because [he] is very successful.", "answer1": "Martin", "answer0": "Pete", "is_associative": 0, "correct_answer": "Martin"}, {"index": 40, "sentence": "The older students were bullying the younger ones, so we punished [them] .", "answer1": "The younger students", "answer0": "The older students", "is_associative": 0, "correct_answer": "The older students"}, {"index": 41, "sentence": "The older students were bullying the younger ones, so we rescued [them] .", "answer1": "The younger students", "answer0": "The older students", "is_associative": 0, "correct_answer": "The younger students"}, {"index": 42, "sentence": "I poured water from the bottle into the cup until [it] was empty.", "answer1": "the cup", "answer0": "the bottle", "is_associative": 0, "correct_answer": "the bottle"}, {"index": 43, "sentence": "I poured water from the bottle into the cup until [it] was full.", "answer1": "the cup", "answer0": "the bottle", "is_associative": 0, "correct_answer": "the cup"}, {"index": 44, "sentence": "Susan knows all about Ann's personal problems because [she] is nosy.", "answer1": "Ann", "answer0": "Susan", "is_associative": 0, "correct_answer": "Susan"}, {"index": 45, "sentence": "Susan knows all about Ann's personal problems because [she] is indiscreet.", "answer1": "Ann", "answer0": "Susan", "is_associative": 0, "correct_answer": "Ann"}, {"index": 46, "sentence": "Sid explained his theory to Mark but [he] couldn't convince him.", "answer1": "Mark", "answer0": "Sid", "is_associative": 0, "correct_answer": "Sid"}, {"index": 47, "sentence": "Sid explained his theory to Mark but [he] couldn't understand him.", "answer1": "Mark", "answer0": "Sid", "is_associative": 0, "correct_answer": "Mark"}, {"index": 48, "sentence": "Susan knew that Ann's son had been in a car accident, so [she] told her about it.", "answer1": "Ann", "answer0": "Susan", "is_associative": 0, "correct_answer": "Susan"}, {"index": 49, "sentence": "Susan knew that Ann's son had been in a car accident, because [she] told her about it.", "answer1": "Ann", "answer0": "Susan", "is_associative": 0, "correct_answer": "Ann"}, {"index": 50, "sentence": "Joe's uncle can still beat him at tennis, even though [he] is 30 years younger.", "answer1": "Joe's uncle", "answer0": "Joe", "is_associative": 0, "correct_answer": "Joe"}, {"index": 51, "sentence": "Joe's uncle can still beat him at tennis, even though [he] is 30 years older.", "answer1": "Joe's uncle", "answer0": "Joe", "is_associative": 0, "correct_answer": "Joe's uncle"}, {"index": 52, "sentence": "The painting in Mark's living room shows an oak tree. [It] is to the right of the bookcase.", "answer1": "The oak tree", "answer0": "The painting", "is_associative": 0, "correct_answer": "The painting"}, {"index": 56, "sentence": "The drain is clogged with hair. [It] has to be cleaned.", "answer1": "The hair", "answer0": "The drain", "is_associative": 0, "correct_answer": "The drain"}, {"index": 57, "sentence": "The drain is clogged with hair. [It] has to be removed.", "answer1": "The hair", "answer0": "The drain", "is_associative": 0, "correct_answer": "The hair"}, {"index": 59, "sentence": "My meeting started at 4:00 and I needed to catch the train at 4:30, so there wasn't much time. Luckily, [it] was delayed, so it worked out.", "answer1": "The train", "answer0": "The meeting", "is_associative": 0, "correct_answer": "The train"}, {"index": 60, "sentence": "There is a pillar between me and the stage, and I can't see around [it] .", "answer1": "The stage", "answer0": "The pillar", "is_associative": 0, "correct_answer": "The pillar"}, {"index": 61, "sentence": "There is a pillar between me and the stage, and I can't see [it] .", "answer1": "The stage", "answer0": "The pillar", "is_associative": 0, "correct_answer": "The stage"}, {"index": 62, "sentence": "They broadcast an announcement, but a subway came into the station and I couldn't hear [it] .", "answer1": "The subway", "answer0": "The announcement", "is_associative": 0, "correct_answer": "The announcement"}, {"index": 63, "sentence": "They broadcast an announcement, but a subway came into the station and I couldn't hear over [it] .", "answer1": "The subway", "answer0": "The announcement", "is_associative": 0, "correct_answer": "The subway"}, {"index": 64, "sentence": "In the middle of the outdoor concert, the rain started falling, but [it] continued until 10.", "answer1": "The rain", "answer0": "The concert", "is_associative": 0, "correct_answer": "The concert"}, {"index": 65, "sentence": "In the middle of the outdoor concert, the rain started falling, and [it] continued until 10.", "answer1": "The rain", "answer0": "The concert", "is_associative": 0, "correct_answer": "The rain"}, {"index": 66, "sentence": "I used an old rag to clean the knife, and then I put [it] in the trash.", "answer1": "The knife", "answer0": "The rag", "is_associative": 0, "correct_answer": "The rag"}, {"index": 67, "sentence": "I used an old rag to clean the knife, and then I put [it] in the drawer.", "answer1": "The knife", "answer0": "The rag", "is_associative": 0, "correct_answer": "The knife"}, {"index": 68, "sentence": "Ann asked Mary what time the library closes, because [she] had forgotten.", "answer1": "Mary", "answer0": "Ann", "is_associative": 0, "correct_answer": "Ann"}, {"index": 69, "sentence": "Ann asked Mary what time the library closes, but [she] had forgotten.", "answer1": "Mary", "answer0": "Ann", "is_associative": 0, "correct_answer": "Mary"}, {"index": 70, "sentence": "I took the water bottle out of the backpack so that [it] would be handy.", "answer1": "The backpack", "answer0": "The water bottle", "is_associative": 0, "correct_answer": "The water bottle"}, {"index": 71, "sentence": "I took the water bottle out of the backpack so that [it] would be lighter.", "answer1": "The backpack", "answer0": "The water bottle", "is_associative": 0, "correct_answer": "The backpack"}, {"index": 73, "sentence": "I couldn't put the pot on the shelf because [it] was too high.", "answer1": "The shelf", "answer0": "The pot", "is_associative": 0, "correct_answer": "The shelf"}, {"index": 76, "sentence": "Bob paid for Charlie's college education. [He] is very generous.", "answer1": "Charlie", "answer0": "Bob", "is_associative": 0, "correct_answer": "Bob"}, {"index": 77, "sentence": "Bob paid for Charlie's college education. [He] is very grateful.", "answer1": "Charlie", "answer0": "Bob", "is_associative": 0, "correct_answer": "Charlie"}, {"index": 78, "sentence": "Bob paid for Charlie's college education, but now Charlie acts as though it never happened. [He] is very hurt.", "answer1": "Charlie", "answer0": "Bob", "is_associative": 0, "correct_answer": "Bob"}, {"index": 79, "sentence": "Bob paid for Charlie's college education, but now Charlie acts as though it never happened. [He] is very ungrateful.", "answer1": "Charlie", "answer0": "Bob", "is_associative": 0, "correct_answer": "Charlie"}, {"index": 80, "sentence": "Bob was playing cards with Adam and was way ahead. If Adam hadn't had a sudden run of good luck, [he] would have won.", "answer1": "Adam", "answer0": "Bob", "is_associative": 0, "correct_answer": "Bob"}, {"index": 81, "sentence": "Bob was playing cards with Adam and was way ahead. If Adam hadn't had a sudden run of good luck, [he] would have lost.", "answer1": "Adam", "answer0": "Bob", "is_associative": 0, "correct_answer": "Adam"}, {"index": 82, "sentence": "Adam can't leave work here until Bob arrives to replace him. If Bob had left home for work on time, [he] would be gone by this time.", "answer1": "Bob", "answer0": "Adam", "is_associative": 0, "correct_answer": "Adam"}, {"index": 83, "sentence": "Adam can't leave work here until Bob arrives to replace him. If Bob had left home for work on time, [he] would be here by this time.", "answer1": "Bob", "answer0": "Adam", "is_associative": 0, "correct_answer": "Bob"}, {"index": 84, "sentence": "If the con artist has succeeded in fooling Sam, [he] would have gotten a lot of money.", "answer1": "Sam", "answer0": "The con artist", "is_associative": 0, "correct_answer": "The con artist"}, {"index": 85, "sentence": "If the con artist has succeeded in fooling Sam, [he] would have lost a lot of money.", "answer1": "Sam", "answer0": "The con artist", "is_associative": 0, "correct_answer": "Sam"}, {"index": 86, "sentence": "It was a summer afternoon, and the dog was sitting in the middle of the lawn. After a while, it got up and moved to a spot under the tree, because [it] was hot.", "answer1": "The spot under the tree", "answer0": "The dog", "is_associative": 0, "correct_answer": "The dog"}, {"index": 88, "sentence": "The cat was lying by the mouse hole waiting for the mouse, but [it] was too impatient.", "answer1": "The mouse", "answer0": "The cat", "is_associative": 0, "correct_answer": "The cat"}, {"index": 89, "sentence": "The cat was lying by the mouse hole waiting for the mouse, but [it] was too cautious.", "answer1": "The mouse", "answer0": "The cat", "is_associative": 0, "correct_answer": "The mouse"}, {"index": 90, "sentence": "Anne gave birth to a daughter last month. [She] is a very charming woman.", "answer1": "Anne's daughter", "answer0": "Anne", "is_associative": 0, "correct_answer": "Anne"}, {"index": 91, "sentence": "Anne gave birth to a daughter last month. [She] is a very charming baby.", "answer1": "Anne's daughter", "answer0": "Anne", "is_associative": 0, "correct_answer": "Anne's daughter"}, {"index": 92, "sentence": "Alice tried frantically to stop her daughter from chatting at the party, leaving us to wonder why [she] was behaving so strangely.", "answer1": "Alice's daughter", "answer0": "Alice", "is_associative": 0, "correct_answer": "Alice"}, {"index": 93, "sentence": "Alice tried frantically to stop her daughter from barking at the party, leaving us to wonder why [she] was behaving so strangely.", "answer1": "Alice's daughter", "answer0": "Alice", "is_associative": 0, "correct_answer": "Alice's daughter"}, {"index": 94, "sentence": "I saw Jim yelling at some guy in a military uniform with a huge red beard. I don't know why [he] was, but he looked very unhappy.", "answer1": "the guy in uniform", "answer0": "Jim", "is_associative": 0, "correct_answer": "Jim"}, {"index": 95, "sentence": "I saw Jim yelling at some guy in a military uniform with a huge red beard. I don't know who [he] was, but he looked very unhappy.", "answer1": "the guy in uniform", "answer0": "Jim", "is_associative": 0, "correct_answer": "the guy in uniform"}, {"index": 96, "sentence": "The fish ate the worm. [It] was hungry.", "answer1": "The worm", "answer0": "The fish", "is_associative": 0, "correct_answer": "The fish"}, {"index": 97, "sentence": "The fish ate the worm. [It] was tasty.", "answer1": "The worm", "answer0": "The fish", "is_associative": 0, "correct_answer": "The worm"}, {"index": 99, "sentence": "I was trying to open the lock with the key, but someone had filled the keyhole with chewing gum, and I couldn't get [it] out.", "answer1": "The chewing gum", "answer0": "The key", "is_associative": 0, "correct_answer": "The chewing gum"}, {"index": 100, "sentence": "The dog chased the cat, which ran up a tree. [It] waited at the bottom.", "answer1": "The cat", "answer0": "The dog", "is_associative": 0, "correct_answer": "The dog"}, {"index": 101, "sentence": "The dog chased the cat, which ran up a tree. [It] waited at the top.", "answer1": "The cat", "answer0": "The dog", "is_associative": 0, "correct_answer": "The cat"}, {"index": 102, "sentence": "In the storm, the tree fell down and crashed through the roof of my house. Now, I have to get [it] removed.", "answer1": "The roof", "answer0": "The tree", "is_associative": 0, "correct_answer": "The tree"}, {"index": 104, "sentence": "The customer walked into the bank and stabbed one of the tellers. [He] was immediately taken to the police station.", "answer1": "The teller", "answer0": "The customer", "is_associative": 0, "correct_answer": "The customer"}, {"index": 105, "sentence": "The customer walked into the bank and stabbed one of the tellers. [He] was immediately taken to the hospital.", "answer1": "The teller", "answer0": "The customer", "is_associative": 0, "correct_answer": "The teller"}, {"index": 106, "sentence": "John was doing research in the library when he heard a man humming and whistling. [He] was very annoyed.", "answer1": "The man", "answer0": "John", "is_associative": 0, "correct_answer": "John"}, {"index": 107, "sentence": "John was doing research in the library when he heard a man humming and whistling. [He] was very annoying.", "answer1": "The man", "answer0": "John", "is_associative": 0, "correct_answer": "The man"}, {"index": 108, "sentence": "John was jogging through the park when he saw a man juggling watermelons. [He] was very impressed.", "answer1": "The juggler", "answer0": "John", "is_associative": 0, "correct_answer": "John"}, {"index": 110, "sentence": "Bob collapsed on the sidewalk. Soon he saw Carl coming to help. [He] was very ill.", "answer1": "Carl", "answer0": "Bob", "is_associative": 0, "correct_answer": "Bob"}, {"index": 111, "sentence": "Bob collapsed on the sidewalk. Soon he saw Carl coming to help. [He] was very concerned.", "answer1": "Carl", "answer0": "Bob", "is_associative": 0, "correct_answer": "Carl"}, {"index": 113, "sentence": "Sam and Amy are passionately in love, but Amy's parents are unhappy about it, because [they] are snobs.", "answer1": "Amy's parents", "answer0": "Sam and Amy", "is_associative": 0, "correct_answer": "Amy's parents"}, {"index": 114, "sentence": "Mark told Pete many lies about himself, which Pete included in his book. [He] should have been more truthful.", "answer1": "Pete", "answer0": "Mark", "is_associative": 0, "correct_answer": "Mark"}, {"index": 115, "sentence": "Mark told Pete many lies about himself, which Pete included in his book. [He] should have been more skeptical.", "answer1": "Pete", "answer0": "Mark", "is_associative": 0, "correct_answer": "Pete"}, {"index": 121, "sentence": "Mary took out her flute and played one of her favorite pieces. She has loved [it] since she was a child.", "answer1": "The piece", "answer0": "The flute", "is_associative": 0, "correct_answer": "The piece"}, {"index": 122, "sentence": "Sam pulled up a chair to the piano, but [it] was broken, so he had to stand instead.", "answer1": "The piano", "answer0": "The chair", "is_associative": 0, "correct_answer": "The chair"}, {"index": 123, "sentence": "Sam pulled up a chair to the piano, but [it] was broken, so he had to sing instead.", "answer1": "The piano", "answer0": "The chair", "is_associative": 0, "correct_answer": "The piano"}, {"index": 124, "sentence": "Since it was raining, I carried the newspaper in my backpack to keep [it] dry.", "answer1": "The backpack", "answer0": "The newspaper", "is_associative": 0, "correct_answer": "The newspaper"}, {"index": 125, "sentence": "Since it was raining, I carried the newspaper over my backpack to keep [it] dry.", "answer1": "The backpack", "answer0": "The newspaper", "is_associative": 0, "correct_answer": "The backpack"}, {"index": 126, "sentence": "Sara borrowed the book from the library because she needs it for an article she is working on. She reads [it] when she gets home from work.", "answer1": "The article", "answer0": "The book", "is_associative": 0, "correct_answer": "The book"}, {"index": 127, "sentence": "Sara borrowed the book from the library because she needs it for an article she is working on. She writes [it] when she gets home from work.", "answer1": "The article", "answer0": "The book", "is_associative": 0, "correct_answer": "The article"}, {"index": 128, "sentence": "This morning, Joey built a sand castle on the beach, and put a toy flag in the highest tower, but this afternoon the tide knocked [it] down.", "answer1": "The flag", "answer0": "The sand castle", "is_associative": 0, "correct_answer": "The sand castle"}, {"index": 129, "sentence": "This morning, Joey built a sand castle on the beach, and put a toy flag in the highest tower, but this afternoon the wind knocked [it] down.", "answer1": "The flag", "answer0": "The sand castle", "is_associative": 0, "correct_answer": "The flag"}, {"index": 130, "sentence": "Jane knocked on Susan's door, but there was no answer. [She] was disappointed.", "answer1": "Susan", "answer0": "Jane", "is_associative": 0, "correct_answer": "Jane"}, {"index": 131, "sentence": "Jane knocked on Susan's door, but there was no answer. [She] was out.", "answer1": "Susan", "answer0": "Jane", "is_associative": 0, "correct_answer": "Susan"}, {"index": 132, "sentence": "Jane knocked on the door, and Susan answered it. [She] invited her to come out.", "answer1": "Susan", "answer0": "Jane", "is_associative": 0, "correct_answer": "Jane"}, {"index": 133, "sentence": "Jane knocked on the door, and Susan answered it. [She] invited her to come in.", "answer1": "Susan", "answer0": "Jane", "is_associative": 0, "correct_answer": "Susan"}, {"index": 134, "sentence": "Sam took French classes from Adam, because [he] was eager to speak it fluently.", "answer1": "Adam", "answer0": "Sam", "is_associative": 0, "correct_answer": "Sam"}, {"index": 135, "sentence": "Sam took French classes from Adam, because [he] was known to speak it fluently.", "answer1": "Adam", "answer0": "Sam", "is_associative": 0, "correct_answer": "Adam"}, {"index": 139, "sentence": "The sun was covered by a thick cloud all morning, but luckily, by the time the picnic started, [it] was gone.", "answer1": "The cloud", "answer0": "The sun", "is_associative": 0, "correct_answer": "The cloud"}, {"index": 140, "sentence": "We went to the lake, because a shark had been seen at the ocean beach, so [it] was a safer place to swim.", "answer1": "The ocean beach", "answer0": "The lake", "is_associative": 0, "correct_answer": "The lake"}, {"index": 141, "sentence": "We went to the lake, because a shark had been seen at the ocean beach, so [it] was a dangerous place to swim.", "answer1": "The ocean beach", "answer0": "The lake", "is_associative": 0, "correct_answer": "The ocean beach"}, {"index": 142, "sentence": "Sam tried to paint a picture of shepherds with sheep, but [they] ended up looking more like golfers.", "answer1": "The sheep", "answer0": "The shepherds", "is_associative": 0, "correct_answer": "The shepherds"}, {"index": 143, "sentence": "Sam tried to paint a picture of shepherds with sheep, but [they] ended up looking more like dogs.", "answer1": "The sheep", "answer0": "The shepherds", "is_associative": 0, "correct_answer": "The sheep"}, {"index": 144, "sentence": "Mary tucked her daughter Anne into bed, so that [she] could work.", "answer1": "Mary's daughter", "answer0": "Mary", "is_associative": 0, "correct_answer": "Mary"}, {"index": 145, "sentence": "Mary tucked her daughter Anne into bed, so that [she] could sleep.", "answer1": "Mary's daughter", "answer0": "Mary", "is_associative": 0, "correct_answer": "Mary's daughter"}, {"index": 148, "sentence": "Thomson visited Cooper's grave in 1765. At that date [he] had been travelling for five years.", "answer1": "Cooper", "answer0": "Thomson", "is_associative": 0, "correct_answer": "Thomson"}, {"index": 149, "sentence": "Thomson visited Cooper's grave in 1765. At that date [he] had been dead for five years.", "answer1": "Cooper", "answer0": "Thomson", "is_associative": 0, "correct_answer": "Cooper"}, {"index": 150, "sentence": "Jackson was greatly influenced by Arnold, though [he] lived two centuries later.", "answer1": "Arnold", "answer0": "Jackson", "is_associative": 0, "correct_answer": "Jackson"}, {"index": 151, "sentence": "Jackson was greatly influenced by Arnold, though [he] lived two centuries earlier.", "answer1": "Arnold", "answer0": "Jackson", "is_associative": 0, "correct_answer": "Arnold"}, {"index": 152, "sentence": "I can't cut that tree down with that axe; [it] is too thick.", "answer1": "The axe", "answer0": "The tree", "is_associative": 0, "correct_answer": "The tree"}, {"index": 153, "sentence": "I can't cut that tree down with that axe; [it] is too small.", "answer1": "The axe", "answer0": "The tree", "is_associative": 0, "correct_answer": "The axe"}, {"index": 154, "sentence": "The foxes are getting in at night and attacking the chickens. I shall have to kill [them] .", "answer1": "The chickens", "answer0": "The foxes", "is_associative": 0, "correct_answer": "The foxes"}, {"index": 156, "sentence": "The foxes are getting in at night and attacking the chickens. [They] have gotten very bold.", "answer1": "The chickens", "answer0": "The foxes", "is_associative": 0, "correct_answer": "The foxes"}, {"index": 157, "sentence": "The foxes are getting in at night and attacking the chickens. [They] have gotten very nervous.", "answer1": "The chickens", "answer0": "The foxes", "is_associative": 0, "correct_answer": "The chickens"}, {"index": 159, "sentence": "Fred covered his eyes with his hands, because the wind was blowing sand around. He lowered [them] when the wind stopped.", "answer1": "His hands", "answer0": "His eyes", "is_associative": 0, "correct_answer": "His hands"}, {"index": 160, "sentence": "The actress used to be named Terpsichore, but she changed it to Tina a few years ago, because she figured [it] was too hard to pronounce.", "answer1": "Tina", "answer0": "Terpsichore", "is_associative": 0, "correct_answer": "Terpsichore"}, {"index": 161, "sentence": "The actress used to be named Terpsichore, but she changed it to Tina a few years ago, because she figured [it] was easier to pronounce.", "answer1": "Tina", "answer0": "Terpsichore", "is_associative": 0, "correct_answer": "Tina"}, {"index": 162, "sentence": "Fred watched TV while George went out to buy groceries. After an hour [he] got up.", "answer1": "George", "answer0": "Fred", "is_associative": 0, "correct_answer": "Fred"}, {"index": 163, "sentence": "Fred watched TV while George went out to buy groceries. After an hour [he] got back.", "answer1": "George", "answer0": "Fred", "is_associative": 0, "correct_answer": "George"}, {"index": 164, "sentence": "Fred was supposed to run the dishwasher, but he put it off, because he wanted to watch TV. But the show turned out to be boring, so he changed his mind and turned [it] on.", "answer1": "The TV", "answer0": "The dishwasher", "is_associative": 0, "correct_answer": "The dishwasher"}, {"index": 165, "sentence": "Fred was supposed to run the dishwasher, but he put it off, because he wanted to watch TV. But the show turned out to be boring, so he changed his mind and turned [it] off.", "answer1": "The TV", "answer0": "The dishwasher", "is_associative": 0, "correct_answer": "The TV"}, {"index": 166, "sentence": "Fred is the only man still alive who remembers my great-grandfather. [He] is a remarkable man.", "answer1": "My great-grandfather", "answer0": "Fred", "is_associative": 0, "correct_answer": "Fred"}, {"index": 167, "sentence": "Fred is the only man still alive who remembers my great-grandfather. [He] was a remarkable man.", "answer1": "My great-grandfather", "answer0": "Fred", "is_associative": 0, "correct_answer": "My great-grandfather"}, {"index": 168, "sentence": "Fred is the only man alive who still remembers my father as an infant. When Fred first saw my father, [he] was twelve years old.", "answer1": "My father", "answer0": "Fred", "is_associative": 0, "correct_answer": "Fred"}, {"index": 169, "sentence": "Fred is the only man alive who still remembers my father as an infant. When Fred first saw my father, [he] was twelve months old.", "answer1": "My father", "answer0": "Fred", "is_associative": 0, "correct_answer": "My father"}, {"index": 170, "sentence": "In July, Kamtchatka declared war on Yakutsk. Since Yakutsk's army was much better equipped and ten times larger, [they] were defeated within weeks.", "answer1": "Yakutsk", "answer0": "Kamchatka", "is_associative": 0, "correct_answer": "Kamchatka"}, {"index": 171, "sentence": "In July, Kamtchatka declared war on Yakutsk. Since Yakutsk's army was much better equipped and ten times larger, [they] were victorious within weeks.", "answer1": "Yakutsk", "answer0": "Kamchatka", "is_associative": 0, "correct_answer": "Yakutsk"}, {"index": 172, "sentence": "Look! There is a minnow swimming right below that duck! [It] had better get away to safety fast!", "answer1": "The duck", "answer0": "The minnow", "is_associative": 0, "correct_answer": "The minnow"}, {"index": 173, "sentence": "Look! There is a shark swimming right below that duck! [It] had better get away to safety fast!", "answer1": "The duck", "answer0": "The shark", "is_associative": 0, "correct_answer": "The duck"}, {"index": 178, "sentence": "The journalists interviewed the stars of the new movie. [They] were very persistent, so the interview lasted for a long time.", "answer1": "The stars", "answer0": "The journalists", "is_associative": 0, "correct_answer": "The journalists"}, {"index": 179, "sentence": "The journalists interviewed the stars of the new movie. [They] were very cooperative, so the interview lasted for a long time.", "answer1": "The stars", "answer0": "The journalists", "is_associative": 0, "correct_answer": "The stars"}, {"index": 186, "sentence": "When the sponsors of the bill got to the town hall, they were surprised to find that the room was full of opponents. [They] were very much in the minority.", "answer1": "The opponents", "answer0": "The sponsors", "is_associative": 0, "correct_answer": "The sponsors"}, {"index": 187, "sentence": "When the sponsors of the bill got to the town hall, they were surprised to find that the room was full of opponents. [They] were very much in the majority.", "answer1": "The opponents", "answer0": "The sponsors", "is_associative": 0, "correct_answer": "The opponents"}, {"index": 188, "sentence": "Everyone really loved the oatmeal cookies; only a few people liked the chocolate chip cookies. Next time, we should make more of [them] .", "answer1": "The chocolate chip cookies", "answer0": "The oatmeal cookies", "is_associative": 0, "correct_answer": "The oatmeal cookies"}, {"index": 189, "sentence": "Everyone really loved the oatmeal cookies; only a few people liked the chocolate chip cookies. Next time, we should make fewer of [them] .", "answer1": "The chocolate chip cookies", "answer0": "The oatmeal cookies", "is_associative": 0, "correct_answer": "The chocolate chip cookies"}, {"index": 190, "sentence": "We had hoped to place copies of our newsletter on all the chairs in the auditorium, but there were simply not enough of [them] .", "answer1": "chairs", "answer0": "copies of the newsletter", "is_associative": 0, "correct_answer": "copies of the newsletter"}, {"index": 191, "sentence": "We had hoped to place copies of our newsletter on all the chairs in the auditorium, but there were simply too many of [them] .", "answer1": "chairs", "answer0": "copies of the newsletter", "is_associative": 0, "correct_answer": "chairs"}, {"index": 193, "sentence": "I stuck a pin through a carrot. When I pulled the pin out, [it] had a hole.", "answer1": "The carrot", "answer0": "The pin", "is_associative": 0, "correct_answer": "The carrot"}, {"index": 194, "sentence": "I couldn't find a spoon, so I tried using a pen to stir my coffee. But that turned out to be a bad idea, because [it] got full of coffee.", "answer1": "The coffee", "answer0": "The pen", "is_associative": 0, "correct_answer": "The pen"}, {"index": 195, "sentence": "I couldn't find a spoon, so I tried using a pen to stir my coffee. But that turned out to be a bad idea, because [it] got full of ink.", "answer1": "The coffee", "answer0": "The pen", "is_associative": 0, "correct_answer": "The coffee"}, {"index": 196, "sentence": "Steve follows Fred's example in everything. [He] admires him hugely.", "answer1": "Fred", "answer0": "Steve", "is_associative": 0, "correct_answer": "Steve"}, {"index": 197, "sentence": "Steve follows Fred's example in everything. [He] influences him hugely.", "answer1": "Fred", "answer0": "Steve", "is_associative": 0, "correct_answer": "Fred"}, {"index": 198, "sentence": "The table won't fit through the doorway because [it] is too wide.", "answer1": "The doorway", "answer0": "The table", "is_associative": 0, "correct_answer": "The table"}, {"index": 199, "sentence": "The table won't fit through the doorway because [it] is too narrow.", "answer1": "The doorway", "answer0": "The table", "is_associative": 0, "correct_answer": "The doorway"}, {"index": 200, "sentence": "Grace was happy to trade me her sweater for my jacket. She thinks [it] looks dowdy on her.", "answer1": "The jacket", "answer0": "The sweater", "is_associative": 0, "correct_answer": "The sweater"}, {"index": 201, "sentence": "Grace was happy to trade me her sweater for my jacket. She thinks [it] looks great on her.", "answer1": "The jacket", "answer0": "The sweater", "is_associative": 0, "correct_answer": "The jacket"}, {"index": 202, "sentence": "John hired Bill to take care of [him] .", "answer1": "Bill", "answer0": "John", "is_associative": 0, "correct_answer": "John"}, {"index": 203, "sentence": "John hired himself out to Bill to take care of [him] .", "answer1": "Bill", "answer0": "John", "is_associative": 0, "correct_answer": "Bill"}, {"index": 204, "sentence": "John promised Bill to leave, so an hour later [he] left.", "answer1": "Bill", "answer0": "John", "is_associative": 0, "correct_answer": "John"}, {"index": 205, "sentence": "John ordered Bill to leave, so an hour later [he] left.", "answer1": "Bill", "answer0": "John", "is_associative": 0, "correct_answer": "Bill"}, {"index": 206, "sentence": "Sam Goodman's biography of the Spartan general Xenophanes conveys a vivid sense of the difficulties [he] faced in his research.", "answer1": "Xenophanes", "answer0": "Goodman", "is_associative": 0, "correct_answer": "Goodman"}, {"index": 207, "sentence": "Sam Goodman's biography of the Spartan general Xenophanes conveys a vivid sense of the difficulties [he] faced in his childhood.", "answer1": "Xenophanes", "answer0": "Goodman", "is_associative": 0, "correct_answer": "Xenophanes"}, {"index": 208, "sentence": "Emma's mother had died long ago, and [her] education had been managed by an excellent woman as governess.", "answer1": "Emma's mother", "answer0": "Emma", "is_associative": 0, "correct_answer": "Emma"}, {"index": 209, "sentence": "Emma's mother had died long ago, and [her] place had been taken by an excellent woman as governess.", "answer1": "Emma's mother", "answer0": "Emma", "is_associative": 0, "correct_answer": "Emma's mother"}, {"index": 210, "sentence": "Jane knocked on Susan's door but [she] did not get an answer.", "answer1": "Susan", "answer0": "Jane", "is_associative": 0, "correct_answer": "Jane"}, {"index": 211, "sentence": "Jane knocked on Susan's door but [she] did not answer.", "answer1": "Susan", "answer0": "Jane", "is_associative": 0, "correct_answer": "Susan"}, {"index": 212, "sentence": "Joe paid the detective after [he] received the final report on the case.", "answer1": "the detective", "answer0": "Joe", "is_associative": 0, "correct_answer": "Joe"}, {"index": 213, "sentence": "Joe paid the detective after [he] delivered the final report on the case.", "answer1": "the detective", "answer0": "Joe", "is_associative": 0, "correct_answer": "the detective"}, {"index": 214, "sentence": "Beth didn't get angry with Sally, who had cut her off, because [she] stopped and counted to ten.", "answer1": "Sally", "answer0": "Beth", "is_associative": 0, "correct_answer": "Beth"}, {"index": 215, "sentence": "Beth didn't get angry with Sally, who had cut her off, because [she] stopped and apologized.", "answer1": "Sally", "answer0": "Beth", "is_associative": 0, "correct_answer": "Sally"}, {"index": 216, "sentence": "Jim signaled the barman and gestured toward [his] empty glass", "answer1": "The barman", "answer0": "Jim", "is_associative": 0, "correct_answer": "Jim"}, {"index": 217, "sentence": "Jim signaled the barman and gestured toward [his] bathroom key.", "answer1": "The barman", "answer0": "Jim", "is_associative": 0, "correct_answer": "The barman"}, {"index": 218, "sentence": "Dan took the rear seat while Bill claimed the front because [his] \"Dibs!\" was slow.", "answer1": "Bill", "answer0": "Dan", "is_associative": 0, "correct_answer": "Dan"}, {"index": 219, "sentence": "Dan took the rear seat while Bill claimed the front because [his] \"Dibs!\" was quicker.", "answer1": "Bill", "answer0": "Dan", "is_associative": 0, "correct_answer": "Bill"}, {"index": 220, "sentence": "Tom said \"Check\" to Ralph as he moved [his] bishop.", "answer1": "Ralph", "answer0": "Tom", "is_associative": 0, "correct_answer": "Tom"}, {"index": 221, "sentence": "Tom said \"Check\" to Ralph as he took [his] bishop.", "answer1": "Ralph", "answer0": "Tom", "is_associative": 0, "correct_answer": "Ralph"}, {"index": 222, "sentence": "As Andrea in the crop duster passed over Susan, [she] could see the landing strip.", "answer1": "Susan", "answer0": "Andrea", "is_associative": 0, "correct_answer": "Andrea"}, {"index": 223, "sentence": "As Andrea in the crop duster passed over Susan, [she] could see the landing gear.", "answer1": "Susan", "answer0": "Andrea", "is_associative": 0, "correct_answer": "Susan"}, {"index": 224, "sentence": "Tom gave Ralph a lift to school so [he] wouldn't have to drive alone.", "answer1": "Ralph", "answer0": "Tom", "is_associative": 0, "correct_answer": "Tom"}, {"index": 225, "sentence": "Tom gave Ralph a lift to school so [he] wouldn't have to walk.", "answer1": "Ralph", "answer0": "Tom", "is_associative": 0, "correct_answer": "Ralph"}, {"index": 226, "sentence": "Bill passed the half-empty plate to John because [he] was full.", "answer1": "John", "answer0": "Bill", "is_associative": 0, "correct_answer": "Bill"}, {"index": 227, "sentence": "Bill passed the half-empty plate to John because [he] was hungry.", "answer1": "John", "answer0": "Bill", "is_associative": 0, "correct_answer": "John"}, {"index": 228, "sentence": "Bill passed the gameboy to John because [his] turn was over.", "answer1": "John", "answer0": "Bill", "is_associative": 0, "correct_answer": "Bill"}, {"index": 229, "sentence": "Bill passed the gameboy to John because [his] turn was next.", "answer1": "John", "answer0": "Bill", "is_associative": 0, "correct_answer": "John"}, {"index": 230, "sentence": "The man lifted the boy onto [his] shoulders.", "answer1": "The boy", "answer0": "The man", "is_associative": 0, "correct_answer": "The man"}, {"index": 232, "sentence": "Stretching [her] back, the woman smiled at the girl.", "answer1": "The girl", "answer0": "The woman", "is_associative": 0, "correct_answer": "The woman"}, {"index": 233, "sentence": "Patting [her] back, the woman smiled at the girl.", "answer1": "The girl", "answer0": "The woman", "is_associative": 0, "correct_answer": "The girl"}, {"index": 234, "sentence": "Billy cried because Toby wouldn't accept [his] toy.", "answer1": "Toby", "answer0": "Billy", "is_associative": 0, "correct_answer": "Billy"}, {"index": 235, "sentence": "Billy cried because Toby wouldn't share [his] toy.", "answer1": "Toby", "answer0": "Billy", "is_associative": 0, "correct_answer": "Toby"}, {"index": 236, "sentence": "Lily spoke to Donna, breaking [her] silence.", "answer1": "Donna", "answer0": "Lily", "is_associative": 0, "correct_answer": "Lily"}, {"index": 237, "sentence": "Lily spoke to Donna, breaking [her] concentration.", "answer1": "Donna", "answer0": "Lily", "is_associative": 0, "correct_answer": "Donna"}, {"index": 238, "sentence": "When Tommy dropped his ice cream, Timmy giggled, so father gave [him] a sympathetic look.", "answer1": "Timmy", "answer0": "Tommy", "is_associative": 0, "correct_answer": "Tommy"}, {"index": 239, "sentence": "When Tommy dropped his ice cream, Timmy giggled, so father gave [him] a stern look.", "answer1": "Timmy", "answer0": "Tommy", "is_associative": 0, "correct_answer": "Timmy"}, {"index": 240, "sentence": "As Ollie carried Tommy up the long winding steps, [his] legs ached.", "answer1": "Tommy", "answer0": "Ollie", "is_associative": 0, "correct_answer": "Ollie"}, {"index": 241, "sentence": "As Ollie carried Tommy up the long winding steps, [his] legs dangled.", "answer1": "Tommy", "answer0": "Ollie", "is_associative": 0, "correct_answer": "Tommy"}, {"index": 242, "sentence": "The father carried the sleeping boy in [his] arms", "answer1": "The boy", "answer0": "The father", "is_associative": 0, "correct_answer": "The father"}, {"index": 243, "sentence": "The father carried the sleeping boy in [his] bassinet.", "answer1": "The boy", "answer0": "The father", "is_associative": 0, "correct_answer": "The boy"}, {"index": 244, "sentence": "The woman held the girl against [her] chest", "answer1": "The girl", "answer0": "The woman", "is_associative": 0, "correct_answer": "The woman"}, {"index": 245, "sentence": "The woman held the girl against [her] will.", "answer1": "The girl", "answer0": "The woman", "is_associative": 0, "correct_answer": "The girl"}, {"index": 246, "sentence": "Pam's parents came home and found her having sex with her boyfriend, Paul. [They] were furious about it.", "answer1": "Pam and Paul", "answer0": "Pam's parents", "is_associative": 0, "correct_answer": "Pam's parents"}, {"index": 247, "sentence": "Pam's parents came home and found her having sex with her boyfriend, Paul. [They] were embarrassed about it.", "answer1": "Pam and Paul", "answer0": "Pam's parents", "is_associative": 0, "correct_answer": "Pam and Paul"}, {"index": 248, "sentence": "Dr. Adams informed Kate that [she] had retired and presented several options for future treatment.", "answer1": "Kate", "answer0": "Dr. Adams", "is_associative": 0, "correct_answer": "Dr. Adams"}, {"index": 249, "sentence": "Dr. Adams informed Kate that [she] had cancer and presented several options for future treatment.", "answer1": "Kate", "answer0": "Dr. Adams", "is_associative": 0, "correct_answer": "Kate"}, {"index": 250, "sentence": "Dan had to stop Bill from toying with the injured bird. [He] is very compassionate.", "answer1": "Bill", "answer0": "Dan", "is_associative": 0, "correct_answer": "Dan"}, {"index": 251, "sentence": "Dan had to stop Bill from toying with the injured bird. [He] is very cruel.", "answer1": "Bill", "answer0": "Dan", "is_associative": 0, "correct_answer": "Bill"}, {"index": 252, "sentence": "George got free tickets to the play, but he gave them to Eric, even though [he] was particularly eager to see it.", "answer1": "Eric", "answer0": "George", "is_associative": 0, "correct_answer": "George"}, {"index": 253, "sentence": "George got free tickets to the play, but he gave them to Eric, because [he] was particularly eager to see it.", "answer1": "Eric", "answer0": "George", "is_associative": 0, "correct_answer": "Eric"}, {"index": 254, "sentence": "George got free tickets to the play, but he gave them to Eric, because [he] was not particularly eager to see it.", "answer1": "Eric", "answer0": "George", "is_associative": 0, "correct_answer": "George"}, {"index": 255, "sentence": "Jane gave Joan candy because [she] wasn't hungry.", "answer1": "Joan", "answer0": "Jane", "is_associative": 0, "correct_answer": "Jane"}, {"index": 256, "sentence": "Jane gave Joan candy because [she] was hungry.", "answer1": "Joan", "answer0": "Jane", "is_associative": 0, "correct_answer": "Joan"}, {"index": 257, "sentence": "I tried to paint a picture of an orchard, with lemons in the lemon trees, but [they] came out looking more like light bulbs.", "answer1": "lemon trees", "answer0": "lemons", "is_associative": 0, "correct_answer": "lemons"}, {"index": 258, "sentence": "I tried to paint a picture of an orchard, with lemons in the lemon trees, but [they] came out looking more like telephone poles.", "answer1": "lemon trees", "answer0": "lemons", "is_associative": 0, "correct_answer": "lemon trees"}, {"index": 259, "sentence": "James asked Robert for a favor but [he] was refused.", "answer1": "Robert", "answer0": "James", "is_associative": 0, "correct_answer": "James"}, {"index": 260, "sentence": "James asked Robert for a favor but [he] refused.", "answer1": "Robert", "answer0": "James", "is_associative": 0, "correct_answer": "Robert"}, {"index": 261, "sentence": "Kirilov ceded the presidency to Shatov because [he] was less popular.", "answer1": "Shatov", "answer0": "Kirilov", "is_associative": 0, "correct_answer": "Kirilov"}, {"index": 262, "sentence": "Kirilov ceded the presidency to Shatov because [he] was more popular.", "answer1": "Shatov", "answer0": "Kirilov", "is_associative": 0, "correct_answer": "Shatov"}, {"index": 263, "sentence": "Emma did not pass the ball to Janie although [she] saw that she was open.", "answer1": "Janie", "answer0": "Emma", "is_associative": 0, "correct_answer": "Emma"}, {"index": 264, "sentence": "Emma did not pass the ball to Janie although [she] was open.", "answer1": "Janie", "answer0": "Emma", "is_associative": 0, "correct_answer": "Janie"}, {"index": 265, "sentence": "I put the butterfly wing on the table and [it] broke.", "answer1": "The table", "answer0": "The butterfly wing", "is_associative": 0, "correct_answer": "The butterfly wing"}, {"index": 267, "sentence": "Madonna fired her trainer because [she] couldn't stand her boyfriend.", "answer1": "The trainer", "answer0": "Madonna", "is_associative": 0, "correct_answer": "Madonna"}, {"index": 268, "sentence": "Madonna fired her trainer because [she] slept with her boyfriend.", "answer1": "The trainer", "answer0": "Madonna", "is_associative": 0, "correct_answer": "The trainer"}, {"index": 269, "sentence": "Madonna fired her trainer because she slept with [her] boyfriend.", "answer1": "The trainer", "answer0": "Madonna", "is_associative": 0, "correct_answer": "Madonna"}, {"index": 270, "sentence": "Madonna fired her trainer because she couldn't stand [her] boyfriend.", "answer1": "The trainer", "answer0": "Madonna", "is_associative": 0, "correct_answer": "The trainer"}, {"index": 271, "sentence": "Carol believed that Rebecca suspected that [she] had stolen the watch.", "answer1": "Rebecca", "answer0": "Carol", "is_associative": 0, "correct_answer": "Carol"}, {"index": 272, "sentence": "Carol believed that Rebecca regretted that [she] had stolen the watch.", "answer1": "Rebecca", "answer0": "Carol", "is_associative": 0, "correct_answer": "Rebecca"}, {"index": 53, "sentence": "The painting in Mark's living room shows an oak tree. [It] is to the right of a house.", "answer1": "The oak tree", "answer0": "The painting", "is_associative": 1, "correct_answer": "The oak tree"}, {"index": 54, "sentence": "There is a gap in the wall. You can see the garden through [it] .", "answer1": "The wall", "answer0": "The gap", "is_associative": 1, "correct_answer": "The gap"}, {"index": 55, "sentence": "There is a gap in the wall. You can see the garden behind [it] .", "answer1": "The wall", "answer0": "The gap", "is_associative": 1, "correct_answer": "The wall"}, {"index": 58, "sentence": "My meeting started at 4:00 and I needed to catch the train at 4:30, so there wasn't much time. Luckily, [it] was short, so it worked out.", "answer1": "The train", "answer0": "The meeting", "is_associative": 1, "correct_answer": "The meeting"}, {"index": 72, "sentence": "I couldn't put the pot on the shelf because [it] was too tall.", "answer1": "The shelf", "answer0": "The pot", "is_associative": 1, "correct_answer": "The pot"}, {"index": 74, "sentence": "I'm sure that my map will show this building; [it] is very good.", "answer1": "The building", "answer0": "The map", "is_associative": 1, "correct_answer": "The map"}, {"index": 75, "sentence": "I'm sure that my map will show this building; [it] is very famous.", "answer1": "The building", "answer0": "The map", "is_associative": 1, "correct_answer": "The building"}, {"index": 87, "sentence": "It was a summer afternoon, and the dog was sitting in the middle of the lawn. After a while, it got up and moved to a spot under the tree, because [it] was cooler.", "answer1": "The spot under the tree", "answer0": "The dog", "is_associative": 1, "correct_answer": "The spot under the tree"}, {"index": 98, "sentence": "I was trying to open the lock with the key, but someone had filled the keyhole with chewing gum, and I couldn't get [it] in.", "answer1": "The chewing gum", "answer0": "The key", "is_associative": 1, "correct_answer": "The key"}, {"index": 103, "sentence": "In the storm, the tree fell down and crashed through the roof of my house. Now, I have to get [it] repaired.", "answer1": "The roof", "answer0": "The tree", "is_associative": 1, "correct_answer": "The roof"}, {"index": 109, "sentence": "John was jogging through the park when he saw a man juggling watermelons. [He] was very impressive.", "answer1": "The juggler", "answer0": "John", "is_associative": 1, "correct_answer": "The juggler"}, {"index": 112, "sentence": "Sam and Amy are passionately in love, but Amy's parents are unhappy about it, because [they] are fifteen.", "answer1": "Amy's parents", "answer0": "Sam and Amy", "is_associative": 1, "correct_answer": "Sam and Amy"}, {"index": 116, "sentence": "Joe has sold his house and bought a new one a few miles away. He will be moving out of [it] on Thursday.", "answer1": "The new house", "answer0": "The old house", "is_associative": 1, "correct_answer": "The old house"}, {"index": 117, "sentence": "Joe has sold his house and bought a new one a few miles away. He will be moving into [it] on Thursday.", "answer1": "The new house", "answer0": "The old house", "is_associative": 1, "correct_answer": "The new house"}, {"index": 118, "sentence": "Many people start to read Paul's books and can't put them down. [They] are gripped because Paul writes so well.", "answer1": "Paul's books", "answer0": "People", "is_associative": 1, "correct_answer": "People"}, {"index": 119, "sentence": "Many people start to read Paul's books and can't put them down. [They] are popular because Paul writes so well.", "answer1": "Paul's books", "answer0": "People", "is_associative": 1, "correct_answer": "Paul's books"}, {"index": 120, "sentence": "Mary took out her flute and played one of her favorite pieces. She has had [it] since she was a child.", "answer1": "The piece", "answer0": "The flute", "is_associative": 1, "correct_answer": "The flute"}, {"index": 136, "sentence": "The path to the lake was blocked, so we couldn't use [it] .", "answer1": "The lake", "answer0": "The path", "is_associative": 1, "correct_answer": "The path"}, {"index": 137, "sentence": "The path to the lake was blocked, so we couldn't reach [it] .", "answer1": "The lake", "answer0": "The path", "is_associative": 1, "correct_answer": "The lake"}, {"index": 138, "sentence": "The sun was covered by a thick cloud all morning, but luckily, by the time the picnic started, [it] was out.", "answer1": "The cloud", "answer0": "The sun", "is_associative": 1, "correct_answer": "The sun"}, {"index": 146, "sentence": "Fred and Alice had very warm down coats, but [they] were not prepared for the cold in Alaska.", "answer1": "coats", "answer0": "Fred and Alice", "is_associative": 1, "correct_answer": "Fred and Alice"}, {"index": 147, "sentence": "Fred and Alice had very warm down coats, but [they] were not enough for the cold in Alaska.", "answer1": "coats", "answer0": "Fred and Alice", "is_associative": 1, "correct_answer": "coats"}, {"index": 155, "sentence": "The foxes are getting in at night and attacking the chickens. I shall have to guard [them] .", "answer1": "The chickens", "answer0": "The foxes", "is_associative": 1, "correct_answer": "The chickens"}, {"index": 158, "sentence": "Fred covered his eyes with his hands, because the wind was blowing sand around. He opened [them] when the wind stopped.", "answer1": "His hands", "answer0": "His eyes", "is_associative": 1, "correct_answer": "His eyes"}, {"index": 174, "sentence": "Archaeologists have concluded that humans lived in Laputa 20,000 years ago. [They] hunted for evidence on the river banks.", "answer1": "Prehistoric humans", "answer0": "Archaeologists", "is_associative": 1, "correct_answer": "Archaeologists"}, {"index": 175, "sentence": "Archaeologists have concluded that humans lived in Laputa 20,000 years ago. [They] hunted for deer on the river banks.", "answer1": "Prehistoric humans", "answer0": "Archaeologists", "is_associative": 1, "correct_answer": "Prehistoric humans"}, {"index": 176, "sentence": "The scientists are studying three species of fish that have recently been found living in the Indian Ocean. [They] began two years ago.", "answer1": "The fish", "answer0": "The scientists", "is_associative": 1, "correct_answer": "The scientists"}, {"index": 177, "sentence": "The scientists are studying three species of fish that have recently been found living in the Indian Ocean. [They] appeared two years ago.", "answer1": "The fish", "answer0": "The scientists", "is_associative": 1, "correct_answer": "The fish"}, {"index": 180, "sentence": "The police arrested all of the gang members. [They] were trying to stop the drug trade in the neighborhood.", "answer1": "The gang members", "answer0": "The police", "is_associative": 1, "correct_answer": "The police"}, {"index": 181, "sentence": "The police arrested all of the gang members. [They] were trying to run the drug trade in the neighborhood.", "answer1": "The gang members", "answer0": "The police", "is_associative": 1, "correct_answer": "The gang members"}, {"index": 182, "sentence": "I put the cake away in the refrigerator. [It] has a lot of butter in it.", "answer1": "The refrigerator", "answer0": "The cake", "is_associative": 1, "correct_answer": "The cake"}, {"index": 183, "sentence": "I put the cake away in the refrigerator. [It] has a lot of leftovers in it.", "answer1": "The refrigerator", "answer0": "The cake", "is_associative": 1, "correct_answer": "The refrigerator"}, {"index": 184, "sentence": "Sam broke both his ankles and he's walking with crutches. But a month or so from now [they] should be better.", "answer1": "The crutches", "answer0": "The ankles", "is_associative": 1, "correct_answer": "The ankles"}, {"index": 185, "sentence": "Sam broke both his ankles and he's walking with crutches. But a month or so from now [they] should be unnecessary.", "answer1": "The crutches", "answer0": "The ankles", "is_associative": 1, "correct_answer": "The crutches"}, {"index": 192, "sentence": "I stuck a pin through a carrot. When I pulled the pin out, [it] left a hole.", "answer1": "The carrot", "answer0": "The pin", "is_associative": 1, "correct_answer": "The pin"}, {"index": 231, "sentence": "The man lifted the boy onto [his] bunk bed.", "answer1": "The boy", "answer0": "The man", "is_associative": 1, "correct_answer": "The boy"}, {"index": 266, "sentence": "I put the heavy book on the table and [it] broke.", "answer1": "The table", "answer0": "The heavy book", "is_associative": 1, "correct_answer": "The table"}] \ No newline at end of file diff --git a/WSC_child_problem.json b/WSC_child_problem.json new file mode 100644 index 00000000000000..33336d7e6cdef0 --- /dev/null +++ b/WSC_child_problem.json @@ -0,0 +1,11443 @@ +[ + { + "index": 0, + "sentences": [ + { + "sentence": "The policemen refused the demonstrators a permit because the _ feared violence.", + "answer1": [ + "demonstrators" + ], + "answer0": [ + "policemen" + ], + "correct_answer": [ + "policemen" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "demonstrators", + 0.4118339717388153 + ] + ], + "score": 0 + } + ] + }, + { + "index": 1, + "sentences": [ + { + "sentence": "The policemen refused the demonstrators a permit because the _ advocated violence.", + "answer1": [ + "demonstrators" + ], + "answer0": [ + "policemen" + ], + "correct_answer": [ + "demonstrators" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "demonstrators", + 0.457832932472229 + ] + ], + "score": 1 + } + ] + }, + { + "index": 2, + "sentences": [ + { + "sentence": "The trophy doesn't fit into the brown suitcase because the _ is too large.", + "answer1": [ + "suitcase" + ], + "answer0": [ + "trophy" + ], + "correct_answer": [ + "trophy" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "suitcase", + 0.21058858931064606 + ] + ], + "score": 0 + } + ] + }, + { + "index": 3, + "sentences": [ + { + "sentence": "The trophy doesn't fit into the brown suitcase because the _ is too small.", + "answer1": [ + "suitcase" + ], + "answer0": [ + "trophy" + ], + "correct_answer": [ + "suitcase" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "suitcase", + 0.11347327381372452 + ], + [ + "trophy", + 0.04299129545688629 + ] + ], + "score": 1 + } + ] + }, + { + "index": 4, + "sentences": [ + { + "sentence": "Susan made sure to thank Alan for all the help _ had recieved.", + "answer1": [ + "Alan", + "he" + ], + "answer0": [ + "Susan", + "she" + ], + "correct_answer": [ + "Susan", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.8059180378913879 + ], + [ + "she", + 0.10031098127365112 + ], + [ + "alan", + 0.006458722520619631 + ] + ], + "score": 0 + }, + { + "sentence": "Anthony made sure to thank Donna for all the help _ had recieved.", + "answer1": [ + "Donna", + "she" + ], + "answer0": [ + "Anthony", + "he" + ], + "correct_answer": [ + "Anthony", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "she", + 0.8589648604393005 + ], + [ + "he", + 0.035839419811964035 + ], + [ + "donna", + 0.009961705654859543 + ], + [ + "anthony", + 0.0026284847408533096 + ] + ], + "score": 0 + } + ] + }, + { + "index": 5, + "sentences": [ + { + "sentence": "Wendy made sure to thank David for all the help _ had given.", + "answer1": [ + "David", + "he" + ], + "answer0": [ + "Wendy", + "she" + ], + "correct_answer": [ + "David", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "he", + 0.9703136682510376 + ], + [ + "she", + 0.012762513943016529 + ], + [ + "david", + 0.002392230788245797 + ] + ], + "score": 1 + }, + { + "sentence": "Daniel made sure to thank Wendy for all the help _ had given.", + "answer1": [ + "Wendy", + "she" + ], + "answer0": [ + "Daniel", + "he" + ], + "correct_answer": [ + "Wendy", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.9681608080863953 + ], + [ + "he", + 0.01262774970382452 + ], + [ + "wendy", + 0.003302064025774598 + ], + [ + "daniel", + 0.00175810931250453 + ] + ], + "score": 1 + } + ] + }, + { + "index": 6, + "sentences": [ + { + "sentence": "Daniel tried to call Lisa on the phone, but _ wasn't successful.", + "answer1": [ + "Lisa", + "she" + ], + "answer0": [ + "Daniel", + "he" + ], + "correct_answer": [ + "Daniel", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.9892734885215759 + ], + [ + "she", + 0.005904461722820997 + ], + [ + "daniel", + 0.0004344168701209128 + ] + ], + "score": 1 + }, + { + "sentence": "Donna tried to call David on the phone, but _ wasn't successful.", + "answer1": [ + "David", + "he" + ], + "answer0": [ + "Donna", + "she" + ], + "correct_answer": [ + "Donna", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "she", + 0.9855420589447021 + ], + [ + "he", + 0.006738866213709116 + ], + [ + "david", + 0.0012104340130463243 + ] + ], + "score": 1 + } + ] + }, + { + "index": 7, + "sentences": [ + { + "sentence": "Barbara tried to call Charles on the phone, but _ wasn't available.", + "answer1": [ + "Charles", + "he" + ], + "answer0": [ + "Barbara", + "she" + ], + "correct_answer": [ + "Charles", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "he", + 0.8587782979011536 + ], + [ + "charles", + 0.1335594803094864 + ], + [ + "she", + 0.000997341237962246 + ] + ], + "score": 1 + }, + { + "sentence": "Warren tried to call Laura on the phone, but _ wasn't available.", + "answer1": [ + "Laura", + "she" + ], + "answer0": [ + "Warren", + "he" + ], + "correct_answer": [ + "Laura", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.9781970381736755 + ], + [ + "laura", + 0.01614491641521454 + ], + [ + "he", + 0.00039897472015582025 + ] + ], + "score": 1 + } + ] + }, + { + "index": 8, + "sentences": [ + { + "sentence": "The lawyer asked the witness a question, but the _ was reluctant to repeat it.", + "answer1": [ + "witness" + ], + "answer0": [ + "lawyer" + ], + "correct_answer": [ + "lawyer" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "witness", + 0.6564255952835083 + ], + [ + "lawyer", + 0.012273530475795269 + ] + ], + "score": 0 + } + ] + }, + { + "index": 9, + "sentences": [ + { + "sentence": "The lawyer asked the witness a question, but the _ was reluctant to answer it.", + "answer1": [ + "witness" + ], + "answer0": [ + "lawyer" + ], + "correct_answer": [ + "witness" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "witness", + 0.8160716891288757 + ] + ], + "score": 1 + } + ] + }, + { + "index": 10, + "sentences": [ + { + "sentence": "The truck zoomed by the bus because the _ was going so fast.", + "answer1": [ + "bus" + ], + "answer0": [ + "truck" + ], + "correct_answer": [ + "truck" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "bus", + 0.2043217420578003 + ], + [ + "truck", + 0.18831151723861694 + ] + ], + "score": 0 + } + ] + }, + { + "index": 11, + "sentences": [ + { + "sentence": "The truck zoomed by the bus because the _ was going so slow.", + "answer1": [ + "bus" + ], + "answer0": [ + "truck" + ], + "correct_answer": [ + "bus" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "bus", + 0.239765927195549 + ], + [ + "truck", + 0.16296519339084625 + ] + ], + "score": 1 + } + ] + }, + { + "index": 12, + "sentences": [ + { + "sentence": "Helen felt vindicated when her longtime rival George revealed that _ was the winner of the competition.", + "answer1": [ + "George", + "he" + ], + "answer0": [ + "Helen", + "she" + ], + "correct_answer": [ + "Helen", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.7605838775634766 + ], + [ + "she", + 0.1281837671995163 + ], + [ + "helen", + 0.03044680505990982 + ], + [ + "george", + 0.016294078901410103 + ] + ], + "score": 0 + }, + { + "sentence": "George felt vindicated when his longtime rival Sandra revealed that _ was the winner of the competition.", + "answer1": [ + "Sandra", + "she" + ], + "answer0": [ + "George", + "he" + ], + "correct_answer": [ + "George", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "she", + 0.6007376313209534 + ], + [ + "he", + 0.27373355627059937 + ], + [ + "george", + 0.06088083237409592 + ], + [ + "sandra", + 0.002873124787583947 + ] + ], + "score": 0 + } + ] + }, + { + "index": 13, + "sentences": [ + { + "sentence": "Edward felt crushed when his longtime rival Barbara revealed that _ was the winner of the competition.", + "answer1": [ + "Barbara", + "she" + ], + "answer0": [ + "Edward", + "he" + ], + "correct_answer": [ + "Barbara", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.7010877132415771 + ], + [ + "he", + 0.1804790198802948 + ], + [ + "edward", + 0.008347589522600174 + ], + [ + "barbara", + 0.0056170192547142506 + ] + ], + "score": 1 + }, + { + "sentence": "Nancy felt crushed when her longtime rival Tony revealed that _ was the winner of the competition.", + "answer1": [ + "Tony", + "he" + ], + "answer0": [ + "Nancy", + "she" + ], + "correct_answer": [ + "Tony", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "he", + 0.6629055142402649 + ], + [ + "she", + 0.231082022190094 + ], + [ + "tony", + 0.03906760364770889 + ], + [ + "nancy", + 0.013350573368370533 + ] + ], + "score": 1 + } + ] + }, + { + "index": 14, + "sentences": [ + { + "sentence": "The man couldn't lift his daughter because _ was so weak.", + "answer1": [ + "she" + ], + "answer0": [ + "he" + ], + "correct_answer": [ + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "she", + 0.966475784778595 + ], + [ + "he", + 0.012883448973298073 + ] + ], + "score": 0 + } + ] + }, + { + "index": 15, + "sentences": [ + { + "sentence": "The man couldn't lift his daughter because _ was so heavy.", + "answer1": [ + "she" + ], + "answer0": [ + "he" + ], + "correct_answer": [ + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.9816145300865173 + ], + [ + "he", + 0.0035426830872893333 + ] + ], + "score": 1 + } + ] + }, + { + "index": 16, + "sentences": [ + { + "sentence": "The ball crashed right through the table because the _ was made of steel.", + "answer1": [ + "table" + ], + "answer0": [ + "ball" + ], + "correct_answer": [ + "ball" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "table", + 0.3629739284515381 + ], + [ + "ball", + 0.08679845184087753 + ] + ], + "score": 0 + } + ] + }, + { + "index": 17, + "sentences": [ + { + "sentence": "The ball crashed right through the table because the _ was made of styrofoam.", + "answer1": [ + "table" + ], + "answer0": [ + "ball" + ], + "correct_answer": [ + "table" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "table", + 0.4074442386627197 + ], + [ + "ball", + 0.10976675152778625 + ] + ], + "score": 1 + } + ] + }, + { + "index": 18, + "sentences": [ + { + "sentence": "Tony couldn't see the stage with Jennifer in front of him because _ is so short.", + "answer1": [ + "Jennifer", + "she" + ], + "answer0": [ + "Tony", + "he" + ], + "correct_answer": [ + "Tony", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "she", + 0.5132592916488647 + ], + [ + "jennifer", + 0.40244361758232117 + ], + [ + "he", + 0.013166015036404133 + ], + [ + "tony", + 0.003285476006567478 + ] + ], + "score": 0 + }, + { + "sentence": "Mandy couldn't see the stage with Charles in front of her because _ is so short.", + "answer1": [ + "Charles", + "he" + ], + "answer0": [ + "Mandy", + "she" + ], + "correct_answer": [ + "Mandy", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.46862319111824036 + ], + [ + "she", + 0.21981821954250336 + ], + [ + "charles", + 0.14085616171360016 + ] + ], + "score": 0 + } + ] + }, + { + "index": 19, + "sentences": [ + { + "sentence": "Sandra couldn't see the stage with Jason in front of her because _ is so tall.", + "answer1": [ + "Jason", + "he" + ], + "answer0": [ + "Sandra", + "she" + ], + "correct_answer": [ + "Jason", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "he", + 0.7260995507240295 + ], + [ + "jason", + 0.2526448965072632 + ], + [ + "she", + 0.006947671994566917 + ], + [ + "sandra", + 0.0005759962368756533 + ] + ], + "score": 1 + }, + { + "sentence": "Richard couldn't see the stage with Margaret in front of him because _ is so tall.", + "answer1": [ + "Margaret", + "she" + ], + "answer0": [ + "Richard", + "he" + ], + "correct_answer": [ + "Margaret", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.8138208389282227 + ], + [ + "margaret", + 0.1297387033700943 + ], + [ + "he", + 0.02292880043387413 + ], + [ + "richard", + 0.0023241350427269936 + ] + ], + "score": 1 + } + ] + }, + { + "index": 20, + "sentences": [ + { + "sentence": "Lisa threw his schoolbag down to Jason after _ reached the top of the stairs.", + "answer1": [ + "Jason", + "he" + ], + "answer0": [ + "Lisa", + "she" + ], + "correct_answer": [ + "Lisa", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.2535589635372162 + ], + [ + "she", + 0.0745818242430687 + ], + [ + "jason", + 0.034438613802194595 + ] + ], + "score": 0 + }, + { + "sentence": "Zack threw his schoolbag down to Sarah after _ reached the top of the stairs.", + "answer1": [ + "Sarah", + "she" + ], + "answer0": [ + "Zack", + "he" + ], + "correct_answer": [ + "Zack", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "she", + 0.2084319144487381 + ], + [ + "he", + 0.10264766216278076 + ], + [ + "sarah", + 0.0038962233811616898 + ] + ], + "score": 0 + } + ] + }, + { + "index": 21, + "sentences": [ + { + "sentence": "Joseph threw his schoolbag down to Emma after _ reached the bottom of the stairs.", + "answer1": [ + "Emma", + "she" + ], + "answer0": [ + "Joseph", + "he" + ], + "correct_answer": [ + "Emma", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.23867610096931458 + ], + [ + "he", + 0.09107589721679688 + ], + [ + "emma", + 0.021801337599754333 + ] + ], + "score": 1 + }, + { + "sentence": "Anna threw his schoolbag down to Anthony after _ reached the bottom of the stairs.", + "answer1": [ + "Anthony", + "he" + ], + "answer0": [ + "Anna", + "she" + ], + "correct_answer": [ + "Anthony", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "he", + 0.3054933547973633 + ], + [ + "she", + 0.06062417849898338 + ], + [ + "anthony", + 0.00862197671085596 + ] + ], + "score": 1 + } + ] + }, + { + "index": 22, + "sentences": [ + { + "sentence": "Although they ran at about the same speed, John beat Vivian because _ had such a good start.", + "answer1": [ + "Vivian", + "she" + ], + "answer0": [ + "John", + "he" + ], + "correct_answer": [ + "John", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.4357670843601227 + ], + [ + "she", + 0.4087676405906677 + ], + [ + "vivian", + 0.028013162314891815 + ], + [ + "john", + 0.007432900369167328 + ] + ], + "score": 1 + }, + { + "sentence": "Although they ran at about the same speed, Barbara beat Tony because _ had such a good start.", + "answer1": [ + "Tony", + "he" + ], + "answer0": [ + "Barbara", + "she" + ], + "correct_answer": [ + "Barbara", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.5490840673446655 + ], + [ + "she", + 0.2430657595396042 + ], + [ + "tony", + 0.01997600682079792 + ], + [ + "barbara", + 0.004922997672110796 + ] + ], + "score": 0 + } + ] + }, + { + "index": 23, + "sentences": [ + { + "sentence": "Although they ran at about the same speed, George beat Nancy because _ had such a bad start.", + "answer1": [ + "Nancy", + "she" + ], + "answer0": [ + "George", + "he" + ], + "correct_answer": [ + "Nancy", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.6856573820114136 + ], + [ + "he", + 0.16068877279758453 + ], + [ + "nancy", + 0.027285361662507057 + ], + [ + "george", + 0.019552595913410187 + ] + ], + "score": 1 + }, + { + "sentence": "Although they ran at about the same speed, Sue beat Edward because _ had such a bad start.", + "answer1": [ + "Edward", + "he" + ], + "answer0": [ + "Sue", + "she" + ], + "correct_answer": [ + "Edward", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "he", + 0.6439886689186096 + ], + [ + "she", + 0.201904296875 + ], + [ + "edward", + 0.05457816645503044 + ], + [ + "sue", + 0.00406282115727663 + ] + ], + "score": 1 + } + ] + }, + { + "index": 24, + "sentences": [ + { + "sentence": "The sculpture rolled off the shelf because the _ wasn't anchored.", + "answer1": [ + "shelf" + ], + "answer0": [ + "sculpture" + ], + "correct_answer": [ + "sculpture" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "shelf", + 0.049739278852939606 + ] + ], + "score": 0 + } + ] + }, + { + "index": 25, + "sentences": [ + { + "sentence": "The sculpture rolled off the shelf because the _ wasn't level.", + "answer1": [ + "shelf" + ], + "answer0": [ + "sculpture" + ], + "correct_answer": [ + "shelf" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "shelf", + 0.1008094847202301 + ] + ], + "score": 1 + } + ] + }, + { + "index": 26, + "sentences": [ + { + "sentence": "Betty's drawing was hung just above Warren's and _'s drawing did look much better with another one below it.", + "answer1": [ + "Warren" + ], + "answer0": [ + "Betty" + ], + "correct_answer": [ + "Betty" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "warren", + 0.11010950058698654 + ], + [ + "betty", + 0.07763160765171051 + ] + ], + "score": 0 + }, + { + "sentence": "Edward's drawing was hung just above Wendy's and _'s drawing did look much better with another one below it.", + "answer1": [ + "Wendy" + ], + "answer0": [ + "Edward" + ], + "correct_answer": [ + "Edward" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "edward", + 0.14753200113773346 + ], + [ + "wendy", + 0.021199282258749008 + ] + ], + "score": 1 + } + ] + }, + { + "index": 27, + "sentences": [ + { + "sentence": "Paul's drawing was hung just above Anna's and _'s drawing did look much better with another one above it.", + "answer1": [ + "Anna" + ], + "answer0": [ + "Paul" + ], + "correct_answer": [ + "Anna" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "anna", + 0.39766207337379456 + ], + [ + "paul", + 0.17563124001026154 + ] + ], + "score": 1 + }, + { + "sentence": "Lisa's drawing was hung just above Anthony's and _'s drawing did look much better with another one above it.", + "answer1": [ + "Anthony" + ], + "answer0": [ + "Lisa" + ], + "correct_answer": [ + "Anthony" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "anthony", + 0.20052659511566162 + ], + [ + "lisa", + 0.06912633776664734 + ] + ], + "score": 1 + } + ] + }, + { + "index": 28, + "sentences": [ + { + "sentence": "Charles did a lot better than his good friend Nancy on the test because _ had studied so hard.", + "answer1": [ + "Nancy", + "she" + ], + "answer0": [ + "Charles", + "he" + ], + "correct_answer": [ + "Charles", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.4398093521595001 + ], + [ + "she", + 0.15170268714427948 + ], + [ + "charles", + 0.08861381560564041 + ], + [ + "nancy", + 0.08683475106954575 + ] + ], + "score": 1 + }, + { + "sentence": "Anna did a lot better than her good friend Jason on the test because _ had studied so hard.", + "answer1": [ + "Jason", + "he" + ], + "answer0": [ + "Anna", + "she" + ], + "correct_answer": [ + "Anna", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.5284304618835449 + ], + [ + "she", + 0.1605355590581894 + ], + [ + "jason", + 0.08630751073360443 + ], + [ + "anna", + 0.051893241703510284 + ] + ], + "score": 0 + } + ] + }, + { + "index": 29, + "sentences": [ + { + "sentence": "Linda did a lot worse than her good friend Thomas on the test because _ had studied so hard.", + "answer1": [ + "Thomas", + "he" + ], + "answer0": [ + "Linda", + "she" + ], + "correct_answer": [ + "Thomas", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.4308488368988037 + ], + [ + "he", + 0.23555852472782135 + ], + [ + "linda", + 0.10451061278581619 + ], + [ + "thomas", + 0.00802362896502018 + ] + ], + "score": 0 + }, + { + "sentence": "Daniel did a lot worse than his good friend Wendy on the test because _ had studied so hard.", + "answer1": [ + "Wendy", + "she" + ], + "answer0": [ + "Daniel", + "he" + ], + "correct_answer": [ + "Wendy", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.6460021734237671 + ], + [ + "he", + 0.13338574767112732 + ], + [ + "wendy", + 0.08289165794849396 + ], + [ + "daniel", + 0.04040371626615524 + ] + ], + "score": 1 + } + ] + }, + { + "index": 30, + "sentences": [ + { + "sentence": "The doctors arrived after the police because the _ were coming from so far away.", + "answer1": [ + "police" + ], + "answer0": [ + "doctors" + ], + "correct_answer": [ + "doctors" + ], + "adjacent_ref": false, + "predict_answer": [], + "score": 0 + } + ] + }, + { + "index": 31, + "sentences": [ + { + "sentence": "The doctors arrived before the police because the _ were coming from so far away.", + "answer1": [ + "police" + ], + "answer0": [ + "doctors" + ], + "correct_answer": [ + "police" + ], + "adjacent_ref": true, + "predict_answer": [], + "score": 0 + } + ] + }, + { + "index": 32, + "sentences": [ + { + "sentence": "Tim was upset with Barbara because the toaster _ had bought from her didn't work.", + "answer1": [ + "Barbara", + "she" + ], + "answer0": [ + "Tim", + "he" + ], + "correct_answer": [ + "Tim", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.5925723910331726 + ], + [ + "barbara", + 0.10750548541545868 + ], + [ + "tim", + 0.036800283938646317 + ], + [ + "she", + 0.034948937594890594 + ] + ], + "score": 1 + }, + { + "sentence": "Betty was upset with Daniel because the toaster _ had bought from him didn't work.", + "answer1": [ + "Daniel", + "he" + ], + "answer0": [ + "Betty", + "she" + ], + "correct_answer": [ + "Betty", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "she", + 0.5441405177116394 + ], + [ + "betty", + 0.3130474388599396 + ], + [ + "he", + 0.015271657146513462 + ], + [ + "daniel", + 0.014954675920307636 + ] + ], + "score": 1 + } + ] + }, + { + "index": 33, + "sentences": [ + { + "sentence": "Joseph was upset with Sandra because the toaster _ had sold him didn't work.", + "answer1": [ + "Sandra", + "she" + ], + "answer0": [ + "Joseph", + "he" + ], + "correct_answer": [ + "Sandra", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.3242930769920349 + ], + [ + "sandra", + 0.06303571909666061 + ], + [ + "joseph", + 0.01616159826517105 + ] + ], + "score": 1 + }, + { + "sentence": "Sue was upset with Brian because the toaster _ had sold her didn't work.", + "answer1": [ + "Brian", + "he" + ], + "answer0": [ + "Sue", + "she" + ], + "correct_answer": [ + "Brian", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "he", + 0.4483914077281952 + ], + [ + "brian", + 0.14752180874347687 + ], + [ + "sue", + 0.04900128394365311 + ], + [ + "she", + 0.014528470113873482 + ] + ], + "score": 1 + } + ] + }, + { + "index": 34, + "sentences": [ + { + "sentence": "Linda yelled at William because _ was so upset.", + "answer1": [ + "William", + "he" + ], + "answer0": [ + "Linda", + "she" + ], + "correct_answer": [ + "Linda", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.8594950437545776 + ], + [ + "she", + 0.10877512395381927 + ], + [ + "william", + 0.012823620811104774 + ] + ], + "score": 0 + }, + { + "sentence": "Warren yelled at Wendy because _ was so upset.", + "answer1": [ + "Wendy", + "she" + ], + "answer0": [ + "Warren", + "he" + ], + "correct_answer": [ + "Warren", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "she", + 0.8749997019767761 + ], + [ + "he", + 0.09160665422677994 + ], + [ + "wendy", + 0.007091708946973085 + ] + ], + "score": 0 + } + ] + }, + { + "index": 35, + "sentences": [ + { + "sentence": "Sandra comforted Daniel because _ was so upset.", + "answer1": [ + "Daniel", + "he" + ], + "answer0": [ + "Sandra", + "she" + ], + "correct_answer": [ + "Daniel", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "he", + 0.946452796459198 + ], + [ + "she", + 0.029977431520819664 + ], + [ + "daniel", + 0.014466170221567154 + ] + ], + "score": 1 + }, + { + "sentence": "Tim comforted Anna because _ was so upset.", + "answer1": [ + "Anna", + "she" + ], + "answer0": [ + "Tim", + "he" + ], + "correct_answer": [ + "Anna", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.9258339405059814 + ], + [ + "anna", + 0.03597695752978325 + ], + [ + "he", + 0.024506375193595886 + ] + ], + "score": 1 + } + ] + }, + { + "index": 36, + "sentences": [ + { + "sentence": "The sack had been placed above the bag, so the _ had to be moved first.", + "answer1": [ + "bag" + ], + "answer0": [ + "sack" + ], + "correct_answer": [ + "sack" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "bag", + 0.14206381142139435 + ], + [ + "sack", + 0.12494820356369019 + ] + ], + "score": 0 + } + ] + }, + { + "index": 37, + "sentences": [ + { + "sentence": "The sack had been placed below the bag, so the _ had to be moved first.", + "answer1": [ + "bag" + ], + "answer0": [ + "sack" + ], + "correct_answer": [ + "bag" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "bag", + 0.12820377945899963 + ], + [ + "sack", + 0.12242510914802551 + ] + ], + "score": 1 + } + ] + }, + { + "index": 38, + "sentences": [ + { + "sentence": "Laura envies Warren although _ is very successful.", + "answer1": [ + "Warren", + "he" + ], + "answer0": [ + "Laura", + "she" + ], + "correct_answer": [ + "Laura", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.5644444227218628 + ], + [ + "warren", + 0.27615460753440857 + ], + [ + "she", + 0.07276368141174316 + ], + [ + "laura", + 0.003620448987931013 + ] + ], + "score": 0 + }, + { + "sentence": "Daniel envies Emma although _ is very successful.", + "answer1": [ + "Emma", + "she" + ], + "answer0": [ + "Daniel", + "he" + ], + "correct_answer": [ + "Daniel", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "she", + 0.4373820424079895 + ], + [ + "he", + 0.3135783076286316 + ], + [ + "daniel", + 0.04151993989944458 + ], + [ + "emma", + 0.02725609578192234 + ] + ], + "score": 0 + } + ] + }, + { + "index": 39, + "sentences": [ + { + "sentence": "Jason envies Susan because _ is very successful.", + "answer1": [ + "Susan", + "she" + ], + "answer0": [ + "Jason", + "he" + ], + "correct_answer": [ + "Susan", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.9335176944732666 + ], + [ + "susan", + 0.04264418035745621 + ], + [ + "he", + 0.009751184843480587 + ] + ], + "score": 1 + }, + { + "sentence": "Sandra envies Zack because _ is very successful.", + "answer1": [ + "Zack", + "he" + ], + "answer0": [ + "Sandra", + "she" + ], + "correct_answer": [ + "Zack", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "he", + 0.8800366520881653 + ], + [ + "zack", + 0.09258320927619934 + ], + [ + "she", + 0.007650280836969614 + ] + ], + "score": 1 + } + ] + }, + { + "index": 40, + "sentences": [ + { + "sentence": "The older students were bullying the younger ones, so we punished the _ students .", + "answer1": [ + "younger" + ], + "answer0": [ + "older" + ], + "correct_answer": [ + "older" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "younger", + 0.5788522362709045 + ], + [ + "older", + 0.3715261220932007 + ] + ], + "score": 0 + } + ] + }, + { + "index": 41, + "sentences": [ + { + "sentence": "The older students were bullying the younger ones, so we rescued the _ students .", + "answer1": [ + "younger" + ], + "answer0": [ + "older" + ], + "correct_answer": [ + "younger" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "younger", + 0.459501713514328 + ], + [ + "older", + 0.3885803818702698 + ] + ], + "score": 1 + } + ] + }, + { + "index": 42, + "sentences": [ + { + "sentence": "I poured water from the bottle into the cup until the _ was empty.", + "answer1": [ + "cup" + ], + "answer0": [ + "bottle" + ], + "correct_answer": [ + "bottle" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "cup", + 0.16094626486301422 + ], + [ + "bottle", + 0.10395961999893188 + ] + ], + "score": 0 + } + ] + }, + { + "index": 43, + "sentences": [ + { + "sentence": "I poured water from the bottle into the cup until the _ was full.", + "answer1": [ + "cup" + ], + "answer0": [ + "bottle" + ], + "correct_answer": [ + "cup" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "cup", + 0.18986396491527557 + ], + [ + "bottle", + 0.03942202031612396 + ] + ], + "score": 1 + } + ] + }, + { + "index": 44, + "sentences": [ + { + "sentence": "William knows all about Sue's personal problems because _ is nosy.", + "answer1": [ + "Sue", + "she" + ], + "answer0": [ + "William", + "he" + ], + "correct_answer": [ + "William", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "she", + 0.4617686867713928 + ], + [ + "he", + 0.31756243109703064 + ], + [ + "sue", + 0.10323674231767654 + ], + [ + "william", + 0.01918354444205761 + ] + ], + "score": 0 + }, + { + "sentence": "Helen knows all about Warren's personal problems because _ is nosy.", + "answer1": [ + "Warren", + "he" + ], + "answer0": [ + "Helen", + "she" + ], + "correct_answer": [ + "Helen", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.37865328788757324 + ], + [ + "warren", + 0.3408011496067047 + ], + [ + "she", + 0.21000434458255768 + ], + [ + "helen", + 0.012840399518609047 + ] + ], + "score": 0 + } + ] + }, + { + "index": 45, + "sentences": [ + { + "sentence": "Emma knows all about Warren's personal problems because _ is indiscreet.", + "answer1": [ + "Warren", + "he" + ], + "answer0": [ + "Emma", + "she" + ], + "correct_answer": [ + "Warren", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "warren", + 0.6146198511123657 + ], + [ + "he", + 0.289546400308609 + ], + [ + "she", + 0.02142639271914959 + ] + ], + "score": 1 + }, + { + "sentence": "Alan knows all about Anna's personal problems because _ is indiscreet.", + "answer1": [ + "Anna", + "she" + ], + "answer0": [ + "Alan", + "he" + ], + "correct_answer": [ + "Anna", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.49282529950141907 + ], + [ + "anna", + 0.32425856590270996 + ], + [ + "he", + 0.07345326244831085 + ], + [ + "alan", + 0.017393626272678375 + ] + ], + "score": 1 + } + ] + }, + { + "index": 46, + "sentences": [ + { + "sentence": "John explained his theory to Wendy but _ couldn't convince her.", + "answer1": [ + "Wendy", + "she" + ], + "answer0": [ + "John", + "he" + ], + "correct_answer": [ + "John", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.9100954532623291 + ], + [ + "john", + 0.017341775819659233 + ], + [ + "wendy", + 0.004638417158275843 + ] + ], + "score": 1 + }, + { + "sentence": "Wendy explained her theory to Warren but _ couldn't convince him.", + "answer1": [ + "Warren", + "he" + ], + "answer0": [ + "Wendy", + "she" + ], + "correct_answer": [ + "Wendy", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "she", + 0.9247599244117737 + ], + [ + "wendy", + 0.0049832225777208805 + ] + ], + "score": 1 + } + ] + }, + { + "index": 47, + "sentences": [ + { + "sentence": "Sarah explained her theory to Steven but _ couldn't understand her.", + "answer1": [ + "Steven", + "he" + ], + "answer0": [ + "Sarah", + "she" + ], + "correct_answer": [ + "Steven", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "he", + 0.8895952105522156 + ], + [ + "steven", + 0.09927554428577423 + ], + [ + "she", + 0.000530878605786711 + ] + ], + "score": 1 + }, + { + "sentence": "David explained his theory to Susan but _ couldn't understand him.", + "answer1": [ + "Susan", + "she" + ], + "answer0": [ + "David", + "he" + ], + "correct_answer": [ + "Susan", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.9552003741264343 + ], + [ + "susan", + 0.025168264284729958 + ] + ], + "score": 1 + } + ] + }, + { + "index": 48, + "sentences": [ + { + "sentence": "Daniel knew that Margaret's son had been in a car accident, so _ told her about it.", + "answer1": [ + "Margaret", + "she" + ], + "answer0": [ + "Daniel", + "he" + ], + "correct_answer": [ + "Daniel", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.9160689115524292 + ], + [ + "daniel", + 0.05031977966427803 + ] + ], + "score": 1 + }, + { + "sentence": "Amy knew that Brian's son had been in a car accident, so _ told him about it.", + "answer1": [ + "Brian", + "he" + ], + "answer0": [ + "Amy", + "she" + ], + "correct_answer": [ + "Amy", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "she", + 0.825965404510498 + ], + [ + "amy", + 0.11254064738750458 + ] + ], + "score": 1 + } + ] + }, + { + "index": 49, + "sentences": [ + { + "sentence": "George knew that Sarah's son had been in a car accident, because _ told him about it.", + "answer1": [ + "Sarah", + "she" + ], + "answer0": [ + "George", + "he" + ], + "correct_answer": [ + "Sarah", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.25967302918434143 + ], + [ + "sarah", + 0.1378738284111023 + ], + [ + "george", + 0.1081177145242691 + ], + [ + "he", + 0.07801171392202377 + ] + ], + "score": 1 + }, + { + "sentence": "Susan knew that Tim's son had been in a car accident, because _ told her about it.", + "answer1": [ + "Tim", + "he" + ], + "answer0": [ + "Susan", + "she" + ], + "correct_answer": [ + "Tim", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "he", + 0.12469684332609177 + ], + [ + "tim", + 0.1100628525018692 + ], + [ + "susan", + 0.08253230154514313 + ], + [ + "she", + 0.044477976858615875 + ] + ], + "score": 1 + } + ] + }, + { + "index": 50, + "sentences": [ + { + "sentence": "Jason's aunt Betty can still beat him at tennis, even though _ is 30 years younger.", + "answer1": [ + "Betty", + "she" + ], + "answer0": [ + "Jason", + "he" + ], + "correct_answer": [ + "Jason", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "she", + 0.4724120795726776 + ], + [ + "he", + 0.3619353473186493 + ], + [ + "jason", + 0.11430627852678299 + ], + [ + "betty", + 0.009248387068510056 + ] + ], + "score": 0 + } + ] + }, + { + "index": 51, + "sentences": [ + { + "sentence": "Alan's aunt Laura can still beat him at tennis, even though _ is 30 years older.", + "answer1": [ + "Laura", + "she" + ], + "answer0": [ + "Alan", + "he" + ], + "correct_answer": [ + "Laura", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.410331130027771 + ], + [ + "he", + 0.3431159555912018 + ], + [ + "alan", + 0.2048305720090866 + ] + ], + "score": 1 + } + ] + }, + { + "index": 52, + "sentences": [ + { + "sentence": "The painting in Mark's living room shows a tree. the _ is to the right of the bookcase.", + "answer1": [ + "tree" + ], + "answer0": [ + "painting" + ], + "correct_answer": [ + "painting" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "painting", + 0.2549212872982025 + ], + [ + "tree", + 0.03998008370399475 + ] + ], + "score": 1 + } + ] + }, + { + "index": 53, + "sentences": [ + { + "sentence": "The painting in Mark's living room shows a tree. the _ is to the right of a house.", + "answer1": [ + "tree" + ], + "answer0": [ + "painting" + ], + "correct_answer": [ + "tree" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "painting", + 0.3817044198513031 + ], + [ + "tree", + 0.12644894421100616 + ] + ], + "score": 0 + } + ] + }, + { + "index": 54, + "sentences": [ + { + "sentence": "There is a gap in the wall. You can see the garden through the _ .", + "answer1": [ + "wall" + ], + "answer0": [ + "gap" + ], + "correct_answer": [ + "gap" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "wall", + 0.24062421917915344 + ], + [ + "gap", + 0.06364461034536362 + ] + ], + "score": 0 + } + ] + }, + { + "index": 55, + "sentences": [ + { + "sentence": "There is a gap in the wall. You can see the garden behind the _ .", + "answer1": [ + "wall" + ], + "answer0": [ + "gap" + ], + "correct_answer": [ + "wall" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "wall", + 0.1855088472366333 + ] + ], + "score": 1 + } + ] + }, + { + "index": 56, + "sentences": [ + { + "sentence": "The drain is clogged with hair. the _ has to be cleaned.", + "answer1": [ + "hair" + ], + "answer0": [ + "drain" + ], + "correct_answer": [ + "drain" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "drain", + 0.10474126785993576 + ] + ], + "score": 1 + } + ] + }, + { + "index": 57, + "sentences": [ + { + "sentence": "The drain is clogged with hair. the _ has to be removed.", + "answer1": [ + "hair" + ], + "answer0": [ + "drain" + ], + "correct_answer": [ + "hair" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "hair", + 0.35679975152015686 + ], + [ + "drain", + 0.06430690735578537 + ] + ], + "score": 1 + } + ] + }, + { + "index": 58, + "sentences": [ + { + "sentence": "My meeting started at 4:00 and I needed to catch the train at 4:30, so there wasn't much time. Luckily, the _ was short, so it worked out.", + "answer1": [ + "train" + ], + "answer0": [ + "meeting" + ], + "correct_answer": [ + "meeting" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "train", + 0.2332829087972641 + ] + ], + "score": 0 + } + ] + }, + { + "index": 59, + "sentences": [ + { + "sentence": "My meeting started at 4:00 and I needed to catch the train at 4:30, so there wasn't much time. Luckily, the _ was delayed, so it worked out.", + "answer1": [ + "train" + ], + "answer0": [ + "meeting" + ], + "correct_answer": [ + "train" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "meeting", + 0.574749231338501 + ], + [ + "train", + 0.2624363899230957 + ] + ], + "score": 0 + } + ] + }, + { + "index": 60, + "sentences": [ + { + "sentence": "There is a pillar between me and the stage, and I can't see around the _ .", + "answer1": [ + "stage" + ], + "answer0": [ + "pillar" + ], + "correct_answer": [ + "pillar" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "pillar", + 0.5279375910758972 + ] + ], + "score": 1 + } + ] + }, + { + "index": 61, + "sentences": [ + { + "sentence": "There is a pillar between me and the stage, and I can't see the _ .", + "answer1": [ + "stage" + ], + "answer0": [ + "pillar" + ], + "correct_answer": [ + "stage" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "stage", + 0.07283679395914078 + ] + ], + "score": 1 + } + ] + }, + { + "index": 62, + "sentences": [ + { + "sentence": "They broadcast an announcement, but a subway came into the station and I couldn't hear the _ .", + "answer1": [ + "subway" + ], + "answer0": [ + "announcement" + ], + "correct_answer": [ + "announcement" + ], + "adjacent_ref": false, + "predict_answer": [], + "score": 0 + } + ] + }, + { + "index": 63, + "sentences": [ + { + "sentence": "They broadcast an announcement, but a subway came into the station and I couldn't hear over the _ .", + "answer1": [ + "subway" + ], + "answer0": [ + "announcement" + ], + "correct_answer": [ + "subway" + ], + "adjacent_ref": true, + "predict_answer": [], + "score": 0 + } + ] + }, + { + "index": 64, + "sentences": [ + { + "sentence": "In the middle of the outdoor concert, the rain started falling, but the _ continued until 10.", + "answer1": [ + "rain" + ], + "answer0": [ + "concert" + ], + "correct_answer": [ + "concert" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "rain", + 0.3882569670677185 + ], + [ + "concert", + 0.0404825396835804 + ] + ], + "score": 0 + } + ] + }, + { + "index": 65, + "sentences": [ + { + "sentence": "In the middle of the outdoor concert, the rain started falling, and the _ continued until 10.", + "answer1": [ + "rain" + ], + "answer0": [ + "concert" + ], + "correct_answer": [ + "rain" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "rain", + 0.22699223458766937 + ], + [ + "concert", + 0.07272675633430481 + ] + ], + "score": 1 + } + ] + }, + { + "index": 66, + "sentences": [ + { + "sentence": "I used an old rag to clean the knife, and then I put the _ in the trash.", + "answer1": [ + "knife" + ], + "answer0": [ + "rag" + ], + "correct_answer": [ + "rag" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "knife", + 0.7708435654640198 + ] + ], + "score": 0 + } + ] + }, + { + "index": 67, + "sentences": [ + { + "sentence": "I used an old rag to clean the knife, and then I put the _ in the drawer.", + "answer1": [ + "knife" + ], + "answer0": [ + "rag" + ], + "correct_answer": [ + "knife" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "knife", + 0.5724927186965942 + ] + ], + "score": 1 + } + ] + }, + { + "index": 68, + "sentences": [ + { + "sentence": "Warren asked Sandra what time the library closes, because _ had forgotten.", + "answer1": [ + "Sandra", + "she" + ], + "answer0": [ + "Warren", + "he" + ], + "correct_answer": [ + "Warren", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.4140302836894989 + ], + [ + "she", + 0.08503930270671844 + ], + [ + "warren", + 0.03423245623707771 + ] + ], + "score": 1 + }, + { + "sentence": "Emma asked Thomas what time the library closes, because _ had forgotten.", + "answer1": [ + "Thomas", + "he" + ], + "answer0": [ + "Emma", + "she" + ], + "correct_answer": [ + "Emma", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.42612558603286743 + ], + [ + "thomas", + 0.15514008700847626 + ], + [ + "she", + 0.15142855048179626 + ] + ], + "score": 0 + } + ] + }, + { + "index": 69, + "sentences": [ + { + "sentence": "Zack asked Barbara what time the library closes, but _ had forgotten.", + "answer1": [ + "Barbara", + "she" + ], + "answer0": [ + "Zack", + "he" + ], + "correct_answer": [ + "Barbara", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.691577672958374 + ], + [ + "barbara", + 0.11628566682338715 + ], + [ + "he", + 0.09651367366313934 + ] + ], + "score": 1 + }, + { + "sentence": "Sarah asked Joseph what time the library closes, but _ had forgotten.", + "answer1": [ + "Joseph", + "he" + ], + "answer0": [ + "Sarah", + "she" + ], + "correct_answer": [ + "Joseph", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "he", + 0.5978205800056458 + ], + [ + "joseph", + 0.30730757117271423 + ], + [ + "she", + 0.0433766171336174 + ] + ], + "score": 1 + } + ] + }, + { + "index": 70, + "sentences": [ + { + "sentence": "I took the bottle out of the backpack so that the _ would be handy.", + "answer1": [ + "backpack" + ], + "answer0": [ + "bottle" + ], + "correct_answer": [ + "bottle" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "bottle", + 0.2538537085056305 + ] + ], + "score": 1 + } + ] + }, + { + "index": 71, + "sentences": [ + { + "sentence": "I took the bottle out of the backpack so that the _ would be lighter.", + "answer1": [ + "backpack" + ], + "answer0": [ + "bottle" + ], + "correct_answer": [ + "backpack" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "bottle", + 0.28798893094062805 + ] + ], + "score": 0 + } + ] + }, + { + "index": 72, + "sentences": [ + { + "sentence": "I couldn't put the pot on the shelf because the _ was too tall.", + "answer1": [ + "shelf" + ], + "answer0": [ + "pot" + ], + "correct_answer": [ + "pot" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "shelf", + 0.05657235160470009 + ], + [ + "pot", + 0.040349528193473816 + ] + ], + "score": 0 + } + ] + }, + { + "index": 73, + "sentences": [ + { + "sentence": "I couldn't put the pot on the shelf because the _ was too high.", + "answer1": [ + "shelf" + ], + "answer0": [ + "pot" + ], + "correct_answer": [ + "shelf" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "shelf", + 0.09407572448253632 + ] + ], + "score": 1 + } + ] + }, + { + "index": 74, + "sentences": [ + { + "sentence": "I'm sure that my map will show this building; the _ is very good.", + "answer1": [ + "building" + ], + "answer0": [ + "map" + ], + "correct_answer": [ + "map" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "map", + 0.2211541384458542 + ] + ], + "score": 1 + } + ] + }, + { + "index": 75, + "sentences": [ + { + "sentence": "I'm sure that my map will show this building; the _ is very famous.", + "answer1": [ + "building" + ], + "answer0": [ + "map" + ], + "correct_answer": [ + "building" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "building", + 0.08651192486286163 + ] + ], + "score": 1 + } + ] + }, + { + "index": 76, + "sentences": [ + { + "sentence": "Sandra paid for Tim's college education. _ is very generous.", + "answer1": [ + "Tim", + "he" + ], + "answer0": [ + "Sandra", + "she" + ], + "correct_answer": [ + "Sandra", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "sandra", + 0.5636617541313171 + ], + [ + "she", + 0.3318640887737274 + ], + [ + "he", + 0.016400793567299843 + ] + ], + "score": 1 + }, + { + "sentence": "George paid for Laura's college education. _ is very generous.", + "answer1": [ + "Laura", + "she" + ], + "answer0": [ + "George", + "he" + ], + "correct_answer": [ + "George", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "george", + 0.43826061487197876 + ], + [ + "he", + 0.2907955050468445 + ], + [ + "she", + 0.09768392145633698 + ], + [ + "laura", + 0.026110105216503143 + ] + ], + "score": 1 + } + ] + }, + { + "index": 77, + "sentences": [ + { + "sentence": "Laura paid for Tim's college education. _ is very grateful.", + "answer1": [ + "Tim", + "he" + ], + "answer0": [ + "Laura", + "she" + ], + "correct_answer": [ + "Tim", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.3613061308860779 + ], + [ + "laura", + 0.2712564170360565 + ], + [ + "tim", + 0.10081641376018524 + ], + [ + "he", + 0.09770604968070984 + ] + ], + "score": 0 + }, + { + "sentence": "George paid for Emma's college education. _ is very grateful.", + "answer1": [ + "Emma", + "she" + ], + "answer0": [ + "George", + "he" + ], + "correct_answer": [ + "Emma", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "emma", + 0.5662250518798828 + ], + [ + "she", + 0.16104619204998016 + ], + [ + "he", + 0.07634122669696808 + ], + [ + "george", + 0.037439413368701935 + ] + ], + "score": 1 + } + ] + }, + { + "index": 78, + "sentences": [ + { + "sentence": "Mandy paid for Steven's college education, but now Steven acts as though it never happened. _ is very hurt.", + "answer1": [ + "Steven", + "he" + ], + "answer0": [ + "Mandy", + "she" + ], + "correct_answer": [ + "Mandy", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "mandy", + 0.4452275037765503 + ], + [ + "steven", + 0.20133669674396515 + ], + [ + "he", + 0.04494181647896767 + ], + [ + "she", + 0.010965993627905846 + ] + ], + "score": 1 + }, + { + "sentence": "Edward paid for Sandra's college education, but now Sandra acts as though it never happened. _ is very hurt.", + "answer1": [ + "Sandra", + "she" + ], + "answer0": [ + "Edward", + "he" + ], + "correct_answer": [ + "Edward", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "sandra", + 0.4786634147167206 + ], + [ + "edward", + 0.3283962309360504 + ], + [ + "she", + 0.05131130293011665 + ], + [ + "he", + 0.004538827110081911 + ] + ], + "score": 0 + } + ] + }, + { + "index": 79, + "sentences": [ + { + "sentence": "David paid for Sue's college education, but now Sue acts as though it never happened. _ is very ungrateful.", + "answer1": [ + "Sue", + "she" + ], + "answer0": [ + "David", + "he" + ], + "correct_answer": [ + "Sue", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "sue", + 0.333128422498703 + ], + [ + "david", + 0.2768923044204712 + ], + [ + "she", + 0.11641097813844681 + ], + [ + "he", + 0.029392829164862633 + ] + ], + "score": 1 + }, + { + "sentence": "Emma paid for Tim's college education, but now Tim acts as though it never happened. _ is very ungrateful.", + "answer1": [ + "Tim", + "he" + ], + "answer0": [ + "Emma", + "she" + ], + "correct_answer": [ + "Tim", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "tim", + 0.312907338142395 + ], + [ + "emma", + 0.26578396558761597 + ], + [ + "he", + 0.20920135080814362 + ], + [ + "she", + 0.02565154619514942 + ] + ], + "score": 1 + } + ] + }, + { + "index": 80, + "sentences": [ + { + "sentence": "Lisa was playing cards with Joseph and was way ahead. If Joseph hadn't had a sudden run of good luck, _ would have won.", + "answer1": [ + "Joseph", + "he" + ], + "answer0": [ + "Lisa", + "she" + ], + "correct_answer": [ + "Lisa", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.4793993830680847 + ], + [ + "she", + 0.16311348974704742 + ], + [ + "lisa", + 0.1321823000907898 + ], + [ + "joseph", + 0.017298907041549683 + ] + ], + "score": 0 + }, + { + "sentence": "John was playing cards with Amy and was way ahead. If Amy hadn't had a sudden run of good luck, _ would have won.", + "answer1": [ + "Amy", + "she" + ], + "answer0": [ + "John", + "he" + ], + "correct_answer": [ + "John", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "she", + 0.48962944746017456 + ], + [ + "john", + 0.1949043720960617 + ], + [ + "he", + 0.1438501924276352 + ] + ], + "score": 0 + } + ] + }, + { + "index": 81, + "sentences": [ + { + "sentence": "Charles was playing cards with Betty and was way ahead. If Betty hadn't had a sudden run of good luck, _ would have lost.", + "answer1": [ + "Betty", + "she" + ], + "answer0": [ + "Charles", + "he" + ], + "correct_answer": [ + "Betty", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.559443473815918 + ], + [ + "he", + 0.10665259510278702 + ], + [ + "charles", + 0.08594903349876404 + ] + ], + "score": 1 + }, + { + "sentence": "Sue was playing cards with Charles and was way ahead. If Charles hadn't had a sudden run of good luck, _ would have lost.", + "answer1": [ + "Charles", + "he" + ], + "answer0": [ + "Sue", + "she" + ], + "correct_answer": [ + "Charles", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "he", + 0.651694118976593 + ], + [ + "sue", + 0.13403257727622986 + ], + [ + "she", + 0.06975802779197693 + ] + ], + "score": 1 + } + ] + }, + { + "index": 82, + "sentences": [ + { + "sentence": "David can't leave work here until Margaret arrives to replace him. If Margaret had left home for work on time, _ would be gone by this time.", + "answer1": [ + "Margaret", + "she" + ], + "answer0": [ + "David", + "he" + ], + "correct_answer": [ + "David", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "david", + 0.4532376229763031 + ], + [ + "she", + 0.3844938278198242 + ], + [ + "he", + 0.09111058712005615 + ] + ], + "score": 1 + }, + { + "sentence": "Linda can't leave work here until Thomas arrives to replace her. If Thomas had left home for work on time, _ would be gone by this time.", + "answer1": [ + "Thomas", + "he" + ], + "answer0": [ + "Linda", + "she" + ], + "correct_answer": [ + "Linda", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.46403148770332336 + ], + [ + "linda", + 0.46336105465888977 + ], + [ + "she", + 0.0296529158949852 + ] + ], + "score": 0 + } + ] + }, + { + "index": 83, + "sentences": [ + { + "sentence": "David can't leave work here until Sandra arrives to replace him. If Sandra had left home for work on time, _ would be here by this time.", + "answer1": [ + "Sandra", + "she" + ], + "answer0": [ + "David", + "he" + ], + "correct_answer": [ + "Sandra", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "david", + 0.4930424094200134 + ], + [ + "she", + 0.34252968430519104 + ], + [ + "he", + 0.06929971277713776 + ], + [ + "sandra", + 0.008408436551690102 + ] + ], + "score": 0 + }, + { + "sentence": "Linda can't leave work here until Tony arrives to replace her. If Tony had left home for work on time, _ would be here by this time.", + "answer1": [ + "Tony", + "he" + ], + "answer0": [ + "Linda", + "she" + ], + "correct_answer": [ + "Tony", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "linda", + 0.7818251252174377 + ], + [ + "he", + 0.1578403264284134 + ], + [ + "she", + 0.016681527718901634 + ], + [ + "tony", + 0.0018600979819893837 + ] + ], + "score": 0 + } + ] + }, + { + "index": 84, + "sentences": [ + { + "sentence": "If the woman has succeeded in fooling Sam, _ would have gotten a lot of money.", + "answer1": [ + "he" + ], + "answer0": [ + "she" + ], + "correct_answer": [ + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "she", + 0.66971355676651 + ], + [ + "he", + 0.14182616770267487 + ] + ], + "score": 1 + } + ] + }, + { + "index": 85, + "sentences": [ + { + "sentence": "If the woman has succeeded in fooling Sam, _ would have lost a lot of money.", + "answer1": [ + "he" + ], + "answer0": [ + "she" + ], + "correct_answer": [ + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.48567670583724976 + ], + [ + "he", + 0.2748803496360779 + ] + ], + "score": 0 + } + ] + }, + { + "index": 86, + "sentences": [] + }, + { + "index": 87, + "sentences": [] + }, + { + "index": 88, + "sentences": [ + { + "sentence": "The cat was lying by the mouse hole waiting for the mouse, but the _ was too impatient.", + "answer1": [ + "mouse" + ], + "answer0": [ + "cat" + ], + "correct_answer": [ + "cat" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "cat", + 0.8255143165588379 + ], + [ + "mouse", + 0.07212252169847488 + ] + ], + "score": 1 + } + ] + }, + { + "index": 89, + "sentences": [ + { + "sentence": "The cat was lying by the mouse hole waiting for the mouse, but the _ was too cautious.", + "answer1": [ + "mouse" + ], + "answer0": [ + "cat" + ], + "correct_answer": [ + "mouse" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "cat", + 0.7318105697631836 + ], + [ + "mouse", + 0.070926234126091 + ] + ], + "score": 0 + } + ] + }, + { + "index": 90, + "sentences": [ + { + "sentence": "Betty gave birth to a son Daniel last month. _ is a very charming woman.", + "answer1": [ + "Daniel", + "he" + ], + "answer0": [ + "Betty", + "she" + ], + "correct_answer": [ + "Betty", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "betty", + 0.5413283109664917 + ], + [ + "she", + 0.4113282859325409 + ], + [ + "daniel", + 0.0013550656149163842 + ] + ], + "score": 1 + } + ] + }, + { + "index": 91, + "sentences": [ + { + "sentence": "Laura gave birth to a son Jason last month. _ is a very charming baby.", + "answer1": [ + "Jason", + "he" + ], + "answer0": [ + "Laura", + "she" + ], + "correct_answer": [ + "Jason", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "jason", + 0.593578040599823 + ], + [ + "he", + 0.34084823727607727 + ] + ], + "score": 1 + } + ] + }, + { + "index": 92, + "sentences": [ + { + "sentence": "Paul tried frantically to stop his daughter Sandra from chatting at the party, leaving us to wonder why _ was behaving so strangely.", + "answer1": [ + "Sandra", + "she" + ], + "answer0": [ + "Paul", + "he" + ], + "correct_answer": [ + "Paul", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "she", + 0.780010461807251 + ], + [ + "he", + 0.06307154893875122 + ], + [ + "sandra", + 0.03193315863609314 + ], + [ + "paul", + 0.02212286926805973 + ] + ], + "score": 0 + } + ] + }, + { + "index": 93, + "sentences": [ + { + "sentence": "Thomas tried frantically to stop his daughter Helen from barking at the party, leaving us to wonder why _ was behaving so strangely.", + "answer1": [ + "Helen", + "she" + ], + "answer0": [ + "Thomas", + "he" + ], + "correct_answer": [ + "Helen", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.5904063582420349 + ], + [ + "he", + 0.12378611415624619 + ], + [ + "helen", + 0.04352760314941406 + ], + [ + "thomas", + 0.04339068755507469 + ] + ], + "score": 1 + } + ] + }, + { + "index": 94, + "sentences": [] + }, + { + "index": 95, + "sentences": [] + }, + { + "index": 96, + "sentences": [ + { + "sentence": "The fish ate the worm. the _ was hungry.", + "answer1": [ + "worm" + ], + "answer0": [ + "fish" + ], + "correct_answer": [ + "fish" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "fish", + 0.37108519673347473 + ], + [ + "worm", + 0.21286211907863617 + ] + ], + "score": 1 + } + ] + }, + { + "index": 97, + "sentences": [ + { + "sentence": "The fish ate the worm. the _ was tasty.", + "answer1": [ + "worm" + ], + "answer0": [ + "fish" + ], + "correct_answer": [ + "worm" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "fish", + 0.44388917088508606 + ], + [ + "worm", + 0.06959007680416107 + ] + ], + "score": 0 + } + ] + }, + { + "index": 98, + "sentences": [ + { + "sentence": "I was trying to open the lock with the key, but someone had filled the keyhole with gum, and I couldn't get the _ in.", + "answer1": [ + "gum" + ], + "answer0": [ + "key" + ], + "correct_answer": [ + "key" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "key", + 0.8628509640693665 + ] + ], + "score": 1 + } + ] + }, + { + "index": 99, + "sentences": [ + { + "sentence": "I was trying to open the lock with the key, but someone had filled the keyhole with gum, and I couldn't get the _ out.", + "answer1": [ + "gum" + ], + "answer0": [ + "key" + ], + "correct_answer": [ + "gum" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "key", + 0.7175930142402649 + ], + [ + "gum", + 0.006937938742339611 + ] + ], + "score": 0 + } + ] + }, + { + "index": 100, + "sentences": [ + { + "sentence": "The dog chased the cat, which ran up a tree. the _ waited at the bottom.", + "answer1": [ + "cat" + ], + "answer0": [ + "dog" + ], + "correct_answer": [ + "dog" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "cat", + 0.21951650083065033 + ], + [ + "dog", + 0.17522823810577393 + ] + ], + "score": 0 + } + ] + }, + { + "index": 101, + "sentences": [ + { + "sentence": "The dog chased the cat, which ran up a tree. the _ waited at the top.", + "answer1": [ + "cat" + ], + "answer0": [ + "dog" + ], + "correct_answer": [ + "cat" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "cat", + 0.18947550654411316 + ], + [ + "dog", + 0.1664084941148758 + ] + ], + "score": 1 + } + ] + }, + { + "index": 102, + "sentences": [ + { + "sentence": "In the storm, the tree fell down and crashed through the roof of my house. Now, I have to get the _ removed.", + "answer1": [ + "roof" + ], + "answer0": [ + "tree" + ], + "correct_answer": [ + "tree" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "tree", + 0.27868902683258057 + ] + ], + "score": 1 + } + ] + }, + { + "index": 103, + "sentences": [ + { + "sentence": "In the storm, the tree fell down and crashed through the roof of my house. Now, I have to get the _ repaired.", + "answer1": [ + "roof" + ], + "answer0": [ + "tree" + ], + "correct_answer": [ + "roof" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "tree", + 0.5251078605651855 + ], + [ + "roof", + 0.1584283411502838 + ] + ], + "score": 0 + } + ] + }, + { + "index": 104, + "sentences": [ + { + "sentence": "The customer walked into the bank and stabbed the tellers. the _ was immediately taken to the police station.", + "answer1": [ + "teller" + ], + "answer0": [ + "customer" + ], + "correct_answer": [ + "customer" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "customer", + 0.5774991512298584 + ] + ], + "score": 1 + } + ] + }, + { + "index": 105, + "sentences": [ + { + "sentence": "The customer walked into the bank and stabbed the tellers. the _ was immediately taken to the hospital.", + "answer1": [ + "teller" + ], + "answer0": [ + "customer" + ], + "correct_answer": [ + "teller" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "customer", + 0.7133771777153015 + ] + ], + "score": 0 + } + ] + }, + { + "index": 106, + "sentences": [ + { + "sentence": "John was doing research in the library when he heard a woman humming and whistling. _ was very annoyed.", + "answer1": [ + "she" + ], + "answer0": [ + "he" + ], + "correct_answer": [ + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.6243001818656921 + ], + [ + "she", + 0.007973955012857914 + ] + ], + "score": 1 + } + ] + }, + { + "index": 107, + "sentences": [ + { + "sentence": "John was doing research in the library when he heard a woman humming and whistling. _ was very annoying.", + "answer1": [ + "she" + ], + "answer0": [ + "he" + ], + "correct_answer": [ + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.2894514203071594 + ] + ], + "score": 1 + } + ] + }, + { + "index": 108, + "sentences": [ + { + "sentence": "John was jogging through the park when he saw a woman juggling watermelons. _ was very impressed.", + "answer1": [ + "she" + ], + "answer0": [ + "he" + ], + "correct_answer": [ + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.6197863221168518 + ], + [ + "she", + 0.028509652242064476 + ] + ], + "score": 1 + } + ] + }, + { + "index": 109, + "sentences": [ + { + "sentence": "John was jogging through the park when he saw a woman juggling watermelons. _ was very impressive.", + "answer1": [ + "she" + ], + "answer0": [ + "he" + ], + "correct_answer": [ + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.7331407070159912 + ] + ], + "score": 1 + } + ] + }, + { + "index": 110, + "sentences": [ + { + "sentence": "George collapsed on the sidewalk. Soon he saw Wendy coming to help. _ was very ill.", + "answer1": [ + "Wendy", + "she" + ], + "answer0": [ + "George", + "he" + ], + "correct_answer": [ + "George", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "she", + 0.9086498618125916 + ], + [ + "wendy", + 0.07171561568975449 + ], + [ + "he", + 0.00464142020791769 + ] + ], + "score": 0 + }, + { + "sentence": "Nancy collapsed on the sidewalk. Soon she saw Brian coming to help. _ was very ill.", + "answer1": [ + "Brian", + "he" + ], + "answer0": [ + "Nancy", + "she" + ], + "correct_answer": [ + "Nancy", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.8744707107543945 + ], + [ + "brian", + 0.09346448630094528 + ], + [ + "she", + 0.012748870067298412 + ] + ], + "score": 0 + } + ] + }, + { + "index": 111, + "sentences": [ + { + "sentence": "Steven collapsed on the sidewalk. Soon he saw Wendy coming to help. _ was very concerned.", + "answer1": [ + "Wendy", + "she" + ], + "answer0": [ + "Steven", + "he" + ], + "correct_answer": [ + "Wendy", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.669244647026062 + ], + [ + "wendy", + 0.28963056206703186 + ], + [ + "he", + 0.009827366098761559 + ] + ], + "score": 1 + }, + { + "sentence": "Betty collapsed on the sidewalk. Soon she saw Daniel coming to help. _ was very concerned.", + "answer1": [ + "Daniel", + "he" + ], + "answer0": [ + "Betty", + "she" + ], + "correct_answer": [ + "Daniel", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "he", + 0.7216044664382935 + ], + [ + "daniel", + 0.1329873502254486 + ], + [ + "she", + 0.07653984427452087 + ], + [ + "betty", + 0.025548135861754417 + ] + ], + "score": 1 + } + ] + }, + { + "index": 112, + "sentences": [] + }, + { + "index": 113, + "sentences": [] + }, + { + "index": 114, + "sentences": [ + { + "sentence": "Thomas told Margaret many lies about himself, which Margaret included in her book. _ should have been more truthful.", + "answer1": [ + "Margaret", + "she" + ], + "answer0": [ + "Thomas", + "he" + ], + "correct_answer": [ + "Thomas", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "thomas", + 0.30783167481422424 + ], + [ + "he", + 0.2996409833431244 + ], + [ + "she", + 0.12280318886041641 + ], + [ + "margaret", + 0.025127053260803223 + ] + ], + "score": 1 + }, + { + "sentence": "Lisa told William many lies about herself, which William included in his book. _ should have been more truthful.", + "answer1": [ + "William", + "he" + ], + "answer0": [ + "Lisa", + "she" + ], + "correct_answer": [ + "Lisa", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "she", + 0.4036584794521332 + ], + [ + "he", + 0.16349658370018005 + ], + [ + "lisa", + 0.10439618676900864 + ] + ], + "score": 1 + } + ] + }, + { + "index": 115, + "sentences": [ + { + "sentence": "Lisa told Edward many lies about herself, which Edward included in his book. _ should have been more skeptical.", + "answer1": [ + "Edward", + "he" + ], + "answer0": [ + "Lisa", + "she" + ], + "correct_answer": [ + "Edward", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "he", + 0.19039444625377655 + ], + [ + "she", + 0.08063376694917679 + ], + [ + "edward", + 0.07449514418840408 + ], + [ + "lisa", + 0.016218291595578194 + ] + ], + "score": 1 + }, + { + "sentence": "Daniel told Vivian many lies about himself, which Vivian included in her book. _ should have been more skeptical.", + "answer1": [ + "Vivian", + "she" + ], + "answer0": [ + "Daniel", + "he" + ], + "correct_answer": [ + "Vivian", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.13308386504650116 + ], + [ + "he", + 0.02119334787130356 + ], + [ + "vivian", + 0.006614150945097208 + ] + ], + "score": 1 + } + ] + }, + { + "index": 116, + "sentences": [ + { + "sentence": "Joe has sold his house and bought a new one a few miles away. He will be moving out of the _ house on Thursday.", + "answer1": [ + "new" + ], + "answer0": [ + "old" + ], + "correct_answer": [ + "old" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "old", + 0.5963925123214722 + ], + [ + "new", + 0.08838000893592834 + ] + ], + "score": 1 + } + ] + }, + { + "index": 117, + "sentences": [ + { + "sentence": "Joe has sold his house and bought a new one a few miles away. He will be moving into the _ house on Thursday.", + "answer1": [ + "new" + ], + "answer0": [ + "old" + ], + "correct_answer": [ + "new" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "new", + 0.8057393431663513 + ], + [ + "old", + 0.06867457181215286 + ] + ], + "score": 1 + } + ] + }, + { + "index": 118, + "sentences": [] + }, + { + "index": 119, + "sentences": [] + }, + { + "index": 120, + "sentences": [ + { + "sentence": "Mary took out her flute and played one of her favorite pieces. She has had the _ since she was a child.", + "answer1": [ + "piece" + ], + "answer0": [ + "flute" + ], + "correct_answer": [ + "flute" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "flute", + 0.8020106554031372 + ], + [ + "piece", + 0.0065211085602641106 + ] + ], + "score": 1 + } + ] + }, + { + "index": 121, + "sentences": [ + { + "sentence": "Mary took out her flute and played one of her favorite pieces. She has loved the _ since she was a child.", + "answer1": [ + "piece" + ], + "answer0": [ + "flute" + ], + "correct_answer": [ + "piece" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "flute", + 0.2698434591293335 + ], + [ + "piece", + 0.11442672461271286 + ] + ], + "score": 0 + } + ] + }, + { + "index": 122, + "sentences": [ + { + "sentence": "Sam pulled up a chair to the piano, but the _ was broken, so he had to stand instead.", + "answer1": [ + "piano" + ], + "answer0": [ + "chair" + ], + "correct_answer": [ + "chair" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "chair", + 0.2117273360490799 + ], + [ + "piano", + 0.0348532609641552 + ] + ], + "score": 1 + } + ] + }, + { + "index": 123, + "sentences": [ + { + "sentence": "Sam pulled up a chair to the piano, but the _ was broken, so he had to sing instead.", + "answer1": [ + "piano" + ], + "answer0": [ + "chair" + ], + "correct_answer": [ + "piano" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "piano", + 0.14480875432491302 + ], + [ + "chair", + 0.08749102801084518 + ] + ], + "score": 1 + } + ] + }, + { + "index": 124, + "sentences": [ + { + "sentence": "Since it was raining, I carried the newspaper in my backpack to keep the _ dry.", + "answer1": [ + "backpack" + ], + "answer0": [ + "newspaper" + ], + "correct_answer": [ + "newspaper" + ], + "adjacent_ref": false, + "predict_answer": [], + "score": 0 + } + ] + }, + { + "index": 125, + "sentences": [ + { + "sentence": "Since it was raining, I carried the newspaper over my backpack to keep the _ dry.", + "answer1": [ + "backpack" + ], + "answer0": [ + "newspaper" + ], + "correct_answer": [ + "backpack" + ], + "adjacent_ref": true, + "predict_answer": [], + "score": 0 + } + ] + }, + { + "index": 126, + "sentences": [ + { + "sentence": "Sara borrowed the book from the library because she needs it for an article she is working on. She reads the _ when she gets home from work.", + "answer1": [ + "article" + ], + "answer0": [ + "book" + ], + "correct_answer": [ + "book" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "article", + 0.7929431796073914 + ], + [ + "book", + 0.03670453652739525 + ] + ], + "score": 0 + } + ] + }, + { + "index": 127, + "sentences": [ + { + "sentence": "Sara borrowed the book from the library because she needs it for an article she is working on. She writes the _ when she gets home from work.", + "answer1": [ + "article" + ], + "answer0": [ + "book" + ], + "correct_answer": [ + "article" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "article", + 0.6547066569328308 + ], + [ + "book", + 0.049485430121421814 + ] + ], + "score": 1 + } + ] + }, + { + "index": 128, + "sentences": [ + { + "sentence": "This morning, Joey built a castle on the beach, and put a toy flag in the highest tower, but this afternoon the tide knocked the _ down.", + "answer1": [ + "flag" + ], + "answer0": [ + "castle" + ], + "correct_answer": [ + "castle" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "castle", + 0.3126690089702606 + ] + ], + "score": 1 + } + ] + }, + { + "index": 129, + "sentences": [ + { + "sentence": "This morning, Joey built a castle on the beach, and put a toy flag in the highest tower, but this afternoon the wind knocked the _ down.", + "answer1": [ + "flag" + ], + "answer0": [ + "castle" + ], + "correct_answer": [ + "flag" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "castle", + 0.14584790170192719 + ], + [ + "flag", + 0.0925380289554596 + ] + ], + "score": 0 + } + ] + }, + { + "index": 130, + "sentences": [ + { + "sentence": "Donna knocked on George's door, but there was no answer. _ was disappointed.", + "answer1": [ + "George", + "he" + ], + "answer0": [ + "Donna", + "she" + ], + "correct_answer": [ + "Donna", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "she", + 0.725192666053772 + ], + [ + "donna", + 0.21122732758522034 + ], + [ + "he", + 0.00751360272988677 + ], + [ + "george", + 0.002388492226600647 + ] + ], + "score": 1 + }, + { + "sentence": "Brian knocked on Sandra's door, but there was no answer. _ was disappointed.", + "answer1": [ + "Sandra", + "she" + ], + "answer0": [ + "Brian", + "he" + ], + "correct_answer": [ + "Brian", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.7136160135269165 + ], + [ + "brian", + 0.2146453559398651 + ], + [ + "she", + 0.03952759504318237 + ], + [ + "sandra", + 0.0034710802137851715 + ] + ], + "score": 1 + } + ] + }, + { + "index": 131, + "sentences": [ + { + "sentence": "Anthony knocked on Laura's door, but there was no answer. _ was out.", + "answer1": [ + "Laura", + "she" + ], + "answer0": [ + "Anthony", + "he" + ], + "correct_answer": [ + "Laura", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.3706660866737366 + ], + [ + "laura", + 0.23670922219753265 + ], + [ + "anthony", + 0.03675343096256256 + ], + [ + "he", + 0.03366513177752495 + ] + ], + "score": 1 + }, + { + "sentence": "Sarah knocked on David's door, but there was no answer. _ was out.", + "answer1": [ + "David", + "he" + ], + "answer0": [ + "Sarah", + "she" + ], + "correct_answer": [ + "David", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "he", + 0.40272143483161926 + ], + [ + "david", + 0.22155331075191498 + ], + [ + "sarah", + 0.08200158923864365 + ], + [ + "she", + 0.025603963062167168 + ] + ], + "score": 1 + } + ] + }, + { + "index": 132, + "sentences": [ + { + "sentence": "David knocked on the door, and Helen answered it. _ invited her to come out.", + "answer1": [ + "Helen", + "she" + ], + "answer0": [ + "David", + "he" + ], + "correct_answer": [ + "David", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.4498973488807678 + ], + [ + "david", + 0.4179869294166565 + ], + [ + "she", + 0.009854797273874283 + ], + [ + "helen", + 0.007285397034138441 + ] + ], + "score": 1 + }, + { + "sentence": "Vivian knocked on the door, and Daniel answered it. _ invited him to come out.", + "answer1": [ + "Daniel", + "he" + ], + "answer0": [ + "Vivian", + "she" + ], + "correct_answer": [ + "Vivian", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "vivian", + 0.45176881551742554 + ], + [ + "she", + 0.10893049836158752 + ] + ], + "score": 1 + } + ] + }, + { + "index": 133, + "sentences": [ + { + "sentence": "Jennifer knocked on the door, and Jason answered it. _ invited her to come in.", + "answer1": [ + "Jason", + "he" + ], + "answer0": [ + "Jennifer", + "she" + ], + "correct_answer": [ + "Jason", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "he", + 0.8066680431365967 + ], + [ + "jason", + 0.05348130315542221 + ], + [ + "jennifer", + 0.04620116949081421 + ] + ], + "score": 1 + }, + { + "sentence": "Joseph knocked on the door, and Amy answered it. _ invited him to come in.", + "answer1": [ + "Amy", + "she" + ], + "answer0": [ + "Joseph", + "he" + ], + "correct_answer": [ + "Amy", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.7397094368934631 + ], + [ + "amy", + 0.09842552989721298 + ], + [ + "joseph", + 0.02913120575249195 + ], + [ + "he", + 0.011561447754502296 + ] + ], + "score": 1 + } + ] + }, + { + "index": 134, + "sentences": [ + { + "sentence": "Daniel took French classes from Anna, because _ was eager to speak it fluently.", + "answer1": [ + "Anna", + "she" + ], + "answer0": [ + "Daniel", + "he" + ], + "correct_answer": [ + "Daniel", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.543056845664978 + ], + [ + "she", + 0.28391432762145996 + ], + [ + "anna", + 0.1334146112203598 + ], + [ + "daniel", + 0.004943103063851595 + ] + ], + "score": 1 + }, + { + "sentence": "Susan took French classes from Edward, because _ was eager to speak it fluently.", + "answer1": [ + "Edward", + "he" + ], + "answer0": [ + "Susan", + "she" + ], + "correct_answer": [ + "Susan", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "she", + 0.6854918003082275 + ], + [ + "he", + 0.16495540738105774 + ], + [ + "edward", + 0.040629129856824875 + ], + [ + "susan", + 0.005385664291679859 + ] + ], + "score": 1 + } + ] + }, + { + "index": 135, + "sentences": [ + { + "sentence": "Daniel took French classes from Nancy, because _ was known to speak it fluently.", + "answer1": [ + "Nancy", + "she" + ], + "answer0": [ + "Daniel", + "he" + ], + "correct_answer": [ + "Nancy", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "he", + 0.9642737507820129 + ], + [ + "she", + 0.03015826642513275 + ], + [ + "daniel", + 0.0008225942146964371 + ], + [ + "nancy", + 0.00018977400031872094 + ] + ], + "score": 0 + }, + { + "sentence": "Amy took French classes from Anthony, because _ was known to speak it fluently.", + "answer1": [ + "Anthony", + "he" + ], + "answer0": [ + "Amy", + "she" + ], + "correct_answer": [ + "Anthony", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.4824458956718445 + ], + [ + "he", + 0.4505062699317932 + ], + [ + "anthony", + 0.016775252297520638 + ], + [ + "amy", + 0.00974208302795887 + ] + ], + "score": 0 + } + ] + }, + { + "index": 136, + "sentences": [ + { + "sentence": "The path to the lake was blocked, so we couldn't use the _ .", + "answer1": [ + "lake" + ], + "answer0": [ + "path" + ], + "correct_answer": [ + "path" + ], + "adjacent_ref": false, + "predict_answer": [], + "score": 0 + } + ] + }, + { + "index": 137, + "sentences": [ + { + "sentence": "The path to the lake was blocked, so we couldn't reach the _ .", + "answer1": [ + "lake" + ], + "answer0": [ + "path" + ], + "correct_answer": [ + "lake" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "lake", + 0.08149494975805283 + ] + ], + "score": 1 + } + ] + }, + { + "index": 138, + "sentences": [ + { + "sentence": "The sun was covered by a thick cloud all morning, but luckily, by the time the picnic started, the _ was out.", + "answer1": [ + "cloud" + ], + "answer0": [ + "sun" + ], + "correct_answer": [ + "sun" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "sun", + 0.5122198462486267 + ] + ], + "score": 1 + } + ] + }, + { + "index": 139, + "sentences": [ + { + "sentence": "The sun was covered by a thick cloud all morning, but luckily, by the time the picnic started, the _ was gone.", + "answer1": [ + "cloud" + ], + "answer0": [ + "sun" + ], + "correct_answer": [ + "cloud" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "cloud", + 0.3546683192253113 + ], + [ + "sun", + 0.04581490159034729 + ] + ], + "score": 1 + } + ] + }, + { + "index": 140, + "sentences": [ + { + "sentence": "We went to the lake, because a shark had been seen at the beach, so the _ was a safer place to swim.", + "answer1": [ + "beach" + ], + "answer0": [ + "lake" + ], + "correct_answer": [ + "lake" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "lake", + 0.5537887811660767 + ], + [ + "beach", + 0.12069465965032578 + ] + ], + "score": 1 + } + ] + }, + { + "index": 141, + "sentences": [ + { + "sentence": "We went to the lake, because a shark had been seen at the beach, so the _ was a dangerous place to swim.", + "answer1": [ + "beach" + ], + "answer0": [ + "lake" + ], + "correct_answer": [ + "beach" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "lake", + 0.7313575148582458 + ], + [ + "beach", + 0.04965415969491005 + ] + ], + "score": 0 + } + ] + }, + { + "index": 142, + "sentences": [ + { + "sentence": "Sam tried to paint a picture of tourists with sheep, but the _ ended up looking more like golfers.", + "answer1": [ + "sheep" + ], + "answer0": [ + "tourists" + ], + "correct_answer": [ + "tourists" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "sheep", + 0.48821017146110535 + ] + ], + "score": 0 + } + ] + }, + { + "index": 143, + "sentences": [ + { + "sentence": "Sam tried to paint a picture of tourists with sheep, but the _ ended up looking more like dogs.", + "answer1": [ + "sheep" + ], + "answer0": [ + "tourists" + ], + "correct_answer": [ + "sheep" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "sheep", + 0.6432616710662842 + ] + ], + "score": 1 + } + ] + }, + { + "index": 144, + "sentences": [ + { + "sentence": "Margaret tucked her son Brian into bed, so that _ could work.", + "answer1": [ + "Brian", + "he" + ], + "answer0": [ + "Margaret", + "she" + ], + "correct_answer": [ + "Margaret", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "she", + 0.24223224818706512 + ], + [ + "he", + 0.22981160879135132 + ], + [ + "margaret", + 0.022159870713949203 + ] + ], + "score": 1 + } + ] + }, + { + "index": 145, + "sentences": [ + { + "sentence": "Wendy tucked her son Paul into bed, so that _ could sleep.", + "answer1": [ + "Paul", + "he" + ], + "answer0": [ + "Wendy", + "she" + ], + "correct_answer": [ + "Paul", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "he", + 0.8820493817329407 + ], + [ + "paul", + 0.020039597526192665 + ], + [ + "she", + 0.0034701882395893335 + ] + ], + "score": 1 + } + ] + }, + { + "index": 146, + "sentences": [] + }, + { + "index": 147, + "sentences": [] + }, + { + "index": 148, + "sentences": [ + { + "sentence": "Sarah visited George's grave in 1765. At that date _ had been travelling for five years.", + "answer1": [ + "George", + "he" + ], + "answer0": [ + "Sarah", + "she" + ], + "correct_answer": [ + "Sarah", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "she", + 0.5088940262794495 + ], + [ + "he", + 0.20025378465652466 + ], + [ + "george", + 0.07865126430988312 + ], + [ + "sarah", + 0.0399525985121727 + ] + ], + "score": 1 + }, + { + "sentence": "Steven visited Helen's grave in 1765. At that date _ had been travelling for five years.", + "answer1": [ + "Helen", + "she" + ], + "answer0": [ + "Steven", + "he" + ], + "correct_answer": [ + "Steven", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.5841214060783386 + ], + [ + "she", + 0.15333987772464752 + ], + [ + "steven", + 0.12337502837181091 + ], + [ + "helen", + 0.00795214157551527 + ] + ], + "score": 1 + } + ] + }, + { + "index": 149, + "sentences": [ + { + "sentence": "Paul visited Margaret's grave in 1765. At that date _ had been dead for five years.", + "answer1": [ + "Margaret", + "she" + ], + "answer0": [ + "Paul", + "he" + ], + "correct_answer": [ + "Margaret", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.8265026211738586 + ], + [ + "margaret", + 0.11342242360115051 + ], + [ + "he", + 0.023298852145671844 + ], + [ + "paul", + 0.0028004718478769064 + ] + ], + "score": 1 + }, + { + "sentence": "Sandra visited Zack's grave in 1765. At that date _ had been dead for five years.", + "answer1": [ + "Zack", + "he" + ], + "answer0": [ + "Sandra", + "she" + ], + "correct_answer": [ + "Zack", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "he", + 0.5330711007118225 + ], + [ + "zack", + 0.4511242210865021 + ], + [ + "she", + 0.0025638267397880554 + ] + ], + "score": 1 + } + ] + }, + { + "index": 150, + "sentences": [ + { + "sentence": "Vivian was greatly influenced by Steven, though _ lived two centuries later.", + "answer1": [ + "Steven", + "he" + ], + "answer0": [ + "Vivian", + "she" + ], + "correct_answer": [ + "Vivian", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.6800739765167236 + ], + [ + "she", + 0.0814879834651947 + ], + [ + "steven", + 0.0334942601621151 + ], + [ + "vivian", + 0.00919678620994091 + ] + ], + "score": 0 + }, + { + "sentence": "Anthony was greatly influenced by Sandra, though _ lived two centuries later.", + "answer1": [ + "Sandra", + "she" + ], + "answer0": [ + "Anthony", + "he" + ], + "correct_answer": [ + "Anthony", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "she", + 0.5291610956192017 + ], + [ + "he", + 0.24366435408592224 + ], + [ + "anthony", + 0.07931230962276459 + ], + [ + "sandra", + 0.014960510656237602 + ] + ], + "score": 0 + } + ] + }, + { + "index": 151, + "sentences": [ + { + "sentence": "Warren was greatly influenced by Mandy, though _ lived two centuries earlier.", + "answer1": [ + "Mandy", + "she" + ], + "answer0": [ + "Warren", + "he" + ], + "correct_answer": [ + "Mandy", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.6331958770751953 + ], + [ + "he", + 0.14606720209121704 + ], + [ + "warren", + 0.010313055478036404 + ] + ], + "score": 1 + }, + { + "sentence": "Lisa was greatly influenced by David, though _ lived two centuries earlier.", + "answer1": [ + "David", + "he" + ], + "answer0": [ + "Lisa", + "she" + ], + "correct_answer": [ + "David", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "he", + 0.6384512782096863 + ], + [ + "she", + 0.16565637290477753 + ], + [ + "david", + 0.0723688080906868 + ], + [ + "lisa", + 0.006370751652866602 + ] + ], + "score": 1 + } + ] + }, + { + "index": 152, + "sentences": [ + { + "sentence": "I can't cut that tree down with that axe; the _ is too thick.", + "answer1": [ + "axe" + ], + "answer0": [ + "tree" + ], + "correct_answer": [ + "tree" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "tree", + 0.025083722546696663 + ] + ], + "score": 1 + } + ] + }, + { + "index": 153, + "sentences": [ + { + "sentence": "I can't cut that tree down with that axe; the _ is too small.", + "answer1": [ + "axe" + ], + "answer0": [ + "tree" + ], + "correct_answer": [ + "axe" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "tree", + 0.4419287145137787 + ] + ], + "score": 0 + } + ] + }, + { + "index": 154, + "sentences": [ + { + "sentence": "The foxes are getting in at night and attacking the chickens. I shall have to kill the _ .", + "answer1": [ + "chickens" + ], + "answer0": [ + "foxes" + ], + "correct_answer": [ + "foxes" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "chickens", + 0.22372716665267944 + ], + [ + "foxes", + 0.1705324798822403 + ] + ], + "score": 0 + } + ] + }, + { + "index": 155, + "sentences": [ + { + "sentence": "The foxes are getting in at night and attacking the chickens. I shall have to guard the _ .", + "answer1": [ + "chickens" + ], + "answer0": [ + "foxes" + ], + "correct_answer": [ + "chickens" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "chickens", + 0.3232119679450989 + ] + ], + "score": 1 + } + ] + }, + { + "index": 156, + "sentences": [ + { + "sentence": "The foxes are getting in at night and attacking the chickens. the _ have gotten very bold.", + "answer1": [ + "chickens" + ], + "answer0": [ + "foxes" + ], + "correct_answer": [ + "foxes" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "foxes", + 0.8538748025894165 + ] + ], + "score": 1 + } + ] + }, + { + "index": 157, + "sentences": [ + { + "sentence": "The foxes are getting in at night and attacking the chickens. the _ have gotten very nervous.", + "answer1": [ + "chickens" + ], + "answer0": [ + "foxes" + ], + "correct_answer": [ + "chickens" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "foxes", + 0.24852731823921204 + ], + [ + "chickens", + 0.1355128437280655 + ] + ], + "score": 0 + } + ] + }, + { + "index": 158, + "sentences": [ + { + "sentence": "Fred covered his eyes with his hands, because the wind was blowing sand around. He opened his _ when the wind stopped.", + "answer1": [ + "hands" + ], + "answer0": [ + "eyes" + ], + "correct_answer": [ + "eyes" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "eyes", + 0.9919928312301636 + ] + ], + "score": 1 + } + ] + }, + { + "index": 159, + "sentences": [ + { + "sentence": "Fred covered his eyes with his hands, because the wind was blowing sand around. He lowered his _ when the wind stopped.", + "answer1": [ + "hands" + ], + "answer0": [ + "eyes" + ], + "correct_answer": [ + "hands" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "eyes", + 0.07590554654598236 + ], + [ + "hands", + 0.04639185965061188 + ] + ], + "score": 0 + } + ] + }, + { + "index": 160, + "sentences": [ + { + "sentence": "The actress used to be named Christina, but she changed it to Tina a few years ago, because she figured the _ was too hard to pronounce.", + "answer1": [ + "Tina" + ], + "answer0": [ + "Christina" + ], + "correct_answer": [ + "Christina" + ], + "adjacent_ref": false, + "predict_answer": [], + "score": 0 + } + ] + }, + { + "index": 161, + "sentences": [ + { + "sentence": "The actress used to be named Christina, but she changed it to Tina a few years ago, because she figured the _ was easier to pronounce.", + "answer1": [ + "Tina" + ], + "answer0": [ + "Christina" + ], + "correct_answer": [ + "Tina" + ], + "adjacent_ref": true, + "predict_answer": [], + "score": 0 + } + ] + }, + { + "index": 162, + "sentences": [ + { + "sentence": "George watched TV while Amy went out to buy groceries. After an hour _ got up.", + "answer1": [ + "Amy", + "she" + ], + "answer0": [ + "George", + "he" + ], + "correct_answer": [ + "George", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "george", + 0.4222693145275116 + ], + [ + "she", + 0.17023402452468872 + ], + [ + "he", + 0.14838194847106934 + ], + [ + "amy", + 0.09143561124801636 + ] + ], + "score": 1 + }, + { + "sentence": "Helen watched TV while Paul went out to buy groceries. After an hour _ got up.", + "answer1": [ + "Paul", + "he" + ], + "answer0": [ + "Helen", + "she" + ], + "correct_answer": [ + "Helen", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "helen", + 0.3572254478931427 + ], + [ + "she", + 0.18161025643348694 + ], + [ + "he", + 0.17783011496067047 + ], + [ + "paul", + 0.12632130086421967 + ] + ], + "score": 1 + } + ] + }, + { + "index": 163, + "sentences": [ + { + "sentence": "Sue watched TV while Richard went out to buy groceries. After an hour _ got back.", + "answer1": [ + "Richard", + "he" + ], + "answer0": [ + "Sue", + "she" + ], + "correct_answer": [ + "Richard", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "he", + 0.36730772256851196 + ], + [ + "richard", + 0.24268296360969543 + ], + [ + "she", + 0.0852513387799263 + ], + [ + "sue", + 0.0603502094745636 + ] + ], + "score": 1 + }, + { + "sentence": "Steven watched TV while Lisa went out to buy groceries. After an hour _ got back.", + "answer1": [ + "Lisa", + "she" + ], + "answer0": [ + "Steven", + "he" + ], + "correct_answer": [ + "Lisa", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.2513202726840973 + ], + [ + "lisa", + 0.21226151287555695 + ], + [ + "he", + 0.15152567625045776 + ], + [ + "steven", + 0.06623411923646927 + ] + ], + "score": 1 + } + ] + }, + { + "index": 164, + "sentences": [ + { + "sentence": "Fred was supposed to run the oven, but he put it off, because he wanted to watch TV. But the show turned out to be boring, so he changed his mind and turned the _ on.", + "answer1": [ + "TV" + ], + "answer0": [ + "oven" + ], + "correct_answer": [ + "oven" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "oven", + 0.33234283328056335 + ], + [ + "tv", + 0.24399203062057495 + ] + ], + "score": 1 + } + ] + }, + { + "index": 165, + "sentences": [ + { + "sentence": "Fred was supposed to run the oven, but he put it off, because he wanted to watch TV. But the show turned out to be boring, so he changed his mind and turned the _ off.", + "answer1": [ + "TV" + ], + "answer0": [ + "oven" + ], + "correct_answer": [ + "TV" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "oven", + 0.5511050224304199 + ], + [ + "tv", + 0.14227940142154694 + ] + ], + "score": 0 + } + ] + }, + { + "index": 166, + "sentences": [ + { + "sentence": "Tony is the only man still alive who remembers my great-grandmother Jennifer. _ is remarkable.", + "answer1": [ + "Jennifer", + "she" + ], + "answer0": [ + "Tony", + "he" + ], + "correct_answer": [ + "Tony", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "she", + 0.31883949041366577 + ], + [ + "he", + 0.2727121412754059 + ] + ], + "score": 0 + } + ] + }, + { + "index": 167, + "sentences": [ + { + "sentence": "Tony is the only man still alive who remembers my great-grandmother Mandy. _ was remarkable.", + "answer1": [ + "Mandy", + "she" + ], + "answer0": [ + "Tony", + "he" + ], + "correct_answer": [ + "Mandy", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.7409354448318481 + ], + [ + "mandy", + 0.02154599316418171 + ], + [ + "he", + 0.008621525950729847 + ] + ], + "score": 1 + } + ] + }, + { + "index": 168, + "sentences": [ + { + "sentence": "Daniel is the only man alive who still remembers my mother Wendy as an infant. When Daniel first saw my mother, _ was twelve years old.", + "answer1": [ + "Wendy", + "she" + ], + "answer0": [ + "Daniel", + "he" + ], + "correct_answer": [ + "Daniel", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "she", + 0.721801221370697 + ], + [ + "he", + 0.14972272515296936 + ], + [ + "daniel", + 0.004093638621270657 + ], + [ + "wendy", + 0.003174440236762166 + ] + ], + "score": 0 + } + ] + }, + { + "index": 169, + "sentences": [ + { + "sentence": "Paul is the only man alive who still remembers my mother Lisa as an infant. When Paul first saw my mother, _ was twelve months old.", + "answer1": [ + "Lisa", + "she" + ], + "answer0": [ + "Paul", + "he" + ], + "correct_answer": [ + "Lisa", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.8270816802978516 + ], + [ + "he", + 0.06970749795436859 + ], + [ + "paul", + 0.006479764357209206 + ], + [ + "lisa", + 0.003468103241175413 + ] + ], + "score": 1 + } + ] + }, + { + "index": 170, + "sentences": [ + { + "sentence": "In July, Spain declared war on Italy. Since Italy's army was much better equipped and ten times larger, the _ were defeated within weeks.", + "answer1": [ + "Italy" + ], + "answer0": [ + "Spain" + ], + "correct_answer": [ + "Spain" + ], + "adjacent_ref": false, + "predict_answer": [], + "score": 0 + } + ] + }, + { + "index": 171, + "sentences": [ + { + "sentence": "In July, France declared war on Russia. Since Russia's army was much better equipped and ten times larger, the _ were victorious within weeks.", + "answer1": [ + "Russia" + ], + "answer0": [ + "France" + ], + "correct_answer": [ + "Russia" + ], + "adjacent_ref": true, + "predict_answer": [], + "score": 0 + } + ] + }, + { + "index": 172, + "sentences": [ + { + "sentence": "Look! There is a fish swimming right below that duck! the _ had better get away to safety fast!", + "answer1": [ + "duck" + ], + "answer0": [ + "fish" + ], + "correct_answer": [ + "fish" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "fish", + 0.7521501183509827 + ], + [ + "duck", + 0.09160765260457993 + ] + ], + "score": 1 + } + ] + }, + { + "index": 173, + "sentences": [ + { + "sentence": "Look! There is a shark swimming right below that duck! the _ had better get away to safety fast!", + "answer1": [ + "duck" + ], + "answer0": [ + "shark" + ], + "correct_answer": [ + "duck" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "shark", + 0.39164668321609497 + ], + [ + "duck", + 0.049340344965457916 + ] + ], + "score": 0 + } + ] + }, + { + "index": 174, + "sentences": [] + }, + { + "index": 175, + "sentences": [] + }, + { + "index": 176, + "sentences": [ + { + "sentence": "The scientists are studying three species of fish that have recently been found living in the Indian Ocean. the _ began two years ago.", + "answer1": [ + "fish" + ], + "answer0": [ + "scientists" + ], + "correct_answer": [ + "scientists" + ], + "adjacent_ref": false, + "predict_answer": [], + "score": 0 + } + ] + }, + { + "index": 177, + "sentences": [ + { + "sentence": "The scientists are studying three species of fish that have recently been found living in the Indian Ocean. the _ appeared two years ago.", + "answer1": [ + "fish" + ], + "answer0": [ + "scientists" + ], + "correct_answer": [ + "fish" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "fish", + 0.014301100745797157 + ] + ], + "score": 1 + } + ] + }, + { + "index": 178, + "sentences": [ + { + "sentence": "The journalists interviewed the stars of the new movie. the _ were very persistent, so the interview lasted for a long time.", + "answer1": [ + "stars" + ], + "answer0": [ + "journalists" + ], + "correct_answer": [ + "journalists" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "journalists", + 0.06059985235333443 + ], + [ + "stars", + 0.04318194463849068 + ] + ], + "score": 1 + } + ] + }, + { + "index": 179, + "sentences": [ + { + "sentence": "The journalists interviewed the stars of the new movie. the _ were very cooperative, so the interview lasted for a long time.", + "answer1": [ + "stars" + ], + "answer0": [ + "journalists" + ], + "correct_answer": [ + "stars" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "journalists", + 0.06477633118629456 + ] + ], + "score": 0 + } + ] + }, + { + "index": 180, + "sentences": [ + { + "sentence": "The police arrested all of the students. the _ were trying to stop the drug trade in the neighborhood.", + "answer1": [ + "students" + ], + "answer0": [ + "police" + ], + "correct_answer": [ + "police" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "police", + 0.8427444100379944 + ], + [ + "students", + 0.08074833452701569 + ] + ], + "score": 1 + } + ] + }, + { + "index": 181, + "sentences": [ + { + "sentence": "The police arrested all of the students. the _ were trying to run the drug trade in the neighborhood.", + "answer1": [ + "students" + ], + "answer0": [ + "police" + ], + "correct_answer": [ + "students" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "police", + 0.5480837821960449 + ], + [ + "students", + 0.25696301460266113 + ] + ], + "score": 0 + } + ] + }, + { + "index": 182, + "sentences": [ + { + "sentence": "I put the cake away in the refrigerator. the _ has a lot of butter in it.", + "answer1": [ + "refrigerator" + ], + "answer0": [ + "cake" + ], + "correct_answer": [ + "cake" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "cake", + 0.3875311017036438 + ] + ], + "score": 1 + } + ] + }, + { + "index": 183, + "sentences": [ + { + "sentence": "I put the cake away in the refrigerator. the _ has a lot of leftovers in it.", + "answer1": [ + "refrigerator" + ], + "answer0": [ + "cake" + ], + "correct_answer": [ + "refrigerator" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "refrigerator", + 0.04797044396400452 + ] + ], + "score": 1 + } + ] + }, + { + "index": 184, + "sentences": [ + { + "sentence": "Sam broke both his ankles and he's walking with cane. But a month or so from now the _ should be better.", + "answer1": [ + "cane" + ], + "answer0": [ + "ankles" + ], + "correct_answer": [ + "ankles" + ], + "adjacent_ref": false, + "predict_answer": [], + "score": 0 + } + ] + }, + { + "index": 185, + "sentences": [ + { + "sentence": "Sam broke both his ankles and he's walking with cane. But a month or so from now the _ should be unnecessary.", + "answer1": [ + "cane" + ], + "answer0": [ + "ankles" + ], + "correct_answer": [ + "cane" + ], + "adjacent_ref": true, + "predict_answer": [], + "score": 0 + } + ] + }, + { + "index": 186, + "sentences": [ + { + "sentence": "When the sponsors of the bill got to the town hall, they were surprised to find that the room was full of opponents. the _ were very much in the minority.", + "answer1": [ + "opponents" + ], + "answer0": [ + "sponsors" + ], + "correct_answer": [ + "sponsors" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "opponents", + 0.1925417184829712 + ] + ], + "score": 0 + } + ] + }, + { + "index": 187, + "sentences": [ + { + "sentence": "When the sponsors of the bill got to the town hall, they were surprised to find that the room was full of opponents. the _ were very much in the majority.", + "answer1": [ + "opponents" + ], + "answer0": [ + "sponsors" + ], + "correct_answer": [ + "opponents" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "opponents", + 0.2790956199169159 + ] + ], + "score": 1 + } + ] + }, + { + "index": 188, + "sentences": [ + { + "sentence": "Everyone really loved the cake; only a few people liked the cookies. Next time, we should make more of the _ .", + "answer1": [ + "cookies" + ], + "answer0": [ + "cake" + ], + "correct_answer": [ + "cake" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "cookies", + 0.34039416909217834 + ], + [ + "cake", + 0.1966356337070465 + ] + ], + "score": 0 + } + ] + }, + { + "index": 189, + "sentences": [ + { + "sentence": "Everyone really loved the cake; only a few people liked the cookies. Next time, we should make fewer of the _ .", + "answer1": [ + "cookies" + ], + "answer0": [ + "cake" + ], + "correct_answer": [ + "cookies" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "cookies", + 0.7632879018783569 + ] + ], + "score": 1 + } + ] + }, + { + "index": 190, + "sentences": [ + { + "sentence": "We had hoped to place books on all the chairs in the auditorium, but there were simply not enough of the _ .", + "answer1": [ + "chairs" + ], + "answer0": [ + "books" + ], + "correct_answer": [ + "books" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "books", + 0.7032318115234375 + ], + [ + "chairs", + 0.02748037874698639 + ] + ], + "score": 1 + } + ] + }, + { + "index": 191, + "sentences": [ + { + "sentence": "We had hoped to place books on all the chairs in the auditorium, but there were simply too many of the _ .", + "answer1": [ + "chairs" + ], + "answer0": [ + "books" + ], + "correct_answer": [ + "chairs" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "chairs", + 0.31335213780403137 + ], + [ + "books", + 0.28027427196502686 + ] + ], + "score": 1 + } + ] + }, + { + "index": 192, + "sentences": [ + { + "sentence": "I stuck a pin through a carrot. When I pulled the pin out, the _ left a hole.", + "answer1": [ + "carrot" + ], + "answer0": [ + "pin" + ], + "correct_answer": [ + "pin" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "pin", + 0.24921917915344238 + ], + [ + "carrot", + 0.02873871475458145 + ] + ], + "score": 1 + } + ] + }, + { + "index": 193, + "sentences": [ + { + "sentence": "I stuck a pin through a carrot. When I pulled the pin out, the _ had a hole.", + "answer1": [ + "carrot" + ], + "answer0": [ + "pin" + ], + "correct_answer": [ + "carrot" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "carrot", + 0.7155035734176636 + ], + [ + "pin", + 0.04343157261610031 + ] + ], + "score": 1 + } + ] + }, + { + "index": 194, + "sentences": [ + { + "sentence": "I couldn't find a spoon, so I tried using a pen to stir my coffee. But that turned out to be a bad idea, because the _ got full of coffee.", + "answer1": [ + "coffee" + ], + "answer0": [ + "pen" + ], + "correct_answer": [ + "pen" + ], + "adjacent_ref": false, + "predict_answer": [], + "score": 0 + } + ] + }, + { + "index": 195, + "sentences": [ + { + "sentence": "I couldn't find a spoon, so I tried using a pen to stir my coffee. But that turned out to be a bad idea, because the _ got full of ink.", + "answer1": [ + "coffee" + ], + "answer0": [ + "pen" + ], + "correct_answer": [ + "coffee" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "pen", + 0.07221017777919769 + ] + ], + "score": 0 + } + ] + }, + { + "index": 196, + "sentences": [ + { + "sentence": "Alan follows Emma's example in everything. _ admires her hugely.", + "answer1": [ + "Emma", + "she" + ], + "answer0": [ + "Alan", + "he" + ], + "correct_answer": [ + "Alan", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.44828981161117554 + ], + [ + "alan", + 0.3871767222881317 + ], + [ + "emma", + 0.0232782494276762 + ], + [ + "she", + 0.01241056714206934 + ] + ], + "score": 1 + }, + { + "sentence": "Donna follows Jason's example in everything. _ admires him hugely.", + "answer1": [ + "Jason", + "he" + ], + "answer0": [ + "Donna", + "she" + ], + "correct_answer": [ + "Donna", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "she", + 0.9227645397186279 + ], + [ + "donna", + 0.03128309175372124 + ], + [ + "he", + 0.003405218245461583 + ] + ], + "score": 1 + } + ] + }, + { + "index": 197, + "sentences": [ + { + "sentence": "Vivian follows Thomas's example in everything. _ influences her hugely.", + "answer1": [ + "Thomas", + "he" + ], + "answer0": [ + "Vivian", + "she" + ], + "correct_answer": [ + "Thomas", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "he", + 0.6024318933486938 + ], + [ + "thomas", + 0.21580076217651367 + ], + [ + "vivian", + 0.005271642003208399 + ] + ], + "score": 1 + }, + { + "sentence": "Richard follows Nancy's example in everything. _ influences him hugely.", + "answer1": [ + "Nancy", + "she" + ], + "answer0": [ + "Richard", + "he" + ], + "correct_answer": [ + "Nancy", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "nancy", + 0.7275211215019226 + ], + [ + "she", + 0.23386697471141815 + ] + ], + "score": 1 + } + ] + }, + { + "index": 198, + "sentences": [ + { + "sentence": "The table won't fit through the doorway because the _ is too wide.", + "answer1": [ + "doorway" + ], + "answer0": [ + "table" + ], + "correct_answer": [ + "table" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "table", + 0.07816523313522339 + ] + ], + "score": 1 + } + ] + }, + { + "index": 199, + "sentences": [ + { + "sentence": "The table won't fit through the doorway because the _ is too narrow.", + "answer1": [ + "doorway" + ], + "answer0": [ + "table" + ], + "correct_answer": [ + "doorway" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "table", + 0.06917010992765427 + ] + ], + "score": 0 + } + ] + }, + { + "index": 200, + "sentences": [ + { + "sentence": "Grace was happy to trade me her sweater for my jacket. She thinks the _ looks dowdy on her.", + "answer1": [ + "jacket" + ], + "answer0": [ + "sweater" + ], + "correct_answer": [ + "sweater" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "sweater", + 0.2597585618495941 + ], + [ + "jacket", + 0.2361963987350464 + ] + ], + "score": 1 + } + ] + }, + { + "index": 201, + "sentences": [ + { + "sentence": "Grace was happy to trade me her sweater for my jacket. She thinks the _ looks great on her.", + "answer1": [ + "jacket" + ], + "answer0": [ + "sweater" + ], + "correct_answer": [ + "jacket" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "sweater", + 0.29831069707870483 + ], + [ + "jacket", + 0.158711239695549 + ] + ], + "score": 0 + } + ] + }, + { + "index": 202, + "sentences": [ + { + "sentence": "Helen hired Jason to take care of _ .", + "answer1": [ + "Jason", + "him" + ], + "answer0": [ + "Helen", + "her" + ], + "correct_answer": [ + "Helen", + "her" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "her", + 0.3049941062927246 + ], + [ + "him", + 0.028502047061920166 + ] + ], + "score": 1 + }, + { + "sentence": "Tim hired Lisa to take care of _ .", + "answer1": [ + "Lisa", + "her" + ], + "answer0": [ + "Tim", + "him" + ], + "correct_answer": [ + "Tim", + "him" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "him", + 0.21020644903182983 + ], + [ + "her", + 0.04846358299255371 + ] + ], + "score": 1 + } + ] + }, + { + "index": 203, + "sentences": [ + { + "sentence": "Emma hired herself out to Anthony to take care of _ .", + "answer1": [ + "Anthony", + "him" + ], + "answer0": [ + "Emma", + "her" + ], + "correct_answer": [ + "Anthony", + "him" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "him", + 0.17034678161144257 + ], + [ + "her", + 0.08058315515518188 + ] + ], + "score": 1 + }, + { + "sentence": "Daniel hired himself out to Emma to take care of _ .", + "answer1": [ + "Emma", + "her" + ], + "answer0": [ + "Daniel", + "him" + ], + "correct_answer": [ + "Emma", + "her" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "her", + 0.21644917130470276 + ], + [ + "him", + 0.043456241488456726 + ] + ], + "score": 1 + } + ] + }, + { + "index": 204, + "sentences": [ + { + "sentence": "Sandra promised Edward to leave, so an hour later _ left.", + "answer1": [ + "Edward", + "he" + ], + "answer0": [ + "Sandra", + "she" + ], + "correct_answer": [ + "Sandra", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.4670681357383728 + ], + [ + "she", + 0.18557576835155487 + ], + [ + "edward", + 0.14714860916137695 + ] + ], + "score": 0 + }, + { + "sentence": "Thomas promised Betty to leave, so an hour later _ left.", + "answer1": [ + "Betty", + "she" + ], + "answer0": [ + "Thomas", + "he" + ], + "correct_answer": [ + "Thomas", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "she", + 0.47173383831977844 + ], + [ + "betty", + 0.22963103652000427 + ], + [ + "he", + 0.14014899730682373 + ], + [ + "thomas", + 0.0091405613347888 + ] + ], + "score": 0 + } + ] + }, + { + "index": 205, + "sentences": [ + { + "sentence": "William ordered Mandy to leave, so an hour later _ left.", + "answer1": [ + "Mandy", + "she" + ], + "answer0": [ + "William", + "he" + ], + "correct_answer": [ + "Mandy", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.2989388108253479 + ], + [ + "he", + 0.23829956352710724 + ], + [ + "mandy", + 0.20623540878295898 + ] + ], + "score": 1 + }, + { + "sentence": "Amy ordered Warren to leave, so an hour later _ left.", + "answer1": [ + "Warren", + "he" + ], + "answer0": [ + "Amy", + "she" + ], + "correct_answer": [ + "Warren", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "he", + 0.4589509963989258 + ], + [ + "warren", + 0.33637410402297974 + ], + [ + "she", + 0.05308223143219948 + ] + ], + "score": 1 + } + ] + }, + { + "index": 206, + "sentences": [ + { + "sentence": "Thomas's biography of Emma conveys a vivid sense of the difficulties _ faced in his research.", + "answer1": [ + "Emma", + "she" + ], + "answer0": [ + "Thomas", + "he" + ], + "correct_answer": [ + "Thomas", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.38616475462913513 + ], + [ + "she", + 0.19750751554965973 + ], + [ + "emma", + 0.14758290350437164 + ], + [ + "thomas", + 0.08441972732543945 + ] + ], + "score": 1 + }, + { + "sentence": "Jennifer's biography of George conveys a vivid sense of the difficulties _ faced in her research.", + "answer1": [ + "George", + "he" + ], + "answer0": [ + "Jennifer", + "she" + ], + "correct_answer": [ + "Jennifer", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "she", + 0.7952712178230286 + ], + [ + "he", + 0.025763899087905884 + ], + [ + "george", + 0.025489602237939835 + ], + [ + "jennifer", + 0.00788148120045662 + ] + ], + "score": 1 + } + ] + }, + { + "index": 207, + "sentences": [ + { + "sentence": "Emma's biography of Alan conveys a vivid sense of the difficulties _ faced in his childhood.", + "answer1": [ + "Alan", + "he" + ], + "answer0": [ + "Emma", + "she" + ], + "correct_answer": [ + "Alan", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "alan", + 0.5420495271682739 + ], + [ + "he", + 0.4210248291492462 + ] + ], + "score": 1 + }, + { + "sentence": "Steven's biography of Margaret conveys a vivid sense of the difficulties _ faced in her childhood.", + "answer1": [ + "Margaret", + "she" + ], + "answer0": [ + "Steven", + "he" + ], + "correct_answer": [ + "Margaret", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.6349072456359863 + ], + [ + "margaret", + 0.29832860827445984 + ] + ], + "score": 1 + } + ] + }, + { + "index": 208, + "sentences": [ + { + "sentence": "Wendy's father Anthony had died long ago, and _ education had been managed by an excellent woman as governess.", + "answer1": [ + "her" + ], + "answer0": [ + "his" + ], + "correct_answer": [ + "her" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "her", + 0.9348182082176208 + ], + [ + "his", + 0.011819146573543549 + ] + ], + "score": 1 + } + ] + }, + { + "index": 209, + "sentences": [ + { + "sentence": "Sandra's father Tim had died long ago, and _ place had been taken by an excellent woman as governess.", + "answer1": [ + "her" + ], + "answer0": [ + "his" + ], + "correct_answer": [ + "his" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "his", + 0.3821287453174591 + ], + [ + "her", + 0.32249218225479126 + ] + ], + "score": 1 + } + ] + }, + { + "index": 210, + "sentences": [ + { + "sentence": "Anthony knocked on Betty's door but _ did not get an answer.", + "answer1": [ + "Betty", + "she" + ], + "answer0": [ + "Anthony", + "he" + ], + "correct_answer": [ + "Anthony", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.9353936910629272 + ], + [ + "she", + 0.014528683386743069 + ], + [ + "anthony", + 0.0021569491364061832 + ] + ], + "score": 1 + }, + { + "sentence": "Betty knocked on Thomas's door but _ did not get an answer.", + "answer1": [ + "Thomas", + "he" + ], + "answer0": [ + "Betty", + "she" + ], + "correct_answer": [ + "Betty", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "she", + 0.8701193928718567 + ], + [ + "he", + 0.04268043488264084 + ], + [ + "thomas", + 0.004556684289127588 + ] + ], + "score": 1 + } + ] + }, + { + "index": 211, + "sentences": [ + { + "sentence": "John knocked on Susan's door but _ did not answer.", + "answer1": [ + "Susan", + "she" + ], + "answer0": [ + "John", + "he" + ], + "correct_answer": [ + "Susan", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.9418527483940125 + ], + [ + "susan", + 0.05125227943062782 + ], + [ + "he", + 0.0006008930504322052 + ] + ], + "score": 1 + }, + { + "sentence": "Emma knocked on Charles's door but _ did not answer.", + "answer1": [ + "Charles", + "he" + ], + "answer0": [ + "Emma", + "she" + ], + "correct_answer": [ + "Charles", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "he", + 0.9730058908462524 + ], + [ + "charles", + 0.02500162087380886 + ] + ], + "score": 1 + } + ] + }, + { + "index": 212, + "sentences": [ + { + "sentence": "Zack paid the detective Susan after _ received the final report on the case.", + "answer1": [ + "Susan", + "she" + ], + "answer0": [ + "Zack", + "he" + ], + "correct_answer": [ + "Zack", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "she", + 0.6660929918289185 + ], + [ + "he", + 0.2357940971851349 + ], + [ + "zack", + 0.012386161834001541 + ], + [ + "susan", + 0.004307710099965334 + ] + ], + "score": 0 + }, + { + "sentence": "Anna paid the detective Daniel after _ received the final report on the case.", + "answer1": [ + "Daniel", + "he" + ], + "answer0": [ + "Anna", + "she" + ], + "correct_answer": [ + "Anna", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.6424099206924438 + ], + [ + "she", + 0.2567521929740906 + ], + [ + "anna", + 0.010955804027616978 + ] + ], + "score": 0 + } + ] + }, + { + "index": 213, + "sentences": [ + { + "sentence": "Brian paid the detective Margaret after _ delivered the final report on the case.", + "answer1": [ + "Margaret", + "she" + ], + "answer0": [ + "Brian", + "he" + ], + "correct_answer": [ + "Margaret", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.8936680555343628 + ], + [ + "he", + 0.036272548139095306 + ], + [ + "margaret", + 0.009478574618697166 + ], + [ + "brian", + 0.005012798588722944 + ] + ], + "score": 1 + }, + { + "sentence": "Helen paid the detective Warren after _ delivered the final report on the case.", + "answer1": [ + "Warren", + "he" + ], + "answer0": [ + "Helen", + "she" + ], + "correct_answer": [ + "Warren", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "he", + 0.759939432144165 + ], + [ + "she", + 0.13307538628578186 + ], + [ + "warren", + 0.012190349400043488 + ], + [ + "helen", + 0.008577443659305573 + ] + ], + "score": 1 + } + ] + }, + { + "index": 214, + "sentences": [ + { + "sentence": "Lisa didn't get angry with Thomas, who had cut her off, because _ stopped and counted to ten.", + "answer1": [ + "Thomas", + "he" + ], + "answer0": [ + "Lisa", + "she" + ], + "correct_answer": [ + "Lisa", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "she", + 0.7895767092704773 + ], + [ + "he", + 0.11457893252372742 + ], + [ + "lisa", + 0.03027874045073986 + ] + ], + "score": 1 + }, + { + "sentence": "Edward didn't get angry with Mandy, who had cut his off, because _ stopped and counted to ten.", + "answer1": [ + "Mandy", + "she" + ], + "answer0": [ + "Edward", + "he" + ], + "correct_answer": [ + "Edward", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.8440133929252625 + ], + [ + "she", + 0.03659467399120331 + ], + [ + "edward", + 0.018313562497496605 + ] + ], + "score": 1 + } + ] + }, + { + "index": 215, + "sentences": [ + { + "sentence": "Jason didn't get angry with Linda, who had cut his off, because _ stopped and apologized.", + "answer1": [ + "Linda", + "she" + ], + "answer0": [ + "Jason", + "he" + ], + "correct_answer": [ + "Linda", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "he", + 0.4764364957809448 + ], + [ + "she", + 0.37486517429351807 + ], + [ + "linda", + 0.0429224893450737 + ], + [ + "jason", + 0.030886851251125336 + ] + ], + "score": 0 + }, + { + "sentence": "Sue didn't get angry with Brian, who had cut her off, because _ stopped and apologized.", + "answer1": [ + "Brian", + "he" + ], + "answer0": [ + "Sue", + "she" + ], + "correct_answer": [ + "Brian", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "he", + 0.5377354621887207 + ], + [ + "she", + 0.31687992811203003 + ], + [ + "brian", + 0.037937115877866745 + ], + [ + "sue", + 0.014928928576409817 + ] + ], + "score": 1 + } + ] + }, + { + "index": 216, + "sentences": [ + { + "sentence": "William signaled Susan and gestured toward _ empty glass", + "answer1": [ + "her" + ], + "answer0": [ + "his" + ], + "correct_answer": [ + "his" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "his", + 0.274255633354187 + ], + [ + "her", + 0.10120938718318939 + ] + ], + "score": 1 + }, + { + "sentence": "Sue signaled Thomas and gestured toward _ empty glass", + "answer1": [ + "his" + ], + "answer0": [ + "her" + ], + "correct_answer": [ + "her" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "his", + 0.28291237354278564 + ], + [ + "her", + 0.14910736680030823 + ] + ], + "score": 0 + } + ] + }, + { + "index": 217, + "sentences": [ + { + "sentence": "David signaled Emma and gestured toward _ bathroom key.", + "answer1": [ + "her" + ], + "answer0": [ + "his" + ], + "correct_answer": [ + "her" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "his", + 0.08032070845365524 + ], + [ + "her", + 0.03398525342345238 + ] + ], + "score": 0 + }, + { + "sentence": "Amy signaled Anthony and gestured toward _ bathroom key.", + "answer1": [ + "his" + ], + "answer0": [ + "her" + ], + "correct_answer": [ + "his" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "her", + 0.095477394759655 + ], + [ + "his", + 0.02509940415620804 + ] + ], + "score": 0 + } + ] + }, + { + "index": 218, + "sentences": [ + { + "sentence": "Mandy took the rear seat while Joseph claimed the front because _ \"Dibs!\" was slow.", + "answer1": [ + "his" + ], + "answer0": [ + "her" + ], + "correct_answer": [ + "her" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "his", + 0.4607595205307007 + ], + [ + "her", + 0.11643649637699127 + ] + ], + "score": 0 + }, + { + "sentence": "Charles took the rear seat while Helen claimed the front because _ \"Dibs!\" was slow.", + "answer1": [ + "her" + ], + "answer0": [ + "his" + ], + "correct_answer": [ + "his" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "her", + 0.4100673198699951 + ], + [ + "his", + 0.139830082654953 + ] + ], + "score": 0 + } + ] + }, + { + "index": 219, + "sentences": [ + { + "sentence": "Jason took the rear seat while Vivian claimed the front because _ \"Dibs!\" was quicker.", + "answer1": [ + "her" + ], + "answer0": [ + "his" + ], + "correct_answer": [ + "her" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "her", + 0.30036041140556335 + ], + [ + "his", + 0.1332821547985077 + ] + ], + "score": 1 + }, + { + "sentence": "Margaret took the rear seat while Tim claimed the front because _ \"Dibs!\" was quicker.", + "answer1": [ + "his" + ], + "answer0": [ + "her" + ], + "correct_answer": [ + "his" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "his", + 0.3551889657974243 + ], + [ + "her", + 0.10693569481372833 + ] + ], + "score": 1 + } + ] + }, + { + "index": 220, + "sentences": [ + { + "sentence": "Mandy said \"Check\" to David as she moved _ bishop.", + "answer1": [ + "his" + ], + "answer0": [ + "her" + ], + "correct_answer": [ + "her" + ], + "adjacent_ref": false, + "predict_answer": [], + "score": 0 + }, + { + "sentence": "David said \"Check\" to Donna as he moved _ bishop.", + "answer1": [ + "her" + ], + "answer0": [ + "his" + ], + "correct_answer": [ + "his" + ], + "adjacent_ref": false, + "predict_answer": [], + "score": 0 + } + ] + }, + { + "index": 221, + "sentences": [ + { + "sentence": "Margaret said \"Check\" to Steven as she took _ bishop.", + "answer1": [ + "his" + ], + "answer0": [ + "her" + ], + "correct_answer": [ + "his" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "his", + 0.045481689274311066 + ], + [ + "her", + 0.04472532123327255 + ] + ], + "score": 1 + }, + { + "sentence": "Anthony said \"Check\" to Sarah as he took _ bishop.", + "answer1": [ + "her" + ], + "answer0": [ + "his" + ], + "correct_answer": [ + "her" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "his", + 0.04094506800174713 + ] + ], + "score": 0 + } + ] + }, + { + "index": 222, + "sentences": [ + { + "sentence": "As Sandra in the crop duster passed over Edward, _ could see the landing strip.", + "answer1": [ + "Edward", + "he" + ], + "answer0": [ + "Sandra", + "she" + ], + "correct_answer": [ + "Sandra", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.23987726867198944 + ], + [ + "she", + 0.19285492599010468 + ], + [ + "edward", + 0.07019305974245071 + ] + ], + "score": 0 + }, + { + "sentence": "As George in the crop duster passed over Lisa, _ could see the landing strip.", + "answer1": [ + "Lisa", + "she" + ], + "answer0": [ + "George", + "he" + ], + "correct_answer": [ + "George", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "lisa", + 0.34604838490486145 + ], + [ + "she", + 0.21167317032814026 + ], + [ + "he", + 0.20678868889808655 + ] + ], + "score": 0 + } + ] + }, + { + "index": 223, + "sentences": [ + { + "sentence": "As Thomas in the crop duster passed over Susan, _ could see the landing gear.", + "answer1": [ + "Susan", + "she" + ], + "answer0": [ + "Thomas", + "he" + ], + "correct_answer": [ + "Susan", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "he", + 0.4135798215866089 + ], + [ + "she", + 0.15865713357925415 + ], + [ + "susan", + 0.1331108957529068 + ] + ], + "score": 0 + }, + { + "sentence": "As Jennifer in the crop duster passed over Thomas, _ could see the landing gear.", + "answer1": [ + "Thomas", + "he" + ], + "answer0": [ + "Jennifer", + "she" + ], + "correct_answer": [ + "Thomas", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "he", + 0.4583258628845215 + ], + [ + "she", + 0.15161868929862976 + ], + [ + "thomas", + 0.1475401371717453 + ] + ], + "score": 1 + } + ] + }, + { + "index": 224, + "sentences": [ + { + "sentence": "Richard gave Mandy a lift to school so _ wouldn't have to drive alone.", + "answer1": [ + "Mandy", + "she" + ], + "answer0": [ + "Richard", + "he" + ], + "correct_answer": [ + "Richard", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "she", + 0.7928639054298401 + ], + [ + "he", + 0.06139402464032173 + ] + ], + "score": 0 + }, + { + "sentence": "Barbara gave Zack a lift to school so _ wouldn't have to drive alone.", + "answer1": [ + "Zack", + "he" + ], + "answer0": [ + "Barbara", + "she" + ], + "correct_answer": [ + "Barbara", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.8156458735466003 + ], + [ + "zack", + 0.10156332701444626 + ], + [ + "she", + 0.025199728086590767 + ] + ], + "score": 0 + } + ] + }, + { + "index": 225, + "sentences": [ + { + "sentence": "Nancy gave Tony a lift to school so _ wouldn't have to walk.", + "answer1": [ + "Tony", + "he" + ], + "answer0": [ + "Nancy", + "she" + ], + "correct_answer": [ + "Tony", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "he", + 0.6183673143386841 + ], + [ + "tony", + 0.19011403620243073 + ] + ], + "score": 1 + }, + { + "sentence": "Edward gave Wendy a lift to school so _ wouldn't have to walk.", + "answer1": [ + "Wendy", + "she" + ], + "answer0": [ + "Edward", + "he" + ], + "correct_answer": [ + "Wendy", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.8526315093040466 + ], + [ + "he", + 0.020969539880752563 + ] + ], + "score": 1 + } + ] + }, + { + "index": 226, + "sentences": [ + { + "sentence": "Margaret passed the half-empty plate to Anthony because _ was full.", + "answer1": [ + "Anthony", + "he" + ], + "answer0": [ + "Margaret", + "she" + ], + "correct_answer": [ + "Margaret", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.07039020210504532 + ], + [ + "she", + 0.0310561191290617 + ] + ], + "score": 0 + }, + { + "sentence": "Thomas passed the half-empty plate to Sarah because _ was full.", + "answer1": [ + "Sarah", + "she" + ], + "answer0": [ + "Thomas", + "he" + ], + "correct_answer": [ + "Thomas", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "she", + 0.05379188060760498 + ], + [ + "he", + 0.045447368174791336 + ] + ], + "score": 0 + } + ] + }, + { + "index": 227, + "sentences": [ + { + "sentence": "Helen passed the half-empty plate to Brian because _ was hungry.", + "answer1": [ + "Brian", + "he" + ], + "answer0": [ + "Helen", + "she" + ], + "correct_answer": [ + "Brian", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.9241517782211304 + ], + [ + "he", + 0.06978235393762589 + ] + ], + "score": 0 + }, + { + "sentence": "Warren passed the half-empty plate to Jennifer because _ was hungry.", + "answer1": [ + "Jennifer", + "she" + ], + "answer0": [ + "Warren", + "he" + ], + "correct_answer": [ + "Jennifer", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "he", + 0.6595138311386108 + ], + [ + "she", + 0.324116587638855 + ], + [ + "jennifer", + 0.0032959093805402517 + ] + ], + "score": 0 + } + ] + }, + { + "index": 228, + "sentences": [ + { + "sentence": "Nancy passed the gameboy to Brian because _ turn was over.", + "answer1": [ + "his" + ], + "answer0": [ + "her" + ], + "correct_answer": [ + "her" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "his", + 0.26922306418418884 + ], + [ + "her", + 0.12208054959774017 + ] + ], + "score": 0 + }, + { + "sentence": "Paul passed the gameboy to Sandra because _ turn was over.", + "answer1": [ + "her" + ], + "answer0": [ + "his" + ], + "correct_answer": [ + "his" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "her", + 0.26440608501434326 + ], + [ + "his", + 0.13414087891578674 + ] + ], + "score": 0 + } + ] + }, + { + "index": 229, + "sentences": [ + { + "sentence": "Zack passed the gameboy to Mandy because _ turn was next.", + "answer1": [ + "her" + ], + "answer0": [ + "his" + ], + "correct_answer": [ + "her" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "her", + 0.4155150055885315 + ], + [ + "his", + 0.2723301649093628 + ] + ], + "score": 1 + }, + { + "sentence": "Vivian passed the gameboy to Paul because _ turn was next.", + "answer1": [ + "his" + ], + "answer0": [ + "her" + ], + "correct_answer": [ + "his" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "his", + 0.5038822889328003 + ], + [ + "her", + 0.15782448649406433 + ] + ], + "score": 1 + } + ] + }, + { + "index": 230, + "sentences": [ + { + "sentence": "The man lifted the girl onto _ shoulders.", + "answer1": [ + "her" + ], + "answer0": [ + "his" + ], + "correct_answer": [ + "his" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "his", + 0.9872719645500183 + ], + [ + "her", + 0.007788238115608692 + ] + ], + "score": 1 + } + ] + }, + { + "index": 231, + "sentences": [ + { + "sentence": "The man lifted the girl onto _ bunk bed.", + "answer1": [ + "her" + ], + "answer0": [ + "his" + ], + "correct_answer": [ + "her" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "her", + 0.052843790501356125 + ], + [ + "his", + 0.028196819126605988 + ] + ], + "score": 1 + } + ] + }, + { + "index": 232, + "sentences": [ + { + "sentence": "Stretching _ back, the woman smiled at the boy.", + "answer1": [ + "his" + ], + "answer0": [ + "her" + ], + "correct_answer": [ + "her" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "her", + 0.9602349996566772 + ], + [ + "his", + 0.0329311229288578 + ] + ], + "score": 1 + } + ] + }, + { + "index": 233, + "sentences": [ + { + "sentence": "Patting _ back, the woman smiled at the boy.", + "answer1": [ + "his" + ], + "answer0": [ + "her" + ], + "correct_answer": [ + "his" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "his", + 0.931027889251709 + ], + [ + "her", + 0.051477789878845215 + ] + ], + "score": 1 + } + ] + }, + { + "index": 234, + "sentences": [ + { + "sentence": "William cried because Wendy wouldn't accept _ toy.", + "answer1": [ + "her" + ], + "answer0": [ + "his" + ], + "correct_answer": [ + "his" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "his", + 0.2313782274723053 + ], + [ + "her", + 0.04676021263003349 + ] + ], + "score": 1 + }, + { + "sentence": "Sandra cried because Alan wouldn't accept _ toy.", + "answer1": [ + "his" + ], + "answer0": [ + "her" + ], + "correct_answer": [ + "her" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "her", + 0.2570785582065582 + ], + [ + "his", + 0.09622883796691895 + ] + ], + "score": 1 + } + ] + }, + { + "index": 235, + "sentences": [ + { + "sentence": "Thomas cried because Susan wouldn't share _ toy.", + "answer1": [ + "her" + ], + "answer0": [ + "his" + ], + "correct_answer": [ + "her" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "his", + 0.28240013122558594 + ], + [ + "her", + 0.10179656744003296 + ] + ], + "score": 0 + }, + { + "sentence": "Vivian cried because George wouldn't share _ toy.", + "answer1": [ + "his" + ], + "answer0": [ + "her" + ], + "correct_answer": [ + "his" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "his", + 0.25874245166778564 + ], + [ + "her", + 0.18479324877262115 + ] + ], + "score": 1 + } + ] + }, + { + "index": 236, + "sentences": [ + { + "sentence": "Edward spoke to Margaret, breaking _ silence.", + "answer1": [ + "her" + ], + "answer0": [ + "his" + ], + "correct_answer": [ + "his" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "her", + 0.004641450475901365 + ], + [ + "his", + 0.00293560978025198 + ] + ], + "score": 0 + }, + { + "sentence": "Wendy spoke to Jason, breaking _ silence.", + "answer1": [ + "his" + ], + "answer0": [ + "her" + ], + "correct_answer": [ + "her" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "his", + 0.0020169508643448353 + ], + [ + "her", + 0.0009562345803715289 + ] + ], + "score": 0 + } + ] + }, + { + "index": 237, + "sentences": [ + { + "sentence": "Tim spoke to Donna, breaking _ concentration.", + "answer1": [ + "her" + ], + "answer0": [ + "his" + ], + "correct_answer": [ + "her" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "her", + 0.5251883268356323 + ], + [ + "his", + 0.34837156534194946 + ] + ], + "score": 1 + }, + { + "sentence": "Sue spoke to Richard, breaking _ concentration.", + "answer1": [ + "his" + ], + "answer0": [ + "her" + ], + "correct_answer": [ + "his" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "his", + 0.7294524908065796 + ], + [ + "her", + 0.18369422852993011 + ] + ], + "score": 1 + } + ] + }, + { + "index": 238, + "sentences": [ + { + "sentence": "When Helen dropped his ice cream, David giggled, so father gave _ a sympathetic look.", + "answer1": [ + "David", + "him" + ], + "answer0": [ + "Helen", + "her" + ], + "correct_answer": [ + "Helen", + "her" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "her", + 0.5764134526252747 + ], + [ + "him", + 0.25476816296577454 + ], + [ + "helen", + 0.02986416220664978 + ], + [ + "david", + 0.019353149458765984 + ] + ], + "score": 1 + }, + { + "sentence": "When Richard dropped his ice cream, Linda giggled, so father gave _ a sympathetic look.", + "answer1": [ + "Linda", + "her" + ], + "answer0": [ + "Richard", + "him" + ], + "correct_answer": [ + "Richard", + "him" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "her", + 0.8484610319137573 + ], + [ + "him", + 0.06817058473825455 + ], + [ + "linda", + 0.015336349606513977 + ] + ], + "score": 0 + } + ] + }, + { + "index": 239, + "sentences": [ + { + "sentence": "When Anna dropped his ice cream, Alan giggled, so father gave _ a stern look.", + "answer1": [ + "Alan", + "him" + ], + "answer0": [ + "Anna", + "her" + ], + "correct_answer": [ + "Alan", + "him" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "her", + 0.7019764184951782 + ], + [ + "him", + 0.139887273311615 + ], + [ + "anna", + 0.06648413836956024 + ], + [ + "alan", + 0.008537232875823975 + ] + ], + "score": 0 + }, + { + "sentence": "When Zack dropped his ice cream, Sue giggled, so father gave _ a stern look.", + "answer1": [ + "Sue", + "her" + ], + "answer0": [ + "Zack", + "him" + ], + "correct_answer": [ + "Sue", + "her" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "her", + 0.7724469900131226 + ], + [ + "him", + 0.11578436195850372 + ], + [ + "zack", + 0.02440289407968521 + ] + ], + "score": 1 + } + ] + }, + { + "index": 240, + "sentences": [ + { + "sentence": "As Donna carried Jason up the long winding steps, _ legs ached.", + "answer1": [ + "his" + ], + "answer0": [ + "her" + ], + "correct_answer": [ + "her" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "her", + 0.7337371110916138 + ], + [ + "his", + 0.2596256732940674 + ] + ], + "score": 1 + }, + { + "sentence": "As Warren carried Sarah up the long winding steps, _ legs ached.", + "answer1": [ + "her" + ], + "answer0": [ + "his" + ], + "correct_answer": [ + "his" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "his", + 0.5732764601707458 + ], + [ + "her", + 0.4222981929779053 + ] + ], + "score": 1 + } + ] + }, + { + "index": 241, + "sentences": [ + { + "sentence": "As Helen carried George up the long winding steps, _ legs dangled.", + "answer1": [ + "his" + ], + "answer0": [ + "her" + ], + "correct_answer": [ + "his" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "his", + 0.6082881689071655 + ], + [ + "her", + 0.37898463010787964 + ] + ], + "score": 1 + }, + { + "sentence": "As Tim carried Amy up the long winding steps, _ legs dangled.", + "answer1": [ + "her" + ], + "answer0": [ + "his" + ], + "correct_answer": [ + "her" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "her", + 0.9631314277648926 + ], + [ + "his", + 0.011471277102828026 + ] + ], + "score": 1 + } + ] + }, + { + "index": 242, + "sentences": [ + { + "sentence": "The father carried the sleeping girl in _ arms", + "answer1": [ + "her" + ], + "answer0": [ + "his" + ], + "correct_answer": [ + "his" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "his", + 0.9978468418121338 + ], + [ + "her", + 0.000320874823955819 + ] + ], + "score": 1 + } + ] + }, + { + "index": 243, + "sentences": [ + { + "sentence": "The father carried the sleeping girl in _ bassinet.", + "answer1": [ + "her" + ], + "answer0": [ + "his" + ], + "correct_answer": [ + "her" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "his", + 0.19996562600135803 + ], + [ + "her", + 0.04508880525827408 + ] + ], + "score": 0 + } + ] + }, + { + "index": 244, + "sentences": [ + { + "sentence": "The woman held the boy against _ chest", + "answer1": [ + "his" + ], + "answer0": [ + "her" + ], + "correct_answer": [ + "her" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "her", + 0.9982455968856812 + ], + [ + "his", + 0.00023115644580684602 + ] + ], + "score": 1 + } + ] + }, + { + "index": 245, + "sentences": [ + { + "sentence": "The woman held the boy against _ will.", + "answer1": [ + "his" + ], + "answer0": [ + "her" + ], + "correct_answer": [ + "his" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "his", + 0.945933997631073 + ], + [ + "her", + 0.05019375681877136 + ] + ], + "score": 1 + } + ] + }, + { + "index": 246, + "sentences": [] + }, + { + "index": 247, + "sentences": [] + }, + { + "index": 248, + "sentences": [ + { + "sentence": "Amy informed Brian that _ had retired and presented several options for future treatment.", + "answer1": [ + "Brian", + "he" + ], + "answer0": [ + "Amy", + "she" + ], + "correct_answer": [ + "Amy", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.37087374925613403 + ], + [ + "she", + 0.3327399492263794 + ], + [ + "amy", + 0.015426949597895145 + ] + ], + "score": 0 + }, + { + "sentence": "Tim informed Linda that _ had retired and presented several options for future treatment.", + "answer1": [ + "Linda", + "she" + ], + "answer0": [ + "Tim", + "he" + ], + "correct_answer": [ + "Tim", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.5563133358955383 + ], + [ + "she", + 0.13216964900493622 + ], + [ + "tim", + 0.018639085814356804 + ] + ], + "score": 1 + } + ] + }, + { + "index": 249, + "sentences": [ + { + "sentence": "Brian informed Sandra that _ had cancer and presented several options for future treatment.", + "answer1": [ + "Sandra", + "she" + ], + "answer0": [ + "Brian", + "he" + ], + "correct_answer": [ + "Sandra", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "he", + 0.5297118425369263 + ], + [ + "she", + 0.2422800213098526 + ], + [ + "brian", + 0.005271288100630045 + ] + ], + "score": 0 + }, + { + "sentence": "Helen informed Paul that _ had cancer and presented several options for future treatment.", + "answer1": [ + "Paul", + "he" + ], + "answer0": [ + "Helen", + "she" + ], + "correct_answer": [ + "Paul", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.4575677514076233 + ], + [ + "he", + 0.31825608015060425 + ], + [ + "paul", + 0.009974068962037563 + ], + [ + "helen", + 0.007482711225748062 + ] + ], + "score": 0 + } + ] + }, + { + "index": 250, + "sentences": [ + { + "sentence": "Daniel had to stop Jennifer from toying with the injured bird. _ is very compassionate.", + "answer1": [ + "Jennifer", + "she" + ], + "answer0": [ + "Daniel", + "he" + ], + "correct_answer": [ + "Daniel", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "jennifer", + 0.2903973460197449 + ], + [ + "she", + 0.22931936383247375 + ], + [ + "he", + 0.16045786440372467 + ], + [ + "daniel", + 0.08556315302848816 + ] + ], + "score": 0 + }, + { + "sentence": "Sarah had to stop Tim from toying with the injured bird. _ is very compassionate.", + "answer1": [ + "Tim", + "he" + ], + "answer0": [ + "Sarah", + "she" + ], + "correct_answer": [ + "Sarah", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "tim", + 0.29738715291023254 + ], + [ + "sarah", + 0.274274617433548 + ], + [ + "he", + 0.18724749982357025 + ], + [ + "she", + 0.10372157394886017 + ] + ], + "score": 0 + } + ] + }, + { + "index": 251, + "sentences": [ + { + "sentence": "Linda had to stop Charles from toying with the injured bird. _ is very cruel.", + "answer1": [ + "Charles", + "he" + ], + "answer0": [ + "Linda", + "she" + ], + "correct_answer": [ + "Charles", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "charles", + 0.752829909324646 + ], + [ + "he", + 0.14354054629802704 + ], + [ + "she", + 0.02448778599500656 + ] + ], + "score": 1 + }, + { + "sentence": "John had to stop Amy from toying with the injured bird. _ is very cruel.", + "answer1": [ + "Amy", + "she" + ], + "answer0": [ + "John", + "he" + ], + "correct_answer": [ + "Amy", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "he", + 0.28404945135116577 + ], + [ + "amy", + 0.20439286530017853 + ], + [ + "john", + 0.15464074909687042 + ], + [ + "she", + 0.14667406678199768 + ] + ], + "score": 0 + } + ] + }, + { + "index": 252, + "sentences": [ + { + "sentence": "Sarah got free tickets to the play, but she gave them to Charles, even though _ was particularly eager to see it.", + "answer1": [ + "Charles", + "he" + ], + "answer0": [ + "Sarah", + "she" + ], + "correct_answer": [ + "Sarah", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.8693797588348389 + ], + [ + "she", + 0.06461245566606522 + ], + [ + "charles", + 0.048112157732248306 + ], + [ + "sarah", + 0.0034832460805773735 + ] + ], + "score": 0 + }, + { + "sentence": "Daniel got free tickets to the play, but he gave them to Amy, even though _ was particularly eager to see it.", + "answer1": [ + "Amy", + "she" + ], + "answer0": [ + "Daniel", + "he" + ], + "correct_answer": [ + "Daniel", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "she", + 0.8007415533065796 + ], + [ + "he", + 0.08351122587919235 + ], + [ + "amy", + 0.07764580100774765 + ], + [ + "daniel", + 0.0045530772767961025 + ] + ], + "score": 0 + } + ] + }, + { + "index": 253, + "sentences": [ + { + "sentence": "Paul got free tickets to the play, but he gave them to Laura, because _ was particularly eager to see it.", + "answer1": [ + "Laura", + "she" + ], + "answer0": [ + "Paul", + "he" + ], + "correct_answer": [ + "Laura", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.9574117064476013 + ], + [ + "laura", + 0.026944546028971672 + ], + [ + "he", + 0.008284908719360828 + ], + [ + "paul", + 0.0004410938418004662 + ] + ], + "score": 1 + }, + { + "sentence": "Margaret got free tickets to the play, but she gave them to Thomas, because _ was particularly eager to see it.", + "answer1": [ + "Thomas", + "he" + ], + "answer0": [ + "Margaret", + "she" + ], + "correct_answer": [ + "Thomas", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "he", + 0.9332942366600037 + ], + [ + "thomas", + 0.03162537142634392 + ], + [ + "she", + 0.018393704667687416 + ], + [ + "margaret", + 0.0010122362291440368 + ] + ], + "score": 1 + } + ] + }, + { + "index": 254, + "sentences": [ + { + "sentence": "Steven got free tickets to the play, but he gave them to Nancy, because _ was not particularly eager to see it.", + "answer1": [ + "Nancy", + "she" + ], + "answer0": [ + "Steven", + "he" + ], + "correct_answer": [ + "Steven", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "she", + 0.9163604378700256 + ], + [ + "nancy", + 0.0480802021920681 + ], + [ + "he", + 0.021462248638272285 + ], + [ + "steven", + 0.0004176660440862179 + ] + ], + "score": 0 + }, + { + "sentence": "Margaret got free tickets to the play, but she gave them to Warren, because _ was not particularly eager to see it.", + "answer1": [ + "Warren", + "he" + ], + "answer0": [ + "Margaret", + "she" + ], + "correct_answer": [ + "Margaret", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.8333559036254883 + ], + [ + "warren", + 0.0700656995177269 + ], + [ + "she", + 0.06793110817670822 + ], + [ + "margaret", + 0.0037946717347949743 + ] + ], + "score": 0 + } + ] + }, + { + "index": 255, + "sentences": [ + { + "sentence": "Emma gave Edward candy because _ wasn't hungry.", + "answer1": [ + "Edward", + "he" + ], + "answer0": [ + "Emma", + "she" + ], + "correct_answer": [ + "Emma", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.6763647794723511 + ], + [ + "she", + 0.26191577315330505 + ], + [ + "edward", + 0.023742079734802246 + ] + ], + "score": 0 + }, + { + "sentence": "Jason gave Betty candy because _ wasn't hungry.", + "answer1": [ + "Betty", + "she" + ], + "answer0": [ + "Jason", + "he" + ], + "correct_answer": [ + "Jason", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "she", + 0.75534987449646 + ], + [ + "he", + 0.1928008496761322 + ], + [ + "betty", + 0.03130156919360161 + ] + ], + "score": 0 + } + ] + }, + { + "index": 256, + "sentences": [ + { + "sentence": "Sue gave Tim candy because _ was hungry.", + "answer1": [ + "Tim", + "he" + ], + "answer0": [ + "Sue", + "she" + ], + "correct_answer": [ + "Tim", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "he", + 0.7316045165061951 + ], + [ + "she", + 0.22841434180736542 + ], + [ + "tim", + 0.025842225179076195 + ] + ], + "score": 1 + }, + { + "sentence": "Jason gave Jennifer candy because _ was hungry.", + "answer1": [ + "Jennifer", + "she" + ], + "answer0": [ + "Jason", + "he" + ], + "correct_answer": [ + "Jennifer", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "he", + 0.5606254935264587 + ], + [ + "she", + 0.4133855998516083 + ], + [ + "jennifer", + 0.010734654031693935 + ], + [ + "jason", + 0.0010448332177475095 + ] + ], + "score": 0 + } + ] + }, + { + "index": 257, + "sentences": [ + { + "sentence": "I tried to paint a picture of an orchard, with apples in the trees, but the _ came out looking more like light bulbs.", + "answer1": [ + "trees" + ], + "answer0": [ + "apples" + ], + "correct_answer": [ + "apples" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "apples", + 0.5211644172668457 + ] + ], + "score": 1 + } + ] + }, + { + "index": 258, + "sentences": [ + { + "sentence": "I tried to paint a picture of an orchard, with apples in the trees, but the _ came out looking more like telephone poles.", + "answer1": [ + "trees" + ], + "answer0": [ + "apples" + ], + "correct_answer": [ + "trees" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "apples", + 0.3111271262168884 + ], + [ + "trees", + 0.0494505800306797 + ] + ], + "score": 0 + } + ] + }, + { + "index": 259, + "sentences": [ + { + "sentence": "Margaret asked Tony for a favor but _ was refused.", + "answer1": [ + "Tony", + "he" + ], + "answer0": [ + "Margaret", + "she" + ], + "correct_answer": [ + "Margaret", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.3978308141231537 + ], + [ + "tony", + 0.05781383812427521 + ], + [ + "she", + 0.05164272338151932 + ] + ], + "score": 0 + }, + { + "sentence": "Anthony asked Amy for a favor but _ was refused.", + "answer1": [ + "Amy", + "she" + ], + "answer0": [ + "Anthony", + "he" + ], + "correct_answer": [ + "Anthony", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "she", + 0.4784584939479828 + ], + [ + "amy", + 0.06777182221412659 + ], + [ + "he", + 0.021794931963086128 + ] + ], + "score": 0 + } + ] + }, + { + "index": 260, + "sentences": [ + { + "sentence": "Emma asked Joseph for a favor but _ refused.", + "answer1": [ + "Joseph", + "he" + ], + "answer0": [ + "Emma", + "she" + ], + "correct_answer": [ + "Joseph", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "joseph", + 0.5529801249504089 + ], + [ + "he", + 0.43354299664497375 + ], + [ + "she", + 0.0008864352712407708 + ] + ], + "score": 1 + }, + { + "sentence": "Warren asked Laura for a favor but _ refused.", + "answer1": [ + "Laura", + "she" + ], + "answer0": [ + "Warren", + "he" + ], + "correct_answer": [ + "Laura", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.8105427026748657 + ], + [ + "laura", + 0.17909826338291168 + ] + ], + "score": 1 + } + ] + }, + { + "index": 261, + "sentences": [ + { + "sentence": "Mandy ceded the presidency to Warren because _ was less popular.", + "answer1": [ + "Warren", + "he" + ], + "answer0": [ + "Mandy", + "she" + ], + "correct_answer": [ + "Mandy", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.5756611824035645 + ], + [ + "warren", + 0.18201638758182526 + ], + [ + "she", + 0.006012094207108021 + ] + ], + "score": 0 + }, + { + "sentence": "Brian ceded the presidency to Wendy because _ was less popular.", + "answer1": [ + "Wendy", + "she" + ], + "answer0": [ + "Brian", + "he" + ], + "correct_answer": [ + "Brian", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "she", + 0.708384096622467 + ], + [ + "he", + 0.1238713338971138 + ], + [ + "wendy", + 0.04093684256076813 + ], + [ + "brian", + 0.01032228209078312 + ] + ], + "score": 0 + } + ] + }, + { + "index": 262, + "sentences": [ + { + "sentence": "Betty ceded the presidency to Edward because _ was more popular.", + "answer1": [ + "Edward", + "he" + ], + "answer0": [ + "Betty", + "she" + ], + "correct_answer": [ + "Edward", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "he", + 0.5475196242332458 + ], + [ + "she", + 0.19534984230995178 + ], + [ + "edward", + 0.11332083493471146 + ], + [ + "betty", + 0.011675244197249413 + ] + ], + "score": 1 + }, + { + "sentence": "Anthony ceded the presidency to Mandy because _ was more popular.", + "answer1": [ + "Mandy", + "she" + ], + "answer0": [ + "Anthony", + "he" + ], + "correct_answer": [ + "Mandy", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.7501657009124756 + ], + [ + "mandy", + 0.17399102449417114 + ], + [ + "he", + 0.03566601499915123 + ] + ], + "score": 1 + } + ] + }, + { + "index": 263, + "sentences": [ + { + "sentence": "Wendy did not pass the ball to Steven although _ saw that he was open.", + "answer1": [ + "Steven", + "he" + ], + "answer0": [ + "Wendy", + "she" + ], + "correct_answer": [ + "Wendy", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "she", + 0.8957375288009644 + ], + [ + "wendy", + 0.028812197968363762 + ], + [ + "he", + 0.014717331156134605 + ] + ], + "score": 1 + }, + { + "sentence": "Tony did not pass the ball to Anna although _ saw that she was open.", + "answer1": [ + "Anna", + "she" + ], + "answer0": [ + "Tony", + "he" + ], + "correct_answer": [ + "Tony", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.931627631187439 + ], + [ + "tony", + 0.03243102505803108 + ], + [ + "she", + 0.006165023893117905 + ] + ], + "score": 1 + } + ] + }, + { + "index": 264, + "sentences": [ + { + "sentence": "Linda did not pass the ball to Zack although _ was open.", + "answer1": [ + "Zack", + "he" + ], + "answer0": [ + "Linda", + "she" + ], + "correct_answer": [ + "Zack", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "he", + 0.1047290489077568 + ], + [ + "she", + 0.035551343113183975 + ] + ], + "score": 1 + }, + { + "sentence": "David did not pass the ball to Mandy although _ was open.", + "answer1": [ + "Mandy", + "she" + ], + "answer0": [ + "David", + "he" + ], + "correct_answer": [ + "Mandy", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.10433197766542435 + ], + [ + "he", + 0.09989483654499054 + ] + ], + "score": 1 + } + ] + }, + { + "index": 265, + "sentences": [ + { + "sentence": "I put the specimen on the table and the _ broke.", + "answer1": [ + "table" + ], + "answer0": [ + "specimen" + ], + "correct_answer": [ + "specimen" + ], + "adjacent_ref": false, + "predict_answer": [], + "score": 0 + } + ] + }, + { + "index": 266, + "sentences": [ + { + "sentence": "I put the stone on the table and the _ broke.", + "answer1": [ + "table" + ], + "answer0": [ + "stone" + ], + "correct_answer": [ + "table" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "stone", + 0.10115251690149307 + ] + ], + "score": 0 + } + ] + }, + { + "index": 267, + "sentences": [ + { + "sentence": "Margaret fired her trainer Edward because _ couldn't stand his friend.", + "answer1": [ + "Edward", + "he" + ], + "answer0": [ + "Margaret", + "she" + ], + "correct_answer": [ + "Margaret", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.731697142124176 + ], + [ + "she", + 0.17442825436592102 + ], + [ + "edward", + 0.03614525496959686 + ], + [ + "margaret", + 0.0032051822636276484 + ] + ], + "score": 0 + } + ] + }, + { + "index": 268, + "sentences": [ + { + "sentence": "Jennifer fired her trainer Thomas because _ slept with her friend.", + "answer1": [ + "Thomas", + "he" + ], + "answer0": [ + "Jennifer", + "she" + ], + "correct_answer": [ + "Thomas", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "he", + 0.8891029357910156 + ], + [ + "she", + 0.055782753974199295 + ], + [ + "thomas", + 0.013204317539930344 + ], + [ + "jennifer", + 0.0024945021141320467 + ] + ], + "score": 1 + } + ] + }, + { + "index": 269, + "sentences": [ + { + "sentence": "Sarah fired her trainer John because he slept with _ friend.", + "answer1": [ + "his" + ], + "answer0": [ + "her" + ], + "correct_answer": [ + "her" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "her", + 0.7042596936225891 + ], + [ + "his", + 0.0390113890171051 + ] + ], + "score": 1 + } + ] + }, + { + "index": 270, + "sentences": [ + { + "sentence": "Amy fired her trainer George because she couldn't stand _ friend.", + "answer1": [ + "his" + ], + "answer0": [ + "her" + ], + "correct_answer": [ + "his" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "her", + 0.8472513556480408 + ], + [ + "his", + 0.0945512130856514 + ] + ], + "score": 0 + } + ] + }, + { + "index": 271, + "sentences": [ + { + "sentence": "Paul believed that Sue suspected that _ had stolen the watch.", + "answer1": [ + "Sue", + "she" + ], + "answer0": [ + "Paul", + "he" + ], + "correct_answer": [ + "Paul", + "he" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "he", + 0.23854734003543854 + ], + [ + "she", + 0.1008545383810997 + ], + [ + "sue", + 0.07258161902427673 + ], + [ + "paul", + 0.06131039932370186 + ] + ], + "score": 1 + }, + { + "sentence": "Mandy believed that Edward suspected that _ had stolen the watch.", + "answer1": [ + "Edward", + "he" + ], + "answer0": [ + "Mandy", + "she" + ], + "correct_answer": [ + "Mandy", + "she" + ], + "adjacent_ref": false, + "predict_answer": [ + [ + "she", + 0.14960864186286926 + ], + [ + "he", + 0.12885059416294098 + ], + [ + "mandy", + 0.05671042948961258 + ], + [ + "edward", + 0.04502331465482712 + ] + ], + "score": 1 + } + ] + }, + { + "index": 272, + "sentences": [ + { + "sentence": "Thomas believed that Wendy regretted that _ had stolen the watch.", + "answer1": [ + "Wendy", + "she" + ], + "answer0": [ + "Thomas", + "he" + ], + "correct_answer": [ + "Wendy", + "she" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "she", + 0.707285463809967 + ], + [ + "he", + 0.11299549043178558 + ], + [ + "thomas", + 0.02341422811150551 + ], + [ + "wendy", + 0.011723235249519348 + ] + ], + "score": 1 + }, + { + "sentence": "Barbara believed that Charles regretted that _ had stolen the watch.", + "answer1": [ + "Charles", + "he" + ], + "answer0": [ + "Barbara", + "she" + ], + "correct_answer": [ + "Charles", + "he" + ], + "adjacent_ref": true, + "predict_answer": [ + [ + "he", + 0.9224457144737244 + ], + [ + "she", + 0.0191626138985157 + ], + [ + "charles", + 0.006952487863600254 + ], + [ + "barbara", + 0.0011777096660807729 + ] + ], + "score": 1 + } + ] + } +] \ No newline at end of file diff --git a/WSC_selected.txt b/WSC_selected.txt new file mode 100644 index 00000000000000..47c30cd309f331 --- /dev/null +++ b/WSC_selected.txt @@ -0,0 +1,8 @@ +The trophy doesn't fit into the brown suitcase because the [trophy] is too large. A because B +The trophy doesn't fit into the brown suitcase because the [suitcase] is too small. A because B +The brown suitcase doesn't hold the trophy because the [trophy] is too large. A because B +The brown suitcase doesn't hold the trophy because the [suitcase] is too small. A because B +The trophy can fit into the brown suitcase because the [trophy] is so small. ~A because ~B +The trophy can fit into the brown suitcase because the [suitcase] is so large. ~A because ~B +The brown suitcase can hold the trophy because the [trophy] is so small. ~A because ~B +The brown suitcase can fit into the trophy because the [suitcase] is so large. ~A because ~B diff --git a/WSC_switched_label.json b/WSC_switched_label.json new file mode 100644 index 00000000000000..ccd4a286c1ab9d --- /dev/null +++ b/WSC_switched_label.json @@ -0,0 +1,3005 @@ +[ + { + "index": 0, + "is_switchable": 0, + "sentence": "The city councilmen refused the demonstrators a permit because [they] feared violence.", + "answer1": "The demonstrators", + "answer0": "The city councilmen", + "sentence_switched": "The demonstrators refused the city councilmen a permit because [they] feared violence.", + "correct_answer": "The city councilmen", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 1, + "is_switchable": 0, + "sentence": "The city councilmen refused the demonstrators a permit because [they] advocated violence.", + "answer1": "The demonstrators", + "answer0": "The city councilmen", + "sentence_switched": "The demonstrators refused the city councilmen a permit because [they] advocated violence.", + "correct_answer": "The demonstrators", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 2, + "is_switchable": 0, + "sentence": "The trophy doesn't fit into the brown suitcase because [it] is too large.", + "answer1": "the suitcase", + "answer0": "the trophy", + "sentence_switched": "The suitcase doesn't fit into the brown trophy because [it] is too large.", + "correct_answer": "the trophy", + "relational_word": "fit into:large/small", + "is_associative": 0 + }, + { + "index": 3, + "is_switchable": 0, + "sentence": "The trophy doesn't fit into the brown suitcase because [it] is too small.", + "answer1": "the suitcase", + "answer0": "the trophy", + "sentence_switched": "The suitcase doesn't fit into the brown trophy because [it] is too small.", + "correct_answer": "the suitcase", + "relational_word": "fit into:large/small", + "is_associative": 0 + }, + { + "index": 4, + "is_switchable": 1, + "sentence": "Joan made sure to thank Susan for all the help [she] had recieved.", + "answer1": "Susan", + "answer0": "Joan", + "sentence_switched": "Susan made sure to thank joan for all the help [she] had recieved.", + "correct_answer": "Joan", + "relational_word": "thank:receive/give", + "is_associative": 0 + }, + { + "index": 5, + "is_switchable": 1, + "sentence": "Joan made sure to thank Susan for all the help [she] had given.", + "answer1": "Susan", + "answer0": "Joan", + "sentence_switched": "Susan made sure to thank joan for all the help [she] had given.", + "correct_answer": "Susan", + "relational_word": "thank:receive/give", + "is_associative": 0 + }, + { + "index": 6, + "is_switchable": 1, + "sentence": "Paul tried to call George on the phone, but [he] wasn't successful.", + "answer1": "George", + "answer0": "Paul", + "sentence_switched": "George tried to call paul on the phone, but [he] wasn't successful.", + "correct_answer": "Paul", + "relational_word": "call:successful available", + "is_associative": 0 + }, + { + "index": 7, + "is_switchable": 1, + "sentence": "Paul tried to call George on the phone, but [he] wasn't available.", + "answer1": "George", + "answer0": "Paul", + "sentence_switched": "George tried to call paul on the phone, but [he] wasn't available.", + "correct_answer": "George", + "relational_word": "call:successful available", + "is_associative": 0 + }, + { + "index": 8, + "is_switchable": 0, + "sentence": "The lawyer asked the witness a question, but [he] was reluctant to repeat it.", + "answer1": "the witness", + "answer0": "the lawyer", + "sentence_switched": "The witness asked the lawyer a question, but [he] was reluctant to repeat it.", + "correct_answer": "the lawyer", + "relational_word": "ask:repeat answer", + "is_associative": 0 + }, + { + "index": 9, + "is_switchable": 0, + "sentence": "The lawyer asked the witness a question, but [he] was reluctant to answer it.", + "answer1": "the witness", + "answer0": "the lawyer", + "sentence_switched": "The witness asked the lawyer a question, but [he] was reluctant to answer it.", + "correct_answer": "the witness", + "relational_word": "ask:repeat answer", + "is_associative": 0 + }, + { + "index": 10, + "is_switchable": 1, + "sentence": "The delivery truck zoomed by the school bus because [it] was going so fast.", + "answer1": "the school bus", + "answer0": "the delivery truck", + "sentence_switched": "The school bus zoomed by the delivery truck because [it] was going so fast.", + "correct_answer": "the delivery truck", + "relational_word": "zoom by:fast/slow", + "is_associative": 0 + }, + { + "index": 11, + "is_switchable": 1, + "sentence": "The delivery truck zoomed by the school bus because [it] was going so slow.", + "answer1": "the school bus", + "answer0": "the delivery truck", + "sentence_switched": "The school bus zoomed by the delivery truck because [it] was going so slow.", + "correct_answer": "the school bus", + "relational_word": "zoom by:fast/slow", + "is_associative": 0 + }, + { + "index": 12, + "is_switchable": 1, + "sentence": "Frank felt vindicated when his longtime rival Bill revealed that [he] was the winner of the competition.", + "answer1": "Bill", + "answer0": "Frank", + "sentence_switched": "Bill felt vindicated when his longtime rival frank revealed that [he] was the winner of the competition.", + "correct_answer": "Frank", + "relational_word": "vindicated/crushed:be the winner", + "is_associative": 0 + }, + { + "index": 13, + "is_switchable": 1, + "sentence": "Frank felt crushed when his longtime rival Bill revealed that [he] was the winner of the competition.", + "answer1": "Bill", + "answer0": "Frank", + "sentence_switched": "Bill felt crushed when his longtime rival frank revealed that [he] was the winner of the competition.", + "correct_answer": "Bill", + "relational_word": "vindicated/crushed:be the winner", + "is_associative": 0 + }, + { + "index": 14, + "is_switchable": 1, + "sentence": "The man couldn't lift his son because [he] was so weak.", + "answer1": "The son", + "answer0": "The man", + "sentence_switched": "The son couldn't lift the man because [he] was so weak.", + "correct_answer": "The man", + "relational_word": "lift:weak heavy", + "is_associative": 0 + }, + { + "index": 15, + "is_switchable": 1, + "sentence": "The man couldn't lift his son because [he] was so heavy.", + "answer1": "The son", + "answer0": "The man", + "sentence_switched": "The son couldn't lift his man because [he] was so heavy.", + "correct_answer": "The son", + "relational_word": "lift:weak heavy", + "is_associative": 0 + }, + { + "index": 16, + "is_switchable": 0, + "sentence": "The large ball crashed right through the table because [it] was made of steel.", + "answer1": "The table", + "answer0": "The large ball", + "sentence_switched": "The table crashed right through the large ball because [it] was made of steel.", + "correct_answer": "The large ball", + "relational_word": "crash through:[hard]/[soft]", + "is_associative": 0 + }, + { + "index": 17, + "is_switchable": 0, + "sentence": "The large ball crashed right through the table because [it] was made of styrofoam.", + "answer1": "The table", + "answer0": "The large ball", + "sentence_switched": "The table crashed right through the large ball because [it] was made of styrofoam.", + "correct_answer": "The table", + "relational_word": "crash through:[hard]/[soft]", + "is_associative": 0 + }, + { + "index": 18, + "is_switchable": 1, + "sentence": "John couldn't see the stage with Billy in front of him because [he] is so short.", + "answer1": "Billy", + "answer0": "John", + "sentence_switched": "Billy couldn't see the stage with john in front of him because [he] is so short.", + "correct_answer": "John", + "relational_word": "[block]:short/tall", + "is_associative": 0 + }, + { + "index": 19, + "is_switchable": 1, + "sentence": "John couldn't see the stage with Billy in front of him because [he] is so tall.", + "answer1": "Billy", + "answer0": "John", + "sentence_switched": "Billy couldn't see the stage with john in front of him because [he] is so tall.", + "correct_answer": "Billy", + "relational_word": "[block]:short/tall", + "is_associative": 0 + }, + { + "index": 20, + "is_switchable": 1, + "sentence": "Tom threw his schoolbag down to Ray after [he] reached the top of the stairs.", + "answer1": "Ray", + "answer0": "Tom", + "sentence_switched": "Ray threw his schoolbag down to tom after [he] reached the top of the stairs.", + "correct_answer": "Tom", + "relational_word": "down to:top/bottom", + "is_associative": 0 + }, + { + "index": 21, + "is_switchable": 1, + "sentence": "Tom threw his schoolbag down to Ray after [he] reached the bottom of the stairs.", + "answer1": "Ray", + "answer0": "Tom", + "sentence_switched": "Ray threw his schoolbag down to tom after [he] reached the botray of the stairs.", + "correct_answer": "Ray", + "relational_word": "down to:top/bottom", + "is_associative": 0 + }, + { + "index": 22, + "is_switchable": 1, + "sentence": "Although they ran at about the same speed, Sue beat Sally because [she] had such a good start.", + "answer1": "Sally", + "answer0": "Sue", + "sentence_switched": "Although they ran at about the same speed, sally beat sue because [she] had such a good start.", + "correct_answer": "Sue", + "relational_word": "beat:good/bad", + "is_associative": 0 + }, + { + "index": 23, + "is_switchable": 1, + "sentence": "Although they ran at about the same speed, Sue beat Sally because [she] had such a bad start.", + "answer1": "Sally", + "answer0": "Sue", + "sentence_switched": "Although they ran at about the same speed, sally beat sue because [she] had such a bad start.", + "correct_answer": "Sally", + "relational_word": "beat:good/bad", + "is_associative": 0 + }, + { + "index": 24, + "is_switchable": 0, + "sentence": "The sculpture rolled off the shelf because [it] wasn't anchored.", + "answer1": "The shelf", + "answer0": "The sculpture", + "sentence_switched": "The shelf rolled off the sculpture because [it] wasn't anchored.", + "correct_answer": "The sculpture", + "relational_word": "roll off:anchored level", + "is_associative": 0 + }, + { + "index": 25, + "is_switchable": 0, + "sentence": "The sculpture rolled off the shelf because [it] wasn't level.", + "answer1": "The shelf", + "answer0": "The sculpture", + "sentence_switched": "The shelf rolled off the sculpture because [it] wasn't level.", + "correct_answer": "The shelf", + "relational_word": "roll off:anchored level", + "is_associative": 0 + }, + { + "index": 26, + "is_switchable": 1, + "sentence": "Sam's drawing was hung just above Tina's and [it] did look much better with another one below it.", + "answer1": "Tina's drawing", + "answer0": "Sam's drawing", + "sentence_switched": "Tina's drawing was hung just above sam's and [it] did look much better with another one below it.", + "correct_answer": "Sam's drawing", + "relational_word": "above/below", + "is_associative": 0 + }, + { + "index": 27, + "is_switchable": 1, + "sentence": "Sam's drawing was hung just above Tina's and [it] did look much better with another one above it.", + "answer1": "Tina's drawing", + "answer0": "Sam's drawing", + "sentence_switched": "Tina's drawing was hung just above sam's and [it] did look much better with another one above it.", + "correct_answer": "Tina's drawing", + "relational_word": "above/below", + "is_associative": 0 + }, + { + "index": 28, + "is_switchable": 1, + "sentence": "Anna did a lot better than her good friend Lucy on the test because [she] had studied so hard.", + "answer1": "Lucy", + "answer0": "Anna", + "sentence_switched": "Lucy did a lot better than her good friend anna on the test because [she] had studied so hard.", + "correct_answer": "Anna", + "relational_word": "better/worse:study hard", + "is_associative": 0 + }, + { + "index": 29, + "is_switchable": 1, + "sentence": "Anna did a lot worse than her good friend Lucy on the test because [she] had studied so hard.", + "answer1": "Lucy", + "answer0": "Anna", + "sentence_switched": "Lucy did a lot worse than her good friend anna on the test because [she] had studied so hard.", + "correct_answer": "Lucy", + "relational_word": "better/worse:study hard", + "is_associative": 0 + }, + { + "index": 30, + "is_switchable": 1, + "sentence": "The firemen arrived after the police because [they] were coming from so far away.", + "answer1": "The police", + "answer0": "The firemen", + "sentence_switched": "The police arrived after the firemen because [they] were coming from so far away.", + "correct_answer": "The firemen", + "relational_word": "after/before:far away", + "is_associative": 0 + }, + { + "index": 31, + "is_switchable": 1, + "sentence": "The firemen arrived before the police because [they] were coming from so far away.", + "answer1": "The police", + "answer0": "The firemen", + "sentence_switched": "The police arrived before the firemen because [they] were coming from so far away.", + "correct_answer": "The police", + "relational_word": "after/before:far away", + "is_associative": 0 + }, + { + "index": 32, + "is_switchable": 1, + "sentence": "Frank was upset with Tom because the toaster [he] had bought from him didn't work.", + "answer1": "Tom", + "answer0": "Frank", + "sentence_switched": "Tom was upset with frank because the toaster [he] had bought from him didn't work.", + "correct_answer": "Frank", + "relational_word": "be upset with:buy from not work/sell not work", + "is_associative": 2 + }, + { + "index": 33, + "is_switchable": 1, + "sentence": "Frank was upset with Tom because the toaster [he] had sold him didn't work.", + "answer1": "Tom", + "answer0": "Frank", + "sentence_switched": "Tom was upset with frank because the toaster [he] had sold him didn't work.", + "correct_answer": "Tom", + "relational_word": "be upset with:buy from not work/sell not work", + "is_associative": 2 + }, + { + "index": 34, + "is_switchable": 1, + "sentence": "Jim yelled at Kevin because [he] was so upset.", + "answer1": "Kevin", + "answer0": "Jim", + "sentence_switched": "Kevin yelled at jim because [he] was so upset.", + "correct_answer": "Jim", + "relational_word": "?yell at comfort:upset", + "is_associative": 0 + }, + { + "index": 35, + "is_switchable": 1, + "sentence": "Jim comforted Kevin because [he] was so upset.", + "answer1": "Kevin", + "answer0": "Jim", + "sentence_switched": "Kevin comforted jim because [he] was so upset.", + "correct_answer": "Kevin", + "relational_word": "?yell at comfort:upset", + "is_associative": 0 + }, + { + "index": 36, + "is_switchable": 1, + "sentence": "The sack of potatoes had been placed above the bag of flour, so [it] had to be moved first.", + "answer1": "The bag of flour", + "answer0": "The sack of potatoes", + "sentence_switched": "The bag of flour had been placed above the sack of potatoes, so [it] had to be moved first.", + "correct_answer": "The sack of potatoes", + "relational_word": "above/below:moved first", + "is_associative": 0 + }, + { + "index": 37, + "is_switchable": 1, + "sentence": "The sack of potatoes had been placed below the bag of flour, so [it] had to be moved first.", + "answer1": "The bag of flour", + "answer0": "The sack of potatoes", + "sentence_switched": "The bag of flour had been placed below the sack of potatoes, so [it] had to be moved first.", + "correct_answer": "The bag of flour", + "relational_word": "above/below:moved first", + "is_associative": 0 + }, + { + "index": 38, + "is_switchable": 1, + "sentence": "Pete envies Martin although [he] is very successful.", + "answer1": "Martin", + "answer0": "Pete", + "sentence_switched": "Martin envies pete although [he] is very successful.", + "correct_answer": "Pete", + "relational_word": "although/because", + "is_associative": 0 + }, + { + "index": 39, + "is_switchable": 1, + "sentence": "Pete envies Martin because [he] is very successful.", + "answer1": "Martin", + "answer0": "Pete", + "sentence_switched": "Martin envies pete because [he] is very successful.", + "correct_answer": "Martin", + "relational_word": "although/because", + "is_associative": 0 + }, + { + "index": 40, + "is_switchable": 1, + "sentence": "The older students were bullying the younger ones, so we punished [them] .", + "answer1": "The younger students", + "answer0": "The older students", + "sentence_switched": "The younger students were bullying the older ones, so we punished [them] .", + "correct_answer": "The older students", + "relational_word": "bully:punish rescue", + "is_associative": 0 + }, + { + "index": 41, + "is_switchable": 1, + "sentence": "The older students were bullying the younger ones, so we rescued [them] .", + "answer1": "The younger students", + "answer0": "The older students", + "sentence_switched": "The younger students were bullying the older ones, so we rescued [them] .", + "correct_answer": "The younger students", + "relational_word": "bully:punish rescue", + "is_associative": 0 + }, + { + "index": 42, + "is_switchable": 1, + "sentence": "I poured water from the bottle into the cup until [it] was empty.", + "answer1": "the cup", + "answer0": "the bottle", + "sentence_switched": "I poured water from the cup into the bottle until [it] was empty.", + "correct_answer": "the bottle", + "relational_word": "pour:empty/full", + "is_associative": 0 + }, + { + "index": 43, + "is_switchable": 1, + "sentence": "I poured water from the bottle into the cup until [it] was full.", + "answer1": "the cup", + "answer0": "the bottle", + "sentence_switched": "I poured water from the cup into the bottle until [it] was full.", + "correct_answer": "the cup", + "relational_word": "pour:empty/full", + "is_associative": 0 + }, + { + "index": 44, + "is_switchable": 1, + "sentence": "Susan knows all about Ann's personal problems because [she] is nosy.", + "answer1": "Ann", + "answer0": "Susan", + "sentence_switched": "Ann knows all about susan's personal problems because [she] is nosy.", + "correct_answer": "Susan", + "relational_word": "know:nosy indiscreet", + "is_associative": 0 + }, + { + "index": 45, + "is_switchable": 1, + "sentence": "Susan knows all about Ann's personal problems because [she] is indiscreet.", + "answer1": "Ann", + "answer0": "Susan", + "sentence_switched": "Ann knows all about susan's personal problems because [she] is indiscreet.", + "correct_answer": "Ann", + "relational_word": "know:nosy indiscreet", + "is_associative": 0 + }, + { + "index": 46, + "is_switchable": 1, + "sentence": "Sid explained his theory to Mark but [he] couldn't convince him.", + "answer1": "Mark", + "answer0": "Sid", + "sentence_switched": "Mark explained his theory to sid but [he] couldn't convince him.", + "correct_answer": "Sid", + "relational_word": "explain:convince/understand", + "is_associative": 2 + }, + { + "index": 47, + "is_switchable": 1, + "sentence": "Sid explained his theory to Mark but [he] couldn't understand him.", + "answer1": "Mark", + "answer0": "Sid", + "sentence_switched": "Mark explained his theory to sid but [he] couldn't understand him.", + "correct_answer": "Mark", + "relational_word": "explain:convince/understand", + "is_associative": 2 + }, + { + "index": 48, + "is_switchable": 1, + "sentence": "Susan knew that Ann's son had been in a car accident, so [she] told her about it.", + "answer1": "Ann", + "answer0": "Susan", + "sentence_switched": "Ann knew that susan's son had been in a car accident, so [she] told her about it.", + "correct_answer": "Susan", + "relational_word": "?know tell:so/because", + "is_associative": 2 + }, + { + "index": 49, + "is_switchable": 1, + "sentence": "Susan knew that Ann's son had been in a car accident, because [she] told her about it.", + "answer1": "Ann", + "answer0": "Susan", + "sentence_switched": "Ann knew that susan's son had been in a car accident, because [she] told her about it.", + "correct_answer": "Ann", + "relational_word": "?know tell:so/because", + "is_associative": 2 + }, + { + "index": 50, + "is_switchable": 0, + "sentence": "Joe's uncle can still beat him at tennis, even though [he] is 30 years younger.", + "answer1": "Joe's uncle", + "answer0": "Joe", + "sentence_switched": "Joe's uncle can still beat him at tennis, even though [he] is 30 years younger.", + "correct_answer": "Joe", + "relational_word": "beat:younger/older", + "is_associative": 0 + }, + { + "index": 51, + "is_switchable": 0, + "sentence": "Joe's uncle can still beat him at tennis, even though [he] is 30 years older.", + "answer1": "Joe's uncle", + "answer0": "Joe", + "sentence_switched": "Joe can still beat him at tennis, even though [he] is 30 years older.", + "correct_answer": "Joe's uncle", + "relational_word": "beat:younger/older", + "is_associative": 0 + }, + { + "index": 52, + "is_switchable": 0, + "sentence": "The painting in Mark's living room shows an oak tree. [It] is to the right of the bookcase.", + "answer1": "The oak tree", + "answer0": "The painting", + "sentence_switched": "The oak tree in mark's living room shows a painting. [it] is to the right of the bookcase.", + "correct_answer": "The painting", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 53, + "is_switchable": 0, + "sentence": "The painting in Mark's living room shows an oak tree. [It] is to the right of a house.", + "answer1": "The oak tree", + "answer0": "The painting", + "sentence_switched": "The oak tree in mark's living room shows a painting. [it] is to the right of a house.", + "correct_answer": "The oak tree", + "relational_word": "none", + "is_associative": 1 + }, + { + "index": 54, + "is_switchable": 0, + "sentence": "There is a gap in the wall. You can see the garden through [it] .", + "answer1": "The wall", + "answer0": "The gap", + "sentence_switched": "There is a wall in the gap. you can see the garden through [it] .", + "correct_answer": "The gap", + "relational_word": "none", + "is_associative": 1 + }, + { + "index": 55, + "is_switchable": 0, + "sentence": "There is a gap in the wall. You can see the garden behind [it] .", + "answer1": "The wall", + "answer0": "The gap", + "sentence_switched": "There is a wall in the gap. you can see the garden behind [it] .", + "correct_answer": "The wall", + "relational_word": "none", + "is_associative": 1 + }, + { + "index": 56, + "is_switchable": 0, + "sentence": "The drain is clogged with hair. [It] has to be cleaned.", + "answer1": "The hair", + "answer0": "The drain", + "sentence_switched": "The hair is clogged with drain. [it] has to be cleaned.", + "correct_answer": "The drain", + "relational_word": "clog:cleaned removed", + "is_associative": 0 + }, + { + "index": 57, + "is_switchable": 0, + "sentence": "The drain is clogged with hair. [It] has to be removed.", + "answer1": "The hair", + "answer0": "The drain", + "sentence_switched": "The hair is clogged with drain. [it] has to be removed.", + "correct_answer": "The hair", + "relational_word": "clog:cleaned removed", + "is_associative": 0 + }, + { + "index": 58, + "is_switchable": 0, + "sentence": "My meeting started at 4:00 and I needed to catch the train at 4:30, so there wasn't much time. Luckily, [it] was short, so it worked out.", + "answer1": "The train", + "answer0": "The meeting", + "sentence_switched": "My train started at 4:00 and i needed to catch the meeting at 4:30, so there wasn't much time. luckily, [it] was short, so it worked out.", + "correct_answer": "The meeting", + "relational_word": "?immediately follow:short delayed", + "is_associative": 1 + }, + { + "index": 59, + "is_switchable": 0, + "sentence": "My meeting started at 4:00 and I needed to catch the train at 4:30, so there wasn't much time. Luckily, [it] was delayed, so it worked out.", + "answer1": "The train", + "answer0": "The meeting", + "sentence_switched": "My train started at 4:00 and i needed to catch the meeting at 4:30, so there wasn't much time. luckily, [it] was delayed, so it worked out.", + "correct_answer": "The train", + "relational_word": "?immediately follow:short delayed", + "is_associative": 0 + }, + { + "index": 60, + "is_switchable": 0, + "sentence": "There is a pillar between me and the stage, and I can't see around [it] .", + "answer1": "The stage", + "answer0": "The pillar", + "sentence_switched": "There is a stage between me and the pillar, and i can't see around [it] .", + "correct_answer": "The pillar", + "relational_word": "?between:see see around", + "is_associative": 2 + }, + { + "index": 61, + "is_switchable": 0, + "sentence": "There is a pillar between me and the stage, and I can't see [it] .", + "answer1": "The stage", + "answer0": "The pillar", + "sentence_switched": "There is a stage between me and the pillar, and i can't see [it] .", + "correct_answer": "The stage", + "relational_word": "?between:see see around", + "is_associative": 2 + }, + { + "index": 62, + "is_switchable": 0, + "sentence": "They broadcast an announcement, but a subway came into the station and I couldn't hear [it] .", + "answer1": "The subway", + "answer0": "The announcement", + "sentence_switched": "They broadcast a subway, but an announcement came into the station and i couldn't hear [it] .", + "correct_answer": "The announcement", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 63, + "is_switchable": 0, + "sentence": "They broadcast an announcement, but a subway came into the station and I couldn't hear over [it] .", + "answer1": "The subway", + "answer0": "The announcement", + "sentence_switched": "They broadcast a subway, but an announcement came into the station and i couldn't hear over [it] .", + "correct_answer": "The subway", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 64, + "is_switchable": 0, + "sentence": "In the middle of the outdoor concert, the rain started falling, but [it] continued until 10.", + "answer1": "The rain", + "answer0": "The concert", + "sentence_switched": "In the middle of the outdoor rain, the concert started falling, but [it] continued until 10.", + "correct_answer": "The concert", + "relational_word": "but/and", + "is_associative": 0 + }, + { + "index": 65, + "is_switchable": 0, + "sentence": "In the middle of the outdoor concert, the rain started falling, and [it] continued until 10.", + "answer1": "The rain", + "answer0": "The concert", + "sentence_switched": "In the middle of the outdoor rain, the concert started falling, and [it] continued until 10.", + "correct_answer": "The rain", + "relational_word": "but/and", + "is_associative": 0 + }, + { + "index": 66, + "is_switchable": 0, + "sentence": "I used an old rag to clean the knife, and then I put [it] in the trash.", + "answer1": "The knife", + "answer0": "The rag", + "sentence_switched": "I used an old knife to clean the rag, and then i put [it] in the trash.", + "correct_answer": "The rag", + "relational_word": "clean:put in the trash put in the drawer", + "is_associative": 0 + }, + { + "index": 67, + "is_switchable": 0, + "sentence": "I used an old rag to clean the knife, and then I put [it] in the drawer.", + "answer1": "The knife", + "answer0": "The rag", + "sentence_switched": "I used an old knife to clean the rag, and then i put [it] in the drawer.", + "correct_answer": "The knife", + "relational_word": "clean:put in the trash put in the drawer", + "is_associative": 0 + }, + { + "index": 68, + "is_switchable": 1, + "sentence": "Ann asked Mary what time the library closes, because [she] had forgotten.", + "answer1": "Mary", + "answer0": "Ann", + "sentence_switched": "Mary asked ann what time the library closes, because [she] had forgotten.", + "correct_answer": "Ann", + "relational_word": "because/but", + "is_associative": 0 + }, + { + "index": 69, + "is_switchable": 1, + "sentence": "Ann asked Mary what time the library closes, but [she] had forgotten.", + "answer1": "Mary", + "answer0": "Ann", + "sentence_switched": "Mary asked ann what time the library closes, but [she] had forgotten.", + "correct_answer": "Mary", + "relational_word": "because/but", + "is_associative": 0 + }, + { + "index": 70, + "is_switchable": 0, + "sentence": "I took the water bottle out of the backpack so that [it] would be handy.", + "answer1": "The backpack", + "answer0": "The water bottle", + "sentence_switched": "I took the backpack out of the water bottle so that [it] would be handy.", + "correct_answer": "The water bottle", + "relational_word": "out of:handy lighter", + "is_associative": 0 + }, + { + "index": 71, + "is_switchable": 0, + "sentence": "I took the water bottle out of the backpack so that [it] would be lighter.", + "answer1": "The backpack", + "answer0": "The water bottle", + "sentence_switched": "I took the backpack out of the water bottle so that [it] would be lighter.", + "correct_answer": "The backpack", + "relational_word": "out of:handy lighter", + "is_associative": 0 + }, + { + "index": 72, + "is_switchable": 0, + "sentence": "I couldn't put the pot on the shelf because [it] was too tall.", + "answer1": "The shelf", + "answer0": "The pot", + "sentence_switched": "I couldn't put the shelf on the pot because [it] was too tall.", + "correct_answer": "The pot", + "relational_word": "put:tall high", + "is_associative": 1 + }, + { + "index": 73, + "is_switchable": 0, + "sentence": "I couldn't put the pot on the shelf because [it] was too high.", + "answer1": "The shelf", + "answer0": "The pot", + "sentence_switched": "I couldn't put the shelf on the pot because [it] was too high.", + "correct_answer": "The shelf", + "relational_word": "put:tall high", + "is_associative": 0 + }, + { + "index": 74, + "is_switchable": 0, + "sentence": "I'm sure that my map will show this building; [it] is very good.", + "answer1": "The building", + "answer0": "The map", + "sentence_switched": "I'm sure that my building will show this map; [it] is very good.", + "correct_answer": "The map", + "relational_word": "show:good famous", + "is_associative": 1 + }, + { + "index": 75, + "is_switchable": 0, + "sentence": "I'm sure that my map will show this building; [it] is very famous.", + "answer1": "The building", + "answer0": "The map", + "sentence_switched": "I'm sure that my building will show this map; [it] is very famous.", + "correct_answer": "The building", + "relational_word": "show:good famous", + "is_associative": 1 + }, + { + "index": 76, + "is_switchable": 1, + "sentence": "Bob paid for Charlie's college education. [He] is very generous.", + "answer1": "Charlie", + "answer0": "Bob", + "sentence_switched": "Charlie paid for bob's college education. [he] is very generous.", + "correct_answer": "Bob", + "relational_word": "pay for:generous grateful", + "is_associative": 0 + }, + { + "index": 77, + "is_switchable": 1, + "sentence": "Bob paid for Charlie's college education. [He] is very grateful.", + "answer1": "Charlie", + "answer0": "Bob", + "sentence_switched": "Charlie paid for bob's college education. [he] is very grateful.", + "correct_answer": "Charlie", + "relational_word": "pay for:generous grateful", + "is_associative": 0 + }, + { + "index": 78, + "is_switchable": 1, + "sentence": "Bob paid for Charlie's college education, but now Charlie acts as though it never happened. [He] is very hurt.", + "answer1": "Charlie", + "answer0": "Bob", + "sentence_switched": "Charlie paid for bob's college education, but now bob acts as though it never happened. [he] is very hurt.", + "correct_answer": "Bob", + "relational_word": "but", + "is_associative": 0 + }, + { + "index": 79, + "is_switchable": 1, + "sentence": "Bob paid for Charlie's college education, but now Charlie acts as though it never happened. [He] is very ungrateful.", + "answer1": "Charlie", + "answer0": "Bob", + "sentence_switched": "Charlie paid for bob's college education, but now bob acts as though it never happened. [he] is very ungrateful.", + "correct_answer": "Charlie", + "relational_word": "but", + "is_associative": 0 + }, + { + "index": 80, + "is_switchable": 1, + "sentence": "Bob was playing cards with Adam and was way ahead. If Adam hadn't had a sudden run of good luck, [he] would have won.", + "answer1": "Adam", + "answer0": "Bob", + "sentence_switched": "Adam was playing cards with bob and was way ahead. if bob hadn't had a sudden run of good luck, [he] would have won.", + "correct_answer": "Bob", + "relational_word": "if", + "is_associative": 0 + }, + { + "index": 81, + "is_switchable": 1, + "sentence": "Bob was playing cards with Adam and was way ahead. If Adam hadn't had a sudden run of good luck, [he] would have lost.", + "answer1": "Adam", + "answer0": "Bob", + "sentence_switched": "Adam was playing cards with bob and was way ahead. if bob hadn't had a sudden run of good luck, [he] would have lost.", + "correct_answer": "Adam", + "relational_word": "if", + "is_associative": 0 + }, + { + "index": 82, + "is_switchable": 1, + "sentence": "Adam can't leave work here until Bob arrives to replace him. If Bob had left home for work on time, [he] would be gone by this time.", + "answer1": "Bob", + "answer0": "Adam", + "sentence_switched": "Bob can't leave work here until adam arrives to replace him. if adam had left home for work on time, [he] would be gone by this time.", + "correct_answer": "Adam", + "relational_word": "if", + "is_associative": 0 + }, + { + "index": 83, + "is_switchable": 1, + "sentence": "Adam can't leave work here until Bob arrives to replace him. If Bob had left home for work on time, [he] would be here by this time.", + "answer1": "Bob", + "answer0": "Adam", + "sentence_switched": "Bob can't leave work here until adam arrives to replace him. if adam had left home for work on time, [he] would be here by this time.", + "correct_answer": "Bob", + "relational_word": "if", + "is_associative": 0 + }, + { + "index": 84, + "is_switchable": 0, + "sentence": "If the con artist has succeeded in fooling Sam, [he] would have gotten a lot of money.", + "answer1": "Sam", + "answer0": "The con artist", + "sentence_switched": "If sam has succeeded in fooling the con artist, [he] would have gotten a lot of money.", + "correct_answer": "The con artist", + "relational_word": "fool:get/lose", + "is_associative": 0 + }, + { + "index": 85, + "is_switchable": 0, + "sentence": "If the con artist has succeeded in fooling Sam, [he] would have lost a lot of money.", + "answer1": "Sam", + "answer0": "The con artist", + "sentence_switched": "If sam has succeeded in fooling the con artist, [he] would have lost a lot of money.", + "correct_answer": "Sam", + "relational_word": "fool:get/lose", + "is_associative": 0 + }, + { + "index": 86, + "is_switchable": 0, + "sentence": "It was a summer afternoon, and the dog was sitting in the middle of the lawn. After a while, it got up and moved to a spot under the tree, because [it] was hot.", + "answer1": "The spot under the tree", + "answer0": "The dog", + "sentence_switched": "It was a summer afternoon, and the spot under tree was sitting in the middle of the lawn. after a while, it got up and moved to a dog, because [it] was hot.", + "correct_answer": "The dog", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 87, + "is_switchable": 0, + "sentence": "It was a summer afternoon, and the dog was sitting in the middle of the lawn. After a while, it got up and moved to a spot under the tree, because [it] was cooler.", + "answer1": "The spot under the tree", + "answer0": "The dog", + "sentence_switched": "It was a summer afternoon, and the spot under tree was sitting in the middle of the lawn. after a while, it got up and moved to a dog, because [it] was cooler.", + "correct_answer": "The spot under the tree", + "relational_word": "none", + "is_associative": 1 + }, + { + "index": 88, + "is_switchable": 0, + "sentence": "The cat was lying by the mouse hole waiting for the mouse, but [it] was too impatient.", + "answer1": "The mouse", + "answer0": "The cat", + "sentence_switched": "The mouse was lying by the cat hole waiting for the cat, but [it] was too impatient.", + "correct_answer": "The cat", + "relational_word": "wait:impatient cautious", + "is_associative": 0 + }, + { + "index": 89, + "is_switchable": 0, + "sentence": "The cat was lying by the mouse hole waiting for the mouse, but [it] was too cautious.", + "answer1": "The mouse", + "answer0": "The cat", + "sentence_switched": "The mouse was lying by the cat hole waiting for the cat, but [it] was too cautious.", + "correct_answer": "The mouse", + "relational_word": "wait:impatient cautious", + "is_associative": 0 + }, + { + "index": 90, + "is_switchable": 0, + "sentence": "Anne gave birth to a daughter last month. [She] is a very charming woman.", + "answer1": "Anne's daughter", + "answer0": "Anne", + "sentence_switched": "Anne's daughter gave birth to Anne last month. [she] is a very charming woman.", + "correct_answer": "Anne", + "relational_word": "give birth:woman baby", + "is_associative": 0 + }, + { + "index": 91, + "is_switchable": 0, + "sentence": "Anne daughter gave birth to Anne last month. [She] is a very charming baby.", + "answer1": "Anne's daughter", + "answer0": "Anne", + "sentence_switched": "Anne's daughter gave birth to Anne last month. [she] is a very charming baby.", + "correct_answer": "Anne's daughter", + "relational_word": "give birth:woman baby", + "is_associative": 0 + }, + { + "index": 92, + "is_switchable": 0, + "sentence": "Alice tried frantically to stop her daughter from chatting at the party, leaving us to wonder why [she] was behaving so strangely.", + "answer1": "Alice's daughter", + "answer0": "Alice", + "sentence_switched": "Alice's daughter tried frantically to stop Alice from chatting at the party, leaving us to wonder why [she] was behaving so strangely.", + "correct_answer": "Alice", + "relational_word": "?stop normal/stop abnormal:strange", + "is_associative": 0 + }, + { + "index": 93, + "is_switchable": 0, + "sentence": "Alice tried frantically to stop her daughter from barking at the party, leaving us to wonder why [she] was behaving so strangely.", + "answer1": "Alice's daughter", + "answer0": "Alice", + "sentence_switched": "Alice's daughter tried frantically to stop Alice from barking at the party, leaving us to wonder why [she] was behaving so strangely.", + "correct_answer": "Alice's daughter", + "relational_word": "?stop normal/stop abnormal:strange", + "is_associative": 0 + }, + { + "index": 94, + "is_switchable": 1, + "sentence": "I saw Jim yelling at some guy in a military uniform with a huge red beard. I don't know why [he] was, but he looked very unhappy.", + "answer1": "the guy in uniform", + "answer0": "Jim", + "sentence_switched": "I saw the guy in military uniform with a huge red beard yelling at jim. i don't know why [he] was, but he looked very unhappy.", + "correct_answer": "Jim", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 95, + "is_switchable": 1, + "sentence": "I saw Jim yelling at some guy in a military uniform with a huge red beard. I don't know who [he] was, but he looked very unhappy.", + "answer1": "the guy in uniform", + "answer0": "Jim", + "sentence_switched": "I saw the guy in military uniform with a huge red beard yelling at jim. i don't know who [he] was, but he looked very unhappy.", + "correct_answer": "the guy in uniform", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 96, + "is_switchable": 0, + "sentence": "The fish ate the worm. [It] was hungry.", + "answer1": "The worm", + "answer0": "The fish", + "sentence_switched": "The worm ate the fish. [it] was hungry.", + "correct_answer": "The fish", + "relational_word": "eat:hungry tasty", + "is_associative": 0 + }, + { + "index": 97, + "is_switchable": 0, + "sentence": "The fish ate the worm. [It] was tasty.", + "answer1": "The worm", + "answer0": "The fish", + "sentence_switched": "The worm ate the fish. [it] was tasty.", + "correct_answer": "The worm", + "relational_word": "eat:hungry tasty", + "is_associative": 0 + }, + { + "index": 98, + "is_switchable": 0, + "sentence": "I was trying to open the lock with the key, but someone had filled the keyhole with chewing gum, and I couldn't get [it] in.", + "answer1": "The chewing gum", + "answer0": "The key", + "sentence_switched": "I was trying to open the lock with the chewing gum, but someone had filled the keyhole with the key, and i couldn't get [it] in.", + "correct_answer": "The key", + "relational_word": "put ... into filled with ... :get in/get out", + "is_associative": 1 + }, + { + "index": 99, + "is_switchable": 0, + "sentence": "I was trying to open the lock with the key, but someone had filled the keyhole with chewing gum, and I couldn't get [it] out.", + "answer1": "The chewing gum", + "answer0": "The key", + "sentence_switched": "I was trying to open the lock with the chewing gum, but someone had filled the keyhole with the key, and i couldn't get [it] out.", + "correct_answer": "The chewing gum", + "relational_word": "put ... into filled with ... :get in/get out", + "is_associative": 0 + }, + { + "index": 100, + "is_switchable": 0, + "sentence": "The dog chased the cat, which ran up a tree. [It] waited at the bottom.", + "answer1": "The cat", + "answer0": "The dog", + "sentence_switched": "The cat chased the dog, which ran up a tree. [it] waited at the bottom.", + "correct_answer": "The dog", + "relational_word": "up:at the bottom/at the top", + "is_associative": 0 + }, + { + "index": 101, + "is_switchable": 0, + "sentence": "The dog chased the cat, which ran up a tree. [It] waited at the top.", + "answer1": "The cat", + "answer0": "The dog", + "sentence_switched": "The cat chased the dog, which ran up a tree. [it] waited at the top.", + "correct_answer": "The cat", + "relational_word": "up:at the bottom/at the top", + "is_associative": 0 + }, + { + "index": 102, + "is_switchable": 0, + "sentence": "In the storm, the tree fell down and crashed through the roof of my house. Now, I have to get [it] removed.", + "answer1": "The roof", + "answer0": "The tree", + "sentence_switched": "In the storm, the roof fell down and crashed through the tree of my house. now, i have to get [it] removed.", + "correct_answer": "The tree", + "relational_word": "crash through:removed repaired", + "is_associative": 0 + }, + { + "index": 103, + "is_switchable": 0, + "sentence": "In the storm, the tree fell down and crashed through the roof of my house. Now, I have to get [it] repaired.", + "answer1": "The roof", + "answer0": "The tree", + "sentence_switched": "In the storm, the roof fell down and crashed through the tree of my house. now, i have to get [it] repaired.", + "correct_answer": "The roof", + "relational_word": "crash through:removed repaired", + "is_associative": 1 + }, + { + "index": 104, + "is_switchable": 0, + "sentence": "The customer walked into the bank and stabbed one of the tellers. [He] was immediately taken to the police station.", + "answer1": "The teller", + "answer0": "The customer", + "sentence_switched": "The teller walked into the bank and stabbed one of the customers. [he] was immediately taken to the police station.", + "correct_answer": "The customer", + "relational_word": "stab:taken to the police station taken to the hospital", + "is_associative": 0 + }, + { + "index": 105, + "is_switchable": 0, + "sentence": "The customer walked into the bank and stabbed one of the tellers. [He] was immediately taken to the hospital.", + "answer1": "The teller", + "answer0": "The customer", + "sentence_switched": "The teller walked into the bank and stabbed one of the customers. [he] was immediately taken to the hospital.", + "correct_answer": "The teller", + "relational_word": "stab:taken to the police station taken to the hospital", + "is_associative": 0 + }, + { + "index": 106, + "is_switchable": 1, + "sentence": "John was doing research in the library when he heard a man humming and whistling. [He] was very annoyed.", + "answer1": "The man", + "answer0": "John", + "sentence_switched": "Man was doing research in the library when he heard a john humming and whistling. [he] was very annoyed.", + "correct_answer": "John", + "relational_word": "hear ... humming and whistling:annoyed/annoying", + "is_associative": 0 + }, + { + "index": 107, + "is_switchable": 1, + "sentence": "John was doing research in the library when he heard a man humming and whistling. [He] was very annoying.", + "answer1": "The man", + "answer0": "John", + "sentence_switched": "A man was doing research in the library when he heard john humming and whistling. [he] was very annoying.", + "correct_answer": "The man", + "relational_word": "hear ... humming and whistling:annoyed/annoying", + "is_associative": 0 + }, + { + "index": 108, + "is_switchable": 0, + "sentence": "John was jogging through the park when he saw a man juggling watermelons. [He] was very impressed.", + "answer1": "The juggler", + "answer0": "John", + "sentence_switched": "The juggler was jogging through the park when he saw a man juggling watermelons. [he] was very impressed.", + "correct_answer": "John", + "relational_word": "see ... juggling watermelons:impressed/impressive", + "is_associative": 0 + }, + { + "index": 109, + "is_switchable": 0, + "sentence": "John was jogging through the park when he saw a man juggling watermelons. [He] was very impressive.", + "answer1": "The juggler", + "answer0": "John", + "sentence_switched": "The juggler was jogging through the park when he saw a man juggling watermelons. [he] was very impressive.", + "correct_answer": "The juggler", + "relational_word": "see ... juggling watermelons:impressed/impressive", + "is_associative": 1 + }, + { + "index": 110, + "is_switchable": 1, + "sentence": "Bob collapsed on the sidewalk. Soon he saw Carl coming to help. [He] was very ill.", + "answer1": "Carl", + "answer0": "Bob", + "sentence_switched": "Carl collapsed on the sidewalk. soon he saw bob coming to help. [he] was very ill.", + "correct_answer": "Bob", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 111, + "is_switchable": 1, + "sentence": "Bob collapsed on the sidewalk. Soon he saw Carl coming to help. [He] was very concerned.", + "answer1": "Carl", + "answer0": "Bob", + "sentence_switched": "Carl collapsed on the sidewalk. soon he saw bob coming to help. [he] was very concerned.", + "correct_answer": "Carl", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 112, + "is_switchable": 0, + "sentence": "Sam and Amy are passionately in love, but Amy's parents are unhappy about it, because [they] are fifteen.", + "answer1": "Amy's parents", + "answer0": "Sam and Amy", + "sentence_switched": "Amy's parents are passionately in love, but sam and amy are unhappy about it, because [they] are fifteen.", + "correct_answer": "Sam and Amy", + "relational_word": "none", + "is_associative": 1 + }, + { + "index": 113, + "is_switchable": 0, + "sentence": "Sam and Amy are passionately in love, but Amy's parents are unhappy about it, because [they] are snobs.", + "answer1": "Amy's parents", + "answer0": "Sam and Amy", + "sentence_switched": "Amy's parents are passionately in love, but sam and amy are unhappy about it, because [they] are snobs.", + "correct_answer": "Amy's parents", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 114, + "is_switchable": 1, + "sentence": "Mark told Pete many lies about himself, which Pete included in his book. [He] should have been more truthful.", + "answer1": "Pete", + "answer0": "Mark", + "sentence_switched": "Pete told mark many lies about himself, which mark included in his book. [he] should have been more truthful.", + "correct_answer": "Mark", + "relational_word": "tell lies: truthful skeptical", + "is_associative": 0 + }, + { + "index": 115, + "is_switchable": 1, + "sentence": "Mark told Pete many lies about himself, which Pete included in his book. [He] should have been more skeptical.", + "answer1": "Pete", + "answer0": "Mark", + "sentence_switched": "Pete told mark many lies about himself, which mark included in his book. [he] should have been more skeptical.", + "correct_answer": "Pete", + "relational_word": "tell lies: truthful skeptical", + "is_associative": 0 + }, + { + "index": 116, + "is_switchable": 0, + "sentence": "Joe has sold his house and bought a new one a few miles away. He will be moving out of [it] on Thursday.", + "answer1": "The new house", + "answer0": "The old house", + "sentence_switched": "Joe has sold his new house and bought a old one a few miles away. he will be moving out of [it] on thursday.", + "correct_answer": "The old house", + "relational_word": "none", + "is_associative": 1 + }, + { + "index": 117, + "is_switchable": 0, + "sentence": "Joe has sold his house and bought a new one a few miles away. He will be moving into [it] on Thursday.", + "answer1": "The new house", + "answer0": "The old house", + "sentence_switched": "Joe has sold his new house and bought a old one a few miles away. he will be moving into [it] on thursday.", + "correct_answer": "The new house", + "relational_word": "none", + "is_associative": 1 + }, + { + "index": 118, + "is_switchable": 0, + "sentence": "Many people start to read Paul's books and can't put them down. [They] are gripped because Paul writes so well.", + "answer1": "Paul's books", + "answer0": "People", + "sentence_switched": "Many paul's books start to read people and can't put them down. [they] are gripped because paul writes so well.", + "correct_answer": "People", + "relational_word": "read:gripped popular", + "is_associative": 1 + }, + { + "index": 119, + "is_switchable": 0, + "sentence": "Many people start to read Paul's books and can't put them down. [They] are popular because Paul writes so well.", + "answer1": "Paul's books", + "answer0": "People", + "sentence_switched": "Many paul's books start to read people and can't put them down. [they] are popular because paul writes so well.", + "correct_answer": "Paul's books", + "relational_word": "read:gripped popular", + "is_associative": 1 + }, + { + "index": 120, + "is_switchable": 0, + "sentence": "Mary took out her flute and played one of her favorite pieces. She has had [it] since she was a child.", + "answer1": "The piece", + "answer0": "The flute", + "sentence_switched": "Mary took out her piece and played one of her favorite flute. she has had [it] since she was a child.", + "correct_answer": "The flute", + "relational_word": "none", + "is_associative": 1 + }, + { + "index": 121, + "is_switchable": 0, + "sentence": "Mary took out her flute and played one of her favorite pieces. She has loved [it] since she was a child.", + "answer1": "The piece", + "answer0": "The flute", + "sentence_switched": "Mary took out her piece and played one of her favorite flute. she has loved [it] since she was a child.", + "correct_answer": "The piece", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 122, + "is_switchable": 0, + "sentence": "Sam pulled up a chair to the piano, but [it] was broken, so he had to stand instead.", + "answer1": "The piano", + "answer0": "The chair", + "sentence_switched": "Sam pulled up a piano to the chair, but [it] was broken, so he had to stand instead.", + "correct_answer": "The chair", + "relational_word": "none", + "is_associative": 2 + }, + { + "index": 123, + "is_switchable": 0, + "sentence": "Sam pulled up a chair to the piano, but [it] was broken, so he had to sing instead.", + "answer1": "The piano", + "answer0": "The chair", + "sentence_switched": "Sam pulled up a piano to the chair, but [it] was broken, so he had to sing instead.", + "correct_answer": "The piano", + "relational_word": "none", + "is_associative": 2 + }, + { + "index": 124, + "is_switchable": 0, + "sentence": "Since it was raining, I carried the newspaper in my backpack to keep [it] dry.", + "answer1": "The backpack", + "answer0": "The newspaper", + "sentence_switched": "Since it was raining, i carried the backpack in my newspaper to keep [it] dry.", + "correct_answer": "The newspaper", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 125, + "is_switchable": 0, + "sentence": "Since it was raining, I carried the newspaper over my backpack to keep [it] dry.", + "answer1": "The backpack", + "answer0": "The newspaper", + "sentence_switched": "Since it was raining, i carried the backpack over my newspaper to keep [it] dry.", + "correct_answer": "The backpack", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 126, + "is_switchable": 0, + "sentence": "Sara borrowed the book from the library because she needs it for an article she is working on. She reads [it] when she gets home from work.", + "answer1": "The article", + "answer0": "The book", + "sentence_switched": "Sara borrowed the article from the library because she needs it for an book she is working on. she reads [it] when she gets home from work.", + "correct_answer": "The book", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 127, + "is_switchable": 0, + "sentence": "Sara borrowed the book from the library because she needs it for an article she is working on. She writes [it] when she gets home from work.", + "answer1": "The article", + "answer0": "The book", + "sentence_switched": "Sara borrowed the article from the library because she needs it for an book she is working on. she writes [it] when she gets home from work.", + "correct_answer": "The article", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 128, + "is_switchable": 0, + "sentence": "This morning, Joey built a sand castle on the beach, and put a toy flag in the highest tower, but this afternoon the tide knocked [it] down.", + "answer1": "The flag", + "answer0": "The sand castle", + "sentence_switched": "This morning, joey built a flag on the beach, and put a toy sand castle in the highest tower, but this afternoon the tide knocked [it] down.", + "correct_answer": "The sand castle", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 129, + "is_switchable": 0, + "sentence": "This morning, Joey built a sand castle on the beach, and put a toy flag in the highest tower, but this afternoon the wind knocked [it] down.", + "answer1": "The flag", + "answer0": "The sand castle", + "sentence_switched": "This morning, joey built a flag on the beach, and put a toy sand castle in the highest tower, but this afternoon the wind knocked [it] down.", + "correct_answer": "The flag", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 130, + "is_switchable": 1, + "sentence": "Jane knocked on Susan's door, but there was no answer. [She] was disappointed.", + "answer1": "Susan", + "answer0": "Jane", + "sentence_switched": "Susan knocked on jane's door, but there was no answer. [she] was disappointed.", + "correct_answer": "Jane", + "relational_word": "but:disappointed", + "is_associative": 0 + }, + { + "index": 131, + "is_switchable": 1, + "sentence": "Jane knocked on Susan's door, but there was no answer. [She] was out.", + "answer1": "Susan", + "answer0": "Jane", + "sentence_switched": "Susan knocked on jane's door, but there was no answer. [she] was out.", + "correct_answer": "Susan", + "relational_word": "but:disappointed", + "is_associative": 0 + }, + { + "index": 132, + "is_switchable": 1, + "sentence": "Jane knocked on the door, and Susan answered it. [She] invited her to come out.", + "answer1": "Susan", + "answer0": "Jane", + "sentence_switched": "Susan knocked on the door, and jane answered it. [she] invited her to come out.", + "correct_answer": "Jane", + "relational_word": "visit:invite come out/invite come in", + "is_associative": 2 + }, + { + "index": 133, + "is_switchable": 1, + "sentence": "Jane knocked on the door, and Susan answered it. [She] invited her to come in.", + "answer1": "Susan", + "answer0": "Jane", + "sentence_switched": "Susan knocked on the door, and jane answered it. [she] invited her to come in.", + "correct_answer": "Susan", + "relational_word": "visit:invite come out/invite come in", + "is_associative": 2 + }, + { + "index": 134, + "is_switchable": 1, + "sentence": "Sam took French classes from Adam, because [he] was eager to speak it fluently.", + "answer1": "Adam", + "answer0": "Sam", + "sentence_switched": "Adam took french classes from sam, because [he] was eager to speak it fluently.", + "correct_answer": "Sam", + "relational_word": "take classes from:eager known to speak it fluently", + "is_associative": 0 + }, + { + "index": 135, + "is_switchable": 1, + "sentence": "Sam took French classes from Adam, because [he] was known to speak it fluently.", + "answer1": "Adam", + "answer0": "Sam", + "sentence_switched": "Adam took french classes from sam, because [he] was known to speak it fluently.", + "correct_answer": "Adam", + "relational_word": "take classes from:eager known to speak it fluently", + "is_associative": 0 + }, + { + "index": 136, + "is_switchable": 0, + "sentence": "The path to the lake was blocked, so we couldn't use [it] .", + "answer1": "The lake", + "answer0": "The path", + "sentence_switched": "The lake to the path was blocked, so we couldn't use [it] .", + "correct_answer": "The path", + "relational_word": "none", + "is_associative": 1 + }, + { + "index": 137, + "is_switchable": 0, + "sentence": "The path to the lake was blocked, so we couldn't reach [it] .", + "answer1": "The lake", + "answer0": "The path", + "sentence_switched": "The lake to the path was blocked, so we couldn't reach [it] .", + "correct_answer": "The lake", + "relational_word": "none", + "is_associative": 1 + }, + { + "index": 138, + "is_switchable": 0, + "sentence": "The sun was covered by a thick cloud all morning, but luckily, by the time the picnic started, [it] was out.", + "answer1": "The cloud", + "answer0": "The sun", + "sentence_switched": "The cloud was covered by a thick sun all morning, but luckily, by the time the picnic started, [it] was out.", + "correct_answer": "The sun", + "relational_word": "cover:out gone", + "is_associative": 1 + }, + { + "index": 139, + "is_switchable": 0, + "sentence": "The sun was covered by a thick cloud all morning, but luckily, by the time the picnic started, [it] was gone.", + "answer1": "The cloud", + "answer0": "The sun", + "sentence_switched": "The cloud was covered by a thick sun all morning, but luckily, by the time the picnic started, [it] was gone.", + "correct_answer": "The cloud", + "relational_word": "cover:out gone", + "is_associative": 2 + }, + { + "index": 140, + "is_switchable": 0, + "sentence": "We went to the lake, because a shark had been seen at the ocean beach, so [it] was a safer place to swim.", + "answer1": "The ocean beach", + "answer0": "The lake", + "sentence_switched": "We went to the ocean beach, because a shark had been seen at the lake, so [it] was a safer place to swim.", + "correct_answer": "The lake", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 141, + "is_switchable": 0, + "sentence": "We went to the lake, because a shark had been seen at the ocean beach, so [it] was a dangerous place to swim.", + "answer1": "The ocean beach", + "answer0": "The lake", + "sentence_switched": "We went to the ocean beach, because a shark had been seen at the lake, so [it] was a dangerous place to swim.", + "correct_answer": "The ocean beach", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 142, + "is_switchable": 0, + "sentence": "Sam tried to paint a picture of shepherds with sheep, but [they] ended up looking more like golfers.", + "answer1": "The sheep", + "answer0": "The shepherds", + "sentence_switched": "Sam tried to paint a picture of sheep with shepherds, but [they] ended up looking more like golfers.", + "correct_answer": "The shepherds", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 143, + "is_switchable": 0, + "sentence": "Sam tried to paint a picture of shepherds with sheep, but [they] ended up looking more like dogs.", + "answer1": "The sheep", + "answer0": "The shepherds", + "sentence_switched": "Sam tried to paint a picture of sheep with shepherds, but [they] ended up looking more like dogs.", + "correct_answer": "The sheep", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 144, + "is_switchable": 0, + "sentence": "Mary tucked her daughter Anne into bed, so that [she] could work.", + "answer1": "Mary's daughter", + "answer0": "Mary", + "sentence_switched": "Mary's daughter tucked Mary into bed, so that [she] could work.", + "correct_answer": "Mary", + "relational_word": "tuck:work sleep", + "is_associative": 0 + }, + { + "index": 145, + "is_switchable": 0, + "sentence": "Mary tucked her daughter Anne into bed, so that [she] could sleep.", + "answer1": "Mary's daughter", + "answer0": "Mary", + "sentence_switched": "Mary's daughter tucked Mary into bed, so that [she] could sleep.", + "correct_answer": "Mary's daughter", + "relational_word": "tuck:work sleep", + "is_associative": 0 + }, + { + "index": 146, + "is_switchable": 0, + "sentence": "Fred and Alice had very warm down coats, but [they] were not prepared for the cold in Alaska.", + "answer1": "coats", + "answer0": "Fred and Alice", + "sentence_switched": "Coats had very warm down fred and alice, but [they] were not prepared for the cold in alaska.", + "correct_answer": "Fred and Alice", + "relational_word": "none", + "is_associative": 1 + }, + { + "index": 147, + "is_switchable": 0, + "sentence": "Fred and Alice had very warm down coats, but [they] were not enough for the cold in Alaska.", + "answer1": "coats", + "answer0": "Fred and Alice", + "sentence_switched": "Coats had very warm down fred and alice, but [they] were not enough for the cold in alaska.", + "correct_answer": "coats", + "relational_word": "none", + "is_associative": 1 + }, + { + "index": 148, + "is_switchable": 1, + "sentence": "Thomson visited Cooper's grave in 1765. At that date [he] had been travelling for five years.", + "answer1": "Cooper", + "answer0": "Thomson", + "sentence_switched": "Cooper visited thomson's grave in 1765. at that date [he] had been travelling for five years.", + "correct_answer": "Thomson", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 149, + "is_switchable": 1, + "sentence": "Thomson visited Cooper's grave in 1765. At that date [he] had been dead for five years.", + "answer1": "Cooper", + "answer0": "Thomson", + "sentence_switched": "Cooper visited thomson's grave in 1765. at that date [he] had been dead for five years.", + "correct_answer": "Cooper", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 150, + "is_switchable": 1, + "sentence": "Jackson was greatly influenced by Arnold, though [he] lived two centuries later.", + "answer1": "Arnold", + "answer0": "Jackson", + "sentence_switched": "Arnold was greatly influenced by jackson, though [he] lived two centuries later.", + "correct_answer": "Jackson", + "relational_word": "influence:later/earlier", + "is_associative": 0 + }, + { + "index": 151, + "is_switchable": 1, + "sentence": "Jackson was greatly influenced by Arnold, though [he] lived two centuries earlier.", + "answer1": "Arnold", + "answer0": "Jackson", + "sentence_switched": "Arnold was greatly influenced by jackson, though [he] lived two centuries earlier.", + "correct_answer": "Arnold", + "relational_word": "influence:later/earlier", + "is_associative": 0 + }, + { + "index": 152, + "is_switchable": 0, + "sentence": "I can't cut that tree down with that axe; [it] is too thick.", + "answer1": "The axe", + "answer0": "The tree", + "sentence_switched": "I can't cut that axe down with that tree; [it] is too thick.", + "correct_answer": "The tree", + "relational_word": "can not cut:thick small", + "is_associative": 0 + }, + { + "index": 153, + "is_switchable": 0, + "sentence": "I can't cut that tree down with that axe; [it] is too small.", + "answer1": "The axe", + "answer0": "The tree", + "sentence_switched": "I can't cut that axe down with that tree; [it] is too small.", + "correct_answer": "The axe", + "relational_word": "can not cut:thick small", + "is_associative": 0 + }, + { + "index": 154, + "is_switchable": 0, + "sentence": "The foxes are getting in at night and attacking the chickens. I shall have to kill [them] .", + "answer1": "The chickens", + "answer0": "The foxes", + "sentence_switched": "The chickens are getting in at night and attacking the foxes. i shall have to kill [them] .", + "correct_answer": "The foxes", + "relational_word": "attack:kill guard", + "is_associative": 0 + }, + { + "index": 155, + "is_switchable": 0, + "sentence": "The foxes are getting in at night and attacking the chickens. I shall have to guard [them] .", + "answer1": "The chickens", + "answer0": "The foxes", + "sentence_switched": "The chickens are getting in at night and attacking the foxes. i shall have to guard [them] .", + "correct_answer": "The chickens", + "relational_word": "attack:kill guard", + "is_associative": 1 + }, + { + "index": 156, + "is_switchable": 0, + "sentence": "The foxes are getting in at night and attacking the chickens. [They] have gotten very bold.", + "answer1": "The chickens", + "answer0": "The foxes", + "sentence_switched": "The chickens are getting in at night and attacking the foxes. [they] have gotten very bold.", + "correct_answer": "The foxes", + "relational_word": "attack:bold nervous", + "is_associative": 0 + }, + { + "index": 157, + "is_switchable": 0, + "sentence": "The foxes are getting in at night and attacking the chickens. [They] have gotten very nervous.", + "answer1": "The chickens", + "answer0": "The foxes", + "sentence_switched": "The chickens are getting in at night and attacking the foxes. [they] have gotten very nervous.", + "correct_answer": "The chickens", + "relational_word": "attack:bold nervous", + "is_associative": 0 + }, + { + "index": 158, + "is_switchable": 0, + "sentence": "Fred covered his eyes with his hands, because the wind was blowing sand around. He opened [them] when the wind stopped.", + "answer1": "His hands", + "answer0": "His eyes", + "sentence_switched": "Fred covered his hands with his eyes, because the wind was blowing sand around. he opened [them] when the wind stopped.", + "correct_answer": "His eyes", + "relational_word": "none", + "is_associative": 1 + }, + { + "index": 159, + "is_switchable": 0, + "sentence": "Fred covered his eyes with his hands, because the wind was blowing sand around. He lowered [them] when the wind stopped.", + "answer1": "His hands", + "answer0": "His eyes", + "sentence_switched": "Fred covered his hands with his eyes, because the wind was blowing sand around. he lowered [them] when the wind stopped.", + "correct_answer": "His hands", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 160, + "is_switchable": 1, + "sentence": "The actress used to be named Terpsichore, but she changed it to Tina a few years ago, because she figured [it] was too hard to pronounce.", + "answer1": "Tina", + "answer0": "Terpsichore", + "sentence_switched": "The actress used to be named tina, but she changed it to terpsichore a few years ago, because she figured [it] was too hard to pronounce.", + "correct_answer": "Terpsichore", + "relational_word": "change:hard/easy", + "is_associative": 0 + }, + { + "index": 161, + "is_switchable": 1, + "sentence": "The actress used to be named Terpsichore, but she changed it to Tina a few years ago, because she figured [it] was easier to pronounce.", + "answer1": "Tina", + "answer0": "Terpsichore", + "sentence_switched": "The actress used to be named tina, but she changed it to terpsichore a few years ago, because she figured [it] was easier to pronounce.", + "correct_answer": "Tina", + "relational_word": "change:hard/easy", + "is_associative": 0 + }, + { + "index": 162, + "is_switchable": 1, + "sentence": "Fred watched TV while George went out to buy groceries. After an hour [he] got up.", + "answer1": "George", + "answer0": "Fred", + "sentence_switched": "George watched tv while fred went out to buy groceries. after an hour [he] got up.", + "correct_answer": "Fred", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 163, + "is_switchable": 1, + "sentence": "Fred watched TV while George went out to buy groceries. After an hour [he] got back.", + "answer1": "George", + "answer0": "Fred", + "sentence_switched": "George watched tv while fred went out to buy groceries. after an hour [he] got back.", + "correct_answer": "George", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 164, + "is_switchable": 0, + "sentence": "Fred was supposed to run the dishwasher, but he put it off, because he wanted to watch TV. But the show turned out to be boring, so he changed his mind and turned [it] on.", + "answer1": "The TV", + "answer0": "The dishwasher", + "sentence_switched": "Fred was supposed to run the tv, but he put it off, because he wanted to watch dishwasher. but the show turned out to be boring, so he changed his mind and turned [it] on.", + "correct_answer": "The dishwasher", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 165, + "is_switchable": 0, + "sentence": "Fred was supposed to run the dishwasher, but he put it off, because he wanted to watch TV. But the show turned out to be boring, so he changed his mind and turned [it] off.", + "answer1": "The TV", + "answer0": "The dishwasher", + "sentence_switched": "Fred was supposed to run the tv, but he put it off, because he wanted to watch dishwasher. but the show turned out to be boring, so he changed his mind and turned [it] off.", + "correct_answer": "The TV", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 166, + "is_switchable": 0, + "sentence": "Fred is the only man still alive who remembers my great-grandfather. [He] is a remarkable man.", + "answer1": "My great-grandfather", + "answer0": "Fred", + "sentence_switched": "My great-grandfather is the only man still alive who remembers fred. [he] is a remarkable man.", + "correct_answer": "Fred", + "relational_word": "alive:is/was", + "is_associative": 0 + }, + { + "index": 167, + "is_switchable": 0, + "sentence": "Fred is the only man still alive who remembers my great-grandfather. [He] was a remarkable man.", + "answer1": "My great-grandfather", + "answer0": "Fred", + "sentence_switched": "My great-grandfather is the only man still alive who remembers fred. [he] was a remarkable man.", + "correct_answer": "My great-grandfather", + "relational_word": "alive:is/was", + "is_associative": 0 + }, + { + "index": 168, + "is_switchable": 0, + "sentence": "Fred is the only man alive who still remembers my father as an infant. When Fred first saw my father, [he] was twelve years old.", + "answer1": "My father", + "answer0": "Fred", + "sentence_switched": "My father is the only man alive who still remembers fred as an infant. when my father first saw fred, [he] was twelve years old.", + "correct_answer": "Fred", + "relational_word": "infant:twelve years old twelve months old", + "is_associative": 0 + }, + { + "index": 169, + "is_switchable": 0, + "sentence": "Fred is the only man alive who still remembers my father as an infant. When Fred first saw my father, [he] was twelve months old.", + "answer1": "My father", + "answer0": "Fred", + "sentence_switched": "My father is the only man alive who still remembers fred as an infant. when my father first saw fred, [he] was twelve months old.", + "correct_answer": "My father", + "relational_word": "infant:twelve years old twelve months old", + "is_associative": 0 + }, + { + "index": 170, + "is_switchable": 1, + "sentence": "In July, Kamtchatka declared war on Yakutsk. Since Yakutsk's army was much better equipped and ten times larger, [they] were defeated within weeks.", + "answer1": "Yakutsk", + "answer0": "Kamchatka", + "sentence_switched": "In july, Yakutsk declared war on kamchatka. since kamchatka's army was much better equipped and ten times larger, [they] were defeated within weeks.", + "correct_answer": "Kamchatka", + "relational_word": "better equipped and large:defeated/victorious", + "is_associative": 0 + }, + { + "index": 171, + "is_switchable": 1, + "sentence": "In July, Kamtchatka declared war on Yakutsk. Since Yakutsk's army was much better equipped and ten times larger, [they] were victorious within weeks.", + "answer1": "Yakutsk", + "answer0": "Kamchatka", + "sentence_switched": "In july, Yakutsk declared war on kamchatka. since kamchatka's army was much better equipped and ten times larger, [they] were victorious within weeks.", + "correct_answer": "Yakutsk", + "relational_word": "better equipped and large:defeated/victorious", + "is_associative": 0 + }, + { + "index": 172, + "is_switchable": 0, + "sentence": "Look! There is a minnow swimming right below that duck! [It] had better get away to safety fast!", + "answer1": "The duck", + "answer0": "The minnow", + "sentence_switched": "Look! there is a duck swimming right below that minnow! [it] had better get away to safety fast!", + "correct_answer": "The minnow", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 173, + "is_switchable": 0, + "sentence": "Look! There is a shark swimming right below that duck! [It] had better get away to safety fast!", + "answer1": "The duck", + "answer0": "The shark", + "sentence_switched": "Look! there is a duck swimming right below that shark! [it] had better get away to safety fast!", + "correct_answer": "The duck", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 174, + "is_switchable": 0, + "sentence": "Archaeologists have concluded that humans lived in Laputa 20,000 years ago. [They] hunted for evidence on the river banks.", + "answer1": "Prehistoric humans", + "answer0": "Archaeologists", + "sentence_switched": "Prehistoric humans have concluded that humans lived in laputa 20,000 years ago. [they] hunted for evidence on the river banks.", + "correct_answer": "Archaeologists", + "relational_word": "none", + "is_associative": 1 + }, + { + "index": 175, + "is_switchable": 0, + "sentence": "Archaeologists have concluded that humans lived in Laputa 20,000 years ago. [They] hunted for deer on the river banks.", + "answer1": "Prehistoric humans", + "answer0": "Archaeologists", + "sentence_switched": "Prehistoric humans have concluded that humans lived in laputa 20,000 years ago. [they] hunted for deer on the river banks.", + "correct_answer": "Prehistoric humans", + "relational_word": "none", + "is_associative": 1 + }, + { + "index": 176, + "is_switchable": 0, + "sentence": "The scientists are studying three species of fish that have recently been found living in the Indian Ocean. [They] began two years ago.", + "answer1": "The fish", + "answer0": "The scientists", + "sentence_switched": "The fish are studying three species of scientists that have recently been found living in the indian ocean. [they] began two years ago.", + "correct_answer": "The scientists", + "relational_word": "none", + "is_associative": 1 + }, + { + "index": 177, + "is_switchable": 0, + "sentence": "The scientists are studying three species of fish that have recently been found living in the Indian Ocean. [They] appeared two years ago.", + "answer1": "The fish", + "answer0": "The scientists", + "sentence_switched": "The fish are studying three species of scientists that have recently been found living in the indian ocean. [they] appeared two years ago.", + "correct_answer": "The fish", + "relational_word": "none", + "is_associative": 1 + }, + { + "index": 178, + "is_switchable": 0, + "sentence": "The journalists interviewed the stars of the new movie. [They] were very persistent, so the interview lasted for a long time.", + "answer1": "The stars", + "answer0": "The journalists", + "sentence_switched": "The stars interviewed the journalists of the new movie. [they] were very persistent, so the interview lasted for a long time.", + "correct_answer": "The journalists", + "relational_word": "interview:persistent cooperative", + "is_associative": 0 + }, + { + "index": 179, + "is_switchable": 0, + "sentence": "The journalists interviewed the stars of the new movie. [They] were very cooperative, so the interview lasted for a long time.", + "answer1": "The stars", + "answer0": "The journalists", + "sentence_switched": "The stars interviewed the journalists of the new movie. [they] were very cooperative, so the interview lasted for a long time.", + "correct_answer": "The stars", + "relational_word": "interview:persistent cooperative", + "is_associative": 0 + }, + { + "index": 180, + "is_switchable": 0, + "sentence": "The police arrested all of the gang members. [They] were trying to stop the drug trade in the neighborhood.", + "answer1": "The gang members", + "answer0": "The police", + "sentence_switched": "The gang members arrested all of the police. [they] were trying to stop the drug trade in the neighborhood.", + "correct_answer": "The police", + "relational_word": "none", + "is_associative": 1 + }, + { + "index": 181, + "is_switchable": 0, + "sentence": "The police arrested all of the gang members. [They] were trying to run the drug trade in the neighborhood.", + "answer1": "The gang members", + "answer0": "The police", + "sentence_switched": "The gang members arrested all of the police. [they] were trying to run the drug trade in the neighborhood.", + "correct_answer": "The gang members", + "relational_word": "none", + "is_associative": 1 + }, + { + "index": 182, + "is_switchable": 0, + "sentence": "I put the cake away in the refrigerator. [It] has a lot of butter in it.", + "answer1": "The refrigerator", + "answer0": "The cake", + "sentence_switched": "I put the refrigerator away in the cake. [it] has a lot of butter in it.", + "correct_answer": "The cake", + "relational_word": "none", + "is_associative": 1 + }, + { + "index": 183, + "is_switchable": 0, + "sentence": "I put the cake away in the refrigerator. [It] has a lot of leftovers in it.", + "answer1": "The refrigerator", + "answer0": "The cake", + "sentence_switched": "I put the refrigerator away in the cake. [it] has a lot of leftovers in it.", + "correct_answer": "The refrigerator", + "relational_word": "none", + "is_associative": 1 + }, + { + "index": 184, + "is_switchable": 0, + "sentence": "Sam broke both his ankles and he's walking with crutches. But a month or so from now [they] should be better.", + "answer1": "The crutches", + "answer0": "The ankles", + "sentence_switched": "Sam broke both his crutches and he's walking with ankles. but a month or so from now [they] should be better.", + "correct_answer": "The ankles", + "relational_word": "none", + "is_associative": 1 + }, + { + "index": 185, + "is_switchable": 0, + "sentence": "Sam broke both his ankles and he's walking with crutches. But a month or so from now [they] should be unnecessary.", + "answer1": "The crutches", + "answer0": "The ankles", + "sentence_switched": "Sam broke both his crutches and he's walking with ankles. but a month or so from now [they] should be unnecessary.", + "correct_answer": "The crutches", + "relational_word": "none", + "is_associative": 1 + }, + { + "index": 186, + "is_switchable": 0, + "sentence": "When the sponsors of the bill got to the town hall, they were surprised to find that the room was full of opponents. [They] were very much in the minority.", + "answer1": "The opponents", + "answer0": "The sponsors", + "sentence_switched": "When the opponents of the bill got to the town hall, they were surprised to find that the room was full of sponsors. [they] were very much in the minority.", + "correct_answer": "The sponsors", + "relational_word": "be full of:minority/majority", + "is_associative": 0 + }, + { + "index": 187, + "is_switchable": 0, + "sentence": "When the sponsors of the bill got to the town hall, they were surprised to find that the room was full of opponents. [They] were very much in the majority.", + "answer1": "The opponents", + "answer0": "The sponsors", + "sentence_switched": "When the opponents of the bill got to the town hall, they were surprised to find that the room was full of sponsors. [they] were very much in the majority.", + "correct_answer": "The opponents", + "relational_word": "be full of:minority/majority", + "is_associative": 0 + }, + { + "index": 188, + "is_switchable": 1, + "sentence": "Everyone really loved the oatmeal cookies; only a few people liked the chocolate chip cookies. Next time, we should make more of [them] .", + "answer1": "The chocolate chip cookies", + "answer0": "The oatmeal cookies", + "sentence_switched": "Everyone really loved the chocolate chip cookies; only a few people liked the oatmeal cookies. next time, we should make more of [them] .", + "correct_answer": "The oatmeal cookies", + "relational_word": "like over:more/fewer", + "is_associative": 0 + }, + { + "index": 189, + "is_switchable": 1, + "sentence": "Everyone really loved the oatmeal cookies; only a few people liked the chocolate chip cookies. Next time, we should make fewer of [them] .", + "answer1": "The chocolate chip cookies", + "answer0": "The oatmeal cookies", + "sentence_switched": "Everyone really loved the chocolate chip cookies; only a few people liked the oatmeal cookies. next time, we should make fewer of [them] .", + "correct_answer": "The chocolate chip cookies", + "relational_word": "like over:more/fewer", + "is_associative": 0 + }, + { + "index": 190, + "is_switchable": 0, + "sentence": "We had hoped to place copies of our newsletter on all the chairs in the auditorium, but there were simply not enough of [them] .", + "answer1": "chairs", + "answer0": "copies of the newsletter", + "sentence_switched": "We had hoped to place chairs on all the copies of the newsletter in the auditorium, but there were simply not enough of [them] .", + "correct_answer": "copies of the newsletter", + "relational_word": "place on all:not enough/too many", + "is_associative": 0 + }, + { + "index": 191, + "is_switchable": 0, + "sentence": "We had hoped to place copies of our newsletter on all the chairs in the auditorium, but there were simply too many of [them] .", + "answer1": "chairs", + "answer0": "copies of the newsletter", + "sentence_switched": "We had hoped to place chairs on all the copies of the newsletter in the auditorium, but there were simply too many of [them] .", + "correct_answer": "chairs", + "relational_word": "place on all:not enough/too many", + "is_associative": 0 + }, + { + "index": 192, + "is_switchable": 0, + "sentence": "I stuck a pin through a carrot. When I pulled the pin out, [it] left a hole.", + "answer1": "The carrot", + "answer0": "The pin", + "sentence_switched": "I stuck a carrot through a pin. when i pulled the carrot out, [it] left a hole.", + "correct_answer": "The pin", + "relational_word": "stick:leave have", + "is_associative": 1 + }, + { + "index": 193, + "is_switchable": 0, + "sentence": "I stuck a pin through a carrot. When I pulled the pin out, [it] had a hole.", + "answer1": "The carrot", + "answer0": "The pin", + "sentence_switched": "I stuck a carrot through a pin. when i pulled the carrot out, [it] had a hole.", + "correct_answer": "The carrot", + "relational_word": "stick:leave have", + "is_associative": 2 + }, + { + "index": 194, + "is_switchable": 0, + "sentence": "I couldn't find a spoon, so I tried using a pen to stir my coffee. But that turned out to be a bad idea, because [it] got full of coffee.", + "answer1": "The coffee", + "answer0": "The pen", + "sentence_switched": "I couldn't find a spoon, so i tried using a coffee to stir my pen. but that turned out to be a bad idea, because [it] got full of pen.", + "correct_answer": "The pen", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 195, + "is_switchable": 0, + "sentence": "I couldn't find a spoon, so I tried using a pen to stir my coffee. But that turned out to be a bad idea, because [it] got full of ink.", + "answer1": "The coffee", + "answer0": "The pen", + "sentence_switched": "I couldn't find a spoon, so i tried using a coffee to stir my pen. but that turned out to be a bad idea, because [it] got full of ink.", + "correct_answer": "The coffee", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 196, + "is_switchable": 1, + "sentence": "Steve follows Fred's example in everything. [He] admires him hugely.", + "answer1": "Fred", + "answer0": "Steve", + "sentence_switched": "Fred follows steve's example in everything. [he] admires him hugely.", + "correct_answer": "Steve", + "relational_word": "follow:admire/influence", + "is_associative": 2 + }, + { + "index": 197, + "is_switchable": 1, + "sentence": "Steve follows Fred's example in everything. [He] influences him hugely.", + "answer1": "Fred", + "answer0": "Steve", + "sentence_switched": "Fred follows steve's example in everything. [he] influences him hugely.", + "correct_answer": "Fred", + "relational_word": "follow:admire/influence", + "is_associative": 2 + }, + { + "index": 198, + "is_switchable": 0, + "sentence": "The table won't fit through the doorway because [it] is too wide.", + "answer1": "The doorway", + "answer0": "The table", + "sentence_switched": "The doorway won't fit through the table because [it] is too wide.", + "correct_answer": "The table", + "relational_word": "fit through:wide/narrow", + "is_associative": 0 + }, + { + "index": 199, + "is_switchable": 0, + "sentence": "The table won't fit through the doorway because [it] is too narrow.", + "answer1": "The doorway", + "answer0": "The table", + "sentence_switched": "The doorway won't fit through the table because [it] is too narrow.", + "correct_answer": "The doorway", + "relational_word": "fit through:wide/narrow", + "is_associative": 0 + }, + { + "index": 200, + "is_switchable": 1, + "sentence": "Grace was happy to trade me her sweater for my jacket. She thinks [it] looks dowdy on her.", + "answer1": "The jacket", + "answer0": "The sweater", + "sentence_switched": "Grace was happy to trade me her jacket for my sweater. she thinks [it] looks dowdy on her.", + "correct_answer": "The sweater", + "relational_word": "trade:dowdy/great", + "is_associative": 0 + }, + { + "index": 201, + "is_switchable": 1, + "sentence": "Grace was happy to trade me her sweater for my jacket. She thinks [it] looks great on her.", + "answer1": "The jacket", + "answer0": "The sweater", + "sentence_switched": "Grace was happy to trade me her jacket for my sweater. she thinks [it] looks great on her.", + "correct_answer": "The jacket", + "relational_word": "trade:dowdy/great", + "is_associative": 0 + }, + { + "index": 202, + "is_switchable": 1, + "sentence": "John hired Bill to take care of [him] .", + "answer1": "Bill", + "answer0": "John", + "sentence_switched": "Bill hired john to take care of [him] .", + "correct_answer": "John", + "relational_word": "hire/hire oneself to:take care of", + "is_associative": 0 + }, + { + "index": 203, + "is_switchable": 1, + "sentence": "John hired himself out to Bill to take care of [him] .", + "answer1": "Bill", + "answer0": "John", + "sentence_switched": "Bill hired himself out to john to take care of [him] .", + "correct_answer": "Bill", + "relational_word": "hire/hire oneself to:take care of", + "is_associative": 0 + }, + { + "index": 204, + "is_switchable": 1, + "sentence": "John promised Bill to leave, so an hour later [he] left.", + "answer1": "Bill", + "answer0": "John", + "sentence_switched": "Bill promised john to leave, so an hour later [he] left.", + "correct_answer": "John", + "relational_word": "promise/order", + "is_associative": 0 + }, + { + "index": 205, + "is_switchable": 1, + "sentence": "John ordered Bill to leave, so an hour later [he] left.", + "answer1": "Bill", + "answer0": "John", + "sentence_switched": "Bill ordered john to leave, so an hour later [he] left.", + "correct_answer": "Bill", + "relational_word": "promise/order", + "is_associative": 0 + }, + { + "index": 206, + "is_switchable": 1, + "sentence": "Sam Goodman's biography of the Spartan general Xenophanes conveys a vivid sense of the difficulties [he] faced in his research.", + "answer1": "Xenophanes", + "answer0": "Goodman", + "sentence_switched": "Sam xenophanes's biography of the spartan general goodman conveys a vivid sense of the difficulties [he] faced in his research.", + "correct_answer": "Goodman", + "relational_word": "none", + "is_associative": 2 + }, + { + "index": 207, + "is_switchable": 1, + "sentence": "Sam Goodman's biography of the Spartan general Xenophanes conveys a vivid sense of the difficulties [he] faced in his childhood.", + "answer1": "Xenophanes", + "answer0": "Goodman", + "sentence_switched": "Sam xenophanes's biography of the spartan general goodman conveys a vivid sense of the difficulties [he] faced in his childhood.", + "correct_answer": "Xenophanes", + "relational_word": "none", + "is_associative": 2 + }, + { + "index": 208, + "is_switchable": 0, + "sentence": "Emma's mother had died long ago, and [her] education had been managed by an excellent woman as governess.", + "answer1": "Emma's mother", + "answer0": "Emma", + "sentence_switched": "Emma had died long ago, and [her] education had been managed by an excellent woman as governess.", + "correct_answer": "Emma", + "relational_word": "mother:education place", + "is_associative": 0 + }, + { + "index": 209, + "is_switchable": 0, + "sentence": "Emma's mother had died long ago, and [her] place had been taken by an excellent woman as governess.", + "answer1": "Emma's mother", + "answer0": "Emma", + "sentence_switched": "Emma had died long ago, and [her] place had been taken by an excellent woman as governess.", + "correct_answer": "Emma's mother", + "relational_word": "mother:education place", + "is_associative": 0 + }, + { + "index": 210, + "is_switchable": 1, + "sentence": "Jane knocked on Susan's door but [she] did not get an answer.", + "answer1": "Susan", + "answer0": "Jane", + "sentence_switched": "Susan knocked on jane's door but [she] did not get an answer.", + "correct_answer": "Jane", + "relational_word": "knock:get an answer/answer", + "is_associative": 0 + }, + { + "index": 211, + "is_switchable": 1, + "sentence": "Jane knocked on Susan's door but [she] did not answer.", + "answer1": "Susan", + "answer0": "Jane", + "sentence_switched": "Susan knocked on jane's door but [she] did not answer.", + "correct_answer": "Susan", + "relational_word": "knock:get an answer/answer", + "is_associative": 0 + }, + { + "index": 212, + "is_switchable": 0, + "sentence": "Joe paid the detective after [he] received the final report on the case.", + "answer1": "the detective", + "answer0": "Joe", + "sentence_switched": "The detective paid joe after [he] received the final report on the case.", + "correct_answer": "Joe", + "relational_word": "pay:receive/deliver", + "is_associative": 0 + }, + { + "index": 213, + "is_switchable": 0, + "sentence": "Joe paid the detective after [he] delivered the final report on the case.", + "answer1": "the detective", + "answer0": "Joe", + "sentence_switched": "The detective paid joe after [he] delivered the final report on the case.", + "correct_answer": "the detective", + "relational_word": "pay:receive/deliver", + "is_associative": 0 + }, + { + "index": 214, + "is_switchable": 1, + "sentence": "Beth didn't get angry with Sally, who had cut her off, because [she] stopped and counted to ten.", + "answer1": "Sally", + "answer0": "Beth", + "sentence_switched": "Sally didn't get angry with beth, who had cut her off, because [she] stopped and counted to ten.", + "correct_answer": "Beth", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 215, + "is_switchable": 1, + "sentence": "Beth didn't get angry with Sally, who had cut her off, because [she] stopped and apologized.", + "answer1": "Sally", + "answer0": "Beth", + "sentence_switched": "Sally didn't get angry with beth, who had cut her off, because [she] stopped and apologized.", + "correct_answer": "Sally", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 216, + "is_switchable": 0, + "sentence": "Jim signaled the barman and gestured toward [his] empty glass", + "answer1": "The barman", + "answer0": "Jim", + "sentence_switched": "The barman signaled jim and gestured toward [his] empty glass", + "correct_answer": "Jim", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 217, + "is_switchable": 0, + "sentence": "Jim signaled the barman and gestured toward [his] bathroom key.", + "answer1": "The barman", + "answer0": "Jim", + "sentence_switched": "The barman signaled jim and gestured toward [his] bathroom key.", + "correct_answer": "The barman", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 218, + "is_switchable": 1, + "sentence": "Dan took the rear seat while Bill claimed the front because [his] \"Dibs!\" was slow.", + "answer1": "Bill", + "answer0": "Dan", + "sentence_switched": "Bill took the rear seat while dan claimed the front because [his] \"dibs!\" was slow.", + "correct_answer": "Dan", + "relational_word": "?", + "is_associative": 0 + }, + { + "index": 219, + "is_switchable": 1, + "sentence": "Dan took the rear seat while Bill claimed the front because [his] \"Dibs!\" was quicker.", + "answer1": "Bill", + "answer0": "Dan", + "sentence_switched": "Bill took the rear seat while dan claimed the front because [his] \"dibs!\" was quicker.", + "correct_answer": "Bill", + "relational_word": "?", + "is_associative": 0 + }, + { + "index": 220, + "is_switchable": 1, + "sentence": "Tom said \"Check\" to Ralph as he moved [his] bishop.", + "answer1": "Ralph", + "answer0": "Tom", + "sentence_switched": "Ralph said \"check\" to tom as he moved [his] bishop.", + "correct_answer": "Tom", + "relational_word": "say check:move take", + "is_associative": 0 + }, + { + "index": 221, + "is_switchable": 1, + "sentence": "Tom said \"Check\" to Ralph as he took [his] bishop.", + "answer1": "Ralph", + "answer0": "Tom", + "sentence_switched": "Ralph said \"check\" to tom as he took [his] bishop.", + "correct_answer": "Ralph", + "relational_word": "say check:move take", + "is_associative": 0 + }, + { + "index": 222, + "is_switchable": 1, + "sentence": "As Andrea in the crop duster passed over Susan, [she] could see the landing strip.", + "answer1": "Susan", + "answer0": "Andrea", + "sentence_switched": "As susan in the crop duster passed over andrea, [she] could see the landing strip.", + "correct_answer": "Andrea", + "relational_word": "?", + "is_associative": 0 + }, + { + "index": 223, + "is_switchable": 1, + "sentence": "As Andrea in the crop duster passed over Susan, [she] could see the landing gear.", + "answer1": "Susan", + "answer0": "Andrea", + "sentence_switched": "As susan in the crop duster passed over andrea, [she] could see the landing gear.", + "correct_answer": "Susan", + "relational_word": "?", + "is_associative": 0 + }, + { + "index": 224, + "is_switchable": 1, + "sentence": "Tom gave Ralph a lift to school so [he] wouldn't have to drive alone.", + "answer1": "Ralph", + "answer0": "Tom", + "sentence_switched": "Ralph gave tom a lift to school so [he] wouldn't have to drive alone.", + "correct_answer": "Tom", + "relational_word": "give a life:drive alone walk", + "is_associative": 0 + }, + { + "index": 225, + "is_switchable": 1, + "sentence": "Tom gave Ralph a lift to school so [he] wouldn't have to walk.", + "answer1": "Ralph", + "answer0": "Tom", + "sentence_switched": "Ralph gave tom a lift to school so [he] wouldn't have to walk.", + "correct_answer": "Ralph", + "relational_word": "give a life:drive alone walk", + "is_associative": 0 + }, + { + "index": 226, + "is_switchable": 1, + "sentence": "Bill passed the half-empty plate to John because [he] was full.", + "answer1": "John", + "answer0": "Bill", + "sentence_switched": "John passed the half-empty plate to bill because [he] was full.", + "correct_answer": "Bill", + "relational_word": "pass the plate:full/hungry", + "is_associative": 0 + }, + { + "index": 227, + "is_switchable": 1, + "sentence": "Bill passed the half-empty plate to John because [he] was hungry.", + "answer1": "John", + "answer0": "Bill", + "sentence_switched": "John passed the half-empty plate to bill because [he] was hungry.", + "correct_answer": "John", + "relational_word": "pass the plate:full/hungry", + "is_associative": 0 + }, + { + "index": 228, + "is_switchable": 1, + "sentence": "Bill passed the gameboy to John because [his] turn was over.", + "answer1": "John", + "answer0": "Bill", + "sentence_switched": "John passed the gameboy to bill because [his] turn was over.", + "correct_answer": "Bill", + "relational_word": "pass:turn over turn next", + "is_associative": 0 + }, + { + "index": 229, + "is_switchable": 1, + "sentence": "Bill passed the gameboy to John because [his] turn was next.", + "answer1": "John", + "answer0": "Bill", + "sentence_switched": "John passed the gameboy to bill because [his] turn was next.", + "correct_answer": "John", + "relational_word": "pass:turn over turn next", + "is_associative": 0 + }, + { + "index": 230, + "is_switchable": 1, + "sentence": "The man lifted the boy onto [his] shoulders.", + "answer1": "The boy", + "answer0": "The man", + "sentence_switched": "The boy lifted the man onto [his] shoulders.", + "correct_answer": "The man", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 231, + "is_switchable": 1, + "sentence": "The man lifted the boy onto [his] bunk bed.", + "answer1": "The boy", + "answer0": "The man", + "sentence_switched": "The boy lifted the man onto [his] bunk bed.", + "correct_answer": "The boy", + "relational_word": "none", + "is_associative": 1 + }, + { + "index": 232, + "is_switchable": 1, + "sentence": "Stretching [her] back, the woman smiled at the girl.", + "answer1": "The girl", + "answer0": "The woman", + "sentence_switched": "Stretching [her] back, the girl smiled at the woman.", + "correct_answer": "The woman", + "relational_word": "stretch pat", + "is_associative": 0 + }, + { + "index": 233, + "is_switchable": 1, + "sentence": "Patting [her] back, the woman smiled at the girl.", + "answer1": "The girl", + "answer0": "The woman", + "sentence_switched": "Patting [her] back, the girl smiled at the woman.", + "correct_answer": "The girl", + "relational_word": "stretch pat", + "is_associative": 0 + }, + { + "index": 234, + "is_switchable": 1, + "sentence": "Billy cried because Toby wouldn't accept [his] toy.", + "answer1": "Toby", + "answer0": "Billy", + "sentence_switched": "Toby cried because billy wouldn't accept [his] toy.", + "correct_answer": "Billy", + "relational_word": "accept share", + "is_associative": 0 + }, + { + "index": 235, + "is_switchable": 1, + "sentence": "Billy cried because Toby wouldn't share [his] toy.", + "answer1": "Toby", + "answer0": "Billy", + "sentence_switched": "Toby cried because billy wouldn't share [his] toy.", + "correct_answer": "Toby", + "relational_word": "accept share", + "is_associative": 0 + }, + { + "index": 236, + "is_switchable": 1, + "sentence": "Lily spoke to Donna, breaking [her] silence.", + "answer1": "Donna", + "answer0": "Lily", + "sentence_switched": "Donna spoke to lily, breaking [her] silence.", + "correct_answer": "Lily", + "relational_word": "speak:break silence break concentration", + "is_associative": 0 + }, + { + "index": 237, + "is_switchable": 1, + "sentence": "Lily spoke to Donna, breaking [her] concentration.", + "answer1": "Donna", + "answer0": "Lily", + "sentence_switched": "Donna spoke to lily, breaking [her] concentration.", + "correct_answer": "Donna", + "relational_word": "speak:break silence break concentration", + "is_associative": 0 + }, + { + "index": 238, + "is_switchable": 1, + "sentence": "When Tommy dropped his ice cream, Timmy giggled, so father gave [him] a sympathetic look.", + "answer1": "Timmy", + "answer0": "Tommy", + "sentence_switched": "When timmy dropped his ice cream, tommy giggled, so father gave [him] a sympathetic look.", + "correct_answer": "Tommy", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 239, + "is_switchable": 1, + "sentence": "When Tommy dropped his ice cream, Timmy giggled, so father gave [him] a stern look.", + "answer1": "Timmy", + "answer0": "Tommy", + "sentence_switched": "When timmy dropped his ice cream, tommy giggled, so father gave [him] a stern look.", + "correct_answer": "Timmy", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 240, + "is_switchable": 1, + "sentence": "As Ollie carried Tommy up the long winding steps, [his] legs ached.", + "answer1": "Tommy", + "answer0": "Ollie", + "sentence_switched": "As tommy carried ollie up the long winding steps, [his] legs ached.", + "correct_answer": "Ollie", + "relational_word": "carry:leg ache leg dangle", + "is_associative": 0 + }, + { + "index": 241, + "is_switchable": 1, + "sentence": "As Ollie carried Tommy up the long winding steps, [his] legs dangled.", + "answer1": "Tommy", + "answer0": "Ollie", + "sentence_switched": "As tommy carried ollie up the long winding steps, [his] legs dangled.", + "correct_answer": "Tommy", + "relational_word": "carry:leg ache leg dangle", + "is_associative": 0 + }, + { + "index": 242, + "is_switchable": 0, + "sentence": "The father carried the sleeping boy in [his] arms", + "answer1": "The boy", + "answer0": "The father", + "sentence_switched": "The boy carried the sleeping father in [his] arms", + "correct_answer": "The father", + "relational_word": "carry:in arms in bassinet", + "is_associative": 0 + }, + { + "index": 243, + "is_switchable": 0, + "sentence": "The father carried the sleeping boy in [his] bassinet.", + "answer1": "The boy", + "answer0": "The father", + "sentence_switched": "The boy carried the sleeping father in [his] bassinet.", + "correct_answer": "The boy", + "relational_word": "carry:in arms in bassinet", + "is_associative": 0 + }, + { + "index": 244, + "is_switchable": 1, + "sentence": "The woman held the girl against [her] chest", + "answer1": "The girl", + "answer0": "The woman", + "sentence_switched": "The girl held the woman against [her] chest", + "correct_answer": "The woman", + "relational_word": "hold:against chest against will", + "is_associative": 0 + }, + { + "index": 245, + "is_switchable": 1, + "sentence": "The woman held the girl against [her] will.", + "answer1": "The girl", + "answer0": "The woman", + "sentence_switched": "The girl held the woman against [her] will.", + "correct_answer": "The girl", + "relational_word": "hold:against chest against will", + "is_associative": 0 + }, + { + "index": 246, + "is_switchable": 0, + "sentence": "Pam's parents came home and found her having sex with her boyfriend, Paul. [They] were furious about it.", + "answer1": "Pam and Paul", + "answer0": "Pam's parents", + "sentence_switched": "Pam and paul came home and found Pam's parents having sex. [they] were furious about it.", + "correct_answer": "Pam's parents", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 247, + "is_switchable": 0, + "sentence": "Pam's parents came home and found her having sex with her boyfriend, Paul. [They] were embarrassed about it.", + "answer1": "Pam and Paul", + "answer0": "Pam's parents", + "sentence_switched": "Pam and paul came home and found Pam's parents having sex. [they] were embarrassed about it.", + "correct_answer": "Pam and Paul", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 248, + "is_switchable": 0, + "sentence": "Dr. Adams informed Kate that [she] had retired and presented several options for future treatment.", + "answer1": "Kate", + "answer0": "Dr. Adams", + "sentence_switched": "Kate informed dr. adams that [she] had retired and presented several options for future treatment.", + "correct_answer": "Dr. Adams", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 249, + "is_switchable": 0, + "sentence": "Dr. Adams informed Kate that [she] had cancer and presented several options for future treatment.", + "answer1": "Kate", + "answer0": "Dr. Adams", + "sentence_switched": "Kate informed dr. adams that [she] had cancer and presented several options for future treatment.", + "correct_answer": "Kate", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 250, + "is_switchable": 1, + "sentence": "Dan had to stop Bill from toying with the injured bird. [He] is very compassionate.", + "answer1": "Bill", + "answer0": "Dan", + "sentence_switched": "Bill had to stop dan from toying with the injured bird. [he] is very compassionate.", + "correct_answer": "Dan", + "relational_word": "stop", + "is_associative": 0 + }, + { + "index": 251, + "is_switchable": 1, + "sentence": "Dan had to stop Bill from toying with the injured bird. [He] is very cruel.", + "answer1": "Bill", + "answer0": "Dan", + "sentence_switched": "Bill had to stop dan from toying with the injured bird. [he] is very cruel.", + "correct_answer": "Bill", + "relational_word": "stop", + "is_associative": 0 + }, + { + "index": 252, + "is_switchable": 1, + "sentence": "George got free tickets to the play, but he gave them to Eric, even though [he] was particularly eager to see it.", + "answer1": "Eric", + "answer0": "George", + "sentence_switched": "Eric got free tickets to the play, but he gave them to george, even though [he] was particularly eager to see it.", + "correct_answer": "George", + "relational_word": "even though/because/not", + "is_associative": 0 + }, + { + "index": 253, + "is_switchable": 1, + "sentence": "George got free tickets to the play, but he gave them to Eric, because [he] was particularly eager to see it.", + "answer1": "Eric", + "answer0": "George", + "sentence_switched": "Eric got free tickets to the play, but he gave them to george, because [he] was particularly eager to see it.", + "correct_answer": "Eric", + "relational_word": "even though/because/not", + "is_associative": 0 + }, + { + "index": 254, + "is_switchable": 1, + "sentence": "George got free tickets to the play, but he gave them to Eric, because [he] was not particularly eager to see it.", + "answer1": "Eric", + "answer0": "George", + "sentence_switched": "Eric got free tickets to the play, but he gave them to george, because [he] was not particularly eager to see it.", + "correct_answer": "George", + "relational_word": "even though/because/not", + "is_associative": 0 + }, + { + "index": 255, + "is_switchable": 1, + "sentence": "Jane gave Joan candy because [she] wasn't hungry.", + "answer1": "Joan", + "answer0": "Jane", + "sentence_switched": "Joan gave jane candy because [she] wasn't hungry.", + "correct_answer": "Jane", + "relational_word": "give:not hungry/hungry", + "is_associative": 0 + }, + { + "index": 256, + "is_switchable": 1, + "sentence": "Jane gave Joan candy because [she] was hungry.", + "answer1": "Joan", + "answer0": "Jane", + "sentence_switched": "Joan gave jane candy because [she] was hungry.", + "correct_answer": "Joan", + "relational_word": "give:not hungry/hungry", + "is_associative": 0 + }, + { + "index": 257, + "is_switchable": 0, + "sentence": "I tried to paint a picture of an orchard, with lemons in the lemon trees, but [they] came out looking more like light bulbs.", + "answer1": "lemon trees", + "answer0": "lemons", + "sentence_switched": "I tried to paint a picture of an orchard, with lemon trees in the lemons, but [they] came out looking more like light bulbs.", + "correct_answer": "lemons", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 258, + "is_switchable": 0, + "sentence": "I tried to paint a picture of an orchard, with lemons in the lemon trees, but [they] came out looking more like telephone poles.", + "answer1": "lemon trees", + "answer0": "lemons", + "sentence_switched": "I tried to paint a picture of an orchard, with lemon trees in the lemons, but [they] came out looking more like telephone poles.", + "correct_answer": "lemon trees", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 259, + "is_switchable": 1, + "sentence": "James asked Robert for a favor but [he] was refused.", + "answer1": "Robert", + "answer0": "James", + "sentence_switched": "Robert asked james for a favor but [he] was refused.", + "correct_answer": "James", + "relational_word": "ask for a favor:refuse/be refused`", + "is_associative": 0 + }, + { + "index": 260, + "is_switchable": 1, + "sentence": "James asked Robert for a favor but [he] refused.", + "answer1": "Robert", + "answer0": "James", + "sentence_switched": "Robert asked james for a favor but [he] refused.", + "correct_answer": "Robert", + "relational_word": "ask for a favor:refuse/be refused`", + "is_associative": 0 + }, + { + "index": 261, + "is_switchable": 1, + "sentence": "Kirilov ceded the presidency to Shatov because [he] was less popular.", + "answer1": "Shatov", + "answer0": "Kirilov", + "sentence_switched": "Shatov ceded the presidency to kirilov because [he] was less popular.", + "correct_answer": "Kirilov", + "relational_word": "cede:less popular/more popular", + "is_associative": 0 + }, + { + "index": 262, + "is_switchable": 1, + "sentence": "Kirilov ceded the presidency to Shatov because [he] was more popular.", + "answer1": "Shatov", + "answer0": "Kirilov", + "sentence_switched": "Shatov ceded the presidency to kirilov because [he] was more popular.", + "correct_answer": "Shatov", + "relational_word": "cede:less popular/more popular", + "is_associative": 0 + }, + { + "index": 263, + "is_switchable": 1, + "sentence": "Emma did not pass the ball to Janie although [she] saw that she was open.", + "answer1": "Janie", + "answer0": "Emma", + "sentence_switched": "Janie did not pass the ball to emma although [she] saw that she was open.", + "correct_answer": "Emma", + "relational_word": "not pass although:see open/open", + "is_associative": 0 + }, + { + "index": 264, + "is_switchable": 1, + "sentence": "Emma did not pass the ball to Janie although [she] was open.", + "answer1": "Janie", + "answer0": "Emma", + "sentence_switched": "Janie did not pass the ball to emma although [she] was open.", + "correct_answer": "Janie", + "relational_word": "not pass although:see open/open", + "is_associative": 0 + }, + { + "index": 265, + "is_switchable": 0, + "sentence": "I put the butterfly wing on the table and [it] broke.", + "answer1": "The table", + "answer0": "The butterfly wing", + "sentence_switched": "I put the table on the butterfly wing and [it] broke.", + "correct_answer": "The butterfly wing", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 266, + "is_switchable": 0, + "sentence": "I put the heavy book on the table and [it] broke.", + "answer1": "The table", + "answer0": "The heavy book", + "sentence_switched": "I put the table on the heavy book and [it] broke.", + "correct_answer": "The table", + "relational_word": "none", + "is_associative": 1 + }, + { + "index": 267, + "is_switchable": 0, + "sentence": "Madonna fired her trainer because [she] couldn't stand her boyfriend.", + "answer1": "The trainer", + "answer0": "Madonna", + "sentence_switched": "The trainer fired madonna because [she] couldn't stand her boyfriend.", + "correct_answer": "Madonna", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 268, + "is_switchable": 0, + "sentence": "Madonna fired her trainer because [she] slept with her boyfriend.", + "answer1": "The trainer", + "answer0": "Madonna", + "sentence_switched": "The trainer fired madonna because [she] slept with her boyfriend.", + "correct_answer": "The trainer", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 269, + "is_switchable": 0, + "sentence": "Madonna fired her trainer because she slept with [her] boyfriend.", + "answer1": "The trainer", + "answer0": "Madonna", + "sentence_switched": "The trainer fired madonna because she slept with [her] boyfriend.", + "correct_answer": "Madonna", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 270, + "is_switchable": 0, + "sentence": "Madonna fired her trainer because she couldn't stand [her] boyfriend.", + "answer1": "The trainer", + "answer0": "Madonna", + "sentence_switched": "The trainer fired madonna because she couldn't stand [her] boyfriend.", + "correct_answer": "The trainer", + "relational_word": "none", + "is_associative": 0 + }, + { + "index": 271, + "is_switchable": 1, + "sentence": "Carol believed that Rebecca suspected that [she] had stolen the watch.", + "answer1": "Rebecca", + "answer0": "Carol", + "sentence_switched": "Rebecca believed that carol suspected that [she] had stolen the watch.", + "correct_answer": "Carol", + "relational_word": "suspect regret", + "is_associative": 2 + }, + { + "index": 272, + "is_switchable": 1, + "sentence": "Carol believed that Rebecca regretted that [she] had stolen the watch.", + "answer1": "Rebecca", + "answer0": "Carol", + "sentence_switched": "Rebecca believed that carol regretted that [she] had stolen the watch.", + "correct_answer": "Rebecca", + "relational_word": "suspect regret", + "is_associative": 2 + } +] diff --git a/bin/pytorch_pretrained_bert b/bin/pytorch_pretrained_bert deleted file mode 100644 index eee2b4c250c962..00000000000000 --- a/bin/pytorch_pretrained_bert +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -python -m pytorch_pretrained_bert "$@" \ No newline at end of file diff --git a/child_frames.py b/child_frames.py new file mode 100644 index 00000000000000..0650898fa0d7bc --- /dev/null +++ b/child_frames.py @@ -0,0 +1,307 @@ +frames = \ +[ + { + "index": 2, + "orig_sentence": "The trophy doesn't fit into the brown suitcase because [it] is too large/small.", + "entities": ["trophy", "suitcase"], + "entity_substitutes": [["ball", "toy"], ["bag", "box"]], + "determiner": "the", + "packed_relations": ["doesn't fit into/can fit into", "doesn't hold/can hold"], + "packed_relation_substitutes": [["can't be put into/can be put into"], ["doesn't have enough room for/has enough room for"]], + "packed_predicates": ["is large/isn't large", "is small/isn't small"], + }, + { + "index": 4, + "orig_sentence": "Joan made sure to thank Susan for all the help [she] had recieved/given.", + "entities": ["John", "Susan"], + "entity_substitutes": [["David", "Michael"], ["Mary", "Tiffany"]], + "packed_relations": ["thanked/didn't thank", "took good care of/didn't good care of"], + "packed_relation_substitutes": [["felt grateful to/didn't feel grateful to"], ["was appreciated by/wasn't appreciated by"]], + "packed_predicates": ["had received a lot of help/hadn't received a lot of help", "had given a lot of help/hadn't given a lot of help"], + "predicate_dichotomy": False, + }, + { + "index": 4000, + "orig_sentence": "John gave a lot of money to Susan because [he] was very rich/poor.", + "entities": ["John", "Susan"], + "entity_substitutes": [["David", "Michael"], ["Mary", "Linda"]], + "packed_relations": ["gave a lot of money to/didn't give a lot of money to", "received a lot of money from/didn't receive a lot of money from"], + "packed_relation_substitutes": [["subsidized/didn't subsidize"], ["borrowed a lot of money from/didn't borrow any money from"]], + "packed_predicates": ["was rich/wasn't rich", "was poor/wasn't poor"], + }, + { + "index": 10, + "orig_sentence": "The delivery truck zoomed by the school bus because [it] was going so fast/slow.", + "entities": ["truck", "bus"], + "entity_substitutes": [["car", "ambulance"], ["bicycle", "tram"]], + "determiner": "the", + "packed_relations": ["overtook/couldn't overtake", "fell far behind/didn't fall far behind"], + "packed_relation_substitutes": [["zoomed by/didn't pass"], ["was left behind/wasn't left far behind"]], + "packed_predicates": ["was going fast/wasn't going fast", "was going slow/wasn't going slow"], + }, + ## didn't defeated, replace error: didn't defeat -> defeated + { + "index": 12, + "orig_sentence": "Frank felt vindicated/crushed when his longtime rival Bill revealed that [he] was the winner of the competition.", + "entities": ["John", "Susan"], + "entity_substitutes": [["David", "Michael"], ["Mary", "Linda"]], + "packed_relations": ["beat/didn't beat", "lost to/didn't lose to"], + "packed_relation_substitutes": [["defeated/didn't defeat"], ["was defeated by/wasn't defeated by"]], + "relation_suffix": "in the game", + "packed_predicates": ["was happy/wasn't happy", "was sad/wasn't sad"], + "reverse_causal": True + }, + { + "index": 16, + "orig_sentence": "The large ball crashed right through the table because [it] was made of steel/styrofoam.", + "entities": ["ball", "board"], + "entity_substitutes": [["bullet", "arrow"], ["shield", "disk"]], + "determiner": "the", + "packed_relations": ["crashed right through/didn't crash through", "failed to block/blocked"], + "packed_relation_substitutes": [["penetrated through/didn't penetrate through"], ["failed to stop/stopped"]], + "packed_predicates": ["was hard/wasn't hard", "was soft/wasn't soft"], + }, + { + "index": 18, + "orig_sentence": "John couldn't see the stage with Billy in front of him because [he] is so short.", + "entities": ["John", "Susan"], + "entity_substitutes": [["David", "Edward"], ["Betty", "Donna"]], + "packed_relations": ["couldn't see the stage behind/could see the stage behind", "blocked the view of/didn't block the view of"], + "packed_relation_substitutes": [["couldn't catch sight of the stage behind/could catch sight of the stage behind"], ["obstructed the sight of/didn't obstruct the sight of"]], + "packed_predicates": ["is short/isn't short", "is tall/isn't tall"], + }, + { + "index": 20, + "orig_sentence": "Tom threw his schoolbag down to Ray after [he] reached the top of the stairs.", + "entities": ["Brian", "Amy"], + "entity_substitutes": [["Charles", "Paul"], ["Emma", "Linda"]], + "packed_relations": ["threw the schoolbag down to/threw the schoolbag up to", "caught the schoolbag thrown down by/caught the schoolbag thrown up by"], + "packed_relation_substitutes": [["cast the schoolbag down to/cast the schoolbag up to"], ["took the schoolbag thrown down by/took the schoolbag thrown up by"]], + "packed_predicates": ["reached the top of the stairs", "reached the bottom of the stairs"], + "conjunctions": [["after", ], ["before", ]] + }, + ## didn't defeated, replace error: didn't defeat -> defeated + { + "index": 22, + "orig_sentence": "Although they ran at about the same speed, Sue beat Sally because [she] had such a good start.", + "entities": ["Tom", "Sue"], + "entity_substitutes": [["John", "David"], ["Sally", "Susan"]], + "packed_relations": ["beat/didn't beat", "lost to/didn't lose to"], + "packed_relation_substitutes": [["defeated/didn't defeat"], ["was defeated by/wasn't defeated by"]], + "relation_prefix": "Running at about the same speed,", + "relation_suffix": "in the running race", + "packed_predicates": ["had a good start/didn't have a good start", "had a bad start/didn't have a bad start"], + }, + { + "index": 28, + "orig_sentence": "Anna did a lot better than her good friend Lucy on the test because [she] had studied so hard.", + "entities": ["Anna", "Andy"], + "entity_substitutes": [["Lucy", "Nancy"], ["George", "Frank"]], + "packed_relations": ["did better than/didn't do better than", "did worse than/didn't do worse than"], + "packed_relation_substitutes": [["performed better than/didn't perform better than"], ["performed worse than/didn't perform worse than"]], + "relation_suffix": "on the test", + "packed_predicates": ["had studied hard/hadn't studied hard", "was lazy in doing homework/wasn't lazy in doing homework"], + }, + { + "index": 30, + "orig_sentence": "The firemen arrived after the police because [they] were coming from so far away.", + "entities": ["doctor", "police"], + "entity_substitutes": [["worker", "employee"], ["boss", "administrator"]], + "determiner": "the", + "packed_relations": ["arrived after/didn't arrive after", "arrived before/didn't arrive before"], + "packed_relation_substitutes": [["reached here after/didn't reach here after"], ["reached here before/didn't reach here before"]], + "packed_predicates": ["came from far away/didn't come from far away", "came from a close place/didn't come from a close place"], + }, + { + "index": 32000, + "orig_sentence": "Frank was upset with Tom because the toaster [he] had bought from him didn't work.", + "entities": ["Betty", "Henry"], + "entity_substitutes": [["Amy", "Linda"], ["Bush", "Frank"]], + "packed_relations": ["was upset with/was pleased with", "was hated by/was loved by"], + "packed_relation_substitutes": [["hated/liked"], ["was disliked by/was liked by"]], + "packed_predicates": ["had bought didn't work/had bought worked well", "had sold didn't work/had sold worked well"], + "predicate_prefix": "the toaster", + "predicate_dichotomy": False, + }, + { + "index": 36, + "orig_sentence": "The sack of potatoes had been placed above the bag of flour, so [it] had to be moved first", + "entities": ["potatoes", "flour"], + "entity_substitutes": [["candy", "rice"], ["beans", "noodles"]], + "determiner": "the bag of", + "packed_relations": ["had been placed above/hadn't been placed above", "had been placed below/hadn't been placed below"], + "packed_relation_substitutes": [["had been put above/hadn't been put above"], ["had been put below/hadn't been put below"]], + "packed_predicates": ["had to be moved first/couldn't be moved first", "had to be moved later/couldn't be moved later"], + "reverse_causal": True + }, + { + "index": 38, + "orig_sentence": "Pete envies Martin although [he] is very successful.", + "entities": ["Peter", "Mandy"], + "entity_substitutes": [["Martin", "Paul"], ["Cindy", "Emma"]], + "packed_relations": ["envied/didn't envy", "was envied by/wasn't envied by"], + "packed_relation_substitutes": [["was jealous of/wasn't jealous of"], ["was admired by/wasn't admired by"]], + "packed_predicates": ["failed/didn't fail", "was successful/wasn't successful"], + }, + { + "index": 42, + "orig_sentence": "I poured water from the bottle into the cup until [it] was empty.", + "entities": ["bottle", "cup"], + "entity_substitutes": [["bowl", "bucket"], ["tube", "container"]], + "determiner": "the", + "packed_relations": ["was filled with water from/leaked into", "leaked into/was filled with water from"], + "packed_relation_substitutes": [["was suffused with water from/dripped water into"], ["dripped water into/was suffused with water from"]], + "packed_predicates": ["was empty", "was full"], + "conjunctions": [["after", ], ["before", ]] + }, + { + "index": 50, + "orig_sentence": "Joe's uncle can still beat him at tennis, even though [he] is 30 years younger.", + "entities": ["Joe", "Amy"], + "entity_substitutes": [["David", "Charles"], ["Betty", "Cindy"]], + "packed_relations": ["can beat/can't beat", "often loses to/seldom loses to"], + "packed_relation_substitutes": [["can defeat/can't defeat"], ["is often defeated by/is seldom defeated by"]], + "relation_suffix": "at tennis", + "packed_predicates": ["is older/isn't older", "is younger/isn't younger"], + }, + { + "index": 68, + "orig_sentence": "Ann asked Mary what time the library closes, because [she] had forgotten.", + "entities": ["Ann", "Henry"], + "entity_substitutes": [["Mary", "Linda"], ["Brian", "Michael"]], + "packed_relations": ["asked/didn't ask", "told/didn't tell"], + "packed_relation_substitutes": [["was told by/wasn't told by"], ["was asked by/wasn't asked by"]], + "relation_suffix": "what time the library closes", + "packed_predicates": ["had forgotten/hadn't forgotten", "remembered/didn't remember"], + }, + { + "index": 84, + "orig_sentence": "If the con artist has succeeded in fooling Sam, [he] would have gotten a lot of money.", + "entities": ["Sam", "Emma"], + "entity_substitutes": [["Paul", "Bush"], ["Susan", "Lucy"]], + "packed_relations": ["succeeded in fooling/failed to fool", "was fooled by/wasn't fooled by"], + "packed_relation_substitutes": [["succeeded in cheating/failed to cheat"], ["was cheated by/wasn't cheated by"]], + "packed_predicates": ["got a lot of money/didn't get a lot of money", "lost a lot of money/didn't lose a lot of money"], + "predicate_dichotomy": False, + "reverse_causal": True + }, + { + "index": 15000, + "orig_sentence": "Jackson was greatly influenced by Arnold, though [he] lived two centuries later.", + "entities": ["Jack", "Betty"], + "entity_substitutes": [["Tom", "Jay"], ["Emily", "Helen"]], + "packed_relations": ["always takes care of/dosen't take care of", "is always taken care of by/isn't taken care of by"], + "packed_relation_substitutes": [["always looks after/dosen't look after"], ["always needs the help of/didn't need the help of"]], + "packed_predicates": ["is older/isn't older", "is younger/isn't younger"], + }, + { + "index": 160, + "orig_sentence": "The actress used to be named Terpsichore, but she changed it to Tina a few years ago, because she figured [it] was too hard to pronounce.", + "entities": ["Betty", "Adele"], + "entity_substitutes": [["Amy", "Cindy"], ["Alberta", "Caroline"]], + "packed_relations": ["replaced/didn't replace", "was changed to/wasn't changed to"], + "packed_relation_substitutes": [["was substituted for/wasn't substituted for"], ["was replaced by/wasn't replaced by"]], + "relation_suffix": "as the actress's new name", + "packed_predicates": ["is easy to pronounce/isn't easy to pronounce", "is hard to pronounce/isn't hard to pronounce"], + }, + { + "index": 1700000, + "orig_sentence": "In July, Kamtchatka declared war on Yakutsk. Since Yakutsk's army was much better equipped and ten times larger, [they] were defeated within weeks.", + "entities": ["Germany", "Italy"], + "entity_substitutes": [["Australia", "Japan"], ["Argentina", "Canada"]], + "packed_relations": ["defeated/didn't defeat", "was defeated by/wasn't defeated by"], + "packed_relation_substitutes": [["conquered/didn't conquer"], ["was conquered by/wasn't conquered by"]], + "packed_predicates": ["was more powerful/wasn't more powerful", "was less powerful/wasn't less powerful"], + }, + { + "index": 186, + "orig_sentence": "When the sponsors of the bill got to the town hall, they were surprised to find that the room was full of opponents. [They] were very much in the minority", + "entities": ["sponsors", "opponents"], + "entity_substitutes": [["workers", "customers"], ["teachers", "students"]], + "determiner": "the", + "packed_relations": ["were less in number than/were not less in number than", "were more in number than/were not more in number than"], + "packed_relation_substitutes": [["were outnumbered by/were not outnumbered by"], ["outnumbered/didn't outnumber"]], + "packed_predicates": ["were in the minority/were not in the minority", "were in the majority/were not in the majority"], + "reverse_causal": True + }, + { + "index": 188, + "orig_sentence": "Everyone really loved the oatmeal cookies; only a few people liked the chocolate chip cookies. Next time, we should make more of [them] .", + "entities": ["cookies", "chips"], + "entity_substitutes": [["apples", "bananas"], ["grapes", "sandwiches"]], + "determiner": "the", + "packed_relations": ["are more popular than/are less popular than", "lose to/don't lose to"], + "packed_relation_substitutes": [["are sold more than/are sold less than"], ["are not as popular as/are as popular as"]], + "packed_predicates": ["should be made more next time/shouldn't be made more next time", "should be made less next time/shouldn't be made less next time"], + "reverse_causal": True + }, + { + "index": 190, + "orig_sentence": "We had hoped to place copies of our newsletter on all the chairs in the auditorium, but there were simply not enough of [them] .", + "entities": ["newspapers", "chairs"], + "entity_substitutes": [["cups", "pictures"], ["tables", "benches"]], + "determiner": "the", + "packed_relations": ["could be placed on all/couldn't be placed on all", "could all be covered by/couldn't all be covered by"], + "packed_relation_substitutes": [["could be put on all/couldn't be put on all"], ["could carry all/couldn't carry all"]], + "packed_predicates": ["there were many of/there were not many of", "there were few of/there were not few of"], + "prepositive_pred": True, + }, + { + "index": 19600, + "orig_sentence": "Steve follows Fred's example in everything. [He] admires him hugely.", + "entities": ["Steve", "Lucy"], + "entity_substitutes": [["Fred", "George"], ["Lily", "Wendy"]], + "packed_relations": ["follows/doesn't follow", "is followed by/isn't followed by"], + "packed_relation_substitutes": [["imitates/doesn't imitate"], ["is imitated by/isn't imitated by"]], + "relation_suffix": "in everything", + "packed_predicates": ["is bad at making decisions/isn't bad at making decisions", "is good at making decisions/isn't good at making decisions"], + }, + { + "index": 198, + "orig_sentence": "The table won't fit through the doorway because [it] is too wide.", + "entities": ["table", "doorway"], + "entity_substitutes": [["desk", "sofa"], ["corridor", "hallway"]], + "determiner": "the", + "packed_relations": ["will fit through/won't fit through", "will be fitted through by/won't be fitted through by"], + "packed_relation_substitutes": [["will pass through/won't pass through"], ["will be passed through by/won't be passed through by"]], + "packed_predicates": ["is narrow/isn't narrow", "is wide/isn't wide"], + }, + { + "index": 2000000, + "orig_sentence": "Grace was happy to trade me her sweater for my jacket. She thinks [it] looks dowdy on her.", + "entities": ["sweater", "jacket"], + "entity_substitutes": [["skirt", "cap"], ["hat", "short"]], + "determiner": "the", + "packed_relations": ["is traded by Grace for/isn't traded by Grace for", "is substituted by Grace for/isn't substituted by Grace for"], + "packed_relation_substitutes": [["is replaced by Grace with/isn't replaced by Grace with"], ["is preferred by Grace to/isn't preferred by Grace to"]], + "packed_predicates": ["looks bad/looks not bad", "looks good/looks not good"], + "predicate_prefix": "she thinks", + }, + { + "index": 226, + "orig_sentence": "Bill passed the half-empty plate to John because [he] was full.", + "entities": ["Bill", "Amy"], + "entity_substitutes": [["Brian", "David"], ["Emma", "Helen"]], + "packed_relations": ["passed the half-empty plate to/didn't pass the half-empty plate to", "received the half-empty plate from/didn't received the half-empty plate from"], + "packed_relation_substitutes": [["gave the half-empty plate to/didn't give the half-empty plate to"], ["took the half-empty plate from/didn't take the half-empty plate from"]], + "packed_predicates": ["was full/wasn't full", "was hungry/wasn't hungry"], + }, + { + "index": 252, + "orig_sentence": "George got free tickets to the play, but he gave them to Eric, even though [he] was particularly eager to see it.", + "entities": ["George", "Linda"], + "entity_substitutes": [["Eric", "Ted"], ["Cindy", "Lucy"]], + "packed_relations": ["gave the tickets of the play to/didn't give the tickets of the play to", "received the tickets of the play from/didn't receive the tickets of the play from"], + "packed_relation_substitutes": [["sent the tickets of the play to/didn't send the tickets of the play to"], ["took the tickets of the play from/didn't take the tickets of the play from"]], + "packed_predicates": ["wasn't interested in it/was interested in it", "was eager to see it/wasn't eager to see it"], + }, + { + "index": 261, + "orig_sentence": "Kirilov ceded the presidency to Shatov because [he] was less popular.", + "entities": ["James", "Amy"], + "entity_substitutes": [["Robert", "Jack"], ["Donna", "Emily"]], + "packed_relations": ["ceded the presidency to/didn't cede the presidency to", "took over the presidency from/didn't take over the presidency from"], + "packed_relation_substitutes": [["gave the presidency to/didn't give the presidency to"], ["got the presidency from/didn't get the presidency from"]], + "packed_predicates": ["was notorious/was not notorious", "was popular/wasn't popular"], + }, +] diff --git a/child_generator.py b/child_generator.py new file mode 100644 index 00000000000000..04b1c150b6b65c --- /dev/null +++ b/child_generator.py @@ -0,0 +1,139 @@ +import os +import json +import itertools +from itertools import product, permutations +from random import sample + +from pytorch_pretrained_bert.tokenization import BertTokenizer +from child_lib import * + + +BERT_DIR = '/nas/pretrain-bert/pretrain-pytorch/bert-base-uncased' +tokenizer = BertTokenizer.from_pretrained('/nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt') + + +def assert_in_bert_vocab(tokens): + for token in tokens: + if isinstance(token, str): # entities + assert token.lower() in tokenizer.vocab, token + '->' + str(tokenizer.tokenize(token)) + elif isinstance(token, tuple): # relations + assert len(token) == 2, str(token) + for rel in token: + rel = rel.split('..')[0] + assert rel in tokenizer.vocab, rel + '->' + str(tokenizer.tokenize(rel)) + + +male_names = ['James', 'John', 'Robert', ]#'Michael', 'David', 'Paul', 'Jeff', 'Daniel', 'Charles', 'Thomas'] +female_names = ['Mary', 'Linda', 'Jennifer', ]#'Maria', 'Susan', 'Lisa', 'Sandra', 'Barbara', 'Patricia', 'Elizabeth'] +people_names = (male_names, female_names) +assert_in_bert_vocab(male_names) +assert_in_bert_vocab(female_names) + +people_adj_relations = ( + ('taller..than', 'shorter..than'), +# ('thinner..than', 'fatter..than'), # fatter not in BERT vocab + ('younger..than', 'older..than'), +# ('stronger..than', 'weaker..than'), +# ('faster..than', 'slower..than'), +# ('richer..than', 'poorer..than') +) + +rel2entypes = { +# spatial_relations: [fruits, animals, people_names], + people_adj_relations: [people_names], +# animal_adj_relations: [animals], +# object_adj_relations: [fruits, animals] +} + + +def comparative2superlative(comparative_form, structured=False): + assert comparative_form.endswith('er'), comparative_form + superlative_form = 'the ' + comparative_form[:-2] + 'est' \ + if not structured else 'the ' + comparative_form + ' st' + return superlative_form + + +def make_relational_atoms(relational_template, entities, relations): + neg_relations = ["isn't " + r for r in relations] + relations = ["is " + r for r in relations] + atoms = [relational_template.format(ent0=ent0, ent1=ent1, rel=rel) + for ent0, ent1, rel in [entities + relations[:1], reverse(entities) + reverse(relations)[:1]]] + atoms += [relational_template.format(ent0=ent0, ent1=ent1, rel=rel) + for ent0, ent1, rel in [entities + reverse(neg_relations)[:1], reverse(entities) + neg_relations[:1]]] + return atoms + + +transitive_P_template = '{ent0} {rel} {ent1} .' +transitive_wh_QA_template = '{which} is {pred} ? {ent} .' +transitive_yesno_QA_template = 'is {ent0} {rel} {ent1} ? {ans} .' + +def make_transitive(P_template, wh_QA_template, yesno_QA_template, join_template, + index=-1, orig_sentence='', entities=["John", "Mary", "Susan"], entity_substitutes=None, determiner="", + relations=('taller..than', 'shorter..than'), maybe=True, structured=False, + packed_predicates=["pred0/~pred0", "pred1/~pred1"], predicate_substitutes=None, + predicate_dichotomy=True, reverse_causal=False): + if entities[0].islower(): + entities = ['the ' + e for e in entities] +# print('relations =', relations) + relations, predicates = ([r.replace('..', ' ') for r in relations], [r.split('..')[0] for r in relations]) \ + if '..' in relations[0] else ([r.split('/')[0] for r in relations], [r.split('/')[-1] for r in relations]) +# print('relations =', relations, 'predicates =', predicates) + predicates = [comparative2superlative(p, structured=structured) for p in predicates] + + P0_entities, P1_entities = ([entities[0], entities[1]], [entities[1], entities[2]]) \ + if not maybe else ([entities[0], entities[1]], [entities[0], entities[2]]) + P0 = make_relational_atoms(P_template, P0_entities, relations) + P1 = make_relational_atoms(P_template, P1_entities, relations) + + wh_pronoun = 'which' if entities[0].startswith('the') else 'who' + wh_QA = [wh_QA_template.format(which=wh_pronoun, pred=pred, ent=ent) + for pred, ent in [(predicates[0], mask(entities[0])), (predicates[-1], mask(entities[-1] if not maybe else 'unknown'))]] + + def _maybe(s): + return s if not maybe else 'maybe' + yesno_entities = (entities[0], entities[-1]) if not maybe else (entities[1], entities[-1]) + yesno_QA = [yesno_QA_template.format(ent0=ent0, ent1=ent1, rel=rel, ans=ans) + for ent0, ent1, rel, ans in [ + (yesno_entities[0], yesno_entities[-1], relations[0], mask(_maybe('yes'))), + (yesno_entities[0], yesno_entities[-1], relations[-1], mask(_maybe('no'))), + (yesno_entities[-1], yesno_entities[0], relations[-1], mask(_maybe('yes'))), + (yesno_entities[-1], yesno_entities[0], relations[0], mask(_maybe('no')))]] + + Ps = [(p0, p1) for p0, p1 in list(product(P0, P1)) + list(product(P1, P0))] + QAs = wh_QA + yesno_QA + + def get_rel(atom): + for rel in relations: +# assert rel.startswith('is') + rel = rel.split()[0] # "taller than" -> "taller" + if rel in atom: + return rel + assert False + sentences = [p0 + ' ' + p1 + ' ||| ' + qas for (p0, p1), qas in product(Ps, QAs) + if not structured or get_rel(p0) == get_rel(p1) == get_rel(qas)] +# sentences = [s.replace('er st ', 'est ') for s in sentences] + return sentences + + +def make_sentences(maybe=True, structured=False): + sentence_groups = [] + maybe = False + for relations, entity_types in rel2entypes.items(): + sentences = [] + ent_tuples = [] + for entities in entity_types: + if isinstance(entities, list): + ent_tuples += permutations(entities, 3) + else: + assert isinstance(entities, tuple) and len(entities) == 2 # people_names + ent_tuples += permutations(entities[0] + entities[1], 3) + for (rel, ent_tuple) in product(relations, ent_tuples): + sentences += make_transitive(transitive_P_template, transitive_wh_QA_template, transitive_yesno_QA_template, None, + entities=list(ent_tuple), relations=rel, maybe=False, structured=True) + if maybe: + sentences += make_transitive(transitive_P_template, transitive_wh_QA_template, transitive_yesno_QA_template, None, + entities=list(ent_tuple), relations=rel, maybe=True, structured=True) + # sample(sentences, 20) + # logger.info('num_sent = %d -> %d' % (len(sentences), len(set(sentences)))) + sentence_groups.append(sentences) + return sentences diff --git a/child_lib.py b/child_lib.py new file mode 100644 index 00000000000000..8f6112bfdb06ed --- /dev/null +++ b/child_lib.py @@ -0,0 +1,13 @@ +def reverse(l): + return list(reversed(l)) if isinstance(l, list) else tuple(reversed(l)) + + +def mask(ent_str): + tokens = ent_str.strip().split() + if len(tokens) == 1: + return '[%s]' % tokens[0] + elif len(tokens) == 2: + assert tokens[0] == 'the', ent_str + return '%s [%s]' % (tokens[0], tokens[1]) + else: + assert False, ent_str diff --git a/child_wsc_generator.py b/child_wsc_generator.py new file mode 100644 index 00000000000000..e247529c747c22 --- /dev/null +++ b/child_wsc_generator.py @@ -0,0 +1,126 @@ +import os +import json +import itertools +from itertools import product, chain + +from pytorch_pretrained_bert import tokenization, BertTokenizer, BertModel, BertForMaskedLM, BertForPreTraining, BertConfig +from child_frames import frames + + +CONFIG_NAME = 'bert_config.json' +BERT_DIR = '/nas/pretrain-bert/pretrain-tensorflow/uncased_L-12_H-768_A-12/' +#tokenizer = BertTokenizer.from_pretrained(os.path.join(BERT_DIR, 'vocab.txt')) +tokenizer = BertTokenizer.from_pretrained('/nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt') + +A_template = "{rel_prefix} {dt} {ent0} {rel} {dt} {ent1} {rel_suffix}" +B_templates = ["{pred_prefix} {dt} {ent} {pred}", "{pred_prefix} {pred} {dt} {ent}"] + +# causal_templates = [["{A} because {B}."],# "{B} so {A}."], +# ["{A} so {B}."],# "{B} because {A}."] +# ] +# turning_templates = [["{A} although {B}."],# "{B} but {A}."], +# ["{A} but {B}."],# "{B} although {A}."] +# ] + +causal_templates = [["{A} ||| {conj} {B}."],# "{B} so {A}."], + ["{A} ||| {conj} {B}."],# "{B} because {A}."] + ] +turning_templates = [["{A} ||| {conj} {B}."],# "{B} but {A}."], + ["{A} ||| {conj} {B}."],# "{B} although {A}."] + ] + + +def reverse(l): + return list(reversed(l)) + + +def mask(ent_str): + tokens = ent_str.strip().split() + if len(tokens) == 1: + return '[%s]' % tokens[0] + elif len(tokens) == 2: + assert tokens[0] == 'the', ent_str + return '%s [%s]' % (tokens[0], tokens[1]) + else: + assert False, ent_str + + +def make_sentences(index=-1, orig_sentence='', entities=["John", "Mary"], entity_substitutes=None, determiner="", + packed_relations=["rel/~rel", "rev_rel/~rev_rel"], packed_relation_substitutes=None, + relation_prefix="", relation_suffix="", + packed_predicates=["pred0/~pred0", "pred1/~pred1"], predicate_prefix="", prepositive_pred=False, + predicate_dichotomy=True, reverse_causal=False, conjunctions=[["because", "so"], ["although", "but"]]): + assert entities[0].lower() in tokenizer.vocab , entities[0] + assert entities[1].lower() in tokenizer.vocab , entities[1] + + def form_As(packed_rels): + relations, neg_relations = zip(*[rel.split("/") for rel in packed_rels]) + relations, neg_relations = list(relations), list(neg_relations) + + As = [A_template.format(dt=determiner, ent0=ent0, ent1=ent1, rel=rel, rel_prefix=relation_prefix, rel_suffix=relation_suffix) + for ent0, ent1, rel in [entities + relations[:1], reverse(entities) + reverse(relations)[:1]]] + negAs = [A_template.format(dt=determiner, ent0=ent0, ent1=ent1, rel=rel, rel_prefix=relation_prefix, rel_suffix=relation_suffix) + for ent0, ent1, rel in [entities + neg_relations[:1], reverse(entities) + reverse(neg_relations)[:1]]] + return As, negAs + + As, negAs = form_As(packed_relations) + + substituted_As, substituted_negAs = [], [] + for packed_rel_subs in zip(*packed_relation_substitutes): + subs_As, subs_negAs = form_As(packed_rel_subs) + substituted_As += subs_As + substituted_negAs += subs_negAs + + if "/" in packed_predicates[0]: + predicates, neg_predicates = zip(*[pred.split("/") for pred in packed_predicates]) + predicates, neg_predicates = list(predicates), list(neg_predicates) + else: + predicates, neg_predicates = packed_predicates, [] + + B_template = B_templates[int(prepositive_pred)] + Bs = [B_template.format(dt=determiner, ent=mask(ent), pred=pred, pred_prefix=predicate_prefix) + for ent, pred in zip(entities, predicates)] + negBs = [B_template.format(dt=determiner, ent=mask(ent), pred=pred, pred_prefix=predicate_prefix) + for ent, pred in zip(entities, neg_predicates)] + if predicate_dichotomy: + Bs += [B_template.format(dt=determiner, ent=mask(ent), pred=pred, pred_prefix=predicate_prefix) + for ent, pred in zip(entities, reversed(neg_predicates))] + negBs += [B_template.format(dt=determiner, ent=mask(ent), pred=pred, pred_prefix=predicate_prefix) + for ent, pred in zip(entities, reversed(predicates))] + + def form_sentences(sentence_template, As, Bs, conj): + return [" ".join(sentence_template.format(A=A, B=B, conj=conj).split()) for A, B in product(As, Bs)] + + def form_all_sentences(As, negAs, Bs, negBs): + causal_sentences = [] + causal_conj = conjunctions[0][int(reverse_causal)] + for causal_template in causal_templates[int(reverse_causal)]: + for A, B in [(As, Bs), (negAs, negBs)]: + causal_sentences += form_sentences(causal_template, A, B, causal_conj) + + turning_sentences = [] + turning_conj = conjunctions[1][int(reverse_causal)] + for turning_template in turning_templates[int(reverse_causal)]: + for A, B in [(As, negBs), (negAs, Bs)]: + turning_sentences += form_sentences(turning_template, A, B, turning_conj) + + sentences = causal_sentences + turning_sentences + return sentences, causal_sentences, turning_sentences + + sentences, causal_sentences, turning_sentences = form_all_sentences(As, negAs, Bs, negBs) + substituted_sentences = sentences + + if packed_relation_substitutes is not None: + substituted_sentences += form_all_sentences(substituted_As, substituted_negAs, Bs, negBs)[0] + + if entity_substitutes is not None: + for sub in entity_substitutes: + for ent in sub: + assert ent.lower() in tokenizer.vocab , ent + " not in BERT vocab" + assert len(set(chain.from_iterable(entity_substitutes))) == 4, entity_substitutes + assert len(set(chain.from_iterable(entity_substitutes)).union(set(entities))) == 6 + + entity_substitutes = list(itertools.product(entities[:1] + entity_substitutes[0], entities[1:] + entity_substitutes[1])) + substituted_sentences = [sent.replace(entities[0], sub[0]).replace(entities[1], sub[1]) + for sent in substituted_sentences for sub in entity_substitutes] + return causal_sentences, turning_sentences, substituted_sentences diff --git a/convert_pos.py b/convert_pos.py new file mode 100644 index 00000000000000..89b7cf9f4ba31e --- /dev/null +++ b/convert_pos.py @@ -0,0 +1,64 @@ +from nltk.corpus import wordnet as wn + +# Just to make it a bit more readable +WN_NOUN = 'n' +WN_VERB = 'v' +WN_ADJECTIVE = 'a' +WN_ADJECTIVE_SATELLITE = 's' +WN_ADVERB = 'r' + +def convert(word, from_pos, to_pos): + """ Transform words given from/to POS tags """ + + synsets = wn.synsets(word, pos=from_pos) + + # Word not found + if not synsets: + return [] + + # Get all lemmas of the word (consider 'a'and 's' equivalent) + lemmas = [l for s in synsets + for l in s.lemmas() + if s.name().split('.')[1] == from_pos + or from_pos in (WN_ADJECTIVE, WN_ADJECTIVE_SATELLITE) + and s.name().split('.')[1] in (WN_ADJECTIVE, WN_ADJECTIVE_SATELLITE)] + + # Get related forms + derivationally_related_forms = [(l, l.derivationally_related_forms()) for l in lemmas] + + # filter only the desired pos (consider 'a' and 's' equivalent) + related_noun_lemmas = [l for drf in derivationally_related_forms + for l in drf[1] + if l.synset().name().split('.')[1] == to_pos + or to_pos in (WN_ADJECTIVE, WN_ADJECTIVE_SATELLITE) + and l.synset().name().split('.')[1] in (WN_ADJECTIVE, WN_ADJECTIVE_SATELLITE)] + + # Extract the words from the lemmas + words = [l.name() for l in related_noun_lemmas] + len_words = len(words) + + # Build the result in the form of a list containing tuples (word, probability) + result = [(w, float(words.count(w))/len_words) for w in set(words)] + result.sort(key=lambda w: -w[1]) + + # return all the possibilities sorted by probability + return result + + +print(convert("death", WN_NOUN, WN_VERB)) +# [('die', 0.75), ('end', 0.2), ('decease', 0.05)] + +print(convert("story", WN_NOUN, WN_VERB)) +# [('report', 0.2222222222222222), ('tell', 0.2222222222222222), ('narrate', 0.2222222222222222),... + +print(convert("boring", WN_ADJECTIVE, WN_NOUN)) +# [('tedium', 0.3333333333333333), ('dullness', 0.16666666666666666),... + +print(convert("trouble", WN_NOUN, WN_ADJECTIVE)) +# [('troublous', 0.6666666666666666), ('problematical', 0.3333333333333333)] + +print(convert("solve", WN_VERB, WN_ADJECTIVE_SATELLITE)) +# [('solvent', 0.5), ('workable', 0.5)] + +print(convert("think", WN_VERB, WN_ADJECTIVE)) +# [('cogitative', 0.6666666666666666), ('recollective', 0.3333333333333333)] diff --git a/docker/Dockerfile b/docker/Dockerfile new file mode 100644 index 00000000000000..e47eb548f9a13f --- /dev/null +++ b/docker/Dockerfile @@ -0,0 +1,7 @@ +FROM pytorch/pytorch:latest + +RUN git clone https://github.com/NVIDIA/apex.git && cd apex && python setup.py install --cuda_ext --cpp_ext + +RUN pip install pytorch-pretrained-bert + +WORKDIR /workspace \ No newline at end of file diff --git a/examples.tar b/examples.tar new file mode 100644 index 00000000000000..03e1c603a947ce Binary files /dev/null and b/examples.tar differ diff --git a/examples/child_frames.py b/examples/child_frames.py new file mode 100644 index 00000000000000..0650898fa0d7bc --- /dev/null +++ b/examples/child_frames.py @@ -0,0 +1,307 @@ +frames = \ +[ + { + "index": 2, + "orig_sentence": "The trophy doesn't fit into the brown suitcase because [it] is too large/small.", + "entities": ["trophy", "suitcase"], + "entity_substitutes": [["ball", "toy"], ["bag", "box"]], + "determiner": "the", + "packed_relations": ["doesn't fit into/can fit into", "doesn't hold/can hold"], + "packed_relation_substitutes": [["can't be put into/can be put into"], ["doesn't have enough room for/has enough room for"]], + "packed_predicates": ["is large/isn't large", "is small/isn't small"], + }, + { + "index": 4, + "orig_sentence": "Joan made sure to thank Susan for all the help [she] had recieved/given.", + "entities": ["John", "Susan"], + "entity_substitutes": [["David", "Michael"], ["Mary", "Tiffany"]], + "packed_relations": ["thanked/didn't thank", "took good care of/didn't good care of"], + "packed_relation_substitutes": [["felt grateful to/didn't feel grateful to"], ["was appreciated by/wasn't appreciated by"]], + "packed_predicates": ["had received a lot of help/hadn't received a lot of help", "had given a lot of help/hadn't given a lot of help"], + "predicate_dichotomy": False, + }, + { + "index": 4000, + "orig_sentence": "John gave a lot of money to Susan because [he] was very rich/poor.", + "entities": ["John", "Susan"], + "entity_substitutes": [["David", "Michael"], ["Mary", "Linda"]], + "packed_relations": ["gave a lot of money to/didn't give a lot of money to", "received a lot of money from/didn't receive a lot of money from"], + "packed_relation_substitutes": [["subsidized/didn't subsidize"], ["borrowed a lot of money from/didn't borrow any money from"]], + "packed_predicates": ["was rich/wasn't rich", "was poor/wasn't poor"], + }, + { + "index": 10, + "orig_sentence": "The delivery truck zoomed by the school bus because [it] was going so fast/slow.", + "entities": ["truck", "bus"], + "entity_substitutes": [["car", "ambulance"], ["bicycle", "tram"]], + "determiner": "the", + "packed_relations": ["overtook/couldn't overtake", "fell far behind/didn't fall far behind"], + "packed_relation_substitutes": [["zoomed by/didn't pass"], ["was left behind/wasn't left far behind"]], + "packed_predicates": ["was going fast/wasn't going fast", "was going slow/wasn't going slow"], + }, + ## didn't defeated, replace error: didn't defeat -> defeated + { + "index": 12, + "orig_sentence": "Frank felt vindicated/crushed when his longtime rival Bill revealed that [he] was the winner of the competition.", + "entities": ["John", "Susan"], + "entity_substitutes": [["David", "Michael"], ["Mary", "Linda"]], + "packed_relations": ["beat/didn't beat", "lost to/didn't lose to"], + "packed_relation_substitutes": [["defeated/didn't defeat"], ["was defeated by/wasn't defeated by"]], + "relation_suffix": "in the game", + "packed_predicates": ["was happy/wasn't happy", "was sad/wasn't sad"], + "reverse_causal": True + }, + { + "index": 16, + "orig_sentence": "The large ball crashed right through the table because [it] was made of steel/styrofoam.", + "entities": ["ball", "board"], + "entity_substitutes": [["bullet", "arrow"], ["shield", "disk"]], + "determiner": "the", + "packed_relations": ["crashed right through/didn't crash through", "failed to block/blocked"], + "packed_relation_substitutes": [["penetrated through/didn't penetrate through"], ["failed to stop/stopped"]], + "packed_predicates": ["was hard/wasn't hard", "was soft/wasn't soft"], + }, + { + "index": 18, + "orig_sentence": "John couldn't see the stage with Billy in front of him because [he] is so short.", + "entities": ["John", "Susan"], + "entity_substitutes": [["David", "Edward"], ["Betty", "Donna"]], + "packed_relations": ["couldn't see the stage behind/could see the stage behind", "blocked the view of/didn't block the view of"], + "packed_relation_substitutes": [["couldn't catch sight of the stage behind/could catch sight of the stage behind"], ["obstructed the sight of/didn't obstruct the sight of"]], + "packed_predicates": ["is short/isn't short", "is tall/isn't tall"], + }, + { + "index": 20, + "orig_sentence": "Tom threw his schoolbag down to Ray after [he] reached the top of the stairs.", + "entities": ["Brian", "Amy"], + "entity_substitutes": [["Charles", "Paul"], ["Emma", "Linda"]], + "packed_relations": ["threw the schoolbag down to/threw the schoolbag up to", "caught the schoolbag thrown down by/caught the schoolbag thrown up by"], + "packed_relation_substitutes": [["cast the schoolbag down to/cast the schoolbag up to"], ["took the schoolbag thrown down by/took the schoolbag thrown up by"]], + "packed_predicates": ["reached the top of the stairs", "reached the bottom of the stairs"], + "conjunctions": [["after", ], ["before", ]] + }, + ## didn't defeated, replace error: didn't defeat -> defeated + { + "index": 22, + "orig_sentence": "Although they ran at about the same speed, Sue beat Sally because [she] had such a good start.", + "entities": ["Tom", "Sue"], + "entity_substitutes": [["John", "David"], ["Sally", "Susan"]], + "packed_relations": ["beat/didn't beat", "lost to/didn't lose to"], + "packed_relation_substitutes": [["defeated/didn't defeat"], ["was defeated by/wasn't defeated by"]], + "relation_prefix": "Running at about the same speed,", + "relation_suffix": "in the running race", + "packed_predicates": ["had a good start/didn't have a good start", "had a bad start/didn't have a bad start"], + }, + { + "index": 28, + "orig_sentence": "Anna did a lot better than her good friend Lucy on the test because [she] had studied so hard.", + "entities": ["Anna", "Andy"], + "entity_substitutes": [["Lucy", "Nancy"], ["George", "Frank"]], + "packed_relations": ["did better than/didn't do better than", "did worse than/didn't do worse than"], + "packed_relation_substitutes": [["performed better than/didn't perform better than"], ["performed worse than/didn't perform worse than"]], + "relation_suffix": "on the test", + "packed_predicates": ["had studied hard/hadn't studied hard", "was lazy in doing homework/wasn't lazy in doing homework"], + }, + { + "index": 30, + "orig_sentence": "The firemen arrived after the police because [they] were coming from so far away.", + "entities": ["doctor", "police"], + "entity_substitutes": [["worker", "employee"], ["boss", "administrator"]], + "determiner": "the", + "packed_relations": ["arrived after/didn't arrive after", "arrived before/didn't arrive before"], + "packed_relation_substitutes": [["reached here after/didn't reach here after"], ["reached here before/didn't reach here before"]], + "packed_predicates": ["came from far away/didn't come from far away", "came from a close place/didn't come from a close place"], + }, + { + "index": 32000, + "orig_sentence": "Frank was upset with Tom because the toaster [he] had bought from him didn't work.", + "entities": ["Betty", "Henry"], + "entity_substitutes": [["Amy", "Linda"], ["Bush", "Frank"]], + "packed_relations": ["was upset with/was pleased with", "was hated by/was loved by"], + "packed_relation_substitutes": [["hated/liked"], ["was disliked by/was liked by"]], + "packed_predicates": ["had bought didn't work/had bought worked well", "had sold didn't work/had sold worked well"], + "predicate_prefix": "the toaster", + "predicate_dichotomy": False, + }, + { + "index": 36, + "orig_sentence": "The sack of potatoes had been placed above the bag of flour, so [it] had to be moved first", + "entities": ["potatoes", "flour"], + "entity_substitutes": [["candy", "rice"], ["beans", "noodles"]], + "determiner": "the bag of", + "packed_relations": ["had been placed above/hadn't been placed above", "had been placed below/hadn't been placed below"], + "packed_relation_substitutes": [["had been put above/hadn't been put above"], ["had been put below/hadn't been put below"]], + "packed_predicates": ["had to be moved first/couldn't be moved first", "had to be moved later/couldn't be moved later"], + "reverse_causal": True + }, + { + "index": 38, + "orig_sentence": "Pete envies Martin although [he] is very successful.", + "entities": ["Peter", "Mandy"], + "entity_substitutes": [["Martin", "Paul"], ["Cindy", "Emma"]], + "packed_relations": ["envied/didn't envy", "was envied by/wasn't envied by"], + "packed_relation_substitutes": [["was jealous of/wasn't jealous of"], ["was admired by/wasn't admired by"]], + "packed_predicates": ["failed/didn't fail", "was successful/wasn't successful"], + }, + { + "index": 42, + "orig_sentence": "I poured water from the bottle into the cup until [it] was empty.", + "entities": ["bottle", "cup"], + "entity_substitutes": [["bowl", "bucket"], ["tube", "container"]], + "determiner": "the", + "packed_relations": ["was filled with water from/leaked into", "leaked into/was filled with water from"], + "packed_relation_substitutes": [["was suffused with water from/dripped water into"], ["dripped water into/was suffused with water from"]], + "packed_predicates": ["was empty", "was full"], + "conjunctions": [["after", ], ["before", ]] + }, + { + "index": 50, + "orig_sentence": "Joe's uncle can still beat him at tennis, even though [he] is 30 years younger.", + "entities": ["Joe", "Amy"], + "entity_substitutes": [["David", "Charles"], ["Betty", "Cindy"]], + "packed_relations": ["can beat/can't beat", "often loses to/seldom loses to"], + "packed_relation_substitutes": [["can defeat/can't defeat"], ["is often defeated by/is seldom defeated by"]], + "relation_suffix": "at tennis", + "packed_predicates": ["is older/isn't older", "is younger/isn't younger"], + }, + { + "index": 68, + "orig_sentence": "Ann asked Mary what time the library closes, because [she] had forgotten.", + "entities": ["Ann", "Henry"], + "entity_substitutes": [["Mary", "Linda"], ["Brian", "Michael"]], + "packed_relations": ["asked/didn't ask", "told/didn't tell"], + "packed_relation_substitutes": [["was told by/wasn't told by"], ["was asked by/wasn't asked by"]], + "relation_suffix": "what time the library closes", + "packed_predicates": ["had forgotten/hadn't forgotten", "remembered/didn't remember"], + }, + { + "index": 84, + "orig_sentence": "If the con artist has succeeded in fooling Sam, [he] would have gotten a lot of money.", + "entities": ["Sam", "Emma"], + "entity_substitutes": [["Paul", "Bush"], ["Susan", "Lucy"]], + "packed_relations": ["succeeded in fooling/failed to fool", "was fooled by/wasn't fooled by"], + "packed_relation_substitutes": [["succeeded in cheating/failed to cheat"], ["was cheated by/wasn't cheated by"]], + "packed_predicates": ["got a lot of money/didn't get a lot of money", "lost a lot of money/didn't lose a lot of money"], + "predicate_dichotomy": False, + "reverse_causal": True + }, + { + "index": 15000, + "orig_sentence": "Jackson was greatly influenced by Arnold, though [he] lived two centuries later.", + "entities": ["Jack", "Betty"], + "entity_substitutes": [["Tom", "Jay"], ["Emily", "Helen"]], + "packed_relations": ["always takes care of/dosen't take care of", "is always taken care of by/isn't taken care of by"], + "packed_relation_substitutes": [["always looks after/dosen't look after"], ["always needs the help of/didn't need the help of"]], + "packed_predicates": ["is older/isn't older", "is younger/isn't younger"], + }, + { + "index": 160, + "orig_sentence": "The actress used to be named Terpsichore, but she changed it to Tina a few years ago, because she figured [it] was too hard to pronounce.", + "entities": ["Betty", "Adele"], + "entity_substitutes": [["Amy", "Cindy"], ["Alberta", "Caroline"]], + "packed_relations": ["replaced/didn't replace", "was changed to/wasn't changed to"], + "packed_relation_substitutes": [["was substituted for/wasn't substituted for"], ["was replaced by/wasn't replaced by"]], + "relation_suffix": "as the actress's new name", + "packed_predicates": ["is easy to pronounce/isn't easy to pronounce", "is hard to pronounce/isn't hard to pronounce"], + }, + { + "index": 1700000, + "orig_sentence": "In July, Kamtchatka declared war on Yakutsk. Since Yakutsk's army was much better equipped and ten times larger, [they] were defeated within weeks.", + "entities": ["Germany", "Italy"], + "entity_substitutes": [["Australia", "Japan"], ["Argentina", "Canada"]], + "packed_relations": ["defeated/didn't defeat", "was defeated by/wasn't defeated by"], + "packed_relation_substitutes": [["conquered/didn't conquer"], ["was conquered by/wasn't conquered by"]], + "packed_predicates": ["was more powerful/wasn't more powerful", "was less powerful/wasn't less powerful"], + }, + { + "index": 186, + "orig_sentence": "When the sponsors of the bill got to the town hall, they were surprised to find that the room was full of opponents. [They] were very much in the minority", + "entities": ["sponsors", "opponents"], + "entity_substitutes": [["workers", "customers"], ["teachers", "students"]], + "determiner": "the", + "packed_relations": ["were less in number than/were not less in number than", "were more in number than/were not more in number than"], + "packed_relation_substitutes": [["were outnumbered by/were not outnumbered by"], ["outnumbered/didn't outnumber"]], + "packed_predicates": ["were in the minority/were not in the minority", "were in the majority/were not in the majority"], + "reverse_causal": True + }, + { + "index": 188, + "orig_sentence": "Everyone really loved the oatmeal cookies; only a few people liked the chocolate chip cookies. Next time, we should make more of [them] .", + "entities": ["cookies", "chips"], + "entity_substitutes": [["apples", "bananas"], ["grapes", "sandwiches"]], + "determiner": "the", + "packed_relations": ["are more popular than/are less popular than", "lose to/don't lose to"], + "packed_relation_substitutes": [["are sold more than/are sold less than"], ["are not as popular as/are as popular as"]], + "packed_predicates": ["should be made more next time/shouldn't be made more next time", "should be made less next time/shouldn't be made less next time"], + "reverse_causal": True + }, + { + "index": 190, + "orig_sentence": "We had hoped to place copies of our newsletter on all the chairs in the auditorium, but there were simply not enough of [them] .", + "entities": ["newspapers", "chairs"], + "entity_substitutes": [["cups", "pictures"], ["tables", "benches"]], + "determiner": "the", + "packed_relations": ["could be placed on all/couldn't be placed on all", "could all be covered by/couldn't all be covered by"], + "packed_relation_substitutes": [["could be put on all/couldn't be put on all"], ["could carry all/couldn't carry all"]], + "packed_predicates": ["there were many of/there were not many of", "there were few of/there were not few of"], + "prepositive_pred": True, + }, + { + "index": 19600, + "orig_sentence": "Steve follows Fred's example in everything. [He] admires him hugely.", + "entities": ["Steve", "Lucy"], + "entity_substitutes": [["Fred", "George"], ["Lily", "Wendy"]], + "packed_relations": ["follows/doesn't follow", "is followed by/isn't followed by"], + "packed_relation_substitutes": [["imitates/doesn't imitate"], ["is imitated by/isn't imitated by"]], + "relation_suffix": "in everything", + "packed_predicates": ["is bad at making decisions/isn't bad at making decisions", "is good at making decisions/isn't good at making decisions"], + }, + { + "index": 198, + "orig_sentence": "The table won't fit through the doorway because [it] is too wide.", + "entities": ["table", "doorway"], + "entity_substitutes": [["desk", "sofa"], ["corridor", "hallway"]], + "determiner": "the", + "packed_relations": ["will fit through/won't fit through", "will be fitted through by/won't be fitted through by"], + "packed_relation_substitutes": [["will pass through/won't pass through"], ["will be passed through by/won't be passed through by"]], + "packed_predicates": ["is narrow/isn't narrow", "is wide/isn't wide"], + }, + { + "index": 2000000, + "orig_sentence": "Grace was happy to trade me her sweater for my jacket. She thinks [it] looks dowdy on her.", + "entities": ["sweater", "jacket"], + "entity_substitutes": [["skirt", "cap"], ["hat", "short"]], + "determiner": "the", + "packed_relations": ["is traded by Grace for/isn't traded by Grace for", "is substituted by Grace for/isn't substituted by Grace for"], + "packed_relation_substitutes": [["is replaced by Grace with/isn't replaced by Grace with"], ["is preferred by Grace to/isn't preferred by Grace to"]], + "packed_predicates": ["looks bad/looks not bad", "looks good/looks not good"], + "predicate_prefix": "she thinks", + }, + { + "index": 226, + "orig_sentence": "Bill passed the half-empty plate to John because [he] was full.", + "entities": ["Bill", "Amy"], + "entity_substitutes": [["Brian", "David"], ["Emma", "Helen"]], + "packed_relations": ["passed the half-empty plate to/didn't pass the half-empty plate to", "received the half-empty plate from/didn't received the half-empty plate from"], + "packed_relation_substitutes": [["gave the half-empty plate to/didn't give the half-empty plate to"], ["took the half-empty plate from/didn't take the half-empty plate from"]], + "packed_predicates": ["was full/wasn't full", "was hungry/wasn't hungry"], + }, + { + "index": 252, + "orig_sentence": "George got free tickets to the play, but he gave them to Eric, even though [he] was particularly eager to see it.", + "entities": ["George", "Linda"], + "entity_substitutes": [["Eric", "Ted"], ["Cindy", "Lucy"]], + "packed_relations": ["gave the tickets of the play to/didn't give the tickets of the play to", "received the tickets of the play from/didn't receive the tickets of the play from"], + "packed_relation_substitutes": [["sent the tickets of the play to/didn't send the tickets of the play to"], ["took the tickets of the play from/didn't take the tickets of the play from"]], + "packed_predicates": ["wasn't interested in it/was interested in it", "was eager to see it/wasn't eager to see it"], + }, + { + "index": 261, + "orig_sentence": "Kirilov ceded the presidency to Shatov because [he] was less popular.", + "entities": ["James", "Amy"], + "entity_substitutes": [["Robert", "Jack"], ["Donna", "Emily"]], + "packed_relations": ["ceded the presidency to/didn't cede the presidency to", "took over the presidency from/didn't take over the presidency from"], + "packed_relation_substitutes": [["gave the presidency to/didn't give the presidency to"], ["got the presidency from/didn't get the presidency from"]], + "packed_predicates": ["was notorious/was not notorious", "was popular/wasn't popular"], + }, +] diff --git a/examples/child_generator.py b/examples/child_generator.py new file mode 100644 index 00000000000000..5d71d8be387042 --- /dev/null +++ b/examples/child_generator.py @@ -0,0 +1,128 @@ +import os +import json +import itertools +from itertools import product, chain + +from pytorch_pretrained_bert import tokenization, BertTokenizer, BertModel, BertForMaskedLM, BertForPreTraining, BertConfig +from child_frames import frames + + +CONFIG_NAME = 'bert_config.json' +BERT_DIR = '/nas/pretrain-bert/pretrain-tensorflow/uncased_L-12_H-768_A-12/' +tokenizer = BertTokenizer.from_pretrained(os.path.join(BERT_DIR, 'vocab.txt')) + + +A_template = "{rel_prefix} {dt} {ent0} {rel} {dt} {ent1} {rel_suffix}" +B_templates = ["{pred_prefix} {dt} {ent} {pred}", "{pred_prefix} {pred} {dt} {ent}"] + +# causal_templates = [["{A} because {B}."],# "{B} so {A}."], +# ["{A} so {B}."],# "{B} because {A}."] +# ] +# turning_templates = [["{A} although {B}."],# "{B} but {A}."], +# ["{A} but {B}."],# "{B} although {A}."] +# ] + +causal_templates = [["{A} ||| {conj} {B}."],# "{B} so {A}."], + ["{A} ||| {conj} {B}."],# "{B} because {A}."] + ] +turning_templates = [["{A} ||| {conj} {B}."],# "{B} but {A}."], + ["{A} ||| {conj} {B}."],# "{B} although {A}."] + ] + + +def reverse(l): + return list(reversed(l)) + + +def mask(ent_str): + tokens = ent_str.strip().split() + if len(tokens) == 1: + return '[%s]' % tokens[0] + elif len(tokens) == 2: + assert tokens[0] == 'the', ent_str + return '%s [%s]' % (tokens[0], tokens[1]) + else: + assert False, ent_str + + +def make_sentences(index=-1, orig_sentence='', entities=["John", "Mary"], entity_substitutes=None, determiner="", + packed_relations=["rel/~rel", "rev_rel/~rev_rel"], packed_relation_substitutes=None, + relation_prefix="", relation_suffix="", + packed_predicates=["pred0/~pred0", "pred1/~pred1"], predicate_prefix="", prepositive_pred=False, + predicate_dichotomy=True, reverse_causal=False, conjunctions=[["because", "so"], ["although", "but"]]): + assert entities[0].lower() in tokenizer.vocab , entities[0] + assert entities[1].lower() in tokenizer.vocab , entities[1] + + def form_As(packed_rels): + relations, neg_relations = zip(*[rel.split("/") for rel in packed_rels]) + relations, neg_relations = list(relations), list(neg_relations) + + As = [A_template.format(dt=determiner, ent0=ent0, ent1=ent1, rel=rel, rel_prefix=relation_prefix, rel_suffix=relation_suffix) + for ent0, ent1, rel in [entities + relations[:1], reverse(entities) + reverse(relations)[:1]]] + negAs = [A_template.format(dt=determiner, ent0=ent0, ent1=ent1, rel=rel, rel_prefix=relation_prefix, rel_suffix=relation_suffix) + for ent0, ent1, rel in [entities + neg_relations[:1], reverse(entities) + reverse(neg_relations)[:1]]] + return As, negAs + + As, negAs = form_As(packed_relations) + + substituted_As, substituted_negAs = [], [] + for packed_rel_subs in zip(*packed_relation_substitutes): + subs_As, subs_negAs = form_As(packed_rel_subs) + substituted_As += subs_As + substituted_negAs += subs_negAs + + if "/" in packed_predicates[0]: + predicates, neg_predicates = zip(*[pred.split("/") for pred in packed_predicates]) + predicates, neg_predicates = list(predicates), list(neg_predicates) + else: + predicates, neg_predicates = packed_predicates, [] + + B_template = B_templates[int(prepositive_pred)] + Bs = [B_template.format(dt=determiner, ent=mask(ent), pred=pred, pred_prefix=predicate_prefix) + for ent, pred in zip(entities, predicates)] + negBs = [B_template.format(dt=determiner, ent=mask(ent), pred=pred, pred_prefix=predicate_prefix) + for ent, pred in zip(entities, neg_predicates)] + if predicate_dichotomy: + Bs += [B_template.format(dt=determiner, ent=mask(ent), pred=pred, pred_prefix=predicate_prefix) + for ent, pred in zip(entities, reversed(neg_predicates))] + negBs += [B_template.format(dt=determiner, ent=mask(ent), pred=pred, pred_prefix=predicate_prefix) + for ent, pred in zip(entities, reversed(predicates))] + + def form_sentences(sentence_template, As, Bs, conj): + return [" ".join(sentence_template.format(A=A, B=B, conj=conj).split()) for A, B in product(As, Bs)] + + def form_all_sentences(As, negAs, Bs, negBs): + causal_sentences = [] + causal_conj = conjunctions[0][int(reverse_causal)] + for causal_template in causal_templates[int(reverse_causal)]: + for A, B in [(As, Bs), (negAs, negBs)]: + causal_sentences += form_sentences(causal_template, A, B, causal_conj) + + turning_sentences = [] + turning_conj = conjunctions[1][int(reverse_causal)] + for turning_template in turning_templates[int(reverse_causal)]: + for A, B in [(As, negBs), (negAs, Bs)]: + turning_sentences += form_sentences(turning_template, A, B, turning_conj) + + sentences = causal_sentences + turning_sentences + return sentences, causal_sentences, turning_sentences + + sentences, causal_sentences, turning_sentences = form_all_sentences(As, negAs, Bs, negBs) + # substituted_sentences = sentences + + if packed_relation_substitutes is not None: + substituted_sentences = form_all_sentences(substituted_As, substituted_negAs, Bs, negBs)[0] + + substituted_sent_groups = list(zip(sentences, substituted_sentences)) + + if entity_substitutes is not None: + for sub in entity_substitutes: + for ent in sub: + assert ent.lower() in tokenizer.vocab , ent + " not in BERT vocab" + assert len(set(chain.from_iterable(entity_substitutes))) == 4, entity_substitutes + assert len(set(chain.from_iterable(entity_substitutes)).union(set(entities))) == 6 + + entity_substitutes = list(itertools.product(entities[:1] + entity_substitutes[0], entities[1:] + entity_substitutes[1])) + substituted_sent_groups = [[sent.replace(entities[0], sub[0]).replace(entities[1], sub[1]) + for sent in sent_group for sub in entity_substitutes] for sent_group in substituted_sent_groups] + return causal_sentences, turning_sentences, substituted_sent_groups \ No newline at end of file diff --git a/examples/extract_features.py b/examples/extract_features.py index abe7fdffe7dbec..9d05d7905d0fa4 100644 --- a/examples/extract_features.py +++ b/examples/extract_features.py @@ -168,7 +168,7 @@ def read_examples(input_file): """Read a list of `InputExample`s from an input file.""" examples = [] unique_id = 0 - with open(input_file, "r") as reader: + with open(input_file, "r", encoding='utf-8') as reader: while True: line = reader.readline() if not line: @@ -199,6 +199,7 @@ def main(): "bert-large-uncased, bert-base-cased, bert-base-multilingual, bert-base-chinese.") ## Other parameters + parser.add_argument("--do_lower_case", action='store_true', help="Set this flag if you are using an uncased model.") parser.add_argument("--layers", default="-1,-2,-3,-4", type=str) parser.add_argument("--max_seq_length", default=128, type=int, help="The maximum total input sequence length after WordPiece tokenization. Sequences longer " @@ -209,7 +210,6 @@ def main(): default=-1, help = "local_rank for distributed training on gpus") parser.add_argument("--no_cuda", - default=False, action='store_true', help="Whether not to use CUDA when available") @@ -227,7 +227,7 @@ def main(): layer_indexes = [int(x) for x in args.layers.split(",")] - tokenizer = BertTokenizer.from_pretrained(args.bert_model) + tokenizer = BertTokenizer.from_pretrained(args.bert_model, do_lower_case=args.do_lower_case) examples = read_examples(args.input_file) diff --git a/examples/run_child_finetuning.py b/examples/run_child_finetuning.py new file mode 100644 index 00000000000000..e960126b85d415 --- /dev/null +++ b/examples/run_child_finetuning.py @@ -0,0 +1,531 @@ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import os +import sys +import logging +import argparse +from tqdm import tqdm, trange +import itertools + +import numpy as np +import torch +from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler +from torch.utils.data.distributed import DistributedSampler + +from pytorch_pretrained_bert.tokenization import BertTokenizer +from pytorch_pretrained_bert.modeling import BertForPreTraining, BertForMaskedLM +from pytorch_pretrained_bert.optimization import BertAdam + +from child_generator import make_sentences +from child_frames import frames + +from torch.utils.data import Dataset +import random + +logging.basicConfig(format='%(asctime)s - %(levelname)s - %(name)s - %(message)s', + datefmt='%m/%d/%Y %H:%M:%S', + level=logging.INFO) +logger = logging.getLogger(__name__) + + +def warmup_linear(x, warmup=0.002): + if x < warmup: + return x/warmup + return 1.0 - x + + +def rejoin_tokens(tokens): + new_tokens = [] + while len(tokens) > 0: + t = tokens.pop(0) + if t == "[": + t1 = tokens.pop(0) + t2 = tokens.pop(0) + assert t2 == "]", t2 + new_tokens.append(t + t1 + t2) + else: + new_tokens.append(t) + return new_tokens + + +class CHILDDataset(Dataset): + def __init__(self, tokenizer, one_sent=False, max_seq_len=None, dev_percent=-1): + self.tokenizer = tokenizer + self.one_sent = one_sent + self.max_seq_len = max_seq_len + + if dev_percent == -1: + causal_lines, turning_lines, subs_lines = [], [], [] + for frame in frames: + causal_sent, turning_sent, subs_sent = make_sentences(**frame) + causal_lines += causal_sent + turning_lines += turning_sent + subs_lines += subs_sent + train_lines = causal_lines + turning_lines + dev_lines = list(set(subs_lines) - set(train_lines)) + self.all_lines = train_lines + dev_lines + self.n_dev = len(dev_lines) + else: + self.all_lines = list(itertools.chain.from_iterable( + [make_sentences(**frame)[-1] for frame in frames])) + random.shuffle(self.all_lines) + self.n_dev = int(round(len(self.all_lines) * dev_percent)) + + n_all = len(self.all_lines) + self.n_train = n_all - self.n_dev + + if type(self.all_lines[0]) == list: + n_substitutes = len(self.all_lines[0]) + assert all(len(substitutes) == n_substitutes for substitutes in self.all_lines) + print('flattening all_lines: %d * %d = %d' % + (n_all, n_substitutes, n_all * n_substitutes)) + self.all_lines = list(itertools.chain.from_iterable(self.all_lines)) + self.n_dev *= n_substitutes + self.n_train *= n_substitutes + + self.examples = [] + cur_id = 0 + for line in self.all_lines: + t1, t2, is_next_label = self.split_sent(line) + + tokens_a = rejoin_tokens(self.tokenizer.tokenize(t1)) + tokens_b = rejoin_tokens(self.tokenizer.tokenize(t2)) if t2 is not None else None + + example = InputExample(guid=cur_id, tokens_a=tokens_a, tokens_b=tokens_b, is_next=is_next_label) + self.examples.append(example) + cur_id += 1 + + if self.max_seq_len is None: + self.max_seq_len = max([len(example.tokens_a) + len(example.tokens_b) + 3 + if example.tokens_b is not None else len(example.tokens_a) + 2 + for example in self.examples]) + print('max_seq_len =', self.max_seq_len) + + self.features = [convert_example_to_features(example, self.max_seq_len, self.tokenizer) for example in self.examples] + + def split_sent(self, line): + label = 0 + if "|||" in line: + t1, t2 = [t.strip() for t in line.split("|||")] + assert len(t1) > 0 and len(t2) > 0, "%d %d" % (len(t1), len(t2)) + if self.one_sent: + t1 = t1 + " " + t2 + t2 = None + else: + assert self.one_sent + t1, t2 = line.strip(), None + return t1, t2, label + + def get_train_examples(self): + return self.examples[:self.n_train] + + def get_dev_examples(self): + return self.examples[self.n_train:] + + def get_train_features(self): + return self.features[:self.n_train] + + def get_dev_features(self): + return self.features[self.n_train:] + + def __len__(self): + return len(self.all_lines) + + +class InputExample(object): + def __init__(self, guid, tokens_a, tokens_b=None, is_next=None, lm_labels=None): + self.guid = guid + self.tokens_a = tokens_a + self.tokens_b = tokens_b + self.is_next = is_next # nextSentence + self.lm_labels = lm_labels # masked words for language model + + +class InputFeatures(object): + def __init__(self, input_ids, input_mask, segment_ids, is_next, lm_label_ids): + self.input_ids = input_ids + self.input_mask = input_mask + self.segment_ids = segment_ids + self.is_next = is_next + self.lm_label_ids = lm_label_ids + + +def mask_word(tokens, tokenizer): + output_label = [] + + for i, token in enumerate(tokens): + if token.startswith("[") and token.endswith("]"): # masked word + token = token[1:-1] + tokens[i] = "[MASK]" + output_label.append(tokenizer.vocab[token]) + else: + output_label.append(-1) + + return tokens, output_label + + +def convert_example_to_features(example, max_seq_length, tokenizer): + tokens_a = example.tokens_a + tokens_b = example.tokens_b + + t1_masked, t1_label = mask_word(tokens_a, tokenizer) + lm_label_ids = [-1] + t1_label + [-1] + + tokens = [] + segment_ids = [] + tokens.append("[CLS]") + segment_ids.append(0) + for token in tokens_a: + tokens.append(token) + segment_ids.append(0) + tokens.append("[SEP]") + segment_ids.append(0) + + if tokens_b is not None and len(tokens_b) > 0: + t2_masked, t2_label = mask_word(tokens_b, tokenizer) + lm_label_ids += (t2_label + [-1]) + + for token in tokens_b: + tokens.append(token) + segment_ids.append(1) + tokens.append("[SEP]") + segment_ids.append(1) + + input_ids = tokenizer.convert_tokens_to_ids(tokens) + + # The mask has 1 for real tokens and 0 for padding tokens. Only real + # tokens are attended to. + input_mask = [1] * len(input_ids) + + # Zero-pad up to the sequence length. + while len(input_ids) < max_seq_length: + input_ids.append(0) + input_mask.append(0) + segment_ids.append(0) + lm_label_ids.append(-1) + + assert len(input_ids) == max_seq_length + assert len(input_mask) == max_seq_length + assert len(segment_ids) == max_seq_length + assert len(lm_label_ids) == max_seq_length + + if example.guid < -5: + logger.info("*** Example ***") + logger.info("guid: %s" % (example.guid)) + logger.info("tokens: %s" % " ".join( + [str(x) for x in tokens])) + logger.info("input_ids: %s" % " ".join([str(x) for x in input_ids])) + logger.info("input_mask: %s" % " ".join([str(x) for x in input_mask])) + logger.info( + "segment_ids: %s" % " ".join([str(x) for x in segment_ids])) + logger.info("LM label: %s " % (lm_label_ids)) + logger.info("Is next sentence label: %s " % (example.is_next)) + + features = InputFeatures(input_ids=input_ids, + input_mask=input_mask, + segment_ids=segment_ids, + lm_label_ids=lm_label_ids, + is_next=example.is_next) + return features + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("--dev_percent", + default=-1, + type=float, + help="") + parser.add_argument("--one_sent", + action='store_true', + help="") + + ## Required parameters + parser.add_argument("--output_dir", + default=None, + type=str, + required=True, + help="The output directory where the model checkpoints will be written.") + + ## Other parameters + parser.add_argument("--max_seq_length", + default=128, + type=int, + help="The maximum total input sequence length after WordPiece tokenization. \n" + "Sequences longer than this will be truncated, and sequences shorter \n" + "than this will be padded.") + parser.add_argument("--do_train", + action='store_true', + help="Whether to run training.") + parser.add_argument("--do_eval", + action='store_true', + help="Whether to run eval on the dev set.") + parser.add_argument("--train_batch_size", + default=32, + type=int, + help="Total batch size for training.") + parser.add_argument("--eval_batch_size", + default=32, + type=int, + help="Total batch size for eval.") + parser.add_argument("--learning_rate", + default=3e-5, + type=float, + help="The initial learning rate for Adam.") + parser.add_argument("--num_train_epochs", + default=3.0, + type=float, + help="Total number of training epochs to perform.") + parser.add_argument("--warmup_proportion", + default=0.1, + type=float, + help="Proportion of training to perform linear learning rate warmup for. " + "E.g., 0.1 = 10%% of training.") + parser.add_argument("--no_cuda", + action='store_true', + help="Whether not to use CUDA when available") + parser.add_argument("--do_lower_case", + action='store_true', + help="Whether to lower case the input text. True for uncased models, False for cased models.") + parser.add_argument("--local_rank", + type=int, + default=-1, + help="local_rank for distributed training on gpus") + parser.add_argument('--seed', + type=int, + default=42, + help="random seed for initialization") + parser.add_argument('--gradient_accumulation_steps', + type=int, + default=1, + help="Number of updates steps to accumualte before performing a backward/update pass.") + parser.add_argument('--fp16', + action='store_true', + help="Whether to use 16-bit float precision instead of 32-bit") + parser.add_argument('--loss_scale', + type = float, default = 0, + help = "Loss scaling to improve fp16 numeric stability. Only used when fp16 set to True.\n" + "0 (default value): dynamic loss scaling.\n" + "Positive power of 2: static loss scaling value.\n") + + args = parser.parse_args() + + if args.local_rank == -1 or args.no_cuda: + device = torch.device("cuda" if torch.cuda.is_available() and not args.no_cuda else "cpu") + n_gpu = torch.cuda.device_count() + else: + torch.cuda.set_device(args.local_rank) + device = torch.device("cuda", args.local_rank) + n_gpu = 1 + # Initializes the distributed backend which will take care of sychronizing nodes/GPUs + torch.distributed.init_process_group(backend='nccl') + logger.info("device: {} n_gpu: {}, distributed training: {}, 16-bits training: {}".format( + device, n_gpu, bool(args.local_rank != -1), args.fp16)) + + if args.gradient_accumulation_steps < 1: + raise ValueError("Invalid gradient_accumulation_steps parameter: {}, should be >= 1".format( + args.gradient_accumulation_steps)) + + args.train_batch_size = int(args.train_batch_size / args.gradient_accumulation_steps) + + random.seed(args.seed) + np.random.seed(args.seed) + torch.manual_seed(args.seed) + if n_gpu > 0: + torch.cuda.manual_seed_all(args.seed) + + if not args.do_train and not args.do_eval: + raise ValueError("At least one of `do_train` or `do_eval` must be True.") + + if os.path.exists(args.output_dir) and os.listdir(args.output_dir): + raise ValueError("Output directory ({}) already exists and is not empty.".format(args.output_dir)) + os.makedirs(args.output_dir, exist_ok=True) + + BERT_DIR = '/nas/pretrain-bert/pretrain-tensorflow/uncased_L-12_H-768_A-12/' + tokenizer = BertTokenizer.from_pretrained(os.path.join(BERT_DIR, 'vocab.txt'), do_lower_case=args.do_lower_case) + + #train_examples = None + num_train_steps = None + if args.do_train: + print("Generating CHILD Dataset") + child_dataset = CHILDDataset(tokenizer, one_sent=args.one_sent, dev_percent=args.dev_percent) + train_features = child_dataset.get_train_features() + num_train_steps = int( + len(train_features) / args.train_batch_size / args.gradient_accumulation_steps * args.num_train_epochs) + + # Prepare model + model = BertForMaskedLM.from_pretrained(BERT_DIR) + if args.fp16: + model.half() + model.to(device) + if args.local_rank != -1: + try: + from apex.parallel import DistributedDataParallel as DDP + except ImportError: + raise ImportError("Please install apex from https://www.github.com/nvidia/apex to use distributed and fp16 training.") + model = DDP(model) + elif n_gpu > 1: + model = torch.nn.DataParallel(model) + + # Prepare optimizer + param_optimizer = list(model.named_parameters()) + no_decay = ['bias', 'LayerNorm.bias', 'LayerNorm.weight'] + optimizer_grouped_parameters = [ + {'params': [p for n, p in param_optimizer if not any(nd in n for nd in no_decay)], 'weight_decay': 0.01}, + {'params': [p for n, p in param_optimizer if any(nd in n for nd in no_decay)], 'weight_decay': 0.0} + ] + if args.fp16: + try: + from apex.optimizers import FP16_Optimizer + from apex.optimizers import FusedAdam + except ImportError: + raise ImportError("Please install apex from https://www.github.com/nvidia/apex to use distributed and fp16 training.") + + optimizer = FusedAdam(optimizer_grouped_parameters, + lr=args.learning_rate, + bias_correction=False, + max_grad_norm=1.0) + if args.loss_scale == 0: + optimizer = FP16_Optimizer(optimizer, dynamic_loss_scale=True) + else: + optimizer = FP16_Optimizer(optimizer, static_loss_scale=args.loss_scale) + + else: + optimizer = BertAdam(optimizer_grouped_parameters, + lr=args.learning_rate, + warmup=args.warmup_proportion, + t_total=num_train_steps) + + def validate(model, eval_dataloader): + model.eval() + eval_loss, eval_accuracy = 0, 0 + nb_eval_steps, nb_eval_examples = 0, 0 + + # for batch in tqdm(eval_dataloader, desc="Evaluating"): + for batch in eval_dataloader: + batch = tuple(t.to(device) for t in batch) + input_ids, input_mask, segment_ids, lm_label_ids, is_next = batch + with torch.no_grad(): + tmp_eval_loss = model(input_ids, segment_ids, input_mask, lm_label_ids) + logits = model(input_ids, segment_ids, input_mask) + + logits = logits.detach().cpu().numpy() + lm_label_ids = lm_label_ids.to('cpu').numpy() + tmp_eval_accuracy = accuracy(logits, lm_label_ids) + + eval_loss += tmp_eval_loss.mean().item() + eval_accuracy += tmp_eval_accuracy + + nb_eval_examples += input_ids.size(0) + nb_eval_steps += 1 + + eval_loss = eval_loss / nb_eval_steps + eval_accuracy = eval_accuracy / nb_eval_examples + result = {'loss': eval_loss, + 'acc': eval_accuracy} + + # logger.info("***** Eval results *****") + for key in sorted(result.keys()): + # logger.info(" %s = %s", key, str(result[key])) + print(" %s = %.3f" % (key, result[key]), end='') + + + global_step = 0 + if args.do_train: + logger.info("***** Running training *****") + logger.info(" Num examples = %d", len(train_features)) + logger.info(" Batch size = %d", args.train_batch_size) + logger.info(" Num steps = %d", num_train_steps) + + all_input_ids = torch.tensor([f.input_ids for f in train_features], dtype=torch.long) + all_input_mask = torch.tensor([f.input_mask for f in train_features], dtype=torch.long) + all_segment_ids = torch.tensor([f.segment_ids for f in train_features], dtype=torch.long) + all_lm_label_ids = torch.tensor([f.lm_label_ids for f in train_features], dtype=torch.long) + all_is_next = torch.tensor([f.is_next for f in train_features], dtype=torch.long) + train_dataset = TensorDataset(all_input_ids, all_input_mask, all_segment_ids, all_lm_label_ids, all_is_next) + + if args.local_rank == -1: + train_sampler = RandomSampler(train_dataset) + else: + #TODO: check if this works with current data generator from disk that relies on file.__next__ + # (it doesn't return item back by index) + train_sampler = DistributedSampler(train_dataset) + train_dataloader = DataLoader(train_dataset, sampler=train_sampler, batch_size=args.train_batch_size) + + if args.do_eval: + eval_features = child_dataset.get_dev_features() + all_input_ids = torch.tensor([f.input_ids for f in eval_features], dtype=torch.long) + all_input_mask = torch.tensor([f.input_mask for f in eval_features], dtype=torch.long) + all_segment_ids = torch.tensor([f.segment_ids for f in eval_features], dtype=torch.long) + all_lm_label_ids = torch.tensor([f.lm_label_ids for f in eval_features], dtype=torch.long) + all_is_next = torch.tensor([f.is_next for f in eval_features], dtype=torch.long) + eval_dataset = TensorDataset(all_input_ids, all_input_mask, all_segment_ids, all_lm_label_ids, all_is_next) + + eval_sampler = SequentialSampler(eval_dataset) + eval_dataloader = DataLoader(eval_dataset, sampler=eval_sampler, batch_size=args.eval_batch_size) + + # logger.info("Epoch 0. Evaluating on train set...") + print("Epoch 0. Train:", end='') + validate(model, train_dataloader) + # logger.info("Evaluating on valid set...") + print(" Valid:", end='') + validate(model, eval_dataloader) + print() + + # for epoch in trange(int(args.num_train_epochs), desc="Epoch"): + for epoch in range(int(args.num_train_epochs)): + model.train() + tr_loss = 0 + nb_tr_examples, nb_tr_steps = 0, 0 + # for step, batch in enumerate(tqdm(train_dataloader, desc="Iteration")): + for step, batch in enumerate(train_dataloader): + batch = tuple(t.to(device) for t in batch) + input_ids, input_mask, segment_ids, lm_label_ids, is_next = batch + loss = model(input_ids, segment_ids, input_mask, lm_label_ids) + if n_gpu > 1: + loss = loss.mean() # mean() to average on multi-gpu. + if args.gradient_accumulation_steps > 1: + loss = loss / args.gradient_accumulation_steps + if args.fp16: + optimizer.backward(loss) + else: + loss.backward() + tr_loss += loss.item() + nb_tr_examples += input_ids.size(0) + nb_tr_steps += 1 + if (step + 1) % args.gradient_accumulation_steps == 0: + if args.fp16: + # modify learning rate with special warm up BERT uses + # if args.fp16 is False, BertAdam is used that handles this automatically + lr_this_step = args.learning_rate * warmup_linear(global_step/num_train_steps, args.warmup_proportion) + for param_group in optimizer.param_groups: + param_group['lr'] = lr_this_step + optimizer.step() + optimizer.zero_grad() + global_step += 1 + + if args.do_eval: + # logger.info("Epoch %d. Evaluating on train set..." % (epoch + 1)) + print("Epoch %d. Train:" % (epoch + 1), end='') + validate(model, train_dataloader) + # logger.info("Evaluating on valid set...") + print(" Valid:", end='') + validate(model, eval_dataloader) + print() + + # Save a trained model + # logger.info("** ** * Saving fine - tuned model ** ** * ") + # model_to_save = model.module if hasattr(model, 'module') else model # Only save the model it-self + # output_model_file = os.path.join(args.output_dir, "pytorch_model.bin") + # if args.do_train: + # torch.save(model_to_save.state_dict(), output_model_file) + + +def accuracy(out, labels): + outputs = np.argmax(out, axis=-1) + return np.all((outputs == labels) | (labels == -1), axis=-1).sum() + + +if __name__ == "__main__": + main() diff --git a/examples/run_classifier.py b/examples/run_classifier.py index 2c83b4fe497fea..31877a541457f9 100644 --- a/examples/run_classifier.py +++ b/examples/run_classifier.py @@ -1,5 +1,6 @@ # coding=utf-8 # Copyright 2018 The Google AI Language Team Authors and The HugginFace Inc. team. +# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -35,7 +36,7 @@ from pytorch_pretrained_bert.optimization import BertAdam from pytorch_pretrained_bert.file_utils import PYTORCH_PRETRAINED_BERT_CACHE -logging.basicConfig(format = '%(asctime)s - %(levelname)s - %(name)s - %(message)s', +logging.basicConfig(format = '%(asctime)s - %(levelname)s - %(name)s - %(message)s', datefmt = '%m/%d/%Y %H:%M:%S', level = logging.INFO) logger = logging.getLogger(__name__) @@ -90,7 +91,7 @@ def get_labels(self): @classmethod def _read_tsv(cls, input_file, quotechar=None): """Reads a tab separated value file.""" - with open(input_file, "r") as f: + with open(input_file, "r", encoding='utf-8') as f: reader = csv.reader(f, delimiter="\t", quotechar=quotechar) lines = [] for line in reader: @@ -196,9 +197,7 @@ def _create_examples(self, lines, set_type): def convert_examples_to_features(examples, label_list, max_seq_length, tokenizer): """Loads a data file into a list of `InputBatch`s.""" - label_map = {} - for (i, label) in enumerate(label_list): - label_map[label] = i + label_map = {label : i for i, label in enumerate(label_list)} features = [] for (ex_index, example) in enumerate(examples): @@ -207,8 +206,6 @@ def convert_examples_to_features(examples, label_list, max_seq_length, tokenizer tokens_b = None if example.text_b: tokens_b = tokenizer.tokenize(example.text_b) - - if tokens_b: # Modifies `tokens_a` and `tokens_b` in place so that the total # length is less than the specified length. # Account for [CLS], [SEP], [SEP] with "- 3" @@ -216,7 +213,7 @@ def convert_examples_to_features(examples, label_list, max_seq_length, tokenizer else: # Account for [CLS] and [SEP] with "- 2" if len(tokens_a) > max_seq_length - 2: - tokens_a = tokens_a[0:(max_seq_length - 2)] + tokens_a = tokens_a[:(max_seq_length - 2)] # The convention in BERT is: # (a) For sequence pairs: @@ -236,22 +233,12 @@ def convert_examples_to_features(examples, label_list, max_seq_length, tokenizer # For classification tasks, the first vector (corresponding to [CLS]) is # used as as the "sentence vector". Note that this only makes sense because # the entire model is fine-tuned. - tokens = [] - segment_ids = [] - tokens.append("[CLS]") - segment_ids.append(0) - for token in tokens_a: - tokens.append(token) - segment_ids.append(0) - tokens.append("[SEP]") - segment_ids.append(0) + tokens = ["[CLS]"] + tokens_a + ["[SEP]"] + segment_ids = [0] * len(tokens) if tokens_b: - for token in tokens_b: - tokens.append(token) - segment_ids.append(1) - tokens.append("[SEP]") - segment_ids.append(1) + tokens += tokens_b + ["[SEP]"] + segment_ids += [1] * (len(tokens_b) + 1) input_ids = tokenizer.convert_tokens_to_ids(tokens) @@ -260,10 +247,10 @@ def convert_examples_to_features(examples, label_list, max_seq_length, tokenizer input_mask = [1] * len(input_ids) # Zero-pad up to the sequence length. - while len(input_ids) < max_seq_length: - input_ids.append(0) - input_mask.append(0) - segment_ids.append(0) + padding = [0] * (max_seq_length - len(input_ids)) + input_ids += padding + input_mask += padding + segment_ids += padding assert len(input_ids) == max_seq_length assert len(input_mask) == max_seq_length @@ -309,34 +296,10 @@ def accuracy(out, labels): outputs = np.argmax(out, axis=1) return np.sum(outputs == labels) -def copy_optimizer_params_to_model(named_params_model, named_params_optimizer): - """ Utility function for optimize_on_cpu and 16-bits training. - Copy the parameters optimized on CPU/RAM back to the model on GPU - """ - for (name_opti, param_opti), (name_model, param_model) in zip(named_params_optimizer, named_params_model): - if name_opti != name_model: - logger.error("name_opti != name_model: {} {}".format(name_opti, name_model)) - raise ValueError - param_model.data.copy_(param_opti.data) - -def set_optimizer_params_grad(named_params_optimizer, named_params_model, test_nan=False): - """ Utility function for optimize_on_cpu and 16-bits training. - Copy the gradient of the GPU parameters to the CPU/RAMM copy of the model - """ - is_nan = False - for (name_opti, param_opti), (name_model, param_model) in zip(named_params_optimizer, named_params_model): - if name_opti != name_model: - logger.error("name_opti != name_model: {} {}".format(name_opti, name_model)) - raise ValueError - if param_model.grad is not None: - if test_nan and torch.isnan(param_model.grad).sum() > 0: - is_nan = True - if param_opti.grad is None: - param_opti.grad = torch.nn.Parameter(param_opti.data.new().resize_(*param_opti.data.size())) - param_opti.grad.data.copy_(param_model.grad.data) - else: - param_opti.grad = None - return is_nan +def warmup_linear(x, warmup=0.002): + if x < warmup: + return x/warmup + return 1.0 - x def main(): parser = argparse.ArgumentParser() @@ -349,7 +312,8 @@ def main(): help="The input data dir. Should contain the .tsv files (or other data files) for the task.") parser.add_argument("--bert_model", default=None, type=str, required=True, help="Bert pre-trained model selected in the list: bert-base-uncased, " - "bert-large-uncased, bert-base-cased, bert-base-multilingual, bert-base-chinese.") + "bert-large-uncased, bert-base-cased, bert-large-cased, bert-base-multilingual-uncased, " + "bert-base-multilingual-cased, bert-base-chinese.") parser.add_argument("--task_name", default=None, type=str, @@ -359,7 +323,7 @@ def main(): default=None, type=str, required=True, - help="The output directory where the model checkpoints will be written.") + help="The output directory where the model predictions and checkpoints will be written.") ## Other parameters parser.add_argument("--max_seq_length", @@ -369,13 +333,14 @@ def main(): "Sequences longer than this will be truncated, and sequences shorter \n" "than this will be padded.") parser.add_argument("--do_train", - default=False, action='store_true', help="Whether to run training.") parser.add_argument("--do_eval", - default=False, action='store_true', help="Whether to run eval on the dev set.") + parser.add_argument("--do_lower_case", + action='store_true', + help="Set this flag if you are using an uncased model.") parser.add_argument("--train_batch_size", default=32, type=int, @@ -398,32 +363,28 @@ def main(): help="Proportion of training to perform linear learning rate warmup for. " "E.g., 0.1 = 10%% of training.") parser.add_argument("--no_cuda", - default=False, action='store_true', help="Whether not to use CUDA when available") parser.add_argument("--local_rank", type=int, default=-1, help="local_rank for distributed training on gpus") - parser.add_argument('--seed', - type=int, + parser.add_argument('--seed', + type=int, default=42, help="random seed for initialization") parser.add_argument('--gradient_accumulation_steps', type=int, default=1, - help="Number of updates steps to accumualte before performing a backward/update pass.") - parser.add_argument('--optimize_on_cpu', - default=False, - action='store_true', - help="Whether to perform optimization and keep the optimizer averages on CPU") + help="Number of updates steps to accumulate before performing a backward/update pass.") parser.add_argument('--fp16', - default=False, action='store_true', help="Whether to use 16-bit float precision instead of 32-bit") parser.add_argument('--loss_scale', - type=float, default=128, - help='Loss scaling, positive power of 2 values can improve fp16 convergence.') + type=float, default=0, + help="Loss scaling to improve fp16 numeric stability. Only used when fp16 set to True.\n" + "0 (default value): dynamic loss scaling.\n" + "Positive power of 2: static loss scaling value.\n") args = parser.parse_args() @@ -433,18 +394,23 @@ def main(): "mrpc": MrpcProcessor, } + num_labels_task = { + "cola": 2, + "mnli": 3, + "mrpc": 2, + } + if args.local_rank == -1 or args.no_cuda: device = torch.device("cuda" if torch.cuda.is_available() and not args.no_cuda else "cpu") n_gpu = torch.cuda.device_count() else: + torch.cuda.set_device(args.local_rank) device = torch.device("cuda", args.local_rank) n_gpu = 1 # Initializes the distributed backend which will take care of sychronizing nodes/GPUs torch.distributed.init_process_group(backend='nccl') - if args.fp16: - logger.info("16-bits training currently not supported in distributed training") - args.fp16 = False # (see https://github.com/pytorch/pytorch/pull/13496) - logger.info("device %s n_gpu %d distributed training %r", device, n_gpu, bool(args.local_rank != -1)) + logger.info("device: {} n_gpu: {}, distributed training: {}, 16-bits training: {}".format( + device, n_gpu, bool(args.local_rank != -1), args.fp16)) if args.gradient_accumulation_steps < 1: raise ValueError("Invalid gradient_accumulation_steps parameter: {}, should be >= 1".format( @@ -461,7 +427,7 @@ def main(): if not args.do_train and not args.do_eval: raise ValueError("At least one of `do_train` or `do_eval` must be True.") - if os.path.exists(args.output_dir) and os.listdir(args.output_dir): + if os.path.exists(args.output_dir) and os.listdir(args.output_dir) and args.do_train: raise ValueError("Output directory ({}) already exists and is not empty.".format(args.output_dir)) os.makedirs(args.output_dir, exist_ok=True) @@ -471,9 +437,10 @@ def main(): raise ValueError("Task not found: %s" % (task_name)) processor = processors[task_name]() + num_labels = num_labels_task[task_name] label_list = processor.get_labels() - tokenizer = BertTokenizer.from_pretrained(args.bert_model) + tokenizer = BertTokenizer.from_pretrained(args.bert_model, do_lower_case=args.do_lower_case) train_examples = None num_train_steps = None @@ -483,40 +450,57 @@ def main(): len(train_examples) / args.train_batch_size / args.gradient_accumulation_steps * args.num_train_epochs) # Prepare model - model = BertForSequenceClassification.from_pretrained(args.bert_model, - cache_dir=PYTORCH_PRETRAINED_BERT_CACHE / 'distributed_{}'.format(args.local_rank)) + model = BertForSequenceClassification.from_pretrained(args.bert_model, + cache_dir=PYTORCH_PRETRAINED_BERT_CACHE / 'distributed_{}'.format(args.local_rank), + num_labels = num_labels) if args.fp16: model.half() model.to(device) if args.local_rank != -1: - model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.local_rank], - output_device=args.local_rank) + try: + from apex.parallel import DistributedDataParallel as DDP + except ImportError: + raise ImportError("Please install apex from https://www.github.com/nvidia/apex to use distributed and fp16 training.") + + model = DDP(model) elif n_gpu > 1: model = torch.nn.DataParallel(model) # Prepare optimizer - if args.fp16: - param_optimizer = [(n, param.clone().detach().to('cpu').float().requires_grad_()) \ - for n, param in model.named_parameters()] - elif args.optimize_on_cpu: - param_optimizer = [(n, param.clone().detach().to('cpu').requires_grad_()) \ - for n, param in model.named_parameters()] - else: - param_optimizer = list(model.named_parameters()) - no_decay = ['bias', 'gamma', 'beta'] + param_optimizer = list(model.named_parameters()) + no_decay = ['bias', 'LayerNorm.bias', 'LayerNorm.weight'] optimizer_grouped_parameters = [ - {'params': [p for n, p in param_optimizer if not any(nd in n for nd in no_decay)], 'weight_decay_rate': 0.01}, - {'params': [p for n, p in param_optimizer if any(nd in n for nd in no_decay)], 'weight_decay_rate': 0.0} + {'params': [p for n, p in param_optimizer if not any(nd in n for nd in no_decay)], 'weight_decay': 0.01}, + {'params': [p for n, p in param_optimizer if any(nd in n for nd in no_decay)], 'weight_decay': 0.0} ] t_total = num_train_steps if args.local_rank != -1: t_total = t_total // torch.distributed.get_world_size() - optimizer = BertAdam(optimizer_grouped_parameters, - lr=args.learning_rate, - warmup=args.warmup_proportion, - t_total=t_total) + if args.fp16: + try: + from apex.optimizers import FP16_Optimizer + from apex.optimizers import FusedAdam + except ImportError: + raise ImportError("Please install apex from https://www.github.com/nvidia/apex to use distributed and fp16 training.") + + optimizer = FusedAdam(optimizer_grouped_parameters, + lr=args.learning_rate, + bias_correction=False, + max_grad_norm=1.0) + if args.loss_scale == 0: + optimizer = FP16_Optimizer(optimizer, dynamic_loss_scale=True) + else: + optimizer = FP16_Optimizer(optimizer, static_loss_scale=args.loss_scale) + + else: + optimizer = BertAdam(optimizer_grouped_parameters, + lr=args.learning_rate, + warmup=args.warmup_proportion, + t_total=t_total) global_step = 0 + nb_tr_steps = 0 + tr_loss = 0 if args.do_train: train_features = convert_examples_to_features( train_examples, label_list, args.max_seq_length, tokenizer) @@ -542,39 +526,40 @@ def main(): for step, batch in enumerate(tqdm(train_dataloader, desc="Iteration")): batch = tuple(t.to(device) for t in batch) input_ids, input_mask, segment_ids, label_ids = batch - loss, _ = model(input_ids, segment_ids, input_mask, label_ids) + loss = model(input_ids, segment_ids, input_mask, label_ids) if n_gpu > 1: loss = loss.mean() # mean() to average on multi-gpu. - if args.fp16 and args.loss_scale != 1.0: - # rescale loss for fp16 training - # see https://docs.nvidia.com/deeplearning/sdk/mixed-precision-training/index.html - loss = loss * args.loss_scale if args.gradient_accumulation_steps > 1: loss = loss / args.gradient_accumulation_steps - loss.backward() + + if args.fp16: + optimizer.backward(loss) + else: + loss.backward() + tr_loss += loss.item() nb_tr_examples += input_ids.size(0) nb_tr_steps += 1 if (step + 1) % args.gradient_accumulation_steps == 0: - if args.fp16 or args.optimize_on_cpu: - if args.fp16 and args.loss_scale != 1.0: - # scale down gradients for fp16 training - for param in model.parameters(): - if param.grad is not None: - param.grad.data = param.grad.data / args.loss_scale - is_nan = set_optimizer_params_grad(param_optimizer, model.named_parameters(), test_nan=True) - if is_nan: - logger.info("FP16 TRAINING: Nan in gradients, reducing loss scaling") - args.loss_scale = args.loss_scale / 2 - model.zero_grad() - continue - optimizer.step() - copy_optimizer_params_to_model(model.named_parameters(), param_optimizer) - else: - optimizer.step() - model.zero_grad() + # modify learning rate with special warm up BERT uses + lr_this_step = args.learning_rate * warmup_linear(global_step/t_total, args.warmup_proportion) + for param_group in optimizer.param_groups: + param_group['lr'] = lr_this_step + optimizer.step() + optimizer.zero_grad() global_step += 1 + # Save a trained model + model_to_save = model.module if hasattr(model, 'module') else model # Only save the model it-self + output_model_file = os.path.join(args.output_dir, "pytorch_model.bin") + if args.do_train: + torch.save(model_to_save.state_dict(), output_model_file) + + # Load a trained model that you have fine-tuned + model_state_dict = torch.load(output_model_file) + model = BertForSequenceClassification.from_pretrained(args.bert_model, state_dict=model_state_dict, num_labels=num_labels) + model.to(device) + if args.do_eval and (args.local_rank == -1 or torch.distributed.get_rank() == 0): eval_examples = processor.get_dev_examples(args.data_dir) eval_features = convert_examples_to_features( @@ -594,14 +579,16 @@ def main(): model.eval() eval_loss, eval_accuracy = 0, 0 nb_eval_steps, nb_eval_examples = 0, 0 - for input_ids, input_mask, segment_ids, label_ids in eval_dataloader: + + for input_ids, input_mask, segment_ids, label_ids in tqdm(eval_dataloader, desc="Evaluating"): input_ids = input_ids.to(device) input_mask = input_mask.to(device) segment_ids = segment_ids.to(device) label_ids = label_ids.to(device) with torch.no_grad(): - tmp_eval_loss, logits = model(input_ids, segment_ids, input_mask, label_ids) + tmp_eval_loss = model(input_ids, segment_ids, input_mask, label_ids) + logits = model(input_ids, segment_ids, input_mask) logits = logits.detach().cpu().numpy() label_ids = label_ids.to('cpu').numpy() @@ -615,11 +602,11 @@ def main(): eval_loss = eval_loss / nb_eval_steps eval_accuracy = eval_accuracy / nb_eval_examples - + loss = tr_loss/nb_tr_steps if args.do_train else None result = {'eval_loss': eval_loss, 'eval_accuracy': eval_accuracy, 'global_step': global_step, - 'loss': tr_loss/nb_tr_steps} + 'loss': loss} output_eval_file = os.path.join(args.output_dir, "eval_results.txt") with open(output_eval_file, "w") as writer: diff --git a/examples/run_lm_finetuning.py b/examples/run_lm_finetuning.py new file mode 100644 index 00000000000000..35a2f797c7e3de --- /dev/null +++ b/examples/run_lm_finetuning.py @@ -0,0 +1,649 @@ +# coding=utf-8 +# Copyright 2018 The Google AI Language Team Authors and The HugginFace Inc. team. +# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""BERT finetuning runner.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import os +import logging +import argparse +from tqdm import tqdm, trange + +import numpy as np +import torch +from torch.utils.data import DataLoader, RandomSampler +from torch.utils.data.distributed import DistributedSampler + +from pytorch_pretrained_bert.tokenization import BertTokenizer +from pytorch_pretrained_bert.modeling import BertForPreTraining +from pytorch_pretrained_bert.optimization import BertAdam + +from torch.utils.data import Dataset +import random + +logging.basicConfig(format='%(asctime)s - %(levelname)s - %(name)s - %(message)s', + datefmt='%m/%d/%Y %H:%M:%S', + level=logging.INFO) +logger = logging.getLogger(__name__) + + +def warmup_linear(x, warmup=0.002): + if x < warmup: + return x/warmup + return 1.0 - x + + +class BERTDataset(Dataset): + def __init__(self, corpus_path, tokenizer, seq_len, encoding="utf-8", corpus_lines=None, on_memory=True): + self.vocab = tokenizer.vocab + self.tokenizer = tokenizer + self.seq_len = seq_len + self.on_memory = on_memory + self.corpus_lines = corpus_lines # number of non-empty lines in input corpus + self.corpus_path = corpus_path + self.encoding = encoding + self.current_doc = 0 # to avoid random sentence from same doc + + # for loading samples directly from file + self.sample_counter = 0 # used to keep track of full epochs on file + self.line_buffer = None # keep second sentence of a pair in memory and use as first sentence in next pair + + # for loading samples in memory + self.current_random_doc = 0 + self.num_docs = 0 + self.sample_to_doc = [] # map sample index to doc and line + + # load samples into memory + if on_memory: + self.all_docs = [] + doc = [] + self.corpus_lines = 0 + with open(corpus_path, "r", encoding=encoding) as f: + for line in tqdm(f, desc="Loading Dataset", total=corpus_lines): + line = line.strip() + if line == "": + self.all_docs.append(doc) + doc = [] + #remove last added sample because there won't be a subsequent line anymore in the doc + self.sample_to_doc.pop() + else: + #store as one sample + sample = {"doc_id": len(self.all_docs), + "line": len(doc)} + self.sample_to_doc.append(sample) + doc.append(line) + self.corpus_lines = self.corpus_lines + 1 + + # if last row in file is not empty + if self.all_docs[-1] != doc: + self.all_docs.append(doc) + self.sample_to_doc.pop() + + self.num_docs = len(self.all_docs) + + # load samples later lazily from disk + else: + if self.corpus_lines is None: + with open(corpus_path, "r", encoding=encoding) as f: + self.corpus_lines = 0 + for line in tqdm(f, desc="Loading Dataset", total=corpus_lines): + if line.strip() == "": + self.num_docs += 1 + else: + self.corpus_lines += 1 + + # if doc does not end with empty line + if line.strip() != "": + self.num_docs += 1 + + self.file = open(corpus_path, "r", encoding=encoding) + self.random_file = open(corpus_path, "r", encoding=encoding) + + def __len__(self): + # last line of doc won't be used, because there's no "nextSentence". Additionally, we start counting at 0. + return self.corpus_lines - self.num_docs - 1 + + def __getitem__(self, item): + cur_id = self.sample_counter + self.sample_counter += 1 + if not self.on_memory: + # after one epoch we start again from beginning of file + if cur_id != 0 and (cur_id % len(self) == 0): + self.file.close() + self.file = open(self.corpus_path, "r", encoding=self.encoding) + + t1, t2, is_next_label = self.random_sent(item) + + # tokenize + tokens_a = self.tokenizer.tokenize(t1) + tokens_b = self.tokenizer.tokenize(t2) + + # combine to one sample + cur_example = InputExample(guid=cur_id, tokens_a=tokens_a, tokens_b=tokens_b, is_next=is_next_label) + + # transform sample to features + cur_features = convert_example_to_features(cur_example, self.seq_len, self.tokenizer) + + cur_tensors = (torch.tensor(cur_features.input_ids), + torch.tensor(cur_features.input_mask), + torch.tensor(cur_features.segment_ids), + torch.tensor(cur_features.lm_label_ids), + torch.tensor(cur_features.is_next)) + + return cur_tensors + + def random_sent(self, index): + """ + Get one sample from corpus consisting of two sentences. With prob. 50% these are two subsequent sentences + from one doc. With 50% the second sentence will be a random one from another doc. + :param index: int, index of sample. + :return: (str, str, int), sentence 1, sentence 2, isNextSentence Label + """ + t1, t2 = self.get_corpus_line(index) + if random.random() > 0.5: + label = 0 + else: + t2 = self.get_random_line() + label = 1 + + assert len(t1) > 0 + assert len(t2) > 0 + return t1, t2, label + + def get_corpus_line(self, item): + """ + Get one sample from corpus consisting of a pair of two subsequent lines from the same doc. + :param item: int, index of sample. + :return: (str, str), two subsequent sentences from corpus + """ + t1 = "" + t2 = "" + assert item < self.corpus_lines + if self.on_memory: + sample = self.sample_to_doc[item] + t1 = self.all_docs[sample["doc_id"]][sample["line"]] + t2 = self.all_docs[sample["doc_id"]][sample["line"]+1] + # used later to avoid random nextSentence from same doc + self.current_doc = sample["doc_id"] + return t1, t2 + else: + if self.line_buffer is None: + # read first non-empty line of file + while t1 == "" : + t1 = self.file.__next__().strip() + t2 = self.file.__next__().strip() + else: + # use t2 from previous iteration as new t1 + t1 = self.line_buffer + t2 = self.file.__next__().strip() + # skip empty rows that are used for separating documents and keep track of current doc id + while t2 == "" or t1 == "": + t1 = self.file.__next__().strip() + t2 = self.file.__next__().strip() + self.current_doc = self.current_doc+1 + self.line_buffer = t2 + + assert t1 != "" + assert t2 != "" + return t1, t2 + + def get_random_line(self): + """ + Get random line from another document for nextSentence task. + :return: str, content of one line + """ + # Similar to original tf repo: This outer loop should rarely go for more than one iteration for large + # corpora. However, just to be careful, we try to make sure that + # the random document is not the same as the document we're processing. + for _ in range(10): + if self.on_memory: + rand_doc_idx = random.randint(0, len(self.all_docs)-1) + rand_doc = self.all_docs[rand_doc_idx] + line = rand_doc[random.randrange(len(rand_doc))] + else: + rand_index = random.randint(1, self.corpus_lines if self.corpus_lines < 1000 else 1000) + #pick random line + for _ in range(rand_index): + line = self.get_next_line() + #check if our picked random line is really from another doc like we want it to be + if self.current_random_doc != self.current_doc: + break + return line + + def get_next_line(self): + """ Gets next line of random_file and starts over when reaching end of file""" + try: + line = self.random_file.__next__().strip() + #keep track of which document we are currently looking at to later avoid having the same doc as t1 + if line == "": + self.current_random_doc = self.current_random_doc + 1 + line = self.random_file.__next__().strip() + except StopIteration: + self.random_file.close() + self.random_file = open(self.corpus_path, "r", encoding=self.encoding) + line = self.random_file.__next__().strip() + return line + + +class InputExample(object): + """A single training/test example for the language model.""" + + def __init__(self, guid, tokens_a, tokens_b=None, is_next=None, lm_labels=None): + """Constructs a InputExample. + + Args: + guid: Unique id for the example. + tokens_a: string. The untokenized text of the first sequence. For single + sequence tasks, only this sequence must be specified. + tokens_b: (Optional) string. The untokenized text of the second sequence. + Only must be specified for sequence pair tasks. + label: (Optional) string. The label of the example. This should be + specified for train and dev examples, but not for test examples. + """ + self.guid = guid + self.tokens_a = tokens_a + self.tokens_b = tokens_b + self.is_next = is_next # nextSentence + self.lm_labels = lm_labels # masked words for language model + + +class InputFeatures(object): + """A single set of features of data.""" + + def __init__(self, input_ids, input_mask, segment_ids, is_next, lm_label_ids): + self.input_ids = input_ids + self.input_mask = input_mask + self.segment_ids = segment_ids + self.is_next = is_next + self.lm_label_ids = lm_label_ids + + +def random_word(tokens, tokenizer): + """ + Masking some random tokens for Language Model task with probabilities as in the original BERT paper. + :param tokens: list of str, tokenized sentence. + :param tokenizer: Tokenizer, object used for tokenization (we need it's vocab here) + :return: (list of str, list of int), masked tokens and related labels for LM prediction + """ + output_label = [] + + for i, token in enumerate(tokens): + prob = random.random() + # mask token with 15% probability + if prob < 0.15: + prob /= 0.15 + + # 80% randomly change token to mask token + if prob < 0.8: + tokens[i] = "[MASK]" + + # 10% randomly change token to random token + elif prob < 0.9: + tokens[i] = random.choice(list(tokenizer.vocab.items()))[0] + + # -> rest 10% randomly keep current token + + # append current token to output (we will predict these later) + try: + output_label.append(tokenizer.vocab[token]) + except KeyError: + # For unknown words (should not occur with BPE vocab) + output_label.append(tokenizer.vocab["[UNK]"]) + logger.warning("Cannot find token '{}' in vocab. Using [UNK] insetad".format(token)) + else: + # no masking token (will be ignored by loss function later) + output_label.append(-1) + + return tokens, output_label + + +def convert_example_to_features(example, max_seq_length, tokenizer): + """ + Convert a raw sample (pair of sentences as tokenized strings) into a proper training sample with + IDs, LM labels, input_mask, CLS and SEP tokens etc. + :param example: InputExample, containing sentence input as strings and is_next label + :param max_seq_length: int, maximum length of sequence. + :param tokenizer: Tokenizer + :return: InputFeatures, containing all inputs and labels of one sample as IDs (as used for model training) + """ + tokens_a = example.tokens_a + tokens_b = example.tokens_b + # Modifies `tokens_a` and `tokens_b` in place so that the total + # length is less than the specified length. + # Account for [CLS], [SEP], [SEP] with "- 3" + _truncate_seq_pair(tokens_a, tokens_b, max_seq_length - 3) + + t1_random, t1_label = random_word(tokens_a, tokenizer) + t2_random, t2_label = random_word(tokens_b, tokenizer) + # concatenate lm labels and account for CLS, SEP, SEP + lm_label_ids = ([-1] + t1_label + [-1] + t2_label + [-1]) + + # The convention in BERT is: + # (a) For sequence pairs: + # tokens: [CLS] is this jack ##son ##ville ? [SEP] no it is not . [SEP] + # type_ids: 0 0 0 0 0 0 0 0 1 1 1 1 1 1 + # (b) For single sequences: + # tokens: [CLS] the dog is hairy . [SEP] + # type_ids: 0 0 0 0 0 0 0 + # + # Where "type_ids" are used to indicate whether this is the first + # sequence or the second sequence. The embedding vectors for `type=0` and + # `type=1` were learned during pre-training and are added to the wordpiece + # embedding vector (and position vector). This is not *strictly* necessary + # since the [SEP] token unambigiously separates the sequences, but it makes + # it easier for the model to learn the concept of sequences. + # + # For classification tasks, the first vector (corresponding to [CLS]) is + # used as as the "sentence vector". Note that this only makes sense because + # the entire model is fine-tuned. + tokens = [] + segment_ids = [] + tokens.append("[CLS]") + segment_ids.append(0) + for token in tokens_a: + tokens.append(token) + segment_ids.append(0) + tokens.append("[SEP]") + segment_ids.append(0) + + assert len(tokens_b) > 0 + for token in tokens_b: + tokens.append(token) + segment_ids.append(1) + tokens.append("[SEP]") + segment_ids.append(1) + + input_ids = tokenizer.convert_tokens_to_ids(tokens) + + # The mask has 1 for real tokens and 0 for padding tokens. Only real + # tokens are attended to. + input_mask = [1] * len(input_ids) + + # Zero-pad up to the sequence length. + while len(input_ids) < max_seq_length: + input_ids.append(0) + input_mask.append(0) + segment_ids.append(0) + lm_label_ids.append(-1) + + assert len(input_ids) == max_seq_length + assert len(input_mask) == max_seq_length + assert len(segment_ids) == max_seq_length + assert len(lm_label_ids) == max_seq_length + + if example.guid < 5: + logger.info("*** Example ***") + logger.info("guid: %s" % (example.guid)) + logger.info("tokens: %s" % " ".join( + [str(x) for x in tokens])) + logger.info("input_ids: %s" % " ".join([str(x) for x in input_ids])) + logger.info("input_mask: %s" % " ".join([str(x) for x in input_mask])) + logger.info( + "segment_ids: %s" % " ".join([str(x) for x in segment_ids])) + logger.info("LM label: %s " % (lm_label_ids)) + logger.info("Is next sentence label: %s " % (example.is_next)) + + features = InputFeatures(input_ids=input_ids, + input_mask=input_mask, + segment_ids=segment_ids, + lm_label_ids=lm_label_ids, + is_next=example.is_next) + return features + + +def main(): + parser = argparse.ArgumentParser() + + ## Required parameters + parser.add_argument("--train_file", + default=None, + type=str, + required=True, + help="The input train corpus.") + parser.add_argument("--bert_model", default=None, type=str, required=True, + help="Bert pre-trained model selected in the list: bert-base-uncased, " + "bert-large-uncased, bert-base-cased, bert-base-multilingual, bert-base-chinese.") + parser.add_argument("--output_dir", + default=None, + type=str, + required=True, + help="The output directory where the model checkpoints will be written.") + + ## Other parameters + parser.add_argument("--max_seq_length", + default=128, + type=int, + help="The maximum total input sequence length after WordPiece tokenization. \n" + "Sequences longer than this will be truncated, and sequences shorter \n" + "than this will be padded.") + parser.add_argument("--do_train", + action='store_true', + help="Whether to run training.") + parser.add_argument("--train_batch_size", + default=32, + type=int, + help="Total batch size for training.") + parser.add_argument("--eval_batch_size", + default=8, + type=int, + help="Total batch size for eval.") + parser.add_argument("--learning_rate", + default=3e-5, + type=float, + help="The initial learning rate for Adam.") + parser.add_argument("--num_train_epochs", + default=3.0, + type=float, + help="Total number of training epochs to perform.") + parser.add_argument("--warmup_proportion", + default=0.1, + type=float, + help="Proportion of training to perform linear learning rate warmup for. " + "E.g., 0.1 = 10%% of training.") + parser.add_argument("--no_cuda", + action='store_true', + help="Whether not to use CUDA when available") + parser.add_argument("--on_memory", + action='store_true', + help="Whether to load train samples into memory or use disk") + parser.add_argument("--do_lower_case", + action='store_true', + help="Whether to lower case the input text. True for uncased models, False for cased models.") + parser.add_argument("--local_rank", + type=int, + default=-1, + help="local_rank for distributed training on gpus") + parser.add_argument('--seed', + type=int, + default=42, + help="random seed for initialization") + parser.add_argument('--gradient_accumulation_steps', + type=int, + default=1, + help="Number of updates steps to accumualte before performing a backward/update pass.") + parser.add_argument('--fp16', + action='store_true', + help="Whether to use 16-bit float precision instead of 32-bit") + parser.add_argument('--loss_scale', + type = float, default = 0, + help = "Loss scaling to improve fp16 numeric stability. Only used when fp16 set to True.\n" + "0 (default value): dynamic loss scaling.\n" + "Positive power of 2: static loss scaling value.\n") + + args = parser.parse_args() + + if args.local_rank == -1 or args.no_cuda: + device = torch.device("cuda" if torch.cuda.is_available() and not args.no_cuda else "cpu") + n_gpu = torch.cuda.device_count() + else: + torch.cuda.set_device(args.local_rank) + device = torch.device("cuda", args.local_rank) + n_gpu = 1 + # Initializes the distributed backend which will take care of sychronizing nodes/GPUs + torch.distributed.init_process_group(backend='nccl') + logger.info("device: {} n_gpu: {}, distributed training: {}, 16-bits training: {}".format( + device, n_gpu, bool(args.local_rank != -1), args.fp16)) + + if args.gradient_accumulation_steps < 1: + raise ValueError("Invalid gradient_accumulation_steps parameter: {}, should be >= 1".format( + args.gradient_accumulation_steps)) + + args.train_batch_size = int(args.train_batch_size / args.gradient_accumulation_steps) + + random.seed(args.seed) + np.random.seed(args.seed) + torch.manual_seed(args.seed) + if n_gpu > 0: + torch.cuda.manual_seed_all(args.seed) + + if not args.do_train and not args.do_eval: + raise ValueError("At least one of `do_train` or `do_eval` must be True.") + + if os.path.exists(args.output_dir) and os.listdir(args.output_dir): + raise ValueError("Output directory ({}) already exists and is not empty.".format(args.output_dir)) + os.makedirs(args.output_dir, exist_ok=True) + + tokenizer = BertTokenizer.from_pretrained(args.bert_model, do_lower_case=args.do_lower_case) + + #train_examples = None + num_train_steps = None + if args.do_train: + print("Loading Train Dataset", args.train_file) + train_dataset = BERTDataset(args.train_file, tokenizer, seq_len=args.max_seq_length, + corpus_lines=None, on_memory=args.on_memory) + num_train_steps = int( + len(train_dataset) / args.train_batch_size / args.gradient_accumulation_steps * args.num_train_epochs) + + # Prepare model + model = BertForPreTraining.from_pretrained(args.bert_model) + if args.fp16: + model.half() + model.to(device) + if args.local_rank != -1: + try: + from apex.parallel import DistributedDataParallel as DDP + except ImportError: + raise ImportError("Please install apex from https://www.github.com/nvidia/apex to use distributed and fp16 training.") + model = DDP(model) + elif n_gpu > 1: + model = torch.nn.DataParallel(model) + + # Prepare optimizer + param_optimizer = list(model.named_parameters()) + no_decay = ['bias', 'LayerNorm.bias', 'LayerNorm.weight'] + optimizer_grouped_parameters = [ + {'params': [p for n, p in param_optimizer if not any(nd in n for nd in no_decay)], 'weight_decay': 0.01}, + {'params': [p for n, p in param_optimizer if any(nd in n for nd in no_decay)], 'weight_decay': 0.0} + ] + if args.fp16: + try: + from apex.optimizers import FP16_Optimizer + from apex.optimizers import FusedAdam + except ImportError: + raise ImportError("Please install apex from https://www.github.com/nvidia/apex to use distributed and fp16 training.") + + optimizer = FusedAdam(optimizer_grouped_parameters, + lr=args.learning_rate, + bias_correction=False, + max_grad_norm=1.0) + if args.loss_scale == 0: + optimizer = FP16_Optimizer(optimizer, dynamic_loss_scale=True) + else: + optimizer = FP16_Optimizer(optimizer, static_loss_scale=args.loss_scale) + + else: + optimizer = BertAdam(optimizer_grouped_parameters, + lr=args.learning_rate, + warmup=args.warmup_proportion, + t_total=num_train_steps) + + global_step = 0 + if args.do_train: + logger.info("***** Running training *****") + logger.info(" Num examples = %d", len(train_dataset)) + logger.info(" Batch size = %d", args.train_batch_size) + logger.info(" Num steps = %d", num_train_steps) + + if args.local_rank == -1: + train_sampler = RandomSampler(train_dataset) + else: + #TODO: check if this works with current data generator from disk that relies on file.__next__ + # (it doesn't return item back by index) + train_sampler = DistributedSampler(train_dataset) + train_dataloader = DataLoader(train_dataset, sampler=train_sampler, batch_size=args.train_batch_size) + + model.train() + for _ in trange(int(args.num_train_epochs), desc="Epoch"): + tr_loss = 0 + nb_tr_examples, nb_tr_steps = 0, 0 + for step, batch in enumerate(tqdm(train_dataloader, desc="Iteration")): + batch = tuple(t.to(device) for t in batch) + input_ids, input_mask, segment_ids, lm_label_ids, is_next = batch + loss = model(input_ids, segment_ids, input_mask, lm_label_ids, is_next) + if n_gpu > 1: + loss = loss.mean() # mean() to average on multi-gpu. + if args.gradient_accumulation_steps > 1: + loss = loss / args.gradient_accumulation_steps + if args.fp16: + optimizer.backward(loss) + else: + loss.backward() + tr_loss += loss.item() + nb_tr_examples += input_ids.size(0) + nb_tr_steps += 1 + if (step + 1) % args.gradient_accumulation_steps == 0: + # modify learning rate with special warm up BERT uses + lr_this_step = args.learning_rate * warmup_linear(global_step/num_train_steps, args.warmup_proportion) + for param_group in optimizer.param_groups: + param_group['lr'] = lr_this_step + optimizer.step() + optimizer.zero_grad() + global_step += 1 + + # Save a trained model + logger.info("** ** * Saving fine - tuned model ** ** * ") + model_to_save = model.module if hasattr(model, 'module') else model # Only save the model it-self + output_model_file = os.path.join(args.output_dir, "pytorch_model.bin") + if args.do_train: + torch.save(model_to_save.state_dict(), output_model_file) + + +def _truncate_seq_pair(tokens_a, tokens_b, max_length): + """Truncates a sequence pair in place to the maximum length.""" + + # This is a simple heuristic which will always truncate the longer sequence + # one token at a time. This makes more sense than truncating an equal percent + # of tokens from each, since if one sequence is very short then each token + # that's truncated likely contains more information than a longer sequence. + while True: + total_length = len(tokens_a) + len(tokens_b) + if total_length <= max_length: + break + if len(tokens_a) > len(tokens_b): + tokens_a.pop() + else: + tokens_b.pop() + + +def accuracy(out, labels): + outputs = np.argmax(out, axis=1) + return np.sum(outputs == labels) + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/examples/run_squad.py b/examples/run_squad.py index e3213189bfba92..39e9c501996d5c 100644 --- a/examples/run_squad.py +++ b/examples/run_squad.py @@ -1,5 +1,6 @@ # coding=utf-8 # Copyright 2018 The Google AI Language Team Authors and The HugginFace Inc. team. +# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -38,14 +39,14 @@ from pytorch_pretrained_bert.optimization import BertAdam from pytorch_pretrained_bert.file_utils import PYTORCH_PRETRAINED_BERT_CACHE -logging.basicConfig(format = '%(asctime)s - %(levelname)s - %(name)s - %(message)s', +logging.basicConfig(format = '%(asctime)s - %(levelname)s - %(name)s - %(message)s', datefmt = '%m/%d/%Y %H:%M:%S', level = logging.INFO) logger = logging.getLogger(__name__) class SquadExample(object): - """A single training/test example for simple sequence classification.""" + """A single training/test example for the Squad dataset.""" def __init__(self, qas_id, @@ -107,7 +108,7 @@ def __init__(self, def read_squad_examples(input_file, is_training): """Read a SQuAD json file into a list of SquadExample.""" - with open(input_file, "r") as reader: + with open(input_file, "r", encoding='utf-8') as reader: input_data = json.load(reader)["data"] def is_whitespace(c): @@ -669,34 +670,10 @@ def _compute_softmax(scores): probs.append(score / total_sum) return probs -def copy_optimizer_params_to_model(named_params_model, named_params_optimizer): - """ Utility function for optimize_on_cpu and 16-bits training. - Copy the parameters optimized on CPU/RAM back to the model on GPU - """ - for (name_opti, param_opti), (name_model, param_model) in zip(named_params_optimizer, named_params_model): - if name_opti != name_model: - logger.error("name_opti != name_model: {} {}".format(name_opti, name_model)) - raise ValueError - param_model.data.copy_(param_opti.data) - -def set_optimizer_params_grad(named_params_optimizer, named_params_model, test_nan=False): - """ Utility function for optimize_on_cpu and 16-bits training. - Copy the gradient of the GPU parameters to the CPU/RAMM copy of the model - """ - is_nan = False - for (name_opti, param_opti), (name_model, param_model) in zip(named_params_optimizer, named_params_model): - if name_opti != name_model: - logger.error("name_opti != name_model: {} {}".format(name_opti, name_model)) - raise ValueError - if param_model.grad is not None: - if test_nan and torch.isnan(param_model.grad).sum() > 0: - is_nan = True - if param_opti.grad is None: - param_opti.grad = torch.nn.Parameter(param_opti.data.new().resize_(*param_opti.data.size())) - param_opti.grad.data.copy_(param_model.grad.data) - else: - param_opti.grad = None - return is_nan +def warmup_linear(x, warmup=0.002): + if x < warmup: + return x/warmup + return 1.0 - x def main(): parser = argparse.ArgumentParser() @@ -704,9 +681,10 @@ def main(): ## Required parameters parser.add_argument("--bert_model", default=None, type=str, required=True, help="Bert pre-trained model selected in the list: bert-base-uncased, " - "bert-large-uncased, bert-base-cased, bert-base-multilingual, bert-base-chinese.") + "bert-large-uncased, bert-base-cased, bert-large-cased, bert-base-multilingual-uncased, " + "bert-base-multilingual-cased, bert-base-chinese.") parser.add_argument("--output_dir", default=None, type=str, required=True, - help="The output directory where the model checkpoints will be written.") + help="The output directory where the model checkpoints and predictions will be written.") ## Other parameters parser.add_argument("--train_file", default=None, type=str, help="SQuAD json for training. E.g., train-v1.1.json") @@ -720,8 +698,8 @@ def main(): parser.add_argument("--max_query_length", default=64, type=int, help="The maximum number of tokens for the question. Questions longer than this will " "be truncated to this length.") - parser.add_argument("--do_train", default=False, action='store_true', help="Whether to run training.") - parser.add_argument("--do_predict", default=False, action='store_true', help="Whether to run eval on the dev set.") + parser.add_argument("--do_train", action='store_true', help="Whether to run training.") + parser.add_argument("--do_predict", action='store_true', help="Whether to run eval on the dev set.") parser.add_argument("--train_batch_size", default=32, type=int, help="Total batch size for training.") parser.add_argument("--predict_batch_size", default=8, type=int, help="Total batch size for predictions.") parser.add_argument("--learning_rate", default=5e-5, type=float, help="The initial learning rate for Adam.") @@ -736,15 +714,14 @@ def main(): parser.add_argument("--max_answer_length", default=30, type=int, help="The maximum length of an answer that can be generated. This is needed because the start " "and end predictions are not conditioned on one another.") - parser.add_argument("--verbose_logging", default=False, action='store_true', + parser.add_argument("--verbose_logging", action='store_true', help="If true, all of the warnings related to data processing will be printed. " "A number of warnings are expected for a normal SQuAD evaluation.") parser.add_argument("--no_cuda", - default=False, action='store_true', help="Whether not to use CUDA when available") - parser.add_argument('--seed', - type=int, + parser.add_argument('--seed', + type=int, default=42, help="random seed for initialization") parser.add_argument('--gradient_accumulation_steps', @@ -752,24 +729,20 @@ def main(): default=1, help="Number of updates steps to accumulate before performing a backward/update pass.") parser.add_argument("--do_lower_case", - default=True, action='store_true', help="Whether to lower case the input text. True for uncased models, False for cased models.") parser.add_argument("--local_rank", type=int, default=-1, help="local_rank for distributed training on gpus") - parser.add_argument('--optimize_on_cpu', - default=False, - action='store_true', - help="Whether to perform optimization and keep the optimizer averages on CPU") parser.add_argument('--fp16', - default=False, action='store_true', help="Whether to use 16-bit float precision instead of 32-bit") parser.add_argument('--loss_scale', - type=float, default=128, - help='Loss scaling, positive power of 2 values can improve fp16 convergence.') + type=float, default=0, + help="Loss scaling to improve fp16 numeric stability. Only used when fp16 set to True.\n" + "0 (default value): dynamic loss scaling.\n" + "Positive power of 2: static loss scaling value.\n") args = parser.parse_args() @@ -777,14 +750,12 @@ def main(): device = torch.device("cuda" if torch.cuda.is_available() and not args.no_cuda else "cpu") n_gpu = torch.cuda.device_count() else: + torch.cuda.set_device(args.local_rank) device = torch.device("cuda", args.local_rank) n_gpu = 1 # Initializes the distributed backend which will take care of sychronizing nodes/GPUs torch.distributed.init_process_group(backend='nccl') - if args.fp16: - logger.info("16-bits training currently not supported in distributed training") - args.fp16 = False # (see https://github.com/pytorch/pytorch/pull/13496) - logger.info("device: {} n_gpu: {}, distributed training: {}, 16-bits trainiing: {}".format( + logger.info("device: {} n_gpu: {}, distributed training: {}, 16-bits training: {}".format( device, n_gpu, bool(args.local_rank != -1), args.fp16)) if args.gradient_accumulation_steps < 1: @@ -811,11 +782,11 @@ def main(): raise ValueError( "If `do_predict` is True, then `predict_file` must be specified.") - if os.path.exists(args.output_dir) and os.listdir(args.output_dir): + if os.path.exists(args.output_dir) and os.listdir(args.output_dir) and args.do_train: raise ValueError("Output directory () already exists and is not empty.") os.makedirs(args.output_dir, exist_ok=True) - tokenizer = BertTokenizer.from_pretrained(args.bert_model) + tokenizer = BertTokenizer.from_pretrained(args.bert_model, do_lower_case=args.do_lower_case) train_examples = None num_train_steps = None @@ -828,41 +799,61 @@ def main(): # Prepare model model = BertForQuestionAnswering.from_pretrained(args.bert_model, cache_dir=PYTORCH_PRETRAINED_BERT_CACHE / 'distributed_{}'.format(args.local_rank)) + if args.fp16: model.half() model.to(device) if args.local_rank != -1: - model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.local_rank], - output_device=args.local_rank) + try: + from apex.parallel import DistributedDataParallel as DDP + except ImportError: + raise ImportError("Please install apex from https://www.github.com/nvidia/apex to use distributed and fp16 training.") + + model = DDP(model) elif n_gpu > 1: model = torch.nn.DataParallel(model) # Prepare optimizer - if args.fp16: - param_optimizer = [(n, param.clone().detach().to('cpu').float().requires_grad_()) \ - for n, param in model.named_parameters()] - elif args.optimize_on_cpu: - param_optimizer = [(n, param.clone().detach().to('cpu').requires_grad_()) \ - for n, param in model.named_parameters()] - else: - param_optimizer = list(model.named_parameters()) - no_decay = ['bias', 'gamma', 'beta'] + param_optimizer = list(model.named_parameters()) + + # hack to remove pooler, which is not used + # thus it produce None grad that break apex + param_optimizer = [n for n in param_optimizer if 'pooler' not in n[0]] + + no_decay = ['bias', 'LayerNorm.bias', 'LayerNorm.weight'] optimizer_grouped_parameters = [ - {'params': [p for n, p in param_optimizer if not any(nd in n for nd in no_decay)], 'weight_decay_rate': 0.01}, - {'params': [p for n, p in param_optimizer if any(nd in n for nd in no_decay)], 'weight_decay_rate': 0.0} + {'params': [p for n, p in param_optimizer if not any(nd in n for nd in no_decay)], 'weight_decay': 0.01}, + {'params': [p for n, p in param_optimizer if any(nd in n for nd in no_decay)], 'weight_decay': 0.0} ] + t_total = num_train_steps if args.local_rank != -1: t_total = t_total // torch.distributed.get_world_size() - optimizer = BertAdam(optimizer_grouped_parameters, - lr=args.learning_rate, - warmup=args.warmup_proportion, - t_total=t_total) + if args.fp16: + try: + from apex.optimizers import FP16_Optimizer + from apex.optimizers import FusedAdam + except ImportError: + raise ImportError("Please install apex from https://www.github.com/nvidia/apex to use distributed and fp16 training.") + + optimizer = FusedAdam(optimizer_grouped_parameters, + lr=args.learning_rate, + bias_correction=False, + max_grad_norm=1.0) + if args.loss_scale == 0: + optimizer = FP16_Optimizer(optimizer, dynamic_loss_scale=True) + else: + optimizer = FP16_Optimizer(optimizer, static_loss_scale=args.loss_scale) + else: + optimizer = BertAdam(optimizer_grouped_parameters, + lr=args.learning_rate, + warmup=args.warmup_proportion, + t_total=t_total) global_step = 0 if args.do_train: cached_train_features_file = args.train_file+'_{0}_{1}_{2}_{3}'.format( - args.bert_model, str(args.max_seq_length), str(args.doc_stride), str(args.max_query_length)) + list(filter(None, args.bert_model.split('/'))).pop(), str(args.max_seq_length), str(args.doc_stride), str(args.max_query_length)) train_features = None try: with open(cached_train_features_file, "rb") as reader: @@ -878,7 +869,7 @@ def main(): if args.local_rank == -1 or torch.distributed.get_rank() == 0: logger.info(" Saving train features into cached file %s", cached_train_features_file) with open(cached_train_features_file, "wb") as writer: - train_features = pickle.dump(train_features, writer) + pickle.dump(train_features, writer) logger.info("***** Running training *****") logger.info(" Num orig examples = %d", len(train_examples)) logger.info(" Num split examples = %d", len(train_features)) @@ -906,33 +897,33 @@ def main(): loss = model(input_ids, segment_ids, input_mask, start_positions, end_positions) if n_gpu > 1: loss = loss.mean() # mean() to average on multi-gpu. - if args.fp16 and args.loss_scale != 1.0: - # rescale loss for fp16 training - # see https://docs.nvidia.com/deeplearning/sdk/mixed-precision-training/index.html - loss = loss * args.loss_scale if args.gradient_accumulation_steps > 1: loss = loss / args.gradient_accumulation_steps - loss.backward() + + if args.fp16: + optimizer.backward(loss) + else: + loss.backward() if (step + 1) % args.gradient_accumulation_steps == 0: - if args.fp16 or args.optimize_on_cpu: - if args.fp16 and args.loss_scale != 1.0: - # scale down gradients for fp16 training - for param in model.parameters(): - if param.grad is not None: - param.grad.data = param.grad.data / args.loss_scale - is_nan = set_optimizer_params_grad(param_optimizer, model.named_parameters(), test_nan=True) - if is_nan: - logger.info("FP16 TRAINING: Nan in gradients, reducing loss scaling") - args.loss_scale = args.loss_scale / 2 - model.zero_grad() - continue - optimizer.step() - copy_optimizer_params_to_model(model.named_parameters(), param_optimizer) - else: - optimizer.step() - model.zero_grad() + # modify learning rate with special warm up BERT uses + lr_this_step = args.learning_rate * warmup_linear(global_step/t_total, args.warmup_proportion) + for param_group in optimizer.param_groups: + param_group['lr'] = lr_this_step + optimizer.step() + optimizer.zero_grad() global_step += 1 + # Save a trained model + model_to_save = model.module if hasattr(model, 'module') else model # Only save the model it-self + output_model_file = os.path.join(args.output_dir, "pytorch_model.bin") + if args.do_train: + torch.save(model_to_save.state_dict(), output_model_file) + + # Load a trained model that you have fine-tuned + model_state_dict = torch.load(output_model_file) + model = BertForQuestionAnswering.from_pretrained(args.bert_model, state_dict=model_state_dict) + model.to(device) + if args.do_predict and (args.local_rank == -1 or torch.distributed.get_rank() == 0): eval_examples = read_squad_examples( input_file=args.predict_file, is_training=False) diff --git a/examples/run_squad2.py b/examples/run_squad2.py new file mode 100644 index 00000000000000..558b24764e87f7 --- /dev/null +++ b/examples/run_squad2.py @@ -0,0 +1,1075 @@ +# coding=utf-8 +# Copyright 2018 The Google AI Language Team Authors and The HugginFace Inc. team. +# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Run BERT on SQuAD 2.0""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import argparse +import collections +import logging +import json +import math +import os +import random +import pickle +from tqdm import tqdm, trange + +import numpy as np +import torch +from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler +from torch.utils.data.distributed import DistributedSampler + +from pytorch_pretrained_bert.tokenization import whitespace_tokenize, BasicTokenizer, BertTokenizer +from pytorch_pretrained_bert.modeling import BertForQuestionAnswering +from pytorch_pretrained_bert.optimization import BertAdam +from pytorch_pretrained_bert.file_utils import PYTORCH_PRETRAINED_BERT_CACHE + +logging.basicConfig(format = '%(asctime)s - %(levelname)s - %(name)s - %(message)s', + datefmt = '%m/%d/%Y %H:%M:%S', + level = logging.INFO) +logger = logging.getLogger(__name__) + + +class SquadExample(object): + """ + A single training/test example for the Squad dataset. + For examples without an answer, the start and end position are -1. + """ + + def __init__(self, + qas_id, + question_text, + doc_tokens, + orig_answer_text=None, + start_position=None, + end_position=None, + is_impossible=None): + self.qas_id = qas_id + self.question_text = question_text + self.doc_tokens = doc_tokens + self.orig_answer_text = orig_answer_text + self.start_position = start_position + self.end_position = end_position + self.is_impossible = is_impossible + + def __str__(self): + return self.__repr__() + + def __repr__(self): + s = "" + s += "qas_id: %s" % (self.qas_id) + s += ", question_text: %s" % ( + self.question_text) + s += ", doc_tokens: [%s]" % (" ".join(self.doc_tokens)) + if self.start_position: + s += ", start_position: %d" % (self.start_position) + if self.start_position: + s += ", end_position: %d" % (self.end_position) + if self.start_position: + s += ", is_impossible: %r" % (self.is_impossible) + return s + + +class InputFeatures(object): + """A single set of features of data.""" + + def __init__(self, + unique_id, + example_index, + doc_span_index, + tokens, + token_to_orig_map, + token_is_max_context, + input_ids, + input_mask, + segment_ids, + start_position=None, + end_position=None, + is_impossible=None): + self.unique_id = unique_id + self.example_index = example_index + self.doc_span_index = doc_span_index + self.tokens = tokens + self.token_to_orig_map = token_to_orig_map + self.token_is_max_context = token_is_max_context + self.input_ids = input_ids + self.input_mask = input_mask + self.segment_ids = segment_ids + self.start_position = start_position + self.end_position = end_position + self.is_impossible = is_impossible + + +def read_squad_examples(input_file, is_training): + """Read a SQuAD json file into a list of SquadExample.""" + with open(input_file, "r", encoding='utf-8') as reader: + source = json.load(reader) + input_data = source["data"] + version = source["version"] + + def is_whitespace(c): + if c == " " or c == "\t" or c == "\r" or c == "\n" or ord(c) == 0x202F: + return True + return False + + examples = [] + for entry in input_data: + for paragraph in entry["paragraphs"]: + paragraph_text = paragraph["context"] + doc_tokens = [] + char_to_word_offset = [] + prev_is_whitespace = True + for c in paragraph_text: + if is_whitespace(c): + prev_is_whitespace = True + else: + if prev_is_whitespace: + doc_tokens.append(c) + else: + doc_tokens[-1] += c + prev_is_whitespace = False + char_to_word_offset.append(len(doc_tokens) - 1) + + for qa in paragraph["qas"]: + qas_id = qa["id"] + question_text = qa["question"] + start_position = None + end_position = None + orig_answer_text = None + is_impossible = False + if is_training: + if version == "v2.0": + is_impossible = qa["is_impossible"] + if (len(qa["answers"]) != 1) and (not is_impossible): + raise ValueError( + "For training, each question should have exactly 1 answer.") + if not is_impossible: + answer = qa["answers"][0] + orig_answer_text = answer["text"] + answer_offset = answer["answer_start"] + answer_length = len(orig_answer_text) + start_position = char_to_word_offset[answer_offset] + end_position = char_to_word_offset[answer_offset + answer_length - 1] + # Only add answers where the text can be exactly recovered from the + # document. If this CAN'T happen it's likely due to weird Unicode + # stuff so we will just skip the example. + # + # Note that this means for training mode, every example is NOT + # guaranteed to be preserved. + actual_text = " ".join(doc_tokens[start_position:(end_position + 1)]) + cleaned_answer_text = " ".join( + whitespace_tokenize(orig_answer_text)) + if actual_text.find(cleaned_answer_text) == -1: + logger.warning("Could not find answer: '%s' vs. '%s'", + actual_text, cleaned_answer_text) + continue + else: + start_position = -1 + end_position = -1 + orig_answer_text = "" + + example = SquadExample( + qas_id=qas_id, + question_text=question_text, + doc_tokens=doc_tokens, + orig_answer_text=orig_answer_text, + start_position=start_position, + end_position=end_position, + is_impossible=is_impossible) + examples.append(example) + return examples + + +def convert_examples_to_features(examples, tokenizer, max_seq_length, + doc_stride, max_query_length, is_training): + """Loads a data file into a list of `InputBatch`s.""" + + unique_id = 1000000000 + + features = [] + for (example_index, example) in enumerate(examples): + query_tokens = tokenizer.tokenize(example.question_text) + + if len(query_tokens) > max_query_length: + query_tokens = query_tokens[0:max_query_length] + + tok_to_orig_index = [] + orig_to_tok_index = [] + all_doc_tokens = [] + for (i, token) in enumerate(example.doc_tokens): + orig_to_tok_index.append(len(all_doc_tokens)) + sub_tokens = tokenizer.tokenize(token) + for sub_token in sub_tokens: + tok_to_orig_index.append(i) + all_doc_tokens.append(sub_token) + + tok_start_position = None + tok_end_position = None + if is_training and example.is_impossible: + tok_start_position = -1 + tok_end_position = -1 + if is_training and not example.is_impossible: + tok_start_position = orig_to_tok_index[example.start_position] + if example.end_position < len(example.doc_tokens) - 1: + tok_end_position = orig_to_tok_index[example.end_position + 1] - 1 + else: + tok_end_position = len(all_doc_tokens) - 1 + (tok_start_position, tok_end_position) = _improve_answer_span( + all_doc_tokens, tok_start_position, tok_end_position, tokenizer, + example.orig_answer_text) + + # The -3 accounts for [CLS], [SEP] and [SEP] + max_tokens_for_doc = max_seq_length - len(query_tokens) - 3 + + # We can have documents that are longer than the maximum sequence length. + # To deal with this we do a sliding window approach, where we take chunks + # of the up to our max length with a stride of `doc_stride`. + _DocSpan = collections.namedtuple( # pylint: disable=invalid-name + "DocSpan", ["start", "length"]) + doc_spans = [] + start_offset = 0 + while start_offset < len(all_doc_tokens): + length = len(all_doc_tokens) - start_offset + if length > max_tokens_for_doc: + length = max_tokens_for_doc + doc_spans.append(_DocSpan(start=start_offset, length=length)) + if start_offset + length == len(all_doc_tokens): + break + start_offset += min(length, doc_stride) + + for (doc_span_index, doc_span) in enumerate(doc_spans): + tokens = [] + token_to_orig_map = {} + token_is_max_context = {} + segment_ids = [] + tokens.append("[CLS]") + segment_ids.append(0) + for token in query_tokens: + tokens.append(token) + segment_ids.append(0) + tokens.append("[SEP]") + segment_ids.append(0) + + for i in range(doc_span.length): + split_token_index = doc_span.start + i + token_to_orig_map[len(tokens)] = tok_to_orig_index[split_token_index] + + is_max_context = _check_is_max_context(doc_spans, doc_span_index, + split_token_index) + token_is_max_context[len(tokens)] = is_max_context + tokens.append(all_doc_tokens[split_token_index]) + segment_ids.append(1) + tokens.append("[SEP]") + segment_ids.append(1) + + input_ids = tokenizer.convert_tokens_to_ids(tokens) + + # The mask has 1 for real tokens and 0 for padding tokens. Only real + # tokens are attended to. + input_mask = [1] * len(input_ids) + + # Zero-pad up to the sequence length. + while len(input_ids) < max_seq_length: + input_ids.append(0) + input_mask.append(0) + segment_ids.append(0) + + assert len(input_ids) == max_seq_length + assert len(input_mask) == max_seq_length + assert len(segment_ids) == max_seq_length + + start_position = None + end_position = None + if is_training and not example.is_impossible: + # For training, if our document chunk does not contain an annotation + # we throw it out, since there is nothing to predict. + doc_start = doc_span.start + doc_end = doc_span.start + doc_span.length - 1 + out_of_span = False + if (example.start_position < doc_start or + example.end_position < doc_start or + example.start_position > doc_end or example.end_position > doc_end): + out_of_span = True + if out_of_span: + start_position = 0 + end_position = 0 + else: + doc_offset = len(query_tokens) + 2 + start_position = tok_start_position - doc_start + doc_offset + end_position = tok_end_position - doc_start + doc_offset + + if is_training and example.is_impossible: + start_position = 0 + end_position = 0 + + if example_index < 20: + logger.info("*** Example ***") + logger.info("unique_id: %s" % (unique_id)) + logger.info("example_index: %s" % (example_index)) + logger.info("doc_span_index: %s" % (doc_span_index)) + logger.info("tokens: %s" % " ".join(tokens)) + logger.info("token_to_orig_map: %s" % " ".join([ + "%d:%d" % (x, y) for (x, y) in token_to_orig_map.items()])) + logger.info("token_is_max_context: %s" % " ".join([ + "%d:%s" % (x, y) for (x, y) in token_is_max_context.items() + ])) + logger.info("input_ids: %s" % " ".join([str(x) for x in input_ids])) + logger.info( + "input_mask: %s" % " ".join([str(x) for x in input_mask])) + logger.info( + "segment_ids: %s" % " ".join([str(x) for x in segment_ids])) + if is_training and example.is_impossible: + logger.info("impossible example") + if is_training and not example.is_impossible: + answer_text = " ".join(tokens[start_position:(end_position + 1)]) + logger.info("start_position: %d" % (start_position)) + logger.info("end_position: %d" % (end_position)) + logger.info( + "answer: %s" % (answer_text)) + + features.append( + InputFeatures( + unique_id=unique_id, + example_index=example_index, + doc_span_index=doc_span_index, + tokens=tokens, + token_to_orig_map=token_to_orig_map, + token_is_max_context=token_is_max_context, + input_ids=input_ids, + input_mask=input_mask, + segment_ids=segment_ids, + start_position=start_position, + end_position=end_position, + is_impossible=example.is_impossible)) + unique_id += 1 + + return features + + +def _improve_answer_span(doc_tokens, input_start, input_end, tokenizer, + orig_answer_text): + """Returns tokenized answer spans that better match the annotated answer.""" + + # The SQuAD annotations are character based. We first project them to + # whitespace-tokenized words. But then after WordPiece tokenization, we can + # often find a "better match". For example: + # + # Question: What year was John Smith born? + # Context: The leader was John Smith (1895-1943). + # Answer: 1895 + # + # The original whitespace-tokenized answer will be "(1895-1943).". However + # after tokenization, our tokens will be "( 1895 - 1943 ) .". So we can match + # the exact answer, 1895. + # + # However, this is not always possible. Consider the following: + # + # Question: What country is the top exporter of electornics? + # Context: The Japanese electronics industry is the lagest in the world. + # Answer: Japan + # + # In this case, the annotator chose "Japan" as a character sub-span of + # the word "Japanese". Since our WordPiece tokenizer does not split + # "Japanese", we just use "Japanese" as the annotation. This is fairly rare + # in SQuAD, but does happen. + tok_answer_text = " ".join(tokenizer.tokenize(orig_answer_text)) + + for new_start in range(input_start, input_end + 1): + for new_end in range(input_end, new_start - 1, -1): + text_span = " ".join(doc_tokens[new_start:(new_end + 1)]) + if text_span == tok_answer_text: + return (new_start, new_end) + + return (input_start, input_end) + + +def _check_is_max_context(doc_spans, cur_span_index, position): + """Check if this is the 'max context' doc span for the token.""" + + # Because of the sliding window approach taken to scoring documents, a single + # token can appear in multiple documents. E.g. + # Doc: the man went to the store and bought a gallon of milk + # Span A: the man went to the + # Span B: to the store and bought + # Span C: and bought a gallon of + # ... + # + # Now the word 'bought' will have two scores from spans B and C. We only + # want to consider the score with "maximum context", which we define as + # the *minimum* of its left and right context (the *sum* of left and + # right context will always be the same, of course). + # + # In the example the maximum context for 'bought' would be span C since + # it has 1 left context and 3 right context, while span B has 4 left context + # and 0 right context. + best_score = None + best_span_index = None + for (span_index, doc_span) in enumerate(doc_spans): + end = doc_span.start + doc_span.length - 1 + if position < doc_span.start: + continue + if position > end: + continue + num_left_context = position - doc_span.start + num_right_context = end - position + score = min(num_left_context, num_right_context) + 0.01 * doc_span.length + if best_score is None or score > best_score: + best_score = score + best_span_index = span_index + + return cur_span_index == best_span_index + + + +RawResult = collections.namedtuple("RawResult", + ["unique_id", "start_logits", "end_logits"]) + + +def write_predictions(all_examples, all_features, all_results, n_best_size, + max_answer_length, do_lower_case, output_prediction_file, + output_nbest_file, output_null_log_odds_file, verbose_logging, is_version2, null_score_diff_threshold): + """Write final predictions to the json file and log-odds of null if needed.""" + logger.info("Writing predictions to: %s" % (output_prediction_file)) + logger.info("Writing nbest to: %s" % (output_nbest_file)) + + example_index_to_features = collections.defaultdict(list) + for feature in all_features: + example_index_to_features[feature.example_index].append(feature) + + unique_id_to_result = {} + for result in all_results: + unique_id_to_result[result.unique_id] = result + + _PrelimPrediction = collections.namedtuple( # pylint: disable=invalid-name + "PrelimPrediction", + ["feature_index", "start_index", "end_index", "start_logit", "end_logit"]) + + all_predictions = collections.OrderedDict() + all_nbest_json = collections.OrderedDict() + scores_diff_json = collections.OrderedDict() + + for (example_index, example) in enumerate(all_examples): + features = example_index_to_features[example_index] + + prelim_predictions = [] + # keep track of the minimum score of null start+end of position 0 + score_null = 1000000 # large and positive + min_null_feature_index = 0 # the paragraph slice with min mull score + null_start_logit = 0 # the start logit at the slice with min null score + null_end_logit = 0 # the end logit at the slice with min null score + + for (feature_index, feature) in enumerate(features): + result = unique_id_to_result[feature.unique_id] + start_indexes = _get_best_indexes(result.start_logits, n_best_size) + end_indexes = _get_best_indexes(result.end_logits, n_best_size) + # if we could have irrelevant answers, get the min score of irrelevant + if is_version2: + feature_null_score = result.start_logits[0] + result.end_logits[0] + if feature_null_score < score_null: + score_null = feature_null_score + min_null_feature_index = feature_index + null_start_logit = result.start_logits[0] + null_end_logit = result.end_logits[0] + + for start_index in start_indexes: + for end_index in end_indexes: + # We could hypothetically create invalid predictions, e.g., predict + # that the start of the span is in the question. We throw out all + # invalid predictions. + if start_index >= len(feature.tokens): + continue + if end_index >= len(feature.tokens): + continue + if start_index not in feature.token_to_orig_map: + continue + if end_index not in feature.token_to_orig_map: + continue + if not feature.token_is_max_context.get(start_index, False): + continue + if end_index < start_index: + continue + length = end_index - start_index + 1 + if length > max_answer_length: + continue + prelim_predictions.append( + _PrelimPrediction( + feature_index=feature_index, + start_index=start_index, + end_index=end_index, + start_logit=result.start_logits[start_index], + end_logit=result.end_logits[end_index])) + + if is_version2: + prelim_predictions.append( + _PrelimPrediction( + feature_index=min_null_feature_index, + start_index=0, + end_index=0, + start_logit=null_start_logit, + end_logit=null_end_logit)) + + prelim_predictions = sorted( + prelim_predictions, + key=lambda x: (x.start_logit + x.end_logit), + reverse=True) + + _NbestPrediction = collections.namedtuple( # pylint: disable=invalid-name + "NbestPrediction", ["text", "start_logit", "end_logit"]) + + seen_predictions = {} + nbest = [] + for pred in prelim_predictions: + if len(nbest) >= n_best_size: + break + feature = features[pred.feature_index] + if pred.start_index > 0: + tok_tokens = feature.tokens[pred.start_index:(pred.end_index + 1)] + orig_doc_start = feature.token_to_orig_map[pred.start_index] + orig_doc_end = feature.token_to_orig_map[pred.end_index] + orig_tokens = example.doc_tokens[orig_doc_start:(orig_doc_end + 1)] + tok_text = " ".join(tok_tokens) + + # De-tokenize WordPieces that have been split off. + tok_text = tok_text.replace(" ##", "") + tok_text = tok_text.replace("##", "") + + # Clean whitespace + tok_text = tok_text.strip() + tok_text = " ".join(tok_text.split()) + orig_text = " ".join(orig_tokens) + + final_text = get_final_text(tok_text, orig_text, do_lower_case, verbose_logging) + if final_text in seen_predictions: + continue + seen_predictions[final_text] = True + else: + final_text = "" + seen_predictions[final_text] = True + + nbest.append( + _NbestPrediction( + text=final_text, + start_logit=pred.start_logit, + end_logit=pred.end_logit)) + + # if we didn't inlude the empty option in the n-best, inlcude it + if is_version2: + if "" not in seen_predictions: + nbest.append( + _NbestPrediction( + text="", start_logit=null_start_logit, + end_logit=null_end_logit)) + + # In very rare edge cases we could have no valid predictions. So we + # just create a nonce prediction in this case to avoid failure. + if not nbest: + nbest.append( + _NbestPrediction(text="empty", start_logit=0.0, end_logit=0.0)) + + assert len(nbest) >= 1 + + total_scores = [] + best_non_null_entry = None + for entry in nbest: + total_scores.append(entry.start_logit + entry.end_logit) + if not best_non_null_entry: + if entry.text: + best_non_null_entry = entry + + probs = _compute_softmax(total_scores) + + nbest_json = [] + for (i, entry) in enumerate(nbest): + output = collections.OrderedDict() + output["text"] = entry.text + output["probability"] = probs[i] + output["start_logit"] = entry.start_logit + output["end_logit"] = entry.end_logit + nbest_json.append(output) + + assert len(nbest_json) >= 1 + + + if not is_version2: + all_predictions[example.qas_id] = nbest_json[0]["text"] + else: + # predict "" iff the null score - the score of best non-null > threshold + score_diff = score_null - best_non_null_entry.start_logit - ( + best_non_null_entry.end_logit) + scores_diff_json[example.qas_id] = score_diff + if score_diff > null_score_diff_threshold: + all_predictions[example.qas_id] = "" + else: + all_predictions[example.qas_id] = best_non_null_entry.text + all_nbest_json[example.qas_id] = nbest_json + + with open(output_prediction_file, "w") as writer: + writer.write(json.dumps(all_predictions, indent=4) + "\n") + + with open(output_nbest_file, "w") as writer: + writer.write(json.dumps(all_nbest_json, indent=4) + "\n") + + if is_version2: + with open(output_null_log_odds_file, "w") as writer: + writer.write(json.dumps(scores_diff_json, indent=4) + "\n") + + +def get_final_text(pred_text, orig_text, do_lower_case, verbose_logging=False): + """Project the tokenized prediction back to the original text.""" + + # When we created the data, we kept track of the alignment between original + # (whitespace tokenized) tokens and our WordPiece tokenized tokens. So + # now `orig_text` contains the span of our original text corresponding to the + # span that we predicted. + # + # However, `orig_text` may contain extra characters that we don't want in + # our prediction. + # + # For example, let's say: + # pred_text = steve smith + # orig_text = Steve Smith's + # + # We don't want to return `orig_text` because it contains the extra "'s". + # + # We don't want to return `pred_text` because it's already been normalized + # (the SQuAD eval script also does punctuation stripping/lower casing but + # our tokenizer does additional normalization like stripping accent + # characters). + # + # What we really want to return is "Steve Smith". + # + # Therefore, we have to apply a semi-complicated alignment heruistic between + # `pred_text` and `orig_text` to get a character-to-charcter alignment. This + # can fail in certain cases in which case we just return `orig_text`. + + def _strip_spaces(text): + ns_chars = [] + ns_to_s_map = collections.OrderedDict() + for (i, c) in enumerate(text): + if c == " ": + continue + ns_to_s_map[len(ns_chars)] = i + ns_chars.append(c) + ns_text = "".join(ns_chars) + return (ns_text, ns_to_s_map) + + # We first tokenize `orig_text`, strip whitespace from the result + # and `pred_text`, and check if they are the same length. If they are + # NOT the same length, the heuristic has failed. If they are the same + # length, we assume the characters are one-to-one aligned. + tokenizer = BasicTokenizer(do_lower_case=do_lower_case) + + tok_text = " ".join(tokenizer.tokenize(orig_text)) + + start_position = tok_text.find(pred_text) + if start_position == -1: + if verbose_logging: + logger.info( + "Unable to find text: '%s' in '%s'" % (pred_text, orig_text)) + return orig_text + end_position = start_position + len(pred_text) - 1 + + (orig_ns_text, orig_ns_to_s_map) = _strip_spaces(orig_text) + (tok_ns_text, tok_ns_to_s_map) = _strip_spaces(tok_text) + + if len(orig_ns_text) != len(tok_ns_text): + if verbose_logging: + logger.info("Length not equal after stripping spaces: '%s' vs '%s'", + orig_ns_text, tok_ns_text) + return orig_text + + # We then project the characters in `pred_text` back to `orig_text` using + # the character-to-character alignment. + tok_s_to_ns_map = {} + for (i, tok_index) in tok_ns_to_s_map.items(): + tok_s_to_ns_map[tok_index] = i + + orig_start_position = None + if start_position in tok_s_to_ns_map: + ns_start_position = tok_s_to_ns_map[start_position] + if ns_start_position in orig_ns_to_s_map: + orig_start_position = orig_ns_to_s_map[ns_start_position] + + if orig_start_position is None: + if verbose_logging: + logger.info("Couldn't map start position") + return orig_text + + orig_end_position = None + if end_position in tok_s_to_ns_map: + ns_end_position = tok_s_to_ns_map[end_position] + if ns_end_position in orig_ns_to_s_map: + orig_end_position = orig_ns_to_s_map[ns_end_position] + + if orig_end_position is None: + if verbose_logging: + logger.info("Couldn't map end position") + return orig_text + + output_text = orig_text[orig_start_position:(orig_end_position + 1)] + return output_text + + +def _get_best_indexes(logits, n_best_size): + """Get the n-best logits from a list.""" + index_and_score = sorted(enumerate(logits), key=lambda x: x[1], reverse=True) + + best_indexes = [] + for i in range(len(index_and_score)): + if i >= n_best_size: + break + best_indexes.append(index_and_score[i][0]) + return best_indexes + + +def _compute_softmax(scores): + """Compute softmax probability over raw logits.""" + if not scores: + return [] + + max_score = None + for score in scores: + if max_score is None or score > max_score: + max_score = score + + exp_scores = [] + total_sum = 0.0 + for score in scores: + x = math.exp(score - max_score) + exp_scores.append(x) + total_sum += x + + probs = [] + for score in exp_scores: + probs.append(score / total_sum) + return probs + +def warmup_linear(x, warmup=0.002): + if x < warmup: + return x/warmup + return 1.0 - x + +def main(): + parser = argparse.ArgumentParser() + + ## Required parameters + parser.add_argument("--bert_model", default=None, type=str, required=True, + help="Bert pre-trained model selected in the list: bert-base-uncased, " + "bert-large-uncased, bert-base-cased, bert-base-multilingual, bert-base-chinese.") + parser.add_argument("--output_dir", default=None, type=str, required=True, + help="The output directory where the model checkpoints and predictions will be written.") + + ## Other parameters + parser.add_argument("--train_file", default=None, type=str, help="SQuAD json for training. E.g., train-v1.1.json") + parser.add_argument("--predict_file", default=None, type=str, + help="SQuAD json for predictions. E.g., dev-v1.1.json or test-v1.1.json") + parser.add_argument("--max_seq_length", default=384, type=int, + help="The maximum total input sequence length after WordPiece tokenization. Sequences " + "longer than this will be truncated, and sequences shorter than this will be padded.") + parser.add_argument("--doc_stride", default=128, type=int, + help="When splitting up a long document into chunks, how much stride to take between chunks.") + parser.add_argument("--max_query_length", default=64, type=int, + help="The maximum number of tokens for the question. Questions longer than this will " + "be truncated to this length.") + parser.add_argument("--do_train", default=False, action='store_true', help="Whether to run training.") + parser.add_argument("--do_predict", default=False, action='store_true', help="Whether to run eval on the dev set.") + parser.add_argument("--train_batch_size", default=32, type=int, help="Total batch size for training.") + parser.add_argument("--predict_batch_size", default=8, type=int, help="Total batch size for predictions.") + parser.add_argument("--learning_rate", default=5e-5, type=float, help="The initial learning rate for Adam.") + parser.add_argument("--num_train_epochs", default=3.0, type=float, + help="Total number of training epochs to perform.") + parser.add_argument("--warmup_proportion", default=0.1, type=float, + help="Proportion of training to perform linear learning rate warmup for. E.g., 0.1 = 10% " + "of training.") + parser.add_argument("--n_best_size", default=20, type=int, + help="The total number of n-best predictions to generate in the nbest_predictions.json " + "output file.") + parser.add_argument("--max_answer_length", default=30, type=int, + help="The maximum length of an answer that can be generated. This is needed because the start " + "and end predictions are not conditioned on one another.") + parser.add_argument("--verbose_logging", default=False, action='store_true', + help="If true, all of the warnings related to data processing will be printed. " + "A number of warnings are expected for a normal SQuAD evaluation.") + parser.add_argument("--no_cuda", + default=False, + action='store_true', + help="Whether not to use CUDA when available") + parser.add_argument('--seed', + type=int, + default=42, + help="random seed for initialization") + parser.add_argument('--gradient_accumulation_steps', + type=int, + default=1, + help="Number of updates steps to accumulate before performing a backward/update pass.") + parser.add_argument("--do_lower_case", + action='store_true', + help="Whether to lower case the input text. True for uncased models, False for cased models.") + parser.add_argument("--local_rank", + type=int, + default=-1, + help="local_rank for distributed training on gpus") + parser.add_argument('--fp16', + default=False, + action='store_true', + help="Whether to use 16-bit float precision instead of 32-bit") + parser.add_argument('--loss_scale', + type=float, default=0, + help="Loss scaling to improve fp16 numeric stability. Only used when fp16 set to True.\n" + "0 (default value): dynamic loss scaling.\n" + "Positive power of 2: static loss scaling value.\n") + parser.add_argument('--null_score_diff_threshold', + type=float, default=0.0, + help="If null_score - best_non_null is greater than the threshold predict null.") + + args = parser.parse_args() + + if args.local_rank == -1 or args.no_cuda: + device = torch.device("cuda" if torch.cuda.is_available() and not args.no_cuda else "cpu") + n_gpu = torch.cuda.device_count() + else: + torch.cuda.set_device(args.local_rank) + device = torch.device("cuda", args.local_rank) + n_gpu = 1 + # Initializes the distributed backend which will take care of sychronizing nodes/GPUs + torch.distributed.init_process_group(backend='nccl') + logger.info("device: {} n_gpu: {}, distributed training: {}, 16-bits training: {}".format( + device, n_gpu, bool(args.local_rank != -1), args.fp16)) + + if args.gradient_accumulation_steps < 1: + raise ValueError("Invalid gradient_accumulation_steps parameter: {}, should be >= 1".format( + args.gradient_accumulation_steps)) + + args.train_batch_size = int(args.train_batch_size / args.gradient_accumulation_steps) + + random.seed(args.seed) + np.random.seed(args.seed) + torch.manual_seed(args.seed) + if n_gpu > 0: + torch.cuda.manual_seed_all(args.seed) + + if not args.do_train and not args.do_predict: + raise ValueError("At least one of `do_train` or `do_predict` must be True.") + + if args.do_train: + if not args.train_file: + raise ValueError( + "If `do_train` is True, then `train_file` must be specified.") + if args.do_predict: + if not args.predict_file: + raise ValueError( + "If `do_predict` is True, then `predict_file` must be specified.") + + if os.path.exists(args.output_dir) and os.listdir(args.output_dir): + raise ValueError("Output directory () already exists and is not empty.") + os.makedirs(args.output_dir, exist_ok=True) + + tokenizer = BertTokenizer.from_pretrained(args.bert_model) + + train_examples = None + num_train_steps = None + if args.do_train: + train_examples = read_squad_examples( + input_file=args.train_file, is_training=True) + num_train_steps = int( + len(train_examples) / args.train_batch_size / args.gradient_accumulation_steps * args.num_train_epochs) + + # Prepare model + model = BertForQuestionAnswering.from_pretrained(args.bert_model, + cache_dir=PYTORCH_PRETRAINED_BERT_CACHE / 'distributed_{}'.format(args.local_rank)) + + if args.fp16: + model.half() + model.to(device) + if args.local_rank != -1: + try: + from apex.parallel import DistributedDataParallel as DDP + except ImportError: + raise ImportError("Please install apex from https://www.github.com/nvidia/apex to use distributed and fp16 training.") + + model = DDP(model) + elif n_gpu > 1: + model = torch.nn.DataParallel(model) + + # Prepare optimizer + param_optimizer = list(model.named_parameters()) + + # hack to remove pooler, which is not used + # thus it produce None grad that break apex + param_optimizer = [n for n in param_optimizer if 'pooler' not in n[0]] + + no_decay = ['bias', 'LayerNorm.bias', 'LayerNorm.weight'] + optimizer_grouped_parameters = [ + {'params': [p for n, p in param_optimizer if not any(nd in n for nd in no_decay)], 'weight_decay': 0.01}, + {'params': [p for n, p in param_optimizer if any(nd in n for nd in no_decay)], 'weight_decay': 0.0} + ] + + t_total = num_train_steps + if args.local_rank != -1: + t_total = t_total // torch.distributed.get_world_size() + if args.fp16: + try: + from apex.optimizers import FP16_Optimizer + from apex.optimizers import FusedAdam + except ImportError: + raise ImportError("Please install apex from https://www.github.com/nvidia/apex to use distributed and fp16 training.") + + optimizer = FusedAdam(optimizer_grouped_parameters, + lr=args.learning_rate, + bias_correction=False, + max_grad_norm=1.0) + if args.loss_scale == 0: + optimizer = FP16_Optimizer(optimizer, dynamic_loss_scale=True) + else: + optimizer = FP16_Optimizer(optimizer, static_loss_scale=args.loss_scale) + else: + optimizer = BertAdam(optimizer_grouped_parameters, + lr=args.learning_rate, + warmup=args.warmup_proportion, + t_total=t_total) + + global_step = 0 + if args.do_train: + cached_train_features_file = args.train_file+'_{0}_{1}_{2}_{3}'.format( + args.bert_model, str(args.max_seq_length), str(args.doc_stride), str(args.max_query_length)) + train_features = None + try: + with open(cached_train_features_file, "rb") as reader: + train_features = pickle.load(reader) + except: + train_features = convert_examples_to_features( + examples=train_examples, + tokenizer=tokenizer, + max_seq_length=args.max_seq_length, + doc_stride=args.doc_stride, + max_query_length=args.max_query_length, + is_training=True) + if args.local_rank == -1 or torch.distributed.get_rank() == 0: + logger.info(" Saving train features into cached file %s", cached_train_features_file) + with open(cached_train_features_file, "wb") as writer: + pickle.dump(train_features, writer) + logger.info("***** Running training *****") + logger.info(" Num orig examples = %d", len(train_examples)) + logger.info(" Num split examples = %d", len(train_features)) + logger.info(" Batch size = %d", args.train_batch_size) + logger.info(" Num steps = %d", num_train_steps) + all_input_ids = torch.tensor([f.input_ids for f in train_features], dtype=torch.long) + all_input_mask = torch.tensor([f.input_mask for f in train_features], dtype=torch.long) + all_segment_ids = torch.tensor([f.segment_ids for f in train_features], dtype=torch.long) + all_start_positions = torch.tensor([f.start_position for f in train_features], dtype=torch.long) + all_end_positions = torch.tensor([f.end_position for f in train_features], dtype=torch.long) + all_is_impossibles = torch.tensor([int(f.is_impossible) for f in train_features], dtype=torch.long) + train_data = TensorDataset(all_input_ids, all_input_mask, all_segment_ids, + all_start_positions, all_end_positions, all_is_impossibles) + if args.local_rank == -1: + train_sampler = RandomSampler(train_data) + else: + train_sampler = DistributedSampler(train_data) + train_dataloader = DataLoader(train_data, sampler=train_sampler, batch_size=args.train_batch_size) + + model.train() + for _ in trange(int(args.num_train_epochs), desc="Epoch"): + for step, batch in enumerate(tqdm(train_dataloader, desc="Iteration")): + if n_gpu == 1: + batch = tuple(t.to(device) for t in batch) # multi-gpu does scattering it-self + input_ids, input_mask, segment_ids, start_positions, end_positions, _ = batch + loss = model(input_ids, segment_ids, input_mask, start_positions, end_positions) + if n_gpu > 1: + loss = loss.mean() # mean() to average on multi-gpu. + if args.gradient_accumulation_steps > 1: + loss = loss / args.gradient_accumulation_steps + + if args.fp16: + optimizer.backward(loss) + else: + loss.backward() + if (step + 1) % args.gradient_accumulation_steps == 0: + # modify learning rate with special warm up BERT uses + lr_this_step = args.learning_rate * warmup_linear(global_step/t_total, args.warmup_proportion) + for param_group in optimizer.param_groups: + param_group['lr'] = lr_this_step + optimizer.step() + optimizer.zero_grad() + global_step += 1 + + # Save a trained model + model_to_save = model.module if hasattr(model, 'module') else model # Only save the model it-self + output_model_file = os.path.join(args.output_dir, "pytorch_model.bin") + if args.do_train: + torch.save(model_to_save.state_dict(), output_model_file) + + # Load a trained model that you have fine-tuned + model_state_dict = torch.load(output_model_file) + model = BertForQuestionAnswering.from_pretrained(args.bert_model, state_dict=model_state_dict) + model.to(device) + + if args.do_predict and (args.local_rank == -1 or torch.distributed.get_rank() == 0): + eval_examples = read_squad_examples( + input_file=args.predict_file, is_training=False) + eval_features = convert_examples_to_features( + examples=eval_examples, + tokenizer=tokenizer, + max_seq_length=args.max_seq_length, + doc_stride=args.doc_stride, + max_query_length=args.max_query_length, + is_training=False) + + logger.info("***** Running predictions *****") + logger.info(" Num orig examples = %d", len(eval_examples)) + logger.info(" Num split examples = %d", len(eval_features)) + logger.info(" Batch size = %d", args.predict_batch_size) + + all_input_ids = torch.tensor([f.input_ids for f in eval_features], dtype=torch.long) + all_input_mask = torch.tensor([f.input_mask for f in eval_features], dtype=torch.long) + all_segment_ids = torch.tensor([f.segment_ids for f in eval_features], dtype=torch.long) + all_example_index = torch.arange(all_input_ids.size(0), dtype=torch.long) + eval_data = TensorDataset(all_input_ids, all_input_mask, all_segment_ids, all_example_index) + # Run prediction for full data + eval_sampler = SequentialSampler(eval_data) + eval_dataloader = DataLoader(eval_data, sampler=eval_sampler, batch_size=args.predict_batch_size) + + model.eval() + all_results = [] + logger.info("Start evaluating") + for input_ids, input_mask, segment_ids, example_indices in tqdm(eval_dataloader, desc="Evaluating"): + if len(all_results) % 1000 == 0: + logger.info("Processing example: %d" % (len(all_results))) + input_ids = input_ids.to(device) + input_mask = input_mask.to(device) + segment_ids = segment_ids.to(device) + with torch.no_grad(): + batch_start_logits, batch_end_logits = model(input_ids, segment_ids, input_mask) + for i, example_index in enumerate(example_indices): + start_logits = batch_start_logits[i].detach().cpu().tolist() + end_logits = batch_end_logits[i].detach().cpu().tolist() + eval_feature = eval_features[example_index.item()] + unique_id = int(eval_feature.unique_id) + all_results.append(RawResult(unique_id=unique_id, + start_logits=start_logits, + end_logits=end_logits)) + output_prediction_file = os.path.join(args.output_dir, "predictions.json") + output_nbest_file = os.path.join(args.output_dir, "nbest_predictions.json") + output_null_log_odds_file = os.path.join(args.output_dir, "null_odds.json") + write_predictions(eval_examples, eval_features, all_results, + args.n_best_size, args.max_answer_length, + args.do_lower_case, output_prediction_file, + output_nbest_file, output_null_log_odds_file, args.verbose_logging, True, args.null_score_diff_threshold) + + +if __name__ == "__main__": + main() diff --git a/examples/run_swag.py b/examples/run_swag.py new file mode 100644 index 00000000000000..3fb87ae3e77882 --- /dev/null +++ b/examples/run_swag.py @@ -0,0 +1,536 @@ +# coding=utf-8 +# Copyright 2018 The Google AI Language Team Authors and The HugginFace Inc. team. +# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""BERT finetuning runner.""" + +import logging +import os +import argparse +import random +from tqdm import tqdm, trange +import csv + +import numpy as np +import torch +from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler +from torch.utils.data.distributed import DistributedSampler + +from pytorch_pretrained_bert.tokenization import BertTokenizer +from pytorch_pretrained_bert.modeling import BertForMultipleChoice +from pytorch_pretrained_bert.optimization import BertAdam +from pytorch_pretrained_bert.file_utils import PYTORCH_PRETRAINED_BERT_CACHE + +logging.basicConfig(format = '%(asctime)s - %(levelname)s - %(name)s - %(message)s', + datefmt = '%m/%d/%Y %H:%M:%S', + level = logging.INFO) +logger = logging.getLogger(__name__) + + +class SwagExample(object): + """A single training/test example for the SWAG dataset.""" + def __init__(self, + swag_id, + context_sentence, + start_ending, + ending_0, + ending_1, + ending_2, + ending_3, + label = None): + self.swag_id = swag_id + self.context_sentence = context_sentence + self.start_ending = start_ending + self.endings = [ + ending_0, + ending_1, + ending_2, + ending_3, + ] + self.label = label + + def __str__(self): + return self.__repr__() + + def __repr__(self): + l = [ + f"swag_id: {self.swag_id}", + f"context_sentence: {self.context_sentence}", + f"start_ending: {self.start_ending}", + f"ending_0: {self.endings[0]}", + f"ending_1: {self.endings[1]}", + f"ending_2: {self.endings[2]}", + f"ending_3: {self.endings[3]}", + ] + + if self.label is not None: + l.append(f"label: {self.label}") + + return ", ".join(l) + + +class InputFeatures(object): + def __init__(self, + example_id, + choices_features, + label + + ): + self.example_id = example_id + self.choices_features = [ + { + 'input_ids': input_ids, + 'input_mask': input_mask, + 'segment_ids': segment_ids + } + for _, input_ids, input_mask, segment_ids in choices_features + ] + self.label = label + + +def read_swag_examples(input_file, is_training): + with open(input_file, 'r', encoding='utf-8') as f: + reader = csv.reader(f) + lines = list(reader) + + if is_training and lines[0][-1] != 'label': + raise ValueError( + "For training, the input file must contain a label column." + ) + + examples = [ + SwagExample( + swag_id = line[2], + context_sentence = line[4], + start_ending = line[5], # in the swag dataset, the + # common beginning of each + # choice is stored in "sent2". + ending_0 = line[7], + ending_1 = line[8], + ending_2 = line[9], + ending_3 = line[10], + label = int(line[11]) if is_training else None + ) for line in lines[1:] # we skip the line with the column names + ] + + return examples + +def convert_examples_to_features(examples, tokenizer, max_seq_length, + is_training): + """Loads a data file into a list of `InputBatch`s.""" + + # Swag is a multiple choice task. To perform this task using Bert, + # we will use the formatting proposed in "Improving Language + # Understanding by Generative Pre-Training" and suggested by + # @jacobdevlin-google in this issue + # https://github.com/google-research/bert/issues/38. + # + # Each choice will correspond to a sample on which we run the + # inference. For a given Swag example, we will create the 4 + # following inputs: + # - [CLS] context [SEP] choice_1 [SEP] + # - [CLS] context [SEP] choice_2 [SEP] + # - [CLS] context [SEP] choice_3 [SEP] + # - [CLS] context [SEP] choice_4 [SEP] + # The model will output a single value for each input. To get the + # final decision of the model, we will run a softmax over these 4 + # outputs. + features = [] + for example_index, example in enumerate(examples): + context_tokens = tokenizer.tokenize(example.context_sentence) + start_ending_tokens = tokenizer.tokenize(example.start_ending) + + choices_features = [] + for ending_index, ending in enumerate(example.endings): + # We create a copy of the context tokens in order to be + # able to shrink it according to ending_tokens + context_tokens_choice = context_tokens[:] + ending_tokens = start_ending_tokens + tokenizer.tokenize(ending) + # Modifies `context_tokens_choice` and `ending_tokens` in + # place so that the total length is less than the + # specified length. Account for [CLS], [SEP], [SEP] with + # "- 3" + _truncate_seq_pair(context_tokens_choice, ending_tokens, max_seq_length - 3) + + tokens = ["[CLS]"] + context_tokens_choice + ["[SEP]"] + ending_tokens + ["[SEP]"] + segment_ids = [0] * (len(context_tokens_choice) + 2) + [1] * (len(ending_tokens) + 1) + + input_ids = tokenizer.convert_tokens_to_ids(tokens) + input_mask = [1] * len(input_ids) + + # Zero-pad up to the sequence length. + padding = [0] * (max_seq_length - len(input_ids)) + input_ids += padding + input_mask += padding + segment_ids += padding + + assert len(input_ids) == max_seq_length + assert len(input_mask) == max_seq_length + assert len(segment_ids) == max_seq_length + + choices_features.append((tokens, input_ids, input_mask, segment_ids)) + + label = example.label + if example_index < 5: + logger.info("*** Example ***") + logger.info(f"swag_id: {example.swag_id}") + for choice_idx, (tokens, input_ids, input_mask, segment_ids) in enumerate(choices_features): + logger.info(f"choice: {choice_idx}") + logger.info(f"tokens: {' '.join(tokens)}") + logger.info(f"input_ids: {' '.join(map(str, input_ids))}") + logger.info(f"input_mask: {' '.join(map(str, input_mask))}") + logger.info(f"segment_ids: {' '.join(map(str, segment_ids))}") + if is_training: + logger.info(f"label: {label}") + + features.append( + InputFeatures( + example_id = example.swag_id, + choices_features = choices_features, + label = label + ) + ) + + return features + +def _truncate_seq_pair(tokens_a, tokens_b, max_length): + """Truncates a sequence pair in place to the maximum length.""" + + # This is a simple heuristic which will always truncate the longer sequence + # one token at a time. This makes more sense than truncating an equal percent + # of tokens from each, since if one sequence is very short then each token + # that's truncated likely contains more information than a longer sequence. + while True: + total_length = len(tokens_a) + len(tokens_b) + if total_length <= max_length: + break + if len(tokens_a) > len(tokens_b): + tokens_a.pop() + else: + tokens_b.pop() + +def accuracy(out, labels): + outputs = np.argmax(out, axis=1) + return np.sum(outputs == labels) + +def select_field(features, field): + return [ + [ + choice[field] + for choice in feature.choices_features + ] + for feature in features + ] + +def warmup_linear(x, warmup=0.002): + if x < warmup: + return x/warmup + return 1.0 - x + +def main(): + parser = argparse.ArgumentParser() + + ## Required parameters + parser.add_argument("--data_dir", + default=None, + type=str, + required=True, + help="The input data dir. Should contain the .csv files (or other data files) for the task.") + parser.add_argument("--bert_model", default=None, type=str, required=True, + help="Bert pre-trained model selected in the list: bert-base-uncased, " + "bert-large-uncased, bert-base-cased, bert-large-cased, bert-base-multilingual-uncased, " + "bert-base-multilingual-cased, bert-base-chinese.") + parser.add_argument("--output_dir", + default=None, + type=str, + required=True, + help="The output directory where the model checkpoints will be written.") + + ## Other parameters + parser.add_argument("--max_seq_length", + default=128, + type=int, + help="The maximum total input sequence length after WordPiece tokenization. \n" + "Sequences longer than this will be truncated, and sequences shorter \n" + "than this will be padded.") + parser.add_argument("--do_train", + action='store_true', + help="Whether to run training.") + parser.add_argument("--do_eval", + action='store_true', + help="Whether to run eval on the dev set.") + parser.add_argument("--do_lower_case", + action='store_true', + help="Set this flag if you are using an uncased model.") + parser.add_argument("--train_batch_size", + default=32, + type=int, + help="Total batch size for training.") + parser.add_argument("--eval_batch_size", + default=8, + type=int, + help="Total batch size for eval.") + parser.add_argument("--learning_rate", + default=5e-5, + type=float, + help="The initial learning rate for Adam.") + parser.add_argument("--num_train_epochs", + default=3.0, + type=float, + help="Total number of training epochs to perform.") + parser.add_argument("--warmup_proportion", + default=0.1, + type=float, + help="Proportion of training to perform linear learning rate warmup for. " + "E.g., 0.1 = 10%% of training.") + parser.add_argument("--no_cuda", + action='store_true', + help="Whether not to use CUDA when available") + parser.add_argument("--local_rank", + type=int, + default=-1, + help="local_rank for distributed training on gpus") + parser.add_argument('--seed', + type=int, + default=42, + help="random seed for initialization") + parser.add_argument('--gradient_accumulation_steps', + type=int, + default=1, + help="Number of updates steps to accumulate before performing a backward/update pass.") + parser.add_argument('--fp16', + action='store_true', + help="Whether to use 16-bit float precision instead of 32-bit") + parser.add_argument('--loss_scale', + type=float, default=0, + help="Loss scaling to improve fp16 numeric stability. Only used when fp16 set to True.\n" + "0 (default value): dynamic loss scaling.\n" + "Positive power of 2: static loss scaling value.\n") + + args = parser.parse_args() + + if args.local_rank == -1 or args.no_cuda: + device = torch.device("cuda" if torch.cuda.is_available() and not args.no_cuda else "cpu") + n_gpu = torch.cuda.device_count() + else: + torch.cuda.set_device(args.local_rank) + device = torch.device("cuda", args.local_rank) + n_gpu = 1 + # Initializes the distributed backend which will take care of sychronizing nodes/GPUs + torch.distributed.init_process_group(backend='nccl') + logger.info("device: {} n_gpu: {}, distributed training: {}, 16-bits training: {}".format( + device, n_gpu, bool(args.local_rank != -1), args.fp16)) + + if args.gradient_accumulation_steps < 1: + raise ValueError("Invalid gradient_accumulation_steps parameter: {}, should be >= 1".format( + args.gradient_accumulation_steps)) + + args.train_batch_size = int(args.train_batch_size / args.gradient_accumulation_steps) + + random.seed(args.seed) + np.random.seed(args.seed) + torch.manual_seed(args.seed) + if n_gpu > 0: + torch.cuda.manual_seed_all(args.seed) + + if not args.do_train and not args.do_eval: + raise ValueError("At least one of `do_train` or `do_eval` must be True.") + + if os.path.exists(args.output_dir) and os.listdir(args.output_dir): + raise ValueError("Output directory ({}) already exists and is not empty.".format(args.output_dir)) + os.makedirs(args.output_dir, exist_ok=True) + + tokenizer = BertTokenizer.from_pretrained(args.bert_model, do_lower_case=args.do_lower_case) + + train_examples = None + num_train_steps = None + if args.do_train: + train_examples = read_swag_examples(os.path.join(args.data_dir, 'train.csv'), is_training = True) + num_train_steps = int( + len(train_examples) / args.train_batch_size / args.gradient_accumulation_steps * args.num_train_epochs) + + # Prepare model + model = BertForMultipleChoice.from_pretrained(args.bert_model, + cache_dir=PYTORCH_PRETRAINED_BERT_CACHE / 'distributed_{}'.format(args.local_rank), + num_choices=4) + if args.fp16: + model.half() + model.to(device) + if args.local_rank != -1: + try: + from apex.parallel import DistributedDataParallel as DDP + except ImportError: + raise ImportError("Please install apex from https://www.github.com/nvidia/apex to use distributed and fp16 training.") + + model = DDP(model) + elif n_gpu > 1: + model = torch.nn.DataParallel(model) + + # Prepare optimizer + param_optimizer = list(model.named_parameters()) + + # hack to remove pooler, which is not used + # thus it produce None grad that break apex + param_optimizer = [n for n in param_optimizer if 'pooler' not in n[0]] + + no_decay = ['bias', 'LayerNorm.bias', 'LayerNorm.weight'] + optimizer_grouped_parameters = [ + {'params': [p for n, p in param_optimizer if not any(nd in n for nd in no_decay)], 'weight_decay': 0.01}, + {'params': [p for n, p in param_optimizer if any(nd in n for nd in no_decay)], 'weight_decay': 0.0} + ] + t_total = num_train_steps + if args.local_rank != -1: + t_total = t_total // torch.distributed.get_world_size() + if args.fp16: + try: + from apex.optimizers import FP16_Optimizer + from apex.optimizers import FusedAdam + except ImportError: + raise ImportError("Please install apex from https://www.github.com/nvidia/apex to use distributed and fp16 training.") + + optimizer = FusedAdam(optimizer_grouped_parameters, + lr=args.learning_rate, + bias_correction=False, + max_grad_norm=1.0) + if args.loss_scale == 0: + optimizer = FP16_Optimizer(optimizer, dynamic_loss_scale=True) + else: + optimizer = FP16_Optimizer(optimizer, static_loss_scale=args.loss_scale) + else: + optimizer = BertAdam(optimizer_grouped_parameters, + lr=args.learning_rate, + warmup=args.warmup_proportion, + t_total=t_total) + + global_step = 0 + if args.do_train: + train_features = convert_examples_to_features( + train_examples, tokenizer, args.max_seq_length, True) + logger.info("***** Running training *****") + logger.info(" Num examples = %d", len(train_examples)) + logger.info(" Batch size = %d", args.train_batch_size) + logger.info(" Num steps = %d", num_train_steps) + all_input_ids = torch.tensor(select_field(train_features, 'input_ids'), dtype=torch.long) + all_input_mask = torch.tensor(select_field(train_features, 'input_mask'), dtype=torch.long) + all_segment_ids = torch.tensor(select_field(train_features, 'segment_ids'), dtype=torch.long) + all_label = torch.tensor([f.label for f in train_features], dtype=torch.long) + train_data = TensorDataset(all_input_ids, all_input_mask, all_segment_ids, all_label) + if args.local_rank == -1: + train_sampler = RandomSampler(train_data) + else: + train_sampler = DistributedSampler(train_data) + train_dataloader = DataLoader(train_data, sampler=train_sampler, batch_size=args.train_batch_size) + + model.train() + for _ in trange(int(args.num_train_epochs), desc="Epoch"): + tr_loss = 0 + nb_tr_examples, nb_tr_steps = 0, 0 + for step, batch in enumerate(tqdm(train_dataloader, desc="Iteration")): + batch = tuple(t.to(device) for t in batch) + input_ids, input_mask, segment_ids, label_ids = batch + loss = model(input_ids, segment_ids, input_mask, label_ids) + if n_gpu > 1: + loss = loss.mean() # mean() to average on multi-gpu. + if args.fp16 and args.loss_scale != 1.0: + # rescale loss for fp16 training + # see https://docs.nvidia.com/deeplearning/sdk/mixed-precision-training/index.html + loss = loss * args.loss_scale + if args.gradient_accumulation_steps > 1: + loss = loss / args.gradient_accumulation_steps + tr_loss += loss.item() + nb_tr_examples += input_ids.size(0) + nb_tr_steps += 1 + + if args.fp16: + optimizer.backward(loss) + else: + loss.backward() + if (step + 1) % args.gradient_accumulation_steps == 0: + # modify learning rate with special warm up BERT uses + lr_this_step = args.learning_rate * warmup_linear(global_step/t_total, args.warmup_proportion) + for param_group in optimizer.param_groups: + param_group['lr'] = lr_this_step + optimizer.step() + optimizer.zero_grad() + global_step += 1 + + # Save a trained model + model_to_save = model.module if hasattr(model, 'module') else model # Only save the model it-self + output_model_file = os.path.join(args.output_dir, "pytorch_model.bin") + torch.save(model_to_save.state_dict(), output_model_file) + + # Load a trained model that you have fine-tuned + model_state_dict = torch.load(output_model_file) + model = BertForMultipleChoice.from_pretrained(args.bert_model, + state_dict=model_state_dict, + num_choices=4) + model.to(device) + + if args.do_eval and (args.local_rank == -1 or torch.distributed.get_rank() == 0): + eval_examples = read_swag_examples(os.path.join(args.data_dir, 'val.csv'), is_training = True) + eval_features = convert_examples_to_features( + eval_examples, tokenizer, args.max_seq_length, True) + logger.info("***** Running evaluation *****") + logger.info(" Num examples = %d", len(eval_examples)) + logger.info(" Batch size = %d", args.eval_batch_size) + all_input_ids = torch.tensor(select_field(eval_features, 'input_ids'), dtype=torch.long) + all_input_mask = torch.tensor(select_field(eval_features, 'input_mask'), dtype=torch.long) + all_segment_ids = torch.tensor(select_field(eval_features, 'segment_ids'), dtype=torch.long) + all_label = torch.tensor([f.label for f in eval_features], dtype=torch.long) + eval_data = TensorDataset(all_input_ids, all_input_mask, all_segment_ids, all_label) + # Run prediction for full data + eval_sampler = SequentialSampler(eval_data) + eval_dataloader = DataLoader(eval_data, sampler=eval_sampler, batch_size=args.eval_batch_size) + + model.eval() + eval_loss, eval_accuracy = 0, 0 + nb_eval_steps, nb_eval_examples = 0, 0 + for input_ids, input_mask, segment_ids, label_ids in eval_dataloader: + input_ids = input_ids.to(device) + input_mask = input_mask.to(device) + segment_ids = segment_ids.to(device) + label_ids = label_ids.to(device) + + with torch.no_grad(): + tmp_eval_loss = model(input_ids, segment_ids, input_mask, label_ids) + logits = model(input_ids, segment_ids, input_mask) + + logits = logits.detach().cpu().numpy() + label_ids = label_ids.to('cpu').numpy() + tmp_eval_accuracy = accuracy(logits, label_ids) + + eval_loss += tmp_eval_loss.mean().item() + eval_accuracy += tmp_eval_accuracy + + nb_eval_examples += input_ids.size(0) + nb_eval_steps += 1 + + eval_loss = eval_loss / nb_eval_steps + eval_accuracy = eval_accuracy / nb_eval_examples + + result = {'eval_loss': eval_loss, + 'eval_accuracy': eval_accuracy, + 'global_step': global_step, + 'loss': tr_loss/nb_tr_steps} + + output_eval_file = os.path.join(args.output_dir, "eval_results.txt") + with open(output_eval_file, "w") as writer: + logger.info("***** Eval results *****") + for key in sorted(result.keys()): + logger.info(" %s = %s", key, str(result[key])) + writer.write("%s = %s\n" % (key, str(result[key]))) + + +if __name__ == "__main__": + main() diff --git "a/likunlin-\344\274\230\345\214\226-Copy1.ipynb" "b/likunlin-\344\274\230\345\214\226-Copy1.ipynb" new file mode 100644 index 00000000000000..5e459999f3fce0 --- /dev/null +++ "b/likunlin-\344\274\230\345\214\226-Copy1.ipynb" @@ -0,0 +1,1396 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "%load_ext autoreload\n", + "%autoreload 2\n", + "\n", + "from IPython.core.interactiveshell import InteractiveShell\n", + "InteractiveShell.ast_node_interactivity = 'all'" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Warning: apex was installed without --cpp_ext. Falling back to Python flatten and unflatten.\n", + "Warning: apex was installed without --cuda_ext. Fused syncbn kernels will be unavailable. Python fallbacks will be used instead.\n", + "Warning: apex was installed without --cuda_ext. FusedAdam will be unavailable.\n", + "Warning: apex was installed without --cuda_ext. FusedLayerNorm will be unavailable.\n", + "Better speed can be achieved with apex installed from https://www.github.com/nvidia/apex.\n" + ] + } + ], + "source": [ + "import os\n", + "import json\n", + "\n", + "import numpy as np\n", + "import math\n", + "import matplotlib\n", + "import matplotlib.pyplot as plt\n", + "from pylab import rcParams\n", + "\n", + "import torch\n", + "import torch.nn.functional as F\n", + "from pytorch_pretrained_bert import tokenization, BertTokenizer, BertModel, BertForMaskedLM, BertForPreTraining, BertConfig\n", + "from examples.extract_features import *" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "04/16/2019 09:11:27 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-uncased-vocab.txt from cache at /home/xd/.pytorch_pretrained_bert/26bc1ad6c0ac742e9b52263248f6d0f00068293b33709fae12320c0e35ccfbbb.542ce4285a40d23a559526243235df47c5f75c197f04f37d1a0c124c32c9a084\n", + "04/16/2019 09:11:27 - INFO - pytorch_pretrained_bert.modeling - loading archive file /nas/pretrain-bert/pretrain-tensorflow/uncased_L-12_H-768_A-12/\n", + "04/16/2019 09:11:27 - INFO - pytorch_pretrained_bert.modeling - Model config {\n", + " \"attention_probs_dropout_prob\": 0.1,\n", + " \"hidden_act\": \"gelu\",\n", + " \"hidden_dropout_prob\": 0.1,\n", + " \"hidden_size\": 768,\n", + " \"initializer_range\": 0.02,\n", + " \"intermediate_size\": 3072,\n", + " \"max_position_embeddings\": 512,\n", + " \"num_attention_heads\": 12,\n", + " \"num_hidden_layers\": 12,\n", + " \"type_vocab_size\": 2,\n", + " \"vocab_size\": 30522\n", + "}\n", + "\n" + ] + } + ], + "source": [ + "class Args:\n", + " def __init__(self):\n", + " pass\n", + " \n", + "args = Args()\n", + "args.no_cuda = True\n", + "\n", + "CONFIG_NAME = 'bert_config.json'\n", + "BERT_DIR = '/nas/pretrain-bert/pretrain-tensorflow/uncased_L-12_H-768_A-12/'\n", + "config_file = os.path.join(BERT_DIR, CONFIG_NAME)\n", + "config = BertConfig.from_json_file(config_file)\n", + "\n", + "tokenizer = BertTokenizer.from_pretrained('bert-base-uncased')#do_lower_case:在标记化时将文本转换为小写。默认= True\n", + "model = BertForPreTraining.from_pretrained(BERT_DIR)\n", + "device = torch.device(\"cuda\" if torch.cuda.is_available() and not args.no_cuda else \"cpu\")\n", + "_ = model.to(device)\n", + "_ = model.eval()" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "13579\n", + "['i', 'ari', '##ve', 'home', '.']\n" + ] + } + ], + "source": [ + "print(tokenizer.vocab['doubts'])\n", + "print(tokenizer.tokenize(\"I arive home.\"))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "BertForPreTraining:\n", + "Outputs:\n", + " if `masked_lm_labels` and `next_sentence_label` are not `None`:\n", + " Outputs the total_loss which is the sum of the masked language modeling loss and the next\n", + " sentence classification loss.\n", + " if `masked_lm_labels` or `next_sentence_label` is `None`:\n", + " Outputs a tuple comprising\n", + " - the masked language modeling logits of shape [batch_size, sequence_length, vocab_size], and\n", + " - the next sentence classification logits of shape [batch_size, 2]." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "from_pretrained:\n", + "Instantiate a BertPreTrainedModel from a pre-trained model file or a pytorch state dict.\n", + "Download and cache the pre-trained model file if needed." + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": { + "scrolled": false + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "04/16/2019 09:34:51 - INFO - examples.extract_features - tokens: [CLS] i love you . hello everybody . [SEP]\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[103, 1045, 2293, 103, 1012, 7592, 103, 1012, 102]\n", + "[101, 103, 2293, 2017, 103, 7592, 7955, 103, 102]\n", + "[101, 1045, 103, 2017, 1012, 103, 7955, 1012, 103]\n" + ] + }, + { + "ename": "IndexError", + "evalue": "list index out of range", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mIndexError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 103\u001b[0m \u001b[0mmasked_feature_copies\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbatches\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mcopy_and_mask_feature\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfeatures\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m3\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 104\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mi\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mrange\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m5\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 105\u001b[0;31m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmasked_feature_copies\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0minput_ids\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;31m#结果[101, 1045, 2293, 103, 102]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", + "\u001b[0;31mIndexError\u001b[0m: list index out of range" + ] + } + ], + "source": [ + "import re\n", + "def convert_text_to_examples(text): #把每一行的句子变成一个实例,一个实例中包含text_a,text_b(text_b目前是没用的)\n", + " examples = []\n", + " unique_id = 0\n", + " if True:\n", + " for line in text:\n", + " line = line.strip()\n", + " text_a = None\n", + " text_b = None\n", + " m = re.match(r\"^(.*) \\|\\|\\| (.*)$\", line) #想要匹配这样的字符串'You are my sunshine. ||| I love you.'\n", + " \n", + " if m is None:\n", + " text_a = line\n", + " else:\n", + " text_a = m.group(1) #匹配的第一句,比如You are my sunshine,my only sunshine.\n", + " text_b = m.group(2) #匹配的第二句,比如I love you.\n", + " \n", + " examples.append(\n", + " InputExample(unique_id=unique_id, text_a=text_a, text_b=text_b))\n", + " unique_id += 1\n", + " return examples\n", + "#疑问,当text是一行的时候,line是一个个字母 -> text是[\"***\"]的形式\n", + "#print(convert_text_to_examples({\"I love you.\",\"hello everybody.\"})[0].text_a)\n", + "\n", + "def convert_examples_to_features(examples, tokenizer, append_special_tokens=True, replace_mask=True, print_info=False):\n", + " #把实例变成一个特征\n", + " features = []\n", + " for (ex_index, example) in enumerate(examples):\n", + " tokens_a = tokenizer.tokenize(example.text_a) #tokenizer的作用是\n", + " #print(example.unique_id) #*****************************\n", + " tokens_b = None\n", + " if example.text_b:\n", + " tokens_b = tokenizer.tokenize(example.text_b)\n", + "\n", + " tokens = []\n", + " input_type_ids = [] #segment embedding\n", + " if append_special_tokens: #输入参数中默认为true\n", + " tokens.append(\"[CLS]\")\n", + " input_type_ids.append(0)\n", + " for token in tokens_a:\n", + " if replace_mask and token == '_': # XD\n", + " token = \"[MASK]\"\n", + " tokens.append(token)\n", + " input_type_ids.append(0)\n", + " if append_special_tokens:\n", + " tokens.append(\"[SEP]\")\n", + " input_type_ids.append(0)\n", + "\n", + " if tokens_b:\n", + " for token in tokens_b:\n", + " if replace_mask and token == '_': # XD\n", + " token = \"[MASK]\"\n", + " tokens.append(token)\n", + " input_type_ids.append(1)\n", + " if append_special_tokens:\n", + " tokens.append(\"[SEP]\")\n", + " input_type_ids.append(1)\n", + " #print(tokens) #*******************************\n", + " input_ids = tokenizer.convert_tokens_to_ids(tokens) #把原来句子中的词语编成在字典中的编号\n", + " input_mask = [1] * len(input_ids) \n", + " #print(input_ids)#***********************************\n", + " if ex_index < 5:\n", + "# logger.info(\"*** Example ***\")\n", + "# logger.info(\"unique_id: %s\" % (example.unique_id))\n", + " logger.info(\"tokens: %s\" % \" \".join([str(x) for x in tokens]))\n", + "# logger.info(\"input_ids: %s\" % \" \".join([str(x) for x in input_ids]))\n", + "# logger.info(\"input_mask: %s\" % \" \".join([str(x) for x in input_mask]))\n", + "# logger.info(\n", + "# \"input_type_ids: %s\" % \" \".join([str(x) for x in input_type_ids]))\n", + " \n", + " features.append(\n", + " InputFeatures(\n", + " unique_id=example.unique_id,\n", + " tokens=tokens,\n", + " input_ids=input_ids,#字符串中的每个单词在词典中的index序列\n", + " input_mask=input_mask, #一堆1\n", + " input_type_ids=input_type_ids)) #第0类和第1类,对text_a,text_b的区分,本代码中全都是零\n", + " return features\n", + " \n", + "\n", + "\n", + "def copy_and_mask_feature(feature, step, masked_tokens=None): #step参数用来表示每多少个单词mask一次\n", + " import copy\n", + " tokens = feature.tokens\n", + " len_token = len(tokens)\n", + " if len_token 0\n", + " masked_feature_copies = []\n", + " for i in batches: #用[mask]依次掩盖每一个位置\n", + " feature_copy = copy.deepcopy(feature)\n", + " masked_pos = i\n", + " while masked_pos < len_token:\n", + " feature_copy.input_ids[masked_pos] = tokenizer.vocab[\"[MASK]\"]\n", + " masked_pos = masked_pos + step\n", + " masked_feature_copies.append(feature_copy)\n", + " return masked_feature_copies, batches\n", + "\n", + "#examples = convert_text_to_examples({\"I love you.Hello everybody.\"})\n", + "#features = convert_examples_to_features(examples, tokenizer, print_info=False)\n", + "#masked_feature_copies, batches = copy_and_mask_feature(features[0],3)\n", + "#for i in range(0,5):\n", + "# print(masked_feature_copies[i].input_ids) #结果[101, 1045, 2293, 103, 102]\n" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "import nltk\n", + "from pattern.en import conjugate, lemma, lexeme,PRESENT,SG\n", + "\n", + "def show_lm_probs(tokens, input_ids, probs, topk=5, firstk=20): #输出结果的函数,要最高概率topk个输出\n", + " def print_pair(token, prob, end_str='', hit_mark=' '):\n", + " if i < firstk:\n", + " # token = token.replace('', '').replace('\\n', '/n')\n", + " print('{}{: >3} | {: <12}'.format(hit_mark, int(round(prob*100)), token), end=end_str)\n", + " \n", + " ret = None\n", + " for i in range(len(tokens)):\n", + " ind_ = input_ids[i].item() if input_ids is not None else tokenizer.vocab[tokens[i]]\n", + " prob_ = probs[i][ind_].item() #这个probs是该字符串第i个位置上填上词典上各个词的概率,prob_是词典上原来天的这个词的概率\n", + " print_pair(tokens[i], prob_, end_str='\\t')\n", + " values, indices = probs[i].topk(topk)\n", + " #print(values, indices)\n", + " #print(\"****************************************************************************************************\")\n", + " top_pairs = []\n", + " for j in range(topk):\n", + " ind, prob = indices[j].item(), values[j].item()\n", + " hit_mark = '*' if ind == ind_ else ' '\n", + " token = tokenizer.ids_to_tokens[ind]\n", + " print_pair(token, prob, hit_mark=hit_mark, end_str='' if j < topk - 1 else '\\n')\n", + " top_pairs.append((token, prob))\n", + " if tokens[i] == \"[MASK]\":\n", + " ret = top_pairs\n", + " return ret #返回的这是个啥" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "import colored\n", + "from colored import stylize\n", + "\n", + "def show_abnormals(tokens, probs, show_suggestions=False):\n", + " def gap2color(gap):\n", + " if gap <= 5:\n", + " return 'yellow_1'\n", + " elif gap <= 10:\n", + " return 'orange_1'\n", + " else:\n", + " return 'red_1'\n", + " \n", + " def print_token(token, suggestion, gap):\n", + " if gap == 0:\n", + " print(stylize(token + ' ', colored.fg('white') + colored.bg('black')), end='')\n", + " else:\n", + " print(stylize(token, colored.fg(gap2color(gap)) + colored.bg('black')), end='')\n", + " if show_suggestions and gap > 5:\n", + " print(stylize('/' + suggestion + ' ', colored.fg('green' if gap > 10 else 'cyan') + colored.bg('black')), end='')\n", + " else:\n", + " print(stylize(' ', colored.fg(gap2color(gap)) + colored.bg('black')), end='')\n", + " # print('/' + suggestion, end=' ')\n", + " # print('%.2f' % gap, end=' ')\n", + " \n", + " avg_gap = 0.\n", + " for i in range(1, len(tokens) - 1): # skip first [CLS] and last [SEP]\n", + " ind_ = tokenizer.vocab[tokens[i]]\n", + " prob_ = probs[i][ind_].item()\n", + " top_prob = probs[i].max().item()\n", + " top_ind = probs[i].argmax().item()\n", + " gap = math.log(top_prob) - math.log(prob_) #计算两个词之间的差距\n", + " suggestion = tokenizer.ids_to_tokens[top_ind]\n", + " print_token(tokens[i], suggestion, gap)\n", + " avg_gap += gap\n", + " avg_gap /= (len(tokens) - 2)\n", + " print()\n", + " print('平均gap:'+ str(avg_gap))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['[CLS]', 'last', 'week', 'i', 'went', 'to', 'the', 'theatre', '.', 'i', 'had', 'a', 'very', 'good', 'seat', '.', 'the', 'play', 'was', 'very', 'interesting', '.', '[SEP]']\n", + "[[101, 2197, 2733, 1045, 2253, 2000, 1996, 3004, 1012, 102], [101, 1045, 2018, 1037, 2200, 2204, 2835, 1012, 102], [101, 1996, 2377, 2001, 2200, 5875, 1012, 102]]\n", + "[[0, 0], [0, 1], [0, 2], [0, 3], [0, 4], [0, 5], [0, 6], [0, 7], [0, 8], [1, 1], [1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [1, 7], [2, 1], [2, 2], [2, 3], [2, 4], [2, 5], [2, 6]]\n", + "[[0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0]]\n", + "['Last week I went to the theatre.', ' I had a very good seat.', ' The play was very interesting.']\n" + ] + } + ], + "source": [ + "analyzed_cache = {}\n", + "from pattern.en import conjugate, lemma, lexeme,PRESENT,SG\n", + "#print (lemma('gave'))\n", + "#print (lexeme('production'))\n", + "#print (conjugate(verb='give',tense=PRESENT,number=SG))\n", + "def process_text(text): \n", + "#处理输入文本,包括将文本按句子分成若干token,得出原来text中index位置的单词在x句子的y位置,还得出各个句子类别码\n", + " token =[]\n", + " token0 = tokenizer.tokenize(text)\n", + " token.append('[CLS]')\n", + " for i in token0:\n", + " token.append(i)\n", + " token.append('[SEP]')\n", + " print(token)\n", + " in_sentence = [[0,0]] \n", + " sentence_n = 0\n", + " index = 1\n", + " for i in range(1,len(token)-1):\n", + " in_sentence.append([sentence_n,index]) #每个token中的词在所在句中的位置表示出来,以及该位置在哪一句中\n", + " index = index + 1 #比如,位置i这个词在第sentence句的index位置上\n", + " if token[i] == '.':\n", + " sentence_n = sentence_n + 1\n", + " index = 1\n", + " sentences = text.split(\".\")\n", + " sentences.remove('')\n", + "\n", + " sen_token = []\n", + " input_ids_sen = []\n", + " input_type_ids_sen = []\n", + " for i,sentence in enumerate(sentences):\n", + " sentence = sentence + '.'\n", + " sentences[i] = sentences[i] + '.'\n", + " token = []\n", + " input_type_ids = []\n", + " tokens = tokenizer.tokenize(sentence)\n", + " token.append('[CLS]')\n", + " input_type_ids.append(0) \n", + " for i in tokens:\n", + " token.append(i)\n", + " input_type_ids.append(0) \n", + " token.append('[SEP]') \n", + " input_type_ids.append(0)\n", + " input_ids_sen.append(tokenizer.convert_tokens_to_ids(token))\n", + " input_type_ids_sen.append(input_type_ids)\n", + " #input_ids_sen = torch.tensor(input_ids_sen)\n", + " #input_type_ids_sen = torch.tensor(input_type_ids_sen)\n", + " return input_ids_sen,input_type_ids_sen,in_sentence,sentences\n", + "text = \"Last week I went to the theatre. I had a very good seat. The play was very interesting.\"\n", + "input_ids_sen,input_type_ids_sen,in_sentence,sentences = process_text(text)\n", + "print(input_ids_sen)\n", + "print(in_sentence)\n", + "print(input_type_ids_sen)\n", + "print(sentences)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "这个函数是在该位置上的单词可能性很低时才使用,不会把原来就较为合理的面目全非" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "是否用不定式:\n", + "用to的可能性0.00036304089007899165\n", + "可能性最大的词概率0.23709583282470703\n", + "是否用被动或进行时:\n", + "[('was', 0.0002793713065329939), ('am', 4.049863855470903e-05), ('were', 1.306664398725843e-05), ('been', 4.840642304770881e-06), ('be', 1.453689151276194e-06), ('are', 7.996850399649702e-07), ('is', 5.958298174846277e-07), ('being', 9.706550230248467e-09)]\n", + "had 0.8573063611984253\n", + "was 0.0002793713065329939\n", + "不是被动\n", + "[('was', 0.9590925574302673), ('am', 0.006898669525980949), ('were', 0.0016424404457211494), ('been', 0.0004373548727016896), ('is', 0.00035717932041734457), ('be', 3.4134478482883424e-05), ('are', 2.2988733689999208e-05), ('being', 3.1775894626662193e-07)]\n", + "was 0.9590925574302673\n", + "was 0.9590925574302673\n", + "判断其他语法:\n", + "need_be == 1\n", + "['go', 'goes', 'going', 'went', 'gone']\n", + "[2175, 3632, 2183, 2253, 2908]\n", + "{'go': 0.00043932811240665615, 'goes': 0.00012179886834928766, 'going': 0.6597349047660828, 'went': 0.00122930109500885, 'gone': 0.002755501540377736}\n", + "going\n", + "was going\n" + ] + }, + { + "data": { + "text/plain": [ + "'was going'" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import copy\n", + "import nltk\n", + "from pattern.en import conjugate, lemma, lexeme,PRESENT,SG,PRESENT,SG,INFINITIVE, PRESENT, PAST, FUTURE, PROGRESSIVE\n", + "\n", + "def analyse_V(index):\n", + "#这是一个处理动词语法问题的函数,输入为问题词在text的token中的下标index\n", + "\n", + "#******************************************初始数据处理**************************************************************************\n", + " need_to = 0 #表示是否需要变为不定式形式,0表示不需要,1表示需要\n", + " need_be = 0 #表示是否需要变为被动语态0表示不需要,1表示需要\n", + " \n", + " sentence_id = in_sentence[index][0]\n", + " id_in_sen = in_sentence[index][1]\n", + " wordV = input_ids_sen[sentence_id][id_in_sen]\n", + " wordV = tokenizer.ids_to_tokens[wordV]\n", + " \n", + " input_ids = copy.deepcopy(input_ids_sen[sentence_id])\n", + " input_type_ids = copy.deepcopy(input_type_ids_sen[sentence_id])\n", + "#*****************************************判断语法应不应该是不定式抑或是被动语态**************************************************************\n", + " '''\n", + " input_ids1 = copy.deepcopy(input_ids)\n", + " input_ids1.insert(id_in_sen,tokenizer.vocab[\"[MASK]\"])\n", + " input_type_ids1 = copy.deepcopy(input_type_ids)\n", + " input_type_ids1.append(0)\n", + " \n", + " T_input_ids1 = torch.tensor([input_ids1], dtype=torch.long) #把input_ids增加了一个维度\n", + " T_input_ids1 = T_input_ids1.to(device) #拿去GPU\n", + "\n", + " T_input_type_ids1 = torch.tensor([input_type_ids1], dtype=torch.long) #把input_type_ids增加了一个维度,其实每一行都一样\n", + " T_input_type_ids1 = T_input_type_ids1.to(device) \n", + " \n", + " mlm_logits1, _ = model(T_input_ids1, T_input_type_ids1)\n", + " mlm_probs1 = F.softmax(mlm_logits1, dim=-1)\n", + " reduced_mlm_probs1 = mlm_probs1[0][id_in_sen]\n", + " '''\n", + "#**************************************判断是不是不定式********************* \n", + " input_ids1 = copy.deepcopy(input_ids)\n", + " input_ids1.insert(id_in_sen,tokenizer.vocab[\"[MASK]\"])\n", + " input_ids1[id_in_sen + 1]=tokenizer.vocab[conjugate(verb=wordV,tense=PRESENT,person = 1)]\n", + " input_type_ids1 = copy.deepcopy(input_type_ids)\n", + " input_type_ids1.append(0)\n", + " \n", + " T_input_ids1 = torch.tensor([input_ids1], dtype=torch.long) #把input_ids增加了一个维度\n", + " T_input_ids1 = T_input_ids1.to(device) #拿去GPU\n", + "\n", + " T_input_type_ids1 = torch.tensor([input_type_ids1], dtype=torch.long) #把input_type_ids增加了一个维度,其实每一行都一样\n", + " T_input_type_ids1 = T_input_type_ids1.to(device) \n", + " \n", + " mlm_logits1, _ = model(T_input_ids1, T_input_type_ids1)\n", + " mlm_probs1 = F.softmax(mlm_logits1, dim=-1)\n", + " reduced_mlm_probs1 = mlm_probs1[0][id_in_sen]\n", + " \n", + " prob_to = float(reduced_mlm_probs1[tokenizer.vocab[\"to\"]])\n", + " top_prob1 = reduced_mlm_probs1.max().item()\n", + " print(\"是否用不定式:\")\n", + " print(\"用to的可能性\"+str(prob_to))\n", + " print(\"可能性最大的词概率\"+str(top_prob1))\n", + " gap1 = math.log(top_prob1) - math.log(prob_to)\n", + " if gap1 < 1:\n", + " need_to = 1 \n", + "#**************************************判断是不是被动语态或者进行时******************* \n", + " print(\"是否用被动或进行时:\")\n", + " input_ids3 = copy.deepcopy(input_ids)\n", + " input_ids3.insert(id_in_sen,tokenizer.vocab[\"[MASK]\"])\n", + " input_ids3_ = copy.deepcopy(input_ids3)\n", + " input_ids3[id_in_sen + 1]=tokenizer.vocab[conjugate(verb=wordV,tense=PAST,aspect=PROGRESSIVE)]\n", + " input_ids3_[id_in_sen + 1]=tokenizer.vocab[conjugate(verb=wordV,tense=PRESENT,aspect=PROGRESSIVE)]\n", + " input_type_ids3 = copy.deepcopy(input_type_ids)\n", + " input_type_ids3.append(0)\n", + " \n", + " T_input_ids3 = torch.tensor([input_ids3], dtype=torch.long) #把input_ids增加了一个维度\n", + " T_input_ids3 = T_input_ids3.to(device) #拿去GPU\n", + " T_input_ids3_ = torch.tensor([input_ids3_], dtype=torch.long)\n", + " T_input_ids3_ = T_input_ids3_.to(device)\n", + "\n", + " T_input_type_ids3 = torch.tensor([input_type_ids3], dtype=torch.long) #把input_type_ids增加了一个维度,其实每一行都一样\n", + " T_input_type_ids3 = T_input_type_ids3.to(device) \n", + " \n", + " mlm_logits3, _ = model(T_input_ids3, T_input_type_ids3)\n", + " mlm_logits3_,_ = model(T_input_ids3_, T_input_type_ids3)\n", + " mlm_probs3 = F.softmax(mlm_logits3, dim=-1)\n", + " reduced_mlm_probs3 = mlm_probs3[0][id_in_sen]\n", + " mlm_probs3_= F.softmax(mlm_logits3_, dim=-1)\n", + " reduced_mlm_probs3_ = mlm_probs3_[0][id_in_sen]\n", + " \n", + " list_be = lexeme('be')\n", + " list_be = lexeme('be')[:8]\n", + "\n", + " list_be_id = tokenizer.convert_tokens_to_ids(list_be)\n", + " list_be_prob = {}\n", + " for word,word_id in zip(list_be,list_be_id):\n", + " list_be_prob.update({word:float(reduced_mlm_probs3[word_id].data)})\n", + " prob_ord3 = sorted(list_be_prob.items(),key = lambda x:x[1],reverse = True)\n", + " print(prob_ord3)\n", + " top_ind3 = reduced_mlm_probs3.argmax().item()\n", + " top_prob3 = reduced_mlm_probs3.max().item()\n", + " print(tokenizer.ids_to_tokens[top_ind3],top_prob3)\n", + " print(prob_ord3[0][0],prob_ord3[0][1])\n", + " top_prob_be = prob_ord3[0][1]\n", + " gap3 = math.log(top_prob3) - math.log(top_prob_be)\n", + " if gap3 < 1:\n", + " need_be = 1 \n", + " be_ = prob_ord3[0][0]\n", + " else:\n", + " print('不是被动')\n", + "#*******************************************是不是现在分词******************************** \n", + " list_be_prob = {}\n", + " for word,word_id in zip(list_be,list_be_id):\n", + " list_be_prob.update({word:float(reduced_mlm_probs3_[word_id].data)})\n", + " prob_ord3 = sorted(list_be_prob.items(),key = lambda x:x[1],reverse = True)\n", + " print(prob_ord3)\n", + " top_ind3 = reduced_mlm_probs3_.argmax().item()\n", + " top_prob3 = reduced_mlm_probs3_.max().item()\n", + " print(tokenizer.ids_to_tokens[top_ind3],top_prob3)\n", + " print(prob_ord3[0][0],prob_ord3[0][1])\n", + " top_prob_be = prob_ord3[0][1]\n", + " gap3 = math.log(top_prob3) - math.log(top_prob_be)\n", + " if gap3 < 1:\n", + " need_be = 1 \n", + " be_ = prob_ord3[0][0] \n", + "#*************************************************判断其他语法******************************************************************\n", + " print(\"判断其他语法:\")\n", + " if need_to == 0 and need_be == 0:\n", + " input_ids[id_in_sen] = tokenizer.vocab[\"[MASK]\"]\n", + " input_type_ids = copy.deepcopy(input_type_ids_sen[sentence_id])\n", + "\n", + " T_input_ids = torch.tensor([input_ids], dtype=torch.long) #把input_ids增加了一个维度\n", + " T_input_type_ids = torch.tensor([input_type_ids], dtype=torch.long) #把input_type_ids增加了一个维度,其实每一行都一样\n", + " T_input_ids = T_input_ids.to(device) #拿去GPU\n", + " T_input_type_ids = T_input_type_ids.to(device)\n", + "\n", + " mlm_logits, _ = model(T_input_ids, T_input_type_ids)\n", + " mlm_probs = F.softmax(mlm_logits, dim=-1)\n", + " reduced_mlm_probs = mlm_probs[0][id_in_sen]\n", + "\n", + " list_word = lexeme(wordV)\n", + " #list_word = [word]\n", + "\n", + " list_word_id = tokenizer.convert_tokens_to_ids(list_word)\n", + " print(list_word)\n", + " print(list_word_id) \n", + " list_word_prob = {}\n", + " for word,word_id in zip(list_word,list_word_id):\n", + " list_word_prob.update({word:float(reduced_mlm_probs[word_id].data)})\n", + " print(list_word_prob)\n", + " prob_ord = sorted(list_word_prob.items(),key = lambda x:x[1],reverse = True)\n", + "\n", + " top_ind = reduced_mlm_probs.argmax().item()\n", + " top_prob = reduced_mlm_probs.max().item()\n", + " top_prob_thisV = prob_ord[0][1]\n", + " gap = math.log(top_prob) - math.log(top_prob_thisV)\n", + " \n", + " suggestion = tokenizer.ids_to_tokens[top_ind]\n", + " sentence = copy.deepcopy(sentences[sentence_id])\n", + " sentence = tokenizer.tokenize(sentence)\n", + " sentence[id_in_sen - 1] = suggestion\n", + " sentence_tag = nltk.pos_tag(sentence)\n", + " \n", + " suggestion_tag = sentence_tag[id_in_sen - 1][1]\n", + " #print(sentence_tag[id_in_sen - 1][0])\n", + " print(suggestion_tag)\n", + " \n", + " if gap < 5 or suggestion_tag.find(\"V\")==-1:\n", + " suggestion = prob_ord[0][0]\n", + " \n", + " \n", + "\n", + " \"\"\"”values, indices = reduced_mlm_probs.topk(topk)\n", + " for j in range(topk):\n", + " ind, prob = indices[j].item(), values[j].item()\n", + " \n", + " token = tokenizer.ids_to_tokens[ind]\n", + " print(token,prob)\"\"\"\n", + " elif need_to == 1:\n", + " input_ids2 = copy.deepcopy(input_ids)\n", + " input_ids2.insert(id_in_sen,tokenizer.vocab[\"to\"])\n", + " input_ids2[id_in_sen + 1] = tokenizer.vocab[\"[MASK]\"]\n", + " T_input_ids2 = torch.tensor([input_ids2], dtype=torch.long) #把input_ids增加了一个维度\n", + " T_input_ids2 = T_input_ids2.to(device) #拿去GPU\n", + " \n", + " input_type_ids2 = copy.deepcopy(input_type_ids1)\n", + " T_input_type_ids2 = torch.tensor([input_type_ids2], dtype=torch.long) #把input_type_ids增加了一个维度,其实每一行都一样\n", + " T_input_type_ids2 = T_input_type_ids2.to(device) \n", + " mlm_logits2, _ = model(T_input_ids2, T_input_type_ids2)\n", + " mlm_probs2 = F.softmax(mlm_logits2, dim=-1)\n", + " reduced_mlm_probs2 = mlm_probs2[0][id_in_sen + 1]\n", + " \n", + " thisV = conjugate(verb = wordV,tense=PRESENT,person = 1)\n", + " print(thisV)\n", + " #list_word = [wordV]\n", + " thisV_id = tokenizer.vocab[thisV]\n", + " \n", + " top_ind2 = reduced_mlm_probs2.argmax().item()\n", + " top_prob2 = reduced_mlm_probs2.max().item()\n", + " prob_thisV2 = reduced_mlm_probs2[thisV_id]\n", + " gap = math.log(top_prob2) - math.log(prob_thisV2)\n", + " \n", + " suggestion = tokenizer.ids_to_tokens[top_ind2]\n", + " sentence = copy.deepcopy(sentences[sentence_id])\n", + " sentence = tokenizer.tokenize(sentence)\n", + " sentence.insert(id_in_sen - 1,'to')\n", + " sentence[id_in_sen] = suggestion\n", + " print(\"sentence是:\",sentence)\n", + " sentence_tag = nltk.pos_tag(sentence)\n", + " \n", + " suggestion_tag = sentence_tag[id_in_sen][1]\n", + " if gap < 5 or suggestion_tag.find(\"V\")== -1:\n", + " suggestion = 'to '+ thisV\n", + " else:\n", + " suggestion = 'to '+ tokenizer.ids_to_tokens[top_ind2]\n", + " elif need_be == 1:#********************************处理需要be动词的时态*****************\n", + " print(\"need_be == 1\")\n", + " input_ids3 = copy.deepcopy(input_ids1)\n", + " input_ids3[id_in_sen] = tokenizer.vocab[be_]\n", + " input_ids3[id_in_sen + 1] = tokenizer.vocab[\"[MASK]\"]\n", + " T_input_ids3 = torch.tensor([input_ids3], dtype=torch.long) #把input_ids增加了一个维度\n", + " T_input_ids3 = T_input_ids3.to(device) #拿去GPU\n", + " \n", + " input_type_ids3 = copy.deepcopy(input_type_ids1)\n", + " T_input_type_ids3 = torch.tensor([input_type_ids3], dtype=torch.long) #把input_type_ids增加了一个维度,其实每一行都一样\n", + " T_input_type_ids3 = T_input_type_ids3.to(device)\n", + " mlm_logits3, _ = model(T_input_ids3, T_input_type_ids3)\n", + " mlm_probs3 = F.softmax(mlm_logits3, dim=-1)\n", + " reduced_mlm_probs3 = mlm_probs3[0][id_in_sen + 1]\n", + " \n", + " list_word3 = lexeme(wordV)\n", + " #list_word = [wordV]\n", + " list_word_id3 = tokenizer.convert_tokens_to_ids(list_word3)\n", + " print(list_word3)\n", + " print(list_word_id3) \n", + " list_word_prob3 = {}\n", + " for word,word_id in zip(list_word3,list_word_id3):\n", + " list_word_prob3.update({word:float(reduced_mlm_probs3[word_id].data)})\n", + " print(list_word_prob3)\n", + " prob_ord3 = sorted(list_word_prob3.items(),key = lambda x:x[1],reverse = True)\n", + "\n", + " top_ind3 = reduced_mlm_probs3.argmax().item()\n", + " top_prob3 = reduced_mlm_probs3.max().item()\n", + " top_prob_thisV3 = prob_ord3[0][1]\n", + " gap = math.log(top_prob3) - math.log(top_prob_thisV3)\n", + " print(tokenizer.ids_to_tokens[top_ind3])\n", + " \n", + " suggestion = tokenizer.ids_to_tokens[top_ind3]\n", + " sentence = copy.deepcopy(sentences[sentence_id])\n", + " sentence = tokenizer.tokenize(sentence)\n", + " sentence.insert(id_in_sen -1,be_)\n", + " sentence[id_in_sen] = suggestion\n", + " #print(\"sentence是:\",sentence)\n", + " sentence_tag = nltk.pos_tag(sentence)\n", + " \n", + " suggestion_tag = sentence_tag[id_in_sen][1]\n", + " if gap < 5 or suggestion_tag.find(\"VB\")== -1:\n", + " suggestion = be_ + ' ' + prob_ord3[0][0]\n", + " else:\n", + " suggestion = be_ + ' ' + tokenizer.ids_to_tokens[top_ind3]\n", + " print(suggestion)\n", + " return suggestion\n", + " \n", + "analyse_V(4)" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [ + { + "ename": "IndentationError", + "evalue": "unexpected indent (, line 49)", + "output_type": "error", + "traceback": [ + "\u001b[0;36m File \u001b[0;32m\"\"\u001b[0;36m, line \u001b[0;32m49\u001b[0m\n\u001b[0;31m T_input_ids = torch.tensor([input_ids], dtype=torch.long) #把input_ids增加了一个维度\u001b[0m\n\u001b[0m ^\u001b[0m\n\u001b[0;31mIndentationError\u001b[0m\u001b[0;31m:\u001b[0m unexpected indent\n" + ] + } + ], + "source": [ + "from pattern.en import article,referenced,pluralize, singularize\n", + "def analyse_N(index):\n", + "#******************************************初始数据处理**************************************************************************\n", + " need_DT = 0 #表示是否需要在前面加冠词\n", + " prob_N = 0 #表示这个名词的单复数中最高的概率 \n", + " sentence_id = in_sentence[index][0]\n", + " id_in_sen = in_sentence[index][1]\n", + " wordN = input_ids_sen[sentence_id][id_in_sen]\n", + " wordN = tokenizer.ids_to_tokens[wordN]\n", + " \n", + " input_ids = copy.deepcopy(input_ids_sen[sentence_id])\n", + " input_type_ids = copy.deepcopy(input_type_ids_sen[sentence_id])\n", + "#*****************************************若一个词有问题************************************************************************* \n", + " input_ids[id_in_sen] = tokenizer.vocab[\"[MASK]\"]\n", + " input_type_ids = copy.deepcopy(input_type_ids_sen[sentence_id])\n", + "\n", + " T_input_ids = torch.tensor([input_ids], dtype=torch.long) #把input_ids增加了一个维度\n", + " T_input_type_ids = torch.tensor([input_type_ids], dtype=torch.long) #把input_type_ids增加了一个维度,其实每一行都一样\n", + " T_input_ids = T_input_ids.to(device) #拿去GPU\n", + " T_input_type_ids = T_input_type_ids.to(device)\n", + "\n", + " mlm_logits, _ = model(T_input_ids, T_input_type_ids)\n", + " mlm_probs = F.softmax(mlm_logits, dim=-1)\n", + " reduced_mlm_probs = mlm_probs[0][id_in_sen]\n", + " \n", + " N_ = singularize(wordN)\n", + " N_s= pluralize(wordN)\n", + " N_id = tokenizer.vocab[N_]\n", + " N_s_id = tokenizer.vocab[N_s]\n", + " if(reduced_mlm_probs[N_id] > reduced_mlm_probs[N_s_id]):\n", + " suggestion = N_\n", + " prob_N = reduced_mlm_probs[N_id]\n", + " else:\n", + " suggestion = N_s\n", + " prob_N = reduced_mlm_probs[N_s_id]\n", + " \n", + " top_ind = reduced_mlm_probs.argmax().item()\n", + " top_prob = reduced_mlm_probs.max().item()\n", + " \n", + " gap = math.log(top_prob)- math.log(prob_N)\n", + " if gap > 6.5: #我觉得代词的阈值应该回比名词小一点\n", + " need_DT = 1 #不见棺材不落泪,认为缺冠词 \n", + " \n", + " \n", + " input_ids.insert(id_in_sen,tokenizer.vocab[\"[MASK]\"])\n", + " input_ids.insert[id_in_sen + 1] = tokenizer.vocab[suggestion]\n", + " input_type_ids.append(0)\n", + " \n", + " T_input_ids = torch.tensor([input_ids], dtype=torch.long) #把input_ids增加了一个维度\n", + " T_input_type_ids = torch.tensor([input_type_ids], dtype=torch.long) #把input_type_ids增加了一个维度,其实每一行都一样\n", + " T_input_ids = T_input_ids.to(device) #拿去GPU\n", + " T_input_type_ids = T_input_type_ids.to(device)\n", + "\n", + " mlm_logits, _ = model(T_input_ids, T_input_type_ids)\n", + " mlm_probs = F.softmax(mlm_logits, dim=-1)\n", + " reduced_mlm_probs = mlm_probs[0][id_in_sen]" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [], + "source": [ + "analyzed_cache = {}\n", + "\n", + "def analyze_text(text, masked_tokens=None, show_suggestions=True, show_firstk_probs=20):\n", + " step = 7\n", + " if text[0] in analyzed_cache: #分析过的缓存\n", + " features, mlm_probs = analyzed_cache[text[0]]\n", + " given_mask = \"[MASK]\" in features[0].tokens\n", + " tokens = features[0].tokens \n", + " else:\n", + " examples = convert_text_to_examples(text)\n", + " features = convert_examples_to_features(examples, tokenizer, print_info=False)\n", + " given_mask = \"[MASK]\" in features[0].tokens\n", + " if not given_mask or masked_tokens is not None:\n", + " assert len(features) == 1\n", + " features, batches = copy_and_mask_feature(features[0],step, masked_tokens=masked_tokens)\n", + " #print(len(features))\n", + "\n", + " input_ids = torch.tensor([f.input_ids for f in features], dtype=torch.long) #把input_ids增加了一个维度,变成[n_features,sequence_len]\n", + " #这里的n_features实际上是句子有多少批训练\n", + " \n", + " input_type_ids = torch.tensor([f.input_type_ids for f in features], dtype=torch.long) #把input_type_ids增加了一个维度,其实每一行都一样\n", + " input_ids = input_ids.to(device) #拿去GPU\n", + " input_type_ids = input_type_ids.to(device)\n", + "\n", + " mlm_logits, _ = model(input_ids, input_type_ids)\n", + " mlm_probs = F.softmax(mlm_logits, dim=-1) #最后一维,也就是vocab 换算成概率和为百分之百\n", + " #print(mlm_probs.size())#这里实验的是torch.Size([5, 5, 30522])\n", + " tokens = features[0].tokens #为了输出,[mask]在input_ids里面表示出来,features的token都一样\n", + " #print(tokens)\n", + " if not given_mask or masked_tokens is not None:\n", + " bsz, seq_len, vocab_size = mlm_probs.size() #三个维度分别是batch_size, sequence_length, vocab_size\n", + " assert bsz == len(batches)\n", + " # reduced_mlm_probs = torch.Tensor(1, seq_len, vocab_size)\n", + " # for i in range(seq_len):\n", + " # reduced_mlm_probs[0, i] = mlm_probs[i, i]\n", + " reduced_mlm_probs = torch.Tensor(1, len(tokens), vocab_size)\n", + " for i in batches:\n", + " pos = i\n", + " while pos < len(tokens):\n", + " reduced_mlm_probs[0, pos] = mlm_probs[i, pos]\n", + " pos = pos + step\n", + " mlm_probs = reduced_mlm_probs #压缩一下大小,节约不必要浪费的空间(只需要第i个batch里面[mask]位置的词汇表概率即可)\n", + " #tokens = [tokens[i] for i in masked_positions]\n", + " \n", + " analyzed_cache[text[0]] = (features, mlm_probs)\n", + " \n", + " top_pairs = show_lm_probs(tokens, None, mlm_probs[0], firstk=show_firstk_probs) #传入的probs是二维的\n", + " #print(top_pairs) #******************************\n", + " if not given_mask:\n", + " show_abnormals(tokens, mlm_probs[0], show_suggestions=show_suggestions)\n", + " #return top_pairs\n" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "ename": "NameError", + "evalue": "name 'analyze_text' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 7\u001b[0m \u001b[0mtext\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m\"I hate you.\"\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 8\u001b[0m \u001b[0;31m#text =[\"Last week I go to the zoo. I had a very good seat. The play was very interesting.But I didn't enjoy it. A young man and a young woman were sitting behind me.They were talking loudly. I got very angry.\"]#因为外面有中括号,所以是二维的\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 9\u001b[0;31m \u001b[0manalyze_text\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtext\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mshow_firstk_probs\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m100\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 10\u001b[0m \u001b[0;31m#print(analyzed_cache)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 11\u001b[0m \u001b[0mtime_end\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mtime\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtime\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mNameError\u001b[0m: name 'analyze_text' is not defined" + ] + } + ], + "source": [ + "# text = [\"Who was Jim Henson? Jim Henson _ a puppeteer.\"]\n", + "# text = [\"Last week I went to the theatre. I had a very good seat. The play was very interesting. But I didn't enjoy it. A young man and a young woman were sitting behind me. They were talking loudly. I got very angry. I couldn't hear a word. I turned round. I looked at the man angrily. They didn't pay any attention.In the end, I couldn't bear it. I turned round again. 'I can't hear a word!' I said angrily. 'It's none of your business,' the young man said rudely. 'This is a private conversation!'\"]\n", + "# text = [\"After the outbreak of the disease, the Ministry of Agriculture and rural areas immediately sent a supervision team to the local. Local Emergency Response Mechanism has been activated in accordance with the requirements, to take blockade, culling, harmless treatment, disinfection and other treatment measures to all disease and culling of pigs for harmless treatment. At the same time, all live pigs and their products are prohibited from transferring out of the blockade area, and live pigs are not allowed to be transported into the blockade area. At present, all the above measures have been implemented.\"]\n", + "# text = [\"Early critics of Emily Dickinson's poetry mistook for simplemindedness the surface of artlessness that in fact she constructed with such innocence.\"]\n", + "import time\n", + "time_start=time.time()\n", + "text = [\"I hate you.\"]\n", + "#text =[\"Last week I go to the zoo. I had a very good seat. The play was very interesting.But I didn't enjoy it. A young man and a young woman were sitting behind me.They were talking loudly. I got very angry.\"]#因为外面有中括号,所以是二维的\n", + "analyze_text(text, show_firstk_probs=100)\n", + "#print(analyzed_cache)\n", + "time_end=time.time()\n", + "print('time cost',time_end-time_start,'s')" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "01/03/2019 17:10:45 - INFO - examples.extract_features - tokens: [CLS] the trophy doesn ' t fit into the brown suitcase because the [MASK] is too large . [SEP]\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + " 0 | [CLS] \t 2 | . 1 | ) 1 | the 1 | , 1 | \" \n", + " 100 | the \t*100 | the 0 | his 0 | a 0 | its 0 | her \n", + " 97 | trophy \t* 97 | trophy 0 | cup 0 | prize 0 | trophies 0 | competition \n", + " 100 | doesn \t*100 | doesn 0 | can 0 | does 0 | won 0 | didn \n", + " 100 | ' \t*100 | ' 0 | t 0 | \" 0 | = 0 | ` \n", + " 100 | t \t*100 | t 0 | not 0 | s 0 | n 0 | to \n", + " 100 | fit \t*100 | fit 0 | fits 0 | sit 0 | get 0 | fitting \n", + " 100 | into \t*100 | into 0 | in 0 | inside 0 | onto 0 | within \n", + " 100 | the \t*100 | the 0 | her 0 | his 0 | a 0 | my \n", + " 100 | brown \t*100 | brown 0 | black 0 | green 0 | blue 0 | plastic \n", + " 95 | suitcase \t* 95 | suitcase 3 | bag 1 | luggage 0 | backpack 0 | trunk \n", + " 100 | because \t*100 | because 0 | as 0 | since 0 | due 0 | . \n", + " 100 | the \t*100 | the 0 | its 0 | his 0 | it 0 | her \n", + " 0 | [MASK] \t 21 | suitcase 19 | bag 6 | box 2 | luggage 2 | case \n", + " 99 | is \t* 99 | is 1 | was 0 | being 0 | has 0 | it \n", + " 100 | too \t*100 | too 0 | very 0 | extra 0 | overly 0 | more \n", + " 87 | large \t* 87 | large 11 | big 1 | small 1 | huge 0 | larger \n", + " 100 | . \t*100 | . 0 | ; 0 | , 0 | ! 0 | ' \n", + " 0 | [SEP] \t 35 | . 8 | ) 5 | , 4 | ( 3 | it \n" + ] + } + ], + "source": [ + "text = [\"The trophy doesn't fit into the brown suitcase because the _ is too large.\"]\n", + "# text = [\"Mary beat John in the match because _ was very strong.\"]\n", + "features = convert_examples_to_features(convert_text_to_examples(text), tokenizer, print_info=False)\n", + "input_ids = torch.tensor([f.input_ids for f in features], dtype=torch.long).to(device)\n", + "input_type_ids = torch.tensor([f.input_type_ids for f in features], dtype=torch.long).to(device)\n", + "mlm_logits, _ = model(input_ids, input_type_ids)\n", + "mlm_probs = F.softmax(mlm_logits, dim=-1)\n", + "tokens = features[0].tokens\n", + "top_pairs = show_lm_probs(tokens, None, mlm_probs[0], firstk=100)" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['Tom has black hair. Mary has black hair. John has yellow hair. _ and Mary have the same hair color.',\n", + " 'Tom has black hair. Mary has black hair. John has yellow hair. _ and Mary have different hair colors.']" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "text = [\n", + " # same / different\n", + " \"Tom has black hair. Mary has black hair. John has yellow hair. _ and Mary have the same hair color.\",\n", + " \"Tom has black hair. Mary has black hair. John has yellow hair. _ and Mary have different hair colors.\",\n", + " \"Tom has yellow hair. Mary has black hair. John has black hair. Mary and _ have the same hair color.\",\n", + " # because / although\n", + " \"John is taller/shorter than Mary because/although _ is older/younger.\",\n", + " \"The red ball is heavier/lighter than the blue ball because/although the _ ball is bigger/smaller.\",\n", + " \"Charles did a lot better/worse than his good friend Nancy on the test because/although _ had/hadn't studied so hard.\",\n", + " \"The trophy doesn't fit into the brown suitcase because/although the _ is too small/large.\",\n", + " \"John thought that he would arrive earlier than Susan, but/and indeed _ was the first to arrive.\",\n", + " # reverse\n", + " \"John came then Mary came. They left in reverse order. _ left then _ left.\",\n", + " \"John came after Mary. They left in reverse order. _ left after _ .\",\n", + " \"John came first, then came Mary. They left in reverse order: _ left first, then left _ .\",\n", + " # compare\n", + " \"Though John is tall, Tom is taller than John. So John is _ than Tom.\",\n", + " \"Tom is taller than John. So _ is shorter than _.\",\n", + " # WSC-style: before /after\n", + " \"Mary came before/after John. _ was late/early .\",\n", + " # yes / no\n", + " \"Was Tom taller than Susan? Yes, _ was taller.\",\n", + " # right / wrong, epistemic modality\n", + " \"John said the rain was about to stop. Mary said the rain would continue. Later the rain stopped. _ was wrong.\",\n", + " \n", + " \"The trophy doesn't fit into the brown suitcase because/although the _ is too small/large.\",\n", + " \"John thanked Mary because _ had given help to _ . \",\n", + " \"John felt vindicated/crushed when his longtime rival Mary revealed that _ was the winner of the competition.\",\n", + " \"John couldn't see the stage with Mary in front of him because _ is so short/tall.\",\n", + " \"Although they ran at about the same speed, John beat Sally because _ had such a bad start.\",\n", + " \"The fish ate the worm. The _ was hungry/tasty.\",\n", + " \n", + " \"John beat Mary. _ won the game/e winner.\",\n", + "]\n", + "text" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "config" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [], + "source": [ + "with open('WSC_switched_label.json') as f:\n", + " examples = json.load(f)" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [], + "source": [ + "with open('WSC_child_problem.json') as f:\n", + " cexamples = json.load(f)" + ] + }, + { + "cell_type": "code", + "execution_count": 89, + "metadata": {}, + "outputs": [], + "source": [ + "for ce in cexamples:\n", + " for s in ce['sentences']:\n", + " for a in s['answer0'] + s['answer1']:\n", + " a = a.lower()\n", + " if a not in tokenizer.vocab:\n", + " ce\n", + " print(a, 'not in vocab!!!')" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [], + "source": [ + "for ce in cexamples:\n", + " if len(ce['sentences']) > 0:\n", + " e = examples[ce['index']]\n", + " assert ce['index'] == e['index']\n", + " e['score'] = all([s['score'] for s in ce['sentences']])\n", + " assert len(set([s['adjacent_ref'] for s in ce['sentences']])) == 1, 'adjcent_refs are different!'\n", + " e['adjacent_ref'] = ce['sentences'][0]['adjacent_ref']" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": {}, + "outputs": [], + "source": [ + "from collections import defaultdict\n", + "\n", + "groups = defaultdict(list)\n", + "for e in examples:\n", + " if 'score' in e:\n", + " index = e['index']\n", + " if index < 252:\n", + " if index % 2 == 1:\n", + " index -= 1\n", + " elif index in [252, 253, 254]:\n", + " index = 252\n", + " else:\n", + " if index % 2 == 0:\n", + " index -= 1\n", + " groups[index].append(e)" + ] + }, + { + "cell_type": "code", + "execution_count": 62, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[(2, 'fit into:large/small', False),\n", + " (4, 'thank:receive/give', False),\n", + " (6, 'call:successful available', True),\n", + " (8, 'ask:repeat answer', False),\n", + " (10, 'zoom by:fast/slow', False),\n", + " (12, 'vindicated/crushed:be the winner', False),\n", + " (14, 'lift:weak heavy', False),\n", + " (16, 'crash through:[hard]/[soft]', False),\n", + " (18, '[block]:short/tall', False),\n", + " (20, 'down to:top/bottom', False),\n", + " (22, 'beat:good/bad', False),\n", + " (24, 'roll off:anchored level', False),\n", + " (26, 'above/below', False),\n", + " (28, 'better/worse:study hard', False),\n", + " (30, 'after/before:far away', False),\n", + " (32, 'be upset with:buy from not work/sell not work', True),\n", + " (34, '?yell at comfort:upset', False),\n", + " (36, 'above/below:moved first', False),\n", + " (38, 'although/because', False),\n", + " (40, 'bully:punish rescue', False),\n", + " (42, 'pour:empty/full', False),\n", + " (44, 'know:nosy indiscreet', False),\n", + " (46, 'explain:convince/understand', True),\n", + " (48, '?know tell:so/because', True),\n", + " (50, 'beat:younger/older', False),\n", + " (56, 'clog:cleaned removed', True),\n", + " (58, '?immediately follow:short delayed', False),\n", + " (60, '?between:see see around', True),\n", + " (64, 'but/and', False),\n", + " (66, 'clean:put in the trash put in the drawer', False),\n", + " (68, 'because/but', False),\n", + " (70, 'out of:handy lighter', False),\n", + " (72, 'put:tall high', False),\n", + " (74, 'show:good famous', True),\n", + " (76, 'pay for:generous grateful', False),\n", + " (78, 'but', False),\n", + " (80, 'if', False),\n", + " (82, 'if', False),\n", + " (84, 'fool:get/lose', False),\n", + " (88, 'wait:impatient cautious', False),\n", + " (90, 'give birth:woman baby', True),\n", + " (92, '?stop normal/stop abnormal:strange', False),\n", + " (96, 'eat:hungry tasty', False),\n", + " (98, 'put ... into filled with ... :get in/get out', False),\n", + " (100, 'up:at the bottom/at the top', False),\n", + " (102, 'crash through:removed repaired', False),\n", + " (104, 'stab:taken to the police station taken to the hospital', False),\n", + " (106, 'hear ... humming and whistling:annoyed/annoying', True),\n", + " (108, 'see ... juggling watermelons:impressed/impressive', True),\n", + " (114, 'tell lies: truthful skeptical', True),\n", + " (130, 'but:disappointed', True),\n", + " (132, 'visit:invite come out/invite come in', True),\n", + " (134, 'take classes from:eager known to speak it fluently', False),\n", + " (138, 'cover:out gone', True),\n", + " (144, 'tuck:work sleep', True),\n", + " (150, 'influence:later/earlier', False),\n", + " (152, 'can not cut:thick small', False),\n", + " (154, 'attack:kill guard', False),\n", + " (156, 'attack:bold nervous', False),\n", + " (160, 'change:hard:easy', False),\n", + " (166, 'alive:is/was', False),\n", + " (168, 'infant:twelve years old twelve months old', False),\n", + " (170, 'better equipped and large:defeated/victorious', False),\n", + " (178, 'interview:persistent cooperative', False),\n", + " (186, 'be full of:minority/majority', False),\n", + " (188, 'like over:more/fewer', False),\n", + " (190, 'place on all:not enough/too many', True),\n", + " (192, 'stick:leave have', True),\n", + " (196, 'follow:admire/influence', True),\n", + " (198, 'fit through:wide/narrow', False),\n", + " (200, 'trade:dowdy/great', False),\n", + " (202, 'hire/hire oneself to:take care of', True),\n", + " (204, 'promise/order', False),\n", + " (208, 'mother:education place', True),\n", + " (210, 'knock:get an answer/answer', True),\n", + " (212, 'pay:receive/deliver', False),\n", + " (218, '?', False),\n", + " (220, 'say check:move take', False),\n", + " (222, '?', False),\n", + " (224, 'give a life:drive alone walk', False),\n", + " (226, 'pass the plate:full/hungry', False),\n", + " (228, 'pass:turn over turn next', False),\n", + " (232, 'stretch pat', True),\n", + " (234, 'accept share', False),\n", + " (236, 'speak:break silence break concentration', False),\n", + " (240, 'carry:leg ache leg dangle', True),\n", + " (242, 'carry:in arms in bassinet', False),\n", + " (244, 'hold:against chest against will', True),\n", + " (250, 'stop', False),\n", + " (252, 'even though/because/not', False),\n", + " (255, 'give:not hungry/hungry', False),\n", + " (259, 'ask for a favor:refuse/be refused`', False),\n", + " (261, 'cede:less popular/more popular', False),\n", + " (263, 'not pass although:see open/open', True),\n", + " (271, 'suspect regret', True)]" + ] + }, + "execution_count": 62, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def filter_dict(d, keys=['index', 'sentence', 'correct_answer', 'relational_word', 'is_associative', 'score']):\n", + " return {k: d[k] for k in d if k in keys}\n", + "\n", + "# ([[filter_dict(e) for e in eg] for eg in groups.values() if eg[0]['relational_word'] != 'none' and all([e['score'] for e in eg])])# / len([eg for eg in groups.values() if eg[0]['relational_word'] != 'none'])\n", + "[(index, eg[0]['relational_word'], all([e['score'] for e in eg])) for index, eg in groups.items() if eg[0]['relational_word'] != 'none']\n", + "# len([filter_dict(e) for e in examples if 'score' in e and not e['score'] and e['adjacent_ref']])\n", + "# for e in examples:\n", + "# if e['index'] % 2 == 0:\n", + "# print(e['sentence'])" + ] + }, + { + "cell_type": "code", + "execution_count": 51, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "data": { + "text/plain": [ + "179" + ] + }, + "execution_count": 51, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "sum(['because' in e['sentence'] for e in examples]) + \\\n", + "sum(['so ' in e['sentence'] for e in examples]) + \\\n", + "sum(['but ' in e['sentence'] for e in examples]) + \\\n", + "sum(['though' in e['sentence'] for e in examples])" + ] + }, + { + "cell_type": "code", + "execution_count": 73, + "metadata": {}, + "outputs": [], + "source": [ + "# with open('WSC_switched_label.json', 'w') as f:\n", + "# json.dump(examples, f)" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [], + "source": [ + "vis_attn_topk = 3\n", + "\n", + "def has_chinese_label(labels):\n", + " labels = [label.split('->')[0].strip() for label in labels]\n", + " r = sum([len(label) > 1 for label in labels if label not in ['BOS', 'EOS']]) * 1. / (len(labels) - 1)\n", + " return 0 < r < 0.5 # r == 0 means empty query labels used in self attention\n", + "\n", + "def _plot_attn(ax1, attn_name, attn, key_labels, query_labels, col, color='b'):\n", + " assert len(query_labels) == attn.size(0)\n", + " assert len(key_labels) == attn.size(1)\n", + "\n", + " ax1.set_xlim([-1, 1])\n", + " ax1.set_xticks([])\n", + " ax2 = ax1.twinx()\n", + " nlabels = max(len(key_labels), len(query_labels))\n", + " pos = range(nlabels)\n", + " \n", + " if 'self' in attn_name and col < ncols - 1:\n", + " query_labels = ['' for _ in query_labels]\n", + "\n", + " for ax, labels in [(ax1, key_labels), (ax2, query_labels)]:\n", + " ax.set_yticks(pos)\n", + " if has_chinese_label(labels):\n", + " ax.set_yticklabels(labels, fontproperties=zhfont)\n", + " else:\n", + " ax.set_yticklabels(labels)\n", + " ax.set_ylim([nlabels - 1, 0])\n", + " ax.tick_params(width=0, labelsize='xx-large')\n", + "\n", + " for spine in ax.spines.values():\n", + " spine.set_visible(False)\n", + "\n", + "# mask, attn = filter_attn(attn)\n", + " for qi in range(attn.size(0)):\n", + "# if not mask[qi]:\n", + "# continue\n", + "# for ki in range(attn.size(1)):\n", + " for ki in attn[qi].topk(vis_attn_topk)[1]:\n", + " a = attn[qi, ki]\n", + " ax1.plot((-1, 1), (ki, qi), color, alpha=a)\n", + "# print(attn.mean(dim=0).topk(5)[0])\n", + "# ax1.barh(pos, attn.mean(dim=0).data.cpu().numpy())\n", + "\n", + "def plot_layer_attn(result_tuple, attn_name='dec_self_attns', layer=0, heads=None):\n", + " hypo, nheads, labels_dict = result_tuple\n", + " key_labels, query_labels = labels_dict[attn_name]\n", + " if heads is None:\n", + " heads = range(nheads)\n", + " else:\n", + " nheads = len(heads)\n", + " \n", + " stride = 2 if attn_name == 'dec_enc_attns' else 1\n", + " nlabels = max(len(key_labels), len(query_labels))\n", + " rcParams['figure.figsize'] = 20, int(round(nlabels * stride * nheads / 8 * 1.0))\n", + " \n", + " rows = nheads // ncols * stride\n", + " fig, axes = plt.subplots(rows, ncols)\n", + " \n", + " # for head in range(nheads):\n", + " for head_i, head in enumerate(heads):\n", + " row, col = head_i * stride // ncols, head_i * stride % ncols\n", + " ax1 = axes[row, col]\n", + " attn = hypo[attn_name][layer][head]\n", + " _plot_attn(ax1, attn_name, attn, key_labels, query_labels, col)\n", + " if attn_name == 'dec_enc_attns':\n", + " col = col + 1\n", + " axes[row, col].axis('off') # next subfig acts as blank place holder\n", + " # plt.suptitle('%s with %d heads, Layer %d' % (attn_name, nheads, layer), fontsize=20)\n", + " plt.show() \n", + " \n", + "ncols = 4" + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "metadata": {}, + "outputs": [ + { + "ename": "AttributeError", + "evalue": "'BertSelfAttention' object has no attribute 'attention_probs'", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0mattn_name\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m'enc_self_attns'\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0mhypo\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m{\u001b[0m\u001b[0mattn_name\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbert\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mencoder\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlayer\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mattention\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mattention_probs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mi\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mrange\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mconfig\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnum_hidden_layers\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m}\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 3\u001b[0m \u001b[0mkey_labels\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mquery_labels\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtokens\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0mlabels_dict\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m{\u001b[0m\u001b[0mattn_name\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mkey_labels\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mquery_labels\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m}\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0mresult_tuple\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mhypo\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mconfig\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnum_attention_heads\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlabels_dict\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m(.0)\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0mattn_name\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m'enc_self_attns'\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0mhypo\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m{\u001b[0m\u001b[0mattn_name\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbert\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mencoder\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlayer\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mattention\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mattention_probs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mi\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mrange\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mconfig\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnum_hidden_layers\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m}\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 3\u001b[0m \u001b[0mkey_labels\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mquery_labels\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtokens\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0mlabels_dict\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m{\u001b[0m\u001b[0mattn_name\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mkey_labels\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mquery_labels\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m}\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0mresult_tuple\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mhypo\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mconfig\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnum_attention_heads\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlabels_dict\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/home/qsj/miniconda3/lib/python3.6/site-packages/torch/nn/modules/module.py\u001b[0m in \u001b[0;36m__getattr__\u001b[0;34m(self, name)\u001b[0m\n\u001b[1;32m 516\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mmodules\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mname\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 517\u001b[0m raise AttributeError(\"'{}' object has no attribute '{}'\".format(\n\u001b[0;32m--> 518\u001b[0;31m type(self).__name__, name))\n\u001b[0m\u001b[1;32m 519\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 520\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m__setattr__\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mname\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvalue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mAttributeError\u001b[0m: 'BertSelfAttention' object has no attribute 'attention_probs'" + ] + } + ], + "source": [ + "attn_name = 'enc_self_attns'\n", + "hypo = {attn_name: [model.bert.encoder.layer[i].attention.self.attention_probs[0] for i in range(config.num_hidden_layers)]}\n", + "key_labels = query_labels = tokens\n", + "labels_dict = {attn_name: (key_labels, query_labels)}\n", + "result_tuple = (hypo, config.num_attention_heads, labels_dict)\n", + "plot_layer_attn(result_tuple, attn_name=attn_name, layer=10, heads=None)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.5" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git "a/likunlin-\344\274\230\345\214\226.ipynb" "b/likunlin-\344\274\230\345\214\226.ipynb" new file mode 100644 index 00000000000000..f5932b2e393d71 --- /dev/null +++ "b/likunlin-\344\274\230\345\214\226.ipynb" @@ -0,0 +1,4690 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "%load_ext autoreload\n", + "%autoreload 2\n", + "\n", + "from IPython.core.interactiveshell import InteractiveShell\n", + "InteractiveShell.ast_node_interactivity = 'all'" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Warning: apex was installed without --cpp_ext. Falling back to Python flatten and unflatten.\n", + "Warning: apex was installed without --cuda_ext. Fused syncbn kernels will be unavailable. Python fallbacks will be used instead.\n", + "Warning: apex was installed without --cuda_ext. FusedAdam will be unavailable.\n", + "Warning: apex was installed without --cuda_ext. FusedLayerNorm will be unavailable.\n", + "Better speed can be achieved with apex installed from https://www.github.com/nvidia/apex.\n" + ] + } + ], + "source": [ + "import os\n", + "import json\n", + "import nltk\n", + "import numpy as np\n", + "import math\n", + "import matplotlib\n", + "import matplotlib.pyplot as plt\n", + "from pylab import rcParams\n", + "\n", + "import torch\n", + "import torch.nn.functional as F\n", + "from pytorch_pretrained_bert import tokenization, BertTokenizer, BertModel, BertForMaskedLM, BertForPreTraining, BertConfig\n", + "from examples.extract_features import *" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "03/21/2019 18:04:54 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-uncased-vocab.txt from cache at /home/xd/.pytorch_pretrained_bert/26bc1ad6c0ac742e9b52263248f6d0f00068293b33709fae12320c0e35ccfbbb.542ce4285a40d23a559526243235df47c5f75c197f04f37d1a0c124c32c9a084\n", + "03/21/2019 18:04:54 - INFO - pytorch_pretrained_bert.modeling - loading archive file /nas/pretrain-bert/pretrain-tensorflow/uncased_L-12_H-768_A-12/\n", + "03/21/2019 18:04:54 - INFO - pytorch_pretrained_bert.modeling - Model config {\n", + " \"attention_probs_dropout_prob\": 0.1,\n", + " \"hidden_act\": \"gelu\",\n", + " \"hidden_dropout_prob\": 0.1,\n", + " \"hidden_size\": 768,\n", + " \"initializer_range\": 0.02,\n", + " \"intermediate_size\": 3072,\n", + " \"max_position_embeddings\": 512,\n", + " \"num_attention_heads\": 12,\n", + " \"num_hidden_layers\": 12,\n", + " \"type_vocab_size\": 2,\n", + " \"vocab_size\": 30522\n", + "}\n", + "\n" + ] + } + ], + "source": [ + "class Args:\n", + " def __init__(self):\n", + " pass\n", + " \n", + "args = Args()\n", + "args.no_cuda = True\n", + "\n", + "CONFIG_NAME = 'bert_config.json'\n", + "BERT_DIR = '/nas/pretrain-bert/pretrain-tensorflow/uncased_L-12_H-768_A-12/'\n", + "config_file = os.path.join(BERT_DIR, CONFIG_NAME)\n", + "config = BertConfig.from_json_file(config_file)\n", + "\n", + "tokenizer = BertTokenizer.from_pretrained('bert-base-uncased')#do_lower_case:在标记化时将文本转换为小写。默认= True\n", + "#tokenizer.tokenize = nltk.word_tokenize\n", + "model = BertForPreTraining.from_pretrained(BERT_DIR)\n", + "device = torch.device(\"cuda\" if torch.cuda.is_available() and not args.no_cuda else \"cpu\")\n", + "_ = model.to(device)\n", + "_ = model.eval()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "BertForPreTraining:\n", + "Outputs:\n", + " if `masked_lm_labels` and `next_sentence_label` are not `None`:\n", + " Outputs the total_loss which is the sum of the masked language modeling loss and the next\n", + " sentence classification loss.\n", + " if `masked_lm_labels` or `next_sentence_label` is `None`:\n", + " Outputs a tuple comprising\n", + " - the masked language modeling logits of shape [batch_size, sequence_length, vocab_size], and\n", + " - the next sentence classification logits of shape [batch_size, 2]." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "from_pretrained:\n", + "Instantiate a BertPreTrainedModel from a pre-trained model file or a pytorch state dict.\n", + "Download and cache the pre-trained model file if needed." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "scrolled": false + }, + "outputs": [], + "source": [ + "import re\n", + "def convert_text_to_examples(text): \n", + " '''功能:\n", + " 把输入的文本变成一个实例,一个实例中包含text_a,text_b(text_b用于是否为上下句的任务,该任务不使用此功能)\n", + " 输入:\n", + " text:一个列表结构,列表中包含原始文本字符串,由于仅完成mlm任务,所以text列表中仅包含一个字符串,就是待检查的字符串\n", + " 输出:\n", + " example:实例,其中包含:\n", + " unique_id:此任务仅用到0\n", + " text_a:text列表内的字符串\n", + " text_b:此任务下该变量为None\n", + " '''\n", + " examples = []\n", + " unique_id = 0\n", + " if True:\n", + " for line in text:\n", + " line = line.strip()\n", + " text_a = None\n", + " text_b = None\n", + " m = re.match(r\"^(.*) \\|\\|\\| (.*)$\", line) #想要匹配这样的字符串'You are my sunshine. ||| I love you.'\n", + " \n", + " if m is None:\n", + " text_a = line\n", + " else:\n", + " text_a = m.group(1) #匹配的第一句,比如You are my sunshine,my only sunshine.\n", + " text_b = m.group(2) #匹配的第二句,比如I love you.\n", + " \n", + " examples.append(\n", + " InputExample(unique_id=unique_id, text_a=text_a, text_b=text_b))\n", + " unique_id += 1\n", + " return examples\n", + "#print(convert_text_to_examples(['I love you. The cat is so cute.'])[0].text_a)\n", + "\n", + "def convert_examples_to_features(examples, tokenizer, append_special_tokens=True, replace_mask=True, print_info=False):\n", + " '''功能:\n", + " 把实例变成一个特征列表\n", + " 输入:\n", + " examples:实例,convert_text_to_examples()函数的输出\n", + " tokenizer:BERT的tokenizer,用于将文本进行各种处理,它可以把一个text转变成tokens,把tokens变成每个token在词典中的编号以及逆运算\n", + " append_special_tokens:是否允许在生成的tokens中加入特殊符号,也就是[CLS]、[MASK]和[SEP],默认为True\n", + " replace_mask:不明\n", + " print_info:不明\n", + " 输出:\n", + " features:每一个feature包含:\n", + " unique_id:编号,目前实现的功能features里面仅有一个feature\n", + " tokens=tokens,tokens:是形如['i','love','you','.']的一个列表\n", + " input_ids=input_ids:字符串中的每个单词在词典中的index序列\n", + " input_mask=input_mask:一堆1\n", + " input_type_ids=input_type_ids)):对text_a,text_b的区分,用于上下句任务,对于本任务,该参数为一个列表,其中包含token长度个的0\n", + " '''\n", + " features = []\n", + " for (ex_index, example) in enumerate(examples):\n", + " tokens_a = tokenizer.tokenize(example.text_a) #tokenize的作用是把\"i love you.\"变成['i','love','you','.']\n", + " tokens_b = None\n", + " if example.text_b:\n", + " tokens_b = tokenizer.tokenize(example.text_b)\n", + "\n", + " tokens = []\n", + " input_type_ids = [] #segment embedding\n", + " if append_special_tokens: #输入参数中默认为true\n", + " tokens.append(\"[CLS]\")\n", + " input_type_ids.append(0)\n", + " for token in tokens_a:\n", + " if replace_mask and token == '_': # XD\n", + " token = \"[MASK]\"\n", + " tokens.append(token)\n", + " input_type_ids.append(0)\n", + " if append_special_tokens:\n", + " tokens.append(\"[SEP]\")\n", + " input_type_ids.append(0)\n", + "\n", + " if tokens_b:\n", + " for token in tokens_b:\n", + " if replace_mask and token == '_': # XD\n", + " token = \"[MASK]\"\n", + " tokens.append(token)\n", + " input_type_ids.append(1)\n", + " if append_special_tokens:\n", + " tokens.append(\"[SEP]\")\n", + " input_type_ids.append(1)\n", + " input_ids = tokenizer.convert_tokens_to_ids(tokens) #把原来句子中的词语编成在字典中的编号\n", + " input_mask = [1] * len(input_ids) \n", + " \n", + " if ex_index < 5:\n", + "# logger.info(\"*** Example ***\")\n", + "# logger.info(\"unique_id: %s\" % (example.unique_id))\n", + " logger.info(\"tokens: %s\" % \" \".join([str(x) for x in tokens]))\n", + "# logger.info(\"input_ids: %s\" % \" \".join([str(x) for x in input_ids]))\n", + "# logger.info(\"input_mask: %s\" % \" \".join([str(x) for x in input_mask]))\n", + "# logger.info(\n", + "# \"input_type_ids: %s\" % \" \".join([str(x) for x in input_type_ids]))\n", + " \n", + " features.append(\n", + " InputFeatures(\n", + " unique_id=example.unique_id,#编号,目前实现的功能features里面仅有一个feature\n", + " tokens=tokens,#形如['i','love','you','.']的一个列表\n", + " input_ids=input_ids,#字符串中的每个单词在词典中的index序列\n", + " input_mask=input_mask, #一堆1\n", + " input_type_ids=input_type_ids)) #第0类和第1类,对text_a,text_b的区分,本代码中全都是零\n", + " return features \n", + "\n", + "def copy_and_mask_feature(feature, step, masked_tokens=None): \n", + " '''\n", + " 功能:\n", + " 输入feature生成训练的批次数以及mask好的训练素材\n", + " 输入:\n", + " feature:convert_examples_to_features函数的输出\n", + " step:两个[mask]位置的步长\n", + " masked_tokens:默认为None,在程序中没有使用\n", + " '''\n", + " import copy\n", + " tokens = feature.tokens\n", + " len_token = len(tokens)\n", + " if len_token 0\n", + " masked_feature_copies = []\n", + " for i in batches: #用[mask]依次掩盖每一个位置\n", + " feature_copy = copy.deepcopy(feature)\n", + " masked_pos = i\n", + " while masked_pos < len_token:\n", + " feature_copy.input_ids[masked_pos] = tokenizer.vocab[\"[MASK]\"]\n", + " masked_pos = masked_pos + step\n", + " masked_feature_copies.append(feature_copy)\n", + " return masked_feature_copies, batches\n", + "\n", + "#masked_feature_copies, batches = copy_and_mask_feature(features[0],3)\n", + "#print(masked_feature_copies[0].input_ids) #结果[101, 1045, 2293, 103, 102]\n", + "#print(batches) #结果是一个range(0,5)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "data": { + "text/plain": [ + "'print(input_ids_sen)\\nprint(in_sentence)\\nprint(input_type_ids_sen)\\nprint(sentences)\\nprint(entire_ids)\\nprint(entire_type_ids)\\n#input_ids_sen,input_type_ids_sen,in_sentence,sentences,entire_ids,entire_type_ids = None'" + ] + }, + "execution_count": 17, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "analyzed_cache = {}\n", + "from pattern.en import conjugate, lemma, lexeme,PRESENT,SG\n", + "#print (lemma('gave'))\n", + "#print (lexeme('production'))\n", + "#print (conjugate(verb='give',tense=PRESENT,number=SG))\n", + "def process_text(text): \n", + " '''\n", + " 功能:\n", + " 处理输入文本,将文本按句子分成若干token,得出原来text中index位置的单词在x句子的y位置,还得出各个句子类别码\n", + " 输入:\n", + " text:文本字符串,注意区别\n", + " 输出:\n", + " input_ids_sen:二维列表,第一维列表的元素是每个句子的input_ids列表\n", + " input_type_ids_sen:二维列表,第一维列表的元素是每个句子的input_type_ids列表\n", + " in_sentence:通过这个二维数组可以很方便的通过在完整text中的下标找到这个下标所在的句子和在句子中的下标\n", + " sentences:字符串列表,列表中每一个元素是一个句子字符串\n", + " entire_ids:整个text的input_ids\n", + " entire_type_ids:整个text的input_type_ids\n", + " '''\n", + " token =[]\n", + " entire_type_ids = []\n", + " token0 = tokenizer.tokenize(text)\n", + " token.append('[CLS]')\n", + " entire_type_ids.append(0)\n", + " for i in token0:\n", + " token.append(i)\n", + " entire_type_ids.append(0)\n", + " token.append('[SEP]')\n", + " entire_type_ids.append(0)\n", + " \n", + " entire_ids = tokenizer.convert_tokens_to_ids(token)\n", + " in_sentence = [[0,0]] \n", + " sentence_n = 0\n", + " index = 1\n", + " for i in range(1,len(token)-1):\n", + " in_sentence.append([sentence_n,index]) #每个token中的词在所在句中的位置表示出来,以及该位置在哪一句中\n", + " index = index + 1 #比如,位置i这个词在第sentence句的index位置上\n", + " if token[i] == '.':\n", + " sentence_n = sentence_n + 1\n", + " index = 1\n", + " sentences = text.split(\".\")\n", + " \n", + " sen_token = []\n", + " input_ids_sen = []\n", + " input_type_ids_sen = []\n", + " for i,sentence in enumerate(sentences):\n", + " sentence = sentence + '.'\n", + " sentences[i] = sentences[i] + '.'\n", + " token = []\n", + " input_type_ids = []\n", + " tokens = tokenizer.tokenize(sentence)\n", + " token.append('[CLS]')\n", + " input_type_ids.append(0) \n", + " for i in tokens:\n", + " token.append(i)\n", + " input_type_ids.append(0) \n", + " token.append('[SEP]') \n", + " input_type_ids.append(0)\n", + " input_ids_sen.append(tokenizer.convert_tokens_to_ids(token))\n", + " input_type_ids_sen.append(input_type_ids)\n", + " return input_ids_sen,input_type_ids_sen,in_sentence,sentences,entire_ids,entire_type_ids\n", + "#text = [\"Last week I went to the theatre. I had an very good a seat.The play were very interesting. But I didn't enjoy it. A young man and a young woman were sitting behind me. They were talking loudly. I got very angry. I couldn't hear a word. I turned round. I looked at the man angrily. They didn't pay any attention.In the end, I couldn't bear it. I turned round again. 'I can't hear a word!' I said angrily. 'It's none of your business,' the young man said rudely. 'This is a private conversation!'\"]\n", + "#text = [\"Last week I went to the theatre. I had very good seat. The play was very interesting. But I didn't enjoy it. A young man and a young woman were sitting behind me. They were talking loudly. I got very angry. I couldn't hear a word. I turned round. I looked at the man angrily. They didn't pay any attention.In the end, I couldn't bear it. I turned round again. 'I can't hear a word!' I said angrily. 'It's none of your business,' the young man said rudely. 'This is a private conversation!'\"]\n", + "#text = [\"The question is more easy than that one.\"]\n", + "text = [\"Last week I went to the theater. There are many person . Luckily I had very good seat. The plays was very interesting. However, I didn't enjoy it. A young man and a young woman were sitting behind me. They were talk loudly. I got very angry. I couldn't hear a word. I turned round. I looked at the man angry. They didn't pay any attention.In the end, I couldn't bear it. I turned round again. 'I can't hear a word!' I said angrily. 'It's none of your business,' the young man said rudely. 'This is a private conversation!'\"]\n", + "\n", + "input_ids_sen,input_type_ids_sen,in_sentence,sentences,entire_ids,entire_type_ids = process_text(text[0])\n", + "'''print(input_ids_sen)\n", + "print(in_sentence)\n", + "print(input_type_ids_sen)\n", + "print(sentences)\n", + "print(entire_ids)\n", + "print(entire_type_ids)\n", + "#input_ids_sen,input_type_ids_sen,in_sentence,sentences,entire_ids,entire_type_ids = None'''" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [], + "source": [ + "def get_word(index):\n", + " '''\n", + " 输入:\n", + " index:在完整text中的位置\n", + " 输出\n", + " word:该位置上的单词\n", + " '''\n", + " word_id = entire_ids[index]\n", + " word = tokenizer.ids_to_tokens[word_id]\n", + " return word\n" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [], + "source": [ + "import copy\n", + "import nltk\n", + "from pattern.en import conjugate, lemma, lexeme,PRESENT,SG,PRESENT,SG,INFINITIVE, PRESENT, PAST, FUTURE, PROGRESSIVE\n", + "\n", + "def give_suggestion(input_ids_,input_type_ids_,id_in_sen,alternative_word,threshold):\n", + " '''\n", + " 功能:\n", + " 给出指定文本指定位置的推荐用词\n", + " 输入:\n", + " input_ids_:要分析的文本的input_ids\n", + " input_type_ids_:要分析的文本的的input_type_ids\n", + " id_in_sen:要分析的文本中[MASK]的位置下标,也就是需要给出建议用词的位置\n", + " alternative_word:推荐的备选词范围\n", + " threshold:阈值\n", + " 输出:\n", + " suggestion:推荐\n", + " need:推荐的是否是备选词中的词\n", + " suggestion_prob:推荐词填在id_in_sen位置的概率\n", + " top_of_alternative:备选词中最值得推荐的词\n", + " '''\n", + " input_ids = copy.deepcopy(input_ids_)\n", + " input_type_ids = copy.deepcopy(input_type_ids_)\n", + " word0 = input_ids[id_in_sen]\n", + " word0 = tokenizer.ids_to_tokens[word0]\n", + " list_word_id = []\n", + " \n", + " input_ids[id_in_sen] = tokenizer.vocab[\"[MASK]\"]\n", + " T_input_ids = torch.tensor([input_ids], dtype=torch.long) #把input_ids增加了一个维度\n", + " T_input_type_ids = torch.tensor([input_type_ids], dtype=torch.long) #把input_type_ids增加了一个维度,其实每一行都一样\n", + " T_input_ids = T_input_ids.to(device) #拿去GPU\n", + " T_input_type_ids = T_input_type_ids.to(device)\n", + "\n", + " mlm_logits, _ = model(T_input_ids, T_input_type_ids)\n", + " mlm_probs = F.softmax(mlm_logits, dim=-1)\n", + " reduced_mlm_probs = mlm_probs[0][id_in_sen]\n", + "\n", + " top_ind = reduced_mlm_probs.argmax().item()\n", + " top_prob = reduced_mlm_probs.max().item() \n", + " \n", + " list_word = []\n", + " \n", + " top_of_alternative = None\n", + " if len(alternative_word)>0:\n", + " list_word_prob = {}\n", + " for word in alternative_word:\n", + " try:\n", + " list_word_id.append(tokenizer.vocab[word])\n", + " list_word.append(word)\n", + " except KeyError:\n", + " pass\n", + " #print(list_word_id) \n", + " #print(list_word)\n", + " for word,word_id in zip(list_word,list_word_id):\n", + " list_word_prob.update({word:float(reduced_mlm_probs[word_id].data)})\n", + " prob_ord = sorted(list_word_prob.items(),key = lambda x:x[1],reverse = True)\n", + " #print(prob_ord)\n", + " #print(tokenizer.ids_to_tokens[top_ind],top_prob)\n", + " #print(prob_ord[0][0],prob_ord[0][1])\n", + " top_prob_word = prob_ord[0][1]\n", + " top_of_alternative = prob_ord[0][0]\n", + " gap = math.log(top_prob) - math.log(top_prob_word)\n", + " if gap < threshold:\n", + " suggestion = prob_ord[0][0]\n", + " suggestion_prob = prob_ord[0][1]\n", + " need = 1\n", + " else:\n", + " suggestion = tokenizer.ids_to_tokens[top_ind]\n", + " suggestion_prob = top_prob\n", + " need = 0\n", + " #print(\"gap = \" + str(gap))\n", + " #print(prob_ord)\n", + " else:\n", + " suggestion = tokenizer.ids_to_tokens[top_ind]\n", + " suggestion_prob = top_prob\n", + " need = 0\n", + " \n", + " return suggestion,need,suggestion_prob,top_of_alternative \n", + "\n", + "#返回变量5\n", + "#suggestion -> 最值得推荐的词\n", + "#need -> 是否需要可选词中的一个\n", + "#suggestion_prob ->最值得推荐的词的概率\n", + "#top_of_alternative -> 可选词中最值得推荐的\n", + "#suggestion,need,suggestion_prob,top_of_alternative = give_suggestion(input_ids_,input_type_ids_,id_in_sen,alternative_word,threshold)" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [], + "source": [ + "from spacy.lemmatizer import Lemmatizer\n", + "from spacy.lang.en import LEMMA_INDEX, LEMMA_EXC, LEMMA_RULES\n", + "from pattern.en import comparative, superlative\n", + "from pattern.en import suggest\n", + "from nltk.stem.lancaster import LancasterStemmer\n", + "from nltk.stem.porter import PorterStemmer\n", + "from nltk.stem import SnowballStemmer\n", + "import enchant\n", + "d = enchant.Dict(\"en_US\")\n" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "None\n", + "totally time cost 0.2184145450592041 s\n" + ] + } + ], + "source": [ + "stemmers=[]\n", + "stemmers.append(LancasterStemmer()) \n", + "stemmers.append(SnowballStemmer(\"english\"))\n", + "stemmers.append(PorterStemmer())\n", + "lemmatizer = Lemmatizer(LEMMA_INDEX, LEMMA_EXC, LEMMA_RULES)\n", + "#分情况讨论,如果新词比旧的词长,或者是短\n", + "def word_convert(word,new_word,Stemmer):\n", + " '''\n", + " 功能:\n", + " 根据提供的word和可能的变形new_word,得到正确的变形,例如给出basic,basicly得到basically\n", + " 输入:\n", + " word:需要变形的词\n", + " new_word:猜想的变形\n", + " 输出:\n", + " suggest_word:推荐的正确变形\n", + " '''\n", + " suggest_word = None\n", + " word_stem = Stemmer().stem(word)\n", + " suggest_ = new_word\n", + " \n", + " suggest_list = suggest(suggest_)\n", + "\n", + " if len(word) 0.95):# or word_[1] > 0.95 :\n", + " suggest_word = word_[0]\n", + " break \n", + " if word_[1] < 0.001:\n", + " break\n", + " stem_list = []\n", + " for stemmer in stemmers:\n", + " suggest_stem = stemmer.stem(word_[0])\n", + " if flag == 1 and suggest_stem[:-1] in word_stem and word_stem[:3] in suggest_stem[:3]: #一般是去后缀\n", + " suggest_word = word_[0]\n", + " break\n", + " elif flag == 0 and word_stem in suggest_stem and word_[0][-1:] in suggest_[-1:]: #一般是加后缀,后缀一定要一样\n", + " suggest_word = word_[0]\n", + " break\n", + " \n", + " if suggest_word != None:\n", + " break\n", + " return suggest_word \n", + "\n", + "import time\n", + "time_start=time.time()\n", + "for i in range(1):\n", + " print(word_convert(\"dark\",\"darkment\",PorterStemmer))\n", + "time_end=time.time()\n", + "print('totally time cost',time_end-time_start,'s')\n" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "beaus\n", + "totally time cost 0.0006299018859863281 s\n" + ] + } + ], + "source": [ + "stemmers=[]\n", + "stemmers.append(LancasterStemmer()) \n", + "stemmers.append(SnowballStemmer(\"english\"))\n", + "stemmers.append(PorterStemmer())\n", + "lemmatizer = Lemmatizer(LEMMA_INDEX, LEMMA_EXC, LEMMA_RULES)\n", + "def word_convert(word,new_word,Stemmer):\n", + " '''\n", + " 说明;\n", + " 与上面的区别是使用的拼写改错算法不同,上面那个平均速度慢,但更符合我的要求,这个平均速度更快\n", + " 功能:\n", + " 根据提供的word和可能的变形new_word,得到正确的变形,例如给出basic,basicly得到basically\n", + " 输入:\n", + " word:需要变形的词\n", + " new_word:猜想的变形\n", + " Stemmer:词根提取器\n", + " 输出:\n", + " suggest_word:推荐的正确变形\n", + " '''\n", + " if d.check(new_word)==True: #如果发现new_word拼写正确,则直接返回\n", + " return new_word\n", + " else:\n", + " suggest_word = None\n", + " word_stem = Stemmer().stem(word)\n", + " suggest_ = new_word\n", + " suggest_list = d.suggest(suggest_) #可能的正确单词列表\n", + "\n", + " if len(word)death,success->succeed无能为力" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "successfully\n", + "basic\n" + ] + } + ], + "source": [ + "\n", + "def adj_to_adv(word):\n", + " suggest_word = None\n", + " if(word == \"good\"):\n", + " return \"well\"\n", + " else:\n", + " suggest_ = word + 'ly'\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " return suggest_word\n", + "#如果形容词副词同形,那么他会返回none,但是不影响计算,因为形容词副词同形啊\n", + "print(adj_to_adv(\"successful\"))\n", + "\n", + "def adv_to_adj(word):\n", + " suggest_word = None\n", + " if(word == \"well\"):\n", + " return \"good\" \n", + " elif word[-2:] == 'ly':\n", + " suggest_ = word[:-2]\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " return suggest_word\n", + "print(adv_to_adj(\"basically\"))\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['interested', 'interest']\n" + ] + } + ], + "source": [ + "def adj_to_anything(word):#形容词变成其他词性\n", + " suggest_word = None\n", + " suggest_list = []\n", + " if word[-1:] == 'y': #举例 healthy->health\n", + " suggest_ = word[:-1]\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word)\n", + " elif word[-3:] == 'ful':#举例 successful->success\n", + " suggest_ = word[:-3]\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word)\n", + " elif word[-3:] == 'ive': #举例 active -> act\n", + " suggest_ = word[:-4]\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word)\n", + " elif word[-2:] == 'ed': #举例 interested->interest->interesting\n", + " suggest_ = word[:-2]\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word) \n", + " suggest_ = suggest_ + 'ing'\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word) \n", + " \n", + " elif word[-3:] == 'ing':#举例 interesting->interest->interested\n", + " suggest_ = word[:-3]\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word)\n", + " suggest_ = suggest_ + 'ed'\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word) \n", + " \n", + " elif word[-4:] == 'less': #举例 careless -> care\n", + " suggest_ = word[:-4]\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word)\n", + " elif word[-2:] == 'ly': #举例: friendly -> friend , lovely -> love\n", + " suggest_ = word[:-2]\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word)\n", + " \n", + " elif word[-1:] == 't': #举例 different -> different\n", + " suggest_ = word[:-1]\n", + " suggest_ = suggest_ + 'ce'\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word)\n", + " elif word[-3:] == 'ous': #举例 dangerous -> danger\n", + " suggest_ = word[:-3]\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word)\n", + " elif word[-2:] == 'al': #举例 original -> origin\n", + " suggest_ = word[:-2]\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word)\n", + " elif word[-4:] == 'able':\n", + " suggest_ = word[:-4]\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word)\n", + " elif word[-2:] == 'en': #举例 woolen -> wool\n", + " suggest_ = word[:-2]\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word)\n", + " elif word[-2:] == 'ic': \n", + " suggest_ = word[:-2]\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word) \n", + " elif word[-3:] == 'ish':\n", + " suggest_ = word[:-3]\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word == None:\n", + " suggest_ = word[:-3]\n", + " suggest_ = suggest_ + 'and'\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer) \n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word)\n", + " elif word[-3:] == 'ese':\n", + " suggest_ = word[:-3]\n", + " suggest_ = suggest_ + 'a'\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer) \n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word)\n", + " elif word[-3:] == 'ian':\n", + " suggest_ = word[:-1]\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word == None:\n", + " suggest_ = word[:-3]\n", + " suggest_ = suggest_ + 'y'\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word)\n", + " if suggest_word == None:\n", + " HouZhui_list = ['ment','ness','tion','ture','sion','ty','y','tive','sive']\n", + " for HouZhui in HouZhui_list:\n", + " suggest_ = word + HouZhui\n", + " new_word = word_convert(word,suggest_,PorterStemmer)\n", + " if new_word != None:\n", + " suggest_word = new_word\n", + " suggest_list.append(suggest_word)\n", + " suggest_list = list(set(suggest_list)) \n", + " return suggest_list\n", + "\n", + "print(adj_to_anything('interesting'))\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'import time\\ntime_start=time.time()\\nprint(N_to_anything(\"success\"))\\ntime_end=time.time()\\nprint(\\'time cost\\',time_end-time_start,\\'s\\')'" + ] + }, + "execution_count": 25, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def N_to_anything(word):#名词变成其他词性\n", + " suggest_list = []\n", + " list_HouZhui = ['y','ful','tive','sive','ed','ing','less','ly','ous','al','able','en','tic','ish','ance','er','or']\n", + " list_QianZhui = ['a']\n", + " if word[-4:] in ['ment','ness','tion','ture','sion','tive','sive']:\n", + " suggest_ = word[:-4]\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word)\n", + " else:\n", + " for HouZhui in list_HouZhui:\n", + " suggest_ = word + HouZhui\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word)\n", + " for QianZhui in list_QianZhui:\n", + " suggest_ = QianZhui + word\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word)\n", + " if word[-2:] == 'ce':\n", + " suggest_ = word[:-2]\n", + " suggest_ = syggest_ + 't'\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word) \n", + " elif word[-4:] == 'land':\n", + " suggest_ = word[:-4]\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word == None:\n", + " suggest_ = suggest_ + 'lish'\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word) \n", + " #print(suggest_list)\n", + " suggest_list = list(set(suggest_list))\n", + " return suggest_list\n", + "'''import time\n", + "time_start=time.time()\n", + "print(N_to_anything(\"success\"))\n", + "time_end=time.time()\n", + "print('time cost',time_end-time_start,'s')'''" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['succeeder', 'succeeds', 'succeeded']\n", + "time cost 0.654491662979126 s\n" + ] + } + ], + "source": [ + "def V_to_anything(word):#动词变成其他词性\n", + " suggest_word = None\n", + " suggest_list = []\n", + "\n", + " HouZhui_list = ['ful','tive','sive','ed','less','ly','ous','al','able','en','tic','ish','ance','tion','sion','ment','er','or','ee']\n", + " for HouZhui in HouZhui_list:\n", + " suggest_ = word + HouZhui\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word)\n", + "\n", + " suggest_list = list(set(suggest_list))\n", + " return suggest_list\n", + "\n", + "time_start=time.time()\n", + "print(V_to_anything('succeed'))\n", + "time_end=time.time()\n", + "print('time cost',time_end-time_start,'s') " + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'\\n 功能:\\n 生成形容词,副词关联词表\\n 输入:\\n word:形容词/副词\\n 输出:\\n list_word:为没有添加词的其他形式,包括三音节以下词的比较级最高级\\n list_word2:为三音节及以上的词的比较级最高级,如果输入形容词比较级最高级没有more/most,该列表为空\\n 说明:\\n 由于三音节形容词/副词的比较级,最高级为more/most+原形容词/副词,所以特别把形容词/副词和其他词性变形区分出来\\n'" + ] + }, + "execution_count": 27, + "metadata": {}, + "output_type": "execute_result" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(['difficult', 'difficulty', 'difficultly'], ['more difficult', 'most difficult'])\n", + "(['early', 'ear', 'earliest', 'earlier'], [])\n" + ] + } + ], + "source": [ + "'''\n", + " 功能:\n", + " 生成形容词,副词关联词表\n", + " 输入:\n", + " word:形容词/副词\n", + " 输出:\n", + " list_word:为没有添加词的其他形式,包括三音节以下词的比较级最高级\n", + " list_word2:为三音节及以上的词的比较级最高级,如果输入形容词比较级最高级没有more/most,该列表为空\n", + " 说明:\n", + " 由于三音节形容词/副词的比较级,最高级为more/most+原形容词/副词,所以特别把形容词/副词和其他词性变形区分出来\n", + "'''\n", + "\n", + "def build_like_word_adj(word): #创建类似形容词列表\n", + " list_word = []\n", + " list_word2 = [] #把比较级最高级带more的放在这里\n", + " lemmas = lemmatizer(word, u'adj')\n", + " #print(lemmas)\n", + " for i in lemmas:\n", + " list_word.append(i)\n", + " word_er = comparative(i)\n", + " if \"more\" in word_er: #把比较级带more,most的词放在另一个列表list_word2\n", + " list_word2.append(word_er)\n", + " else:\n", + " list_word.append(word_er)\n", + " word_est = superlative(i)\n", + " if \"most\" in word_est:\n", + " list_word2.append(word_est)\n", + " else:\n", + " list_word.append(word_est)\n", + " word_adv = adj_to_adv(i)\n", + " if word_adv != None:\n", + " list_word.append(word_adv)\n", + " list_N = adj_to_anything(word)\n", + " for N in list_N:\n", + " list_word.append(N)\n", + " \n", + " list_word = list(set(list_word))\n", + " return list_word,list_word2\n", + "\n", + "def build_like_word_adv(word): #创建类似形容词列表\n", + " list_word = []\n", + " list_word2 = []\n", + " list_special = ['however','seldom','often','never','otherwise']\n", + " if word in list_special:\n", + " list_word = [word]\n", + " list_word2 = []\n", + " else:\n", + " lemmas = lemmatizer(word, u'adj')\n", + " #print(lemmas)\n", + " for i in lemmas:\n", + " list_word.append(i)\n", + " word_er = comparative(i)\n", + " if \"more\" in word_er:\n", + " list_word2.append(word_er)\n", + " else:\n", + " list_word.append(word_er)\n", + " word_est = superlative(i)\n", + " if \"most\" in word_est:\n", + " list_word2.append(word_est)\n", + " else:\n", + " list_word.append(word_est)\n", + " word_adv = adv_to_adj(i)\n", + " if word_adv != None:\n", + " list_word.append(word_adv)\n", + " list_word = list(set(list_word))\n", + " return list_word,list_word2\n", + "\n", + "\n", + "\n", + "print(build_like_word_adj(\"difficult\"))\n", + "print(build_like_word_adv(\"early\"))\n", + "\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'\\n 功能:\\n 根据检查的位置整理出放入BERT模型的input_ids,input_type_ids以及检查位置在input_ids中的下标位置\\n pre_training_input_in_sentence得到检查位置所在句子的信息\\n pre_training_input_entire得到检查位置所在句子的信息\\n 输入:\\n index:在完整text中的位置\\n 输出:\\n input_ids:\\n input_type_ids:\\n id_in_sen:检查位置在句子中的下标\\n index:检查位置在完整text中的下标,其实就是输入的下标\\n'" + ] + }, + "execution_count": 28, + "metadata": {}, + "output_type": "execute_result" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "the\n", + "[101, 2197, 2733, 1045, 2253, 2000, 1996, 4258, 1012, 102]\n", + "[0, 0, 0, 0, 0, 0, 0, 0, 0, 0]\n", + "6\n" + ] + } + ], + "source": [ + "'''\n", + " 功能:\n", + " 根据检查的位置整理出放入BERT模型的input_ids,input_type_ids以及检查位置在input_ids中的下标位置\n", + " pre_training_input_in_sentence得到检查位置所在句子的信息\n", + " pre_training_input_entire得到检查位置所在句子的信息\n", + " 输入:\n", + " index:在完整text中的位置\n", + " 输出:\n", + " input_ids:\n", + " input_type_ids:\n", + " id_in_sen:检查位置在句子中的下标\n", + " index:检查位置在完整text中的下标,其实就是输入的下标\n", + "'''\n", + "def pre_training_input_in_sentence(index): \n", + " sentence_id = in_sentence[index][0]\n", + " id_in_sen = in_sentence[index][1]\n", + " word = input_ids_sen[sentence_id][id_in_sen]\n", + " word = tokenizer.ids_to_tokens[word]\n", + " input_ids = copy.deepcopy(input_ids_sen[sentence_id])\n", + " input_type_ids = copy.deepcopy(input_type_ids_sen[sentence_id])\n", + "\n", + " return word,input_ids,input_type_ids,id_in_sen\n", + "\n", + "def pre_training_input_entire(index): \n", + " word = entire_ids[index]\n", + " word = tokenizer.ids_to_tokens[word]\n", + " input_ids = copy.deepcopy(entire_ids)\n", + " input_type_ids = copy.deepcopy(entire_type_ids)\n", + "\n", + " return word,input_ids,input_type_ids,index\n", + "\n", + "word,input_ids,input_type_ids,index = pre_training_input_in_sentence(6)\n", + "print(word)\n", + "print(input_ids)\n", + "print(input_type_ids)\n", + "print(index)\n", + "#[101, 1045, 2572, 3153, 2006, 1996, 2754, 1012, 102]\n", + "#[101, 1045, 2572, 3153, 2006, 1996, 2754, 1012, 1045, 2018, 1037, 2200, 2204, 2835, 1012, 1996, 2377, 2001, 2200, 5875, 1012, 102]" + ] + }, + { + "cell_type": "raw", + "metadata": {}, + "source": [ + "分析各种词性系列函数\n", + " 功能:对第一遍检查得出的有问题的位置的单词,根据不同的词性进行不同步骤的分析\n", + " 输入:\n", + " index:在原文中的错误位置\n", + " 输出:\n", + " 给出的修改建议,修改建议不局限于错误位置" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "data": { + "text/plain": [ + "'week'" + ] + }, + "execution_count": 29, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import copy\n", + "import nltk\n", + "from pattern.en import conjugate, lemma, lexeme,PRESENT,SG,PRESENT,SG,INFINITIVE, PRESENT, PAST, FUTURE, PROGRESSIVE\n", + "\n", + "def analyse_V(index):\n", + "#这是一个处理动词语法问题的函数,输入为问题词在text的token中的下标index\n", + " need_to_will = need_be = 0\n", + " list_be = lexeme('be')\n", + " list_be = lexeme('be')[:8]\n", + " #**************************************判断是不是动词其他形式************************\n", + " wordV,input_ids,input_type_ids,index = pre_training_input_entire(index)\n", + " if wordV in list_be:\n", + " list_word = list_be\n", + " else:\n", + " list_word = lexeme(wordV)\n", + " list_others = V_to_anything(conjugate(verb=wordV,tense=PRESENT,person = 1))\n", + " for other in list_others:\n", + " list_word.append(other)\n", + " #print(\"list_word = \",list_word)\n", + " #print(tokenizer.convert_ids_to_tokens(input_ids))\n", + " suggestion0,need,_,_= give_suggestion(input_ids,input_type_ids,index,list_word,5)\n", + " if need == 1 and suggestion0 != wordV:\n", + " return suggestion0\n", + " \n", + " else:#**************************************判断是不是缺介词***************************\n", + " wordV,input_ids,input_type_ids,id_in_sen = pre_training_input_in_sentence(index)\n", + " input_ids.insert(id_in_sen + 1,tokenizer.vocab['at'])#就随便插入一个东西,占位子\n", + " input_type_ids.append(0)\n", + " list_IN = [\"at\",\"in\",\"on\",\"by\",\"for\",\"from\",\"with\",\"about\",\"against\",\"along\",\"among\",\"around\",\"as\",\"before\",\"behind\",\"below\",\"beside\",\"between\",\"during\",\"besides\",\"into\",\"near\",\"over\",\"through\",\"under\",\"without\",\"after\",\"above\",\"of\"]\n", + " suggestion4,need_IN,_,_ = give_suggestion(input_ids,input_type_ids,id_in_sen + 1,list_IN,1)\n", + " if need_IN == 1:\n", + " input_ids[id_in_sen + 1] = tokenizer.vocab[suggestion4]\n", + " list_word = lexeme(wordV)\n", + " suggestion44,need,_,_ = give_suggestion(input_ids,input_type_ids,id_in_sen,list_word,3)\n", + " if need == 1:\n", + " suggestion = suggestion44 + ' ' +suggestion4\n", + " return suggestion\n", + " #**************************************判断是不是不定式或者将来时*************************** \n", + " #print(\"是否用不定式或将来时\")\n", + " wordV,input_ids,input_type_ids,id_in_sen = pre_training_input_in_sentence(index)\n", + " input_ids.insert(id_in_sen,tokenizer.vocab['to'])#就随便插入一个东西,占位子\n", + " input_type_ids.append(0)\n", + " input_ids[id_in_sen + 1] = tokenizer.vocab[conjugate(verb=wordV,tense=PRESENT,person = 1)]\n", + " #print(tokenizer.convert_ids_to_tokens(input_ids))\n", + " suggestion_to_will,need_to_will,prob0,_ = give_suggestion(input_ids,input_type_ids,id_in_sen,[\"to\",\"will\"],1)\n", + " if need_to_will == 1:\n", + " list_word = [conjugate(verb=wordV,tense=PRESENT,person = 1),conjugate(verb=wordV,tense=PRESENT,aspect=PROGRESSIVE)]\n", + " suggestion,need0,_,prob00= give_suggestion(input_ids,input_type_ids,id_in_sen + 1,list_word,5) \n", + " \n", + " #**********************************判断是不是被动语态或者进行时******************* \n", + "\n", + " #********************是不是被动语态**************** \n", + " #print(\"是不是被动语态\")\n", + " wordV,input_ids,input_type_ids,index = pre_training_input_entire(index)\n", + " input_ids.insert(index,tokenizer.vocab['be'])#就随便插入一个东西,占位子\n", + " try:\n", + " input_ids[index + 1]=tokenizer.vocab[conjugate(verb=wordV,tense=PAST,aspect=PROGRESSIVE)]\n", + " input_type_ids.append(0)\n", + " #print(tokenizer.convert_ids_to_tokens(input_ids))\n", + " suggestion1,need_be1,prob1,_ = give_suggestion(input_ids,input_type_ids,index,list_be,1)\n", + " except KeyError:\n", + " need_be1 = 0\n", + " #********************是不是现在分词**************** \n", + " #print(\"是不是进行时\")\n", + " try:\n", + " input_ids[index + 1]=tokenizer.vocab[conjugate(verb=wordV,tense=PRESENT,aspect=PROGRESSIVE)]\n", + " suggestion2,need_be2,prob2,_ = give_suggestion(input_ids,input_type_ids,index,list_be,1)\n", + " #print(tokenizer.convert_ids_to_tokens(input_ids))\n", + " except KeyError:\n", + " need_be2 = 0\n", + " \n", + " #if need_be1 == 1 or need_be2 == 1:\n", + " #print(\"需要be\")\n", + " #***************************选择是不定式还是被动语态还是进行时****************************\n", + " prob_max = 0\n", + " if need_to_will == 1:\n", + " prob_max = max(prob_max,prob0)\n", + " if need_be1 == 1:\n", + " prob_max = max(prob_max,prob1)\n", + " if need_be2 == 1:\n", + " prob_max = max(prob_max,prob2)\n", + "\n", + " if need_to_will == 1 and prob_max == prob0:\n", + " need_be = 0\n", + " if need_be1 == 1 and prob_max == prob1:\n", + " need_to_will = 0\n", + " need_be = 1\n", + " be_ = suggestion1\n", + " if need_be2 == 1 and prob_max == prob2:\n", + " need_to_will = 0\n", + " need_be = 1\n", + " be_ = suggestion2\n", + " #*************************************************处理各种语法******************************************************************\n", + " if need_to_will == 1:\n", + " wordV,input_ids,input_type_ids,index = pre_training_input_entire(index)\n", + " input_ids.insert(index,tokenizer.vocab[suggestion_to_will])\n", + " input_type_ids.append(0)\n", + " list_word = [conjugate(verb=wordV,tense=PRESENT,person = 1),conjugate(verb=wordV,tense=PRESENT,aspect=PROGRESSIVE)]\n", + " suggestion,_,_,_= give_suggestion(input_ids,input_type_ids,index + 1,list_word,5)\n", + " return 'to ' + suggestion\n", + "\n", + " elif need_be == 1:\n", + " #********************************被动语态或者进行时*****************\n", + " wordV,input_ids,input_type_ids,index = pre_training_input_entire(index)\n", + " input_ids.insert(index,tokenizer.vocab[be_])\n", + " input_type_ids.append(0)\n", + " list_word = lexeme(wordV)\n", + " suggestion,_,_,_= give_suggestion(input_ids,input_type_ids,index + 1,list_word,5)\n", + " suggestion = be_ + ' '+ suggestion\n", + " else:\n", + " #*****************************************判断该位置是不是动词的其他时态**************************************************************\n", + " suggestion = suggestion0\n", + "\n", + " return suggestion\n", + " \n", + "analyse_V(2)\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 55, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "was unwilling\n" + ] + } + ], + "source": [ + "def analyse_adj(index):\n", + " #这是一个处理形容词语法问题的函数,输入为问题词在text的token中的下标index\n", + " wordADJ,input_ids,input_type_ids,id_in_sen = pre_training_input_entire(index) \n", + " list_word,list_word2 = build_like_word_adj(wordADJ)\n", + " #print(list_word)\n", + " suggestion0,need_adj,prob,_ = give_suggestion(input_ids,input_type_ids,id_in_sen,list_word,5)\n", + " \n", + " if need_adj == 1 and suggestion0 != wordADJ:#判断是不是形容词其他变形\n", + " return suggestion0\n", + " elif get_word(index - 1) in ['more','most'] and len(list_word2) == 0:\n", + " #判断是不是比较级使用错误,如果该形容词比较级/最高级不需要加more/most,但是前面有more/most\n", + " wordADJ,input_ids,input_type_ids,id_in_sen = pre_training_input_in_sentence(index) \n", + " del input_ids[id_in_sen - 1]\n", + " del input_type_ids[0]\n", + " suggestion3,need_adj,prob,_ = give_suggestion(input_ids,input_type_ids,id_in_sen - 1,list_word,6)\n", + " return '去掉前面 ' + get_word(index - 1)+ ' 原位置改成 ' + suggestion3\n", + " elif get_word(index + 1) in ['##er','##est','##r','##st'] and len(list_word2) != 0:\n", + " #判断是不是比较级使用错误,如果该形容词比较级/最高级需要more/most,但是错写成形容词+er/est\n", + " wordADJ,input_ids,input_type_ids,id_in_sen = pre_training_input_in_sentence(index) \n", + " input_ids[id_in_sen + 1] = tokenizer.vocab[wordADJ]\n", + " suggestion4,need_bijiao,prob,_ = give_suggestion(input_ids,input_type_ids,id_in_sen,['more','most'],2)\n", + " if need_bijiao == 1:\n", + " input_ids[id_in_sen] = tokenizer.vocab[suggestion4]\n", + " suggestion5,need_adj,prob,_ = give_suggestion(input_ids,input_type_ids,id_in_sen+1,list_word,6)\n", + " return '去掉后面 '+ get_word(index + 1) + ' 原位置改成 '+ suggestion4 + ' ' + suggestion5 \n", + " else:#检查形容词前面是否需要加冠词或者是需要more,most的比较级,最高级抑或是be动词\n", + " #print(\"缺冠词或者没用比较级\")\n", + " wordADJ,input_ids,input_type_ids,id_in_sen = pre_training_input_in_sentence(index) \n", + " input_ids.insert(id_in_sen,tokenizer.vocab[\"[MASK]\"])\n", + " input_type_ids.append(0)\n", + " #print(tokenizer.convert_ids_to_tokens(input_ids))\n", + " list_DT = ['the','a','an','this','that','these','those','some','any','all','more','most','am','is','are','was','were'] \n", + " suggestion,need_DT,_,_= give_suggestion(input_ids,input_type_ids,id_in_sen,list_DT,1)\n", + " if need_DT == 1:\n", + " wordADJ,input_ids,input_type_ids,index = pre_training_input_entire(index)\n", + " input_ids.insert(index,tokenizer.vocab[suggestion])\n", + " input_type_ids.append(0)\n", + " #print(tokenizer.convert_ids_to_tokens(input_ids))\n", + " suggestion2,_,_,_= give_suggestion(input_ids,input_type_ids,index + 1,list_word,6) \n", + " return suggestion + ' ' + suggestion2\n", + " else:\n", + " return suggestion0\n", + "print(analyse_adj(78))" + ] + }, + { + "cell_type": "code", + "execution_count": 56, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + ",\n" + ] + } + ], + "source": [ + "def analyse_adv(index):\n", + " #这是一个处理形容词语法问题的函数,输入为问题词在text的token中的下标index\n", + " need_DT = 0\n", + " need_douhao = 0\n", + " wordADV,input_ids,input_type_ids,id_in_sen = pre_training_input_entire(index)\n", + " list_word,list_word2 = build_like_word_adv(wordADV)\n", + " suggestion0,need_adv,prob,_ = give_suggestion(input_ids,input_type_ids,id_in_sen,list_word,3.5)\n", + " if need_adv == 1 and suggestion0 != wordADV:\n", + " return suggestion0\n", + " elif get_word(index - 1) in ['more','most'] and len(list_word2) == 0:\n", + " #判断是不是比较级使用错误,如果该形容词比较级/最高级不需要加more/most,但是前面有more/most\n", + " wordADV,input_ids,input_type_ids,id_in_sen = pre_training_input_in_sentence(index) \n", + " del input_ids[id_in_sen - 1]\n", + " del input_type_ids[0]\n", + " suggestion3,need_adj,prob,_ = give_suggestion(input_ids,input_type_ids,id_in_sen - 1,list_word,5)\n", + " return '去掉前面 ' + get_word(index - 1)+ ' 原位置改成 ' + suggestion3\n", + " elif get_word(index + 1) in ['##er','##est','##r','##st'] and len(list_word2) != 0:\n", + " #判断是不是比较级使用错误,如果该形容词比较级/最高级需要more/most,但是错写成形容词+er/est\n", + " wordADV,input_ids,input_type_ids,id_in_sen = pre_training_input_in_sentence(index) \n", + " input_ids[id_in_sen + 1] = tokenizer.vocab[wordADV]\n", + " suggestion4,need_bijiao,prob,_ = give_suggestion(input_ids,input_type_ids,id_in_sen,['more','most'],2)\n", + " if need_bijiao == 1:\n", + " input_ids[id_in_sen] = tokenizer.vocab[suggestion4]\n", + " suggestion5,need_adj,prob,_ = give_suggestion(input_ids,input_type_ids,id_in_sen+1,list_word,5)\n", + " return '去掉后面 '+ get_word(index + 1) + ' 原位置改成 '+ suggestion4 + ' ' + suggestion5 \n", + " else:#检查形容词前面是否需要加冠词或者是需要more,most的比较级,最高级,be动词\n", + " wordADV,input_ids,input_type_ids,id_in_sen = pre_training_input_in_sentence(index)\n", + " input_ids.insert(id_in_sen,tokenizer.vocab[\"[MASK]\"])\n", + " input_type_ids.append(0)\n", + " #print(tokenizer.convert_ids_to_tokens(input_ids))\n", + " list_DT = ['the','a','an','this','that','these','those','some','any','all','more','most','am','is','are','was','were'] \n", + " suggestion,need_DT,_,_= give_suggestion(input_ids,input_type_ids,id_in_sen,list_DT,1)\n", + " if need_DT == 1:\n", + " #print(\"需要冠词\")\n", + " wordADV,input_ids,input_type_ids,index = pre_training_input_entire(index)\n", + " input_ids.insert(index,tokenizer.vocab[suggestion])\n", + " input_type_ids.append(0)\n", + " #print(tokenizer.convert_ids_to_tokens(input_ids))\n", + " suggestion2,_,_,_= give_suggestion(input_ids,input_type_ids,index + 1,list_word,3) \n", + " return suggestion + ' ' + suggestion2\n", + " else:\n", + " #副词后面可能缺少逗号,比如 Luckily,I won the game.\n", + " wordADV,input_ids,input_type_ids,id_in_sen = pre_training_input_in_sentence(index)\n", + " input_ids.insert(id_in_sen + 1,tokenizer.vocab[\",\"])\n", + " input_type_ids.append(0)\n", + " suggestion3,need_douhao,_,_= give_suggestion(input_ids,input_type_ids,id_in_sen,list_word,2)\n", + " if need_douhao == 1:\n", + " return suggestion3 + ' ,'\n", + " else:\n", + " return suggestion0\n", + "print(analyse_adv(5))" + ] + }, + { + "cell_type": "code", + "execution_count": 57, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "wanted\n" + ] + } + ], + "source": [ + "from pattern.en import article,referenced,pluralize, singularize\n", + "import nltk\n", + "def analyse_N(index):\n", + " #这是一个处理名词语法问题的函数,输入为问题词在text的token中的下标index\n", + "#******************************************初始数据处理**************************************************************************\n", + " need_DT = 0 #表示是否需要在前面加冠词 \n", + " wordN,input_ids,input_type_ids,id_in_sen = pre_training_input_in_sentence(index)\n", + " word_tag = nltk.pos_tag([wordN])\n", + " if word_tag[0][1] == \"NN\":\n", + " N_ = wordN\n", + " N_s= pluralize(wordN)\n", + " else:\n", + " N_ = singularize(wordN)\n", + " N_s= wordN\n", + " list_word = [N_,N_s]\n", + " list_others = N_to_anything(N_)\n", + " for other in list_others:\n", + " list_word.append(other)\n", + " #print(list_word)\n", + "#*****************************************判断是否需要冠词或者代词************************************************************************ \n", + " \n", + " input_ids.insert(id_in_sen,tokenizer.vocab[\"[MASK]\"])\n", + " input_type_ids.append(0)\n", + " #print(tokenizer.convert_ids_to_tokens(input_ids))\n", + " list_DT = ['the','a','an','this','that','these','those','some','any','all']\n", + " suggestion,need_DT,_,_= give_suggestion(input_ids,input_type_ids,id_in_sen,list_DT,1)\n", + " if need_DT == 0:#不需要冠词\n", + " #print(\"不需要冠词\")\n", + " wordN,input_ids,input_type_ids,index = pre_training_input_entire(index)\n", + " suggestion,need_DT,_,top_of_list_word = give_suggestion(input_ids,input_type_ids,index,list_word,7)\n", + " return suggestion\n", + " elif need_DT == 1:\n", + " wordN,input_ids,input_type_ids,index = pre_training_input_entire(index)\n", + " input_ids.insert(index,tokenizer.vocab[suggestion])\n", + " input_type_ids.append(0)\n", + " suggestion2,_,_,_= give_suggestion(input_ids,input_type_ids,index + 1,list_word,7)\n", + " return suggestion + ' ' + suggestion2\n", + "\n", + "print(analyse_N(78))\n" + ] + }, + { + "cell_type": "code", + "execution_count": 58, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'\\n 这是一个相关代词的词典,容易混淆的词放在一个列表中\\n\\n'" + ] + }, + "execution_count": 58, + "metadata": {}, + "output_type": "execute_result" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{'he': ['he', 'his', 'him', 'himself', 'who', 'whom', 'whose'], 'his': ['he', 'his', 'him', 'himself', 'who', 'whom', 'whose'], 'him': ['he', 'his', 'him', 'himself', 'who', 'whom', 'whose'], 'himself': ['he', 'his', 'him', 'himself', 'who', 'whom', 'whose'], 'who': ['that', 'which', 'who', 'whom', 'whose', 'as'], 'whom': ['that', 'which', 'who', 'whom', 'whose', 'as'], 'whose': ['that', 'which', 'who', 'whom', 'whose', 'as'], 'she': ['she', 'her', 'herself', 'hers', 'who', 'whom', 'whose'], 'her': ['she', 'her', 'herself', 'hers', 'who', 'whom', 'whose'], 'herself': ['she', 'her', 'herself', 'hers', 'who', 'whom', 'whose'], 'hers': ['she', 'her', 'herself', 'hers', 'who', 'whom', 'whose'], 'it': ['it', 'its', 'itself', 'who', 'whom', 'whose'], 'its': ['it', 'its', 'itself', 'who', 'whom', 'whose'], 'itself': ['it', 'its', 'itself', 'who', 'whom', 'whose'], 'i': ['i', 'me', 'my', 'myself', 'mine'], 'me': ['i', 'me', 'my', 'myself', 'mine'], 'my': ['i', 'me', 'my', 'myself', 'mine'], 'myself': ['i', 'me', 'my', 'myself', 'mine'], 'mine': ['i', 'me', 'my', 'myself', 'mine'], 'you': ['you', 'your', 'yourself', 'yourselves'], 'your': ['you', 'your', 'yourself', 'yourselves'], 'yourself': ['you', 'your', 'yourself', 'yourselves'], 'yourselves': ['you', 'your', 'yourself', 'yourselves'], 'we': ['we', 'us', 'our', 'ours', 'ourselves'], 'us': ['we', 'us', 'our', 'ours', 'ourselves'], 'our': ['we', 'us', 'our', 'ours', 'ourselves'], 'ours': ['we', 'us', 'our', 'ours', 'ourselves'], 'ourselves': ['we', 'us', 'our', 'ours', 'ourselves'], 'they': ['they', 'them', 'their', 'theirs'], 'them': ['they', 'them', 'their', 'theirs'], 'their': ['they', 'them', 'their', 'theirs'], 'theirs': ['they', 'them', 'their', 'theirs'], 'this': ['this', 'these'], 'these': ['this', 'these'], 'that': ['that', 'which', 'who', 'whom', 'whose', 'as'], 'those': ['that', 'those'], 'which': ['that', 'which', 'who', 'whom', 'whose', 'as'], 'what': ['who', 'whom', 'whose', 'which', 'what', 'whoever', 'whichever', 'whatever'], 'whoever': ['who', 'whom', 'whose', 'which', 'what', 'whoever', 'whichever', 'whatever'], 'whichever': ['who', 'whom', 'whose', 'which', 'what', 'whoever', 'whichever', 'whatever'], 'whatever': ['who', 'whom', 'whose', 'which', 'what', 'whoever', 'whichever', 'whatever'], 'as': ['that', 'which', 'who', 'whom', 'whose', 'as'], 'some': ['some', 'any'], 'any': ['some', 'any'], 'few': ['few', 'little'], 'little': ['few', 'little'], 'many': ['many', 'much'], 'much': ['many', 'much'], 'another': ['another', 'other'], 'other': ['another', 'other']}\n" + ] + } + ], + "source": [ + "'''\n", + " 这是一个相关代词的词典,容易混淆的词放在一个列表中\n", + "\n", + "'''\n", + "like_he = ['he','his','him','himself','who', 'whom', 'whose']\n", + "like_she = ['she','her','herself','hers','who', 'whom', 'whose']\n", + "like_it = ['it','its','itself','who', 'whom', 'whose']\n", + "like_i = ['i','me','my','myself','mine']\n", + "like_you = ['you','your','yourself','yourselves']\n", + "like_we = ['we','us','our','ours','ourselves']\n", + "like_they = ['they','them','their','theirs']\n", + "\n", + "like_this = ['this', 'these'] \n", + "like_that = ['that','those'] \n", + "pronoun_Question = ['who', 'whom', 'whose', 'which', 'what', 'whoever', 'whichever', 'whatever'] #疑问代词\n", + "pronoun_relation = ['that', 'which', 'who', 'whom', 'whose', 'as'] #关系代词\n", + "like_some = ['some','any']\n", + "like_few = ['few','little']\n", + "like_many = ['many','much']\n", + "like_other = ['another','other']\n", + "\n", + "pronoun = [like_he,like_she,like_it,like_i,like_you,like_we,like_they,like_this,like_that,pronoun_Question,pronoun_relation,like_some,like_few,like_many,like_other]\n", + "pronoun_dictionary = {}\n", + "\n", + "for list_word in pronoun:\n", + " for word in list_word:\n", + " pronoun_dictionary.update({word:list_word})\n", + "print(pronoun_dictionary)" + ] + }, + { + "cell_type": "code", + "execution_count": 59, + "metadata": {}, + "outputs": [ + { + "ename": "KeyError", + "evalue": "'night'", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mKeyError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0msuggestion\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0m_\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0m_\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0m_\u001b[0m\u001b[0;34m=\u001b[0m \u001b[0mgive_suggestion\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0minput_ids\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0minput_type_ids\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mindex\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mlist_word\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m3\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 6\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0msuggestion\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 7\u001b[0;31m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0manalyse_pronoun\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m14\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", + "\u001b[0;32m\u001b[0m in \u001b[0;36manalyse_pronoun\u001b[0;34m(index)\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0;31m#这是一个处理代词语法问题的函数,输入为问题词在text的token中的下标index\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0mwordPROP\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0minput_ids\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0minput_type_ids\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mindex\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mpre_training_input_entire\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mindex\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 4\u001b[0;31m \u001b[0mlist_word\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mpronoun_dictionary\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mwordPROP\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 5\u001b[0m \u001b[0msuggestion\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0m_\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0m_\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0m_\u001b[0m\u001b[0;34m=\u001b[0m \u001b[0mgive_suggestion\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0minput_ids\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0minput_type_ids\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mindex\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mlist_word\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m3\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 6\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0msuggestion\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mKeyError\u001b[0m: 'night'" + ] + } + ], + "source": [ + "def analyse_pronoun(index):\n", + " #这是一个处理代词语法问题的函数,输入为问题词在text的token中的下标index\n", + " wordPROP,input_ids,input_type_ids,index = pre_training_input_entire(index)\n", + " list_word = pronoun_dictionary[wordPROP]\n", + " suggestion,_,_,_= give_suggestion(input_ids,input_type_ids,index,list_word,3)\n", + " return suggestion\n", + "print(analyse_pronoun(14))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "解释一下,有很多副词确实也不是ly形式结尾的,比如用在三音节形容词前面的比较级more,most,还有频度副词often,seldom,never这种。因为这些词比较不容易用错,先暂时不考虑" + ] + }, + { + "cell_type": "code", + "execution_count": 60, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "all\n" + ] + } + ], + "source": [ + "def analyse_DT(index):\n", + " #检查冠词,检查是不是用别的冠词,或者是去掉会不会更好\n", + " wordDT,input_ids,input_type_ids,id_in_sen = pre_training_input_in_sentence(index) \n", + " if wordDT in ['all',\"every\",'per']:\n", + " return wordDT\n", + " else:\n", + " if wordDT in ['some','any']:\n", + " list_word = ['some','any']\n", + " elif wordDT in ['this','that','these','those']:\n", + " list_word = ['this','that','these','those']\n", + " elif wordDT in ['the','a','an']:\n", + " list_word = ['the','a','an']\n", + " elif wordDT in ['another','other']:\n", + " list_word = ['another','other']\n", + " else:\n", + " list_word = []\n", + " suggestion0,need_DT,prob,_ = give_suggestion(input_ids,input_type_ids,id_in_sen,list_word,1)\n", + " if wordDT in ['some','any','this','that','these','those','another','other','the','a','an']:\n", + " if need_DT == 1:\n", + " return suggestion0\n", + " else:\n", + " return \"去掉 \" + get_word(index)\n", + " else:\n", + " return wordDT\n", + " \n", + "print(analyse_DT(25))" + ] + }, + { + "cell_type": "code", + "execution_count": 61, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "but\n" + ] + } + ], + "source": [ + "def analyse_IN(index):\n", + " #检查介词是否需要去掉\n", + " wordIN,input_ids,input_type_ids,id_in_sen = pre_training_input_in_sentence(index) \n", + " list_word = [\"at\",\"in\",\"on\",\"by\",\"for\",\"from\",\"with\",\"about\",\"against\",\"along\",\"among\",\"around\",\"as\",\"before\",\"behind\",\"below\",\"beside\",\"between\",\"during\",\"besides\",\"into\",\"near\",\"over\",\"through\",\"under\",\"without\",\"after\",\"above\",\"of\",'to']\n", + " suggestion0,need_IN,prob,_ = give_suggestion(input_ids,input_type_ids,id_in_sen,list_word,3)\n", + " if need_IN == 1:\n", + " return suggestion0\n", + " else:\n", + " if wordIN in list_word:\n", + " return \"去掉 \" + get_word(index)\n", + " else:\n", + " return suggestion0\n", + " \n", + "print(analyse_IN(76))" + ] + }, + { + "cell_type": "code", + "execution_count": 62, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'\\n 这是一个输出BERT模型训练结果的函数,方便查看调试\\n'" + ] + }, + "execution_count": 62, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import nltk\n", + "from pattern.en import conjugate, lemma, lexeme,PRESENT,SG\n", + "'''\n", + " 这是一个输出BERT模型训练结果的函数,方便查看调试\n", + "'''\n", + "def show_lm_probs(tokens, input_ids, probs, topk=5, firstk=20): #输出结果的函数,要最高概率topk个输出\n", + " def print_pair(token, prob, end_str='', hit_mark=' '):\n", + " if i < firstk:\n", + " # token = token.replace('', '').replace('\\n', '/n')\n", + " print('{}{: >3} | {: <12}'.format(hit_mark, int(round(prob*100)), token), end=end_str)\n", + " \n", + " ret = None\n", + " for i in range(len(tokens)):\n", + " ind_ = input_ids[i].item() if input_ids is not None else tokenizer.vocab[tokens[i]]\n", + " prob_ = probs[i][ind_].item() #这个probs是该字符串第i个位置上填上词典上各个词的概率,prob_是词典上原来天的这个词的概率\n", + " print_pair(tokens[i], prob_, end_str='\\t')\n", + " values, indices = probs[i].topk(topk)\n", + " #print(values, indices)\n", + " #print(\"****************************************************************************************************\")\n", + " top_pairs = []\n", + " for j in range(topk):\n", + " ind, prob = indices[j].item(), values[j].item()\n", + " hit_mark = '*' if ind == ind_ else ' '\n", + " token = tokenizer.ids_to_tokens[ind]\n", + " print_pair(token, prob, hit_mark=hit_mark, end_str='' if j < topk - 1 else '\\n')\n", + " top_pairs.append((token, prob))\n", + " if tokens[i] == \"[MASK]\":\n", + " ret = top_pairs\n", + " return ret #返回的这是个啥" + ] + }, + { + "cell_type": "code", + "execution_count": 63, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'\\n 功能:\\n judge_and_suggestion系列函数,这个系列函数是在analyse之前做的一个预先判断处理,判断的是该位置原来词的相关词中有没有可以代替它的词\\n 当相关词中有词的可能性和原词的可能性的差距大于阈值,则认为原词是错的\\n 输入:\\n prob:该位置可能性列表\\n original:该位置原先的词\\n list_word:该位置相关词表\\n threhold:门槛,也就是阈值\\n 输出:\\n judge:判断原来的词是否正确,0表示需要换词,1表示不需要换词或者说相关词里面没一个合适的\\n suggestion:相关词中最好的推荐\\n'" + ] + }, + "execution_count": 63, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import math\n", + "from pattern import en\n", + "from pattern.en import conjugate, lemma, lexeme,PRESENT,SG,INFINITIVE, PRESENT, PAST, FUTURE, PROGRESSIVE\n", + "'''\n", + " 功能:\n", + " judge_and_suggestion系列函数,这个系列函数是在analyse之前做的一个预先判断处理,判断的是该位置原来词的相关词中有没有可以代替它的词\n", + " 当相关词中有词的可能性和原词的可能性的差距大于阈值,则认为原词是错的\n", + " 输入:\n", + " prob:该位置可能性列表\n", + " original:该位置原先的词\n", + " list_word:该位置相关词表\n", + " threhold:门槛,也就是阈值\n", + " 输出:\n", + " judge:判断原来的词是否正确,0表示需要换词,1表示不需要换词或者说相关词里面没一个合适的\n", + " suggestion:相关词中最好的推荐\n", + "'''\n", + "def judge_and_suggestion(prob,original,list_word,threhold):\n", + " top_prob = 0\n", + " original_prob = prob[tokenizer.vocab[original]]\n", + " best = None\n", + " suggestion = None\n", + " for word in list_word:\n", + " try:\n", + " word_id = tokenizer.vocab[word]\n", + " prob_word = prob[word_id]\n", + " if prob_word > top_prob:\n", + " top_prob = prob_word\n", + " best_word = word\n", + " except KeyError:\n", + " pass\n", + " #print(best_word,top_prob)\n", + " #print(original,original_prob)\n", + " gap = math.log(top_prob) - math.log(original_prob)\n", + " #print(gap)\n", + " if gap > threhold:\n", + " suggestion = best_word\n", + " return 0,suggestion\n", + " else:\n", + " return 1,suggestion\n", + "def judge_CC_and_suggestion(prob,original_CC):\n", + " list_CC = [\"but\",\"yet\",\"still\",\"however\",\"although\",\"for\",\"so\",\"thus\",\"and\",\"or\",\"too\",\"again\",\"another\",\"either\",\"or\",\"neither\",\"nor\",\"when\",\"while\",\"as\",\"whenever\",\"since\",\"until\",\"till\"]\n", + " judge,suggestion = judge_and_suggestion(prob,original_CC,list_CC,2)\n", + " return judge,suggestion\n", + "def judge_V_and_suggestion(prob,original_V):\n", + " list_V = lexeme(original_V)\n", + " judge,suggestion = judge_and_suggestion(prob,original_V,list_V,2)\n", + " #print(\"检查点\",judge,suggestion)\n", + " return judge,suggestion\n", + " \n", + "def judge_IN_and_suggestion(prob,original_IN):\n", + " list_IN = [\"at\",\"in\",\"on\",\"by\",\"for\",\"from\",\"with\",\"about\",\"against\",\"along\",\"among\",\"around\",\"as\",\"before\",\"behind\",\"below\",\"beside\",\"between\",\"during\",\"besides\",\"into\",\"near\",\"over\",\"through\",\"under\",\"without\",\"after\",\"above\",\"of\",'to']\n", + " judge,suggestion = judge_and_suggestion(prob,original_IN,list_IN,1)\n", + " return judge,suggestion\n", + "def judge_DT_and_suggestion(prob,original_DT):\n", + " if original_DT in ['some','any']:\n", + " list_word = ['some','any']\n", + " elif original_DT in ['this','that','these','those']:\n", + " list_word = ['this','that','these','those']\n", + " elif original_DT in ['the','a','an']:\n", + " list_word = ['the','a','an']\n", + " elif original_DT in ['another','other']:\n", + " list_word = ['another','other']\n", + " judge,suggestion = judge_and_suggestion(prob,original_DT,list_DT,1)\n", + " return judge,suggestion\n", + "\n", + "def judge_MD_and_suggestion(prob,original_MD):\n", + " if original_MD in ['can','could']:\n", + " list_MD = ['can','could']\n", + " elif original_MD in ['may','might']:\n", + " list_MD = ['may','might']\n", + " elif original_MD in ['shall','should']:\n", + " list_MD = ['shall','should'] \n", + " elif original_MD in ['will','would']:\n", + " list_MD = ['will','would'] \n", + " elif original_MD in ['dare','dared']:\n", + " list_MD = ['dare','dared'] \n", + " else:\n", + " list_MD = []\n", + " judge,suggestion = judge_and_suggestion(prob,original_MD,list_MD,1)\n", + " if original_MD not in ['can','could','may','might','shall','should','will','would'] :\n", + " return judge,suggestion\n", + " else:\n", + " return 1,None\n", + " \n", + "def judge_N_and_suggestion(prob,original_N):\n", + " word_tag = nltk.pos_tag([original_N])\n", + " if word_tag[0][1] == \"NN\":\n", + " N_ = original_N\n", + " N_s= pluralize(original_N)\n", + " else:\n", + " N_ = singularize(original_N)\n", + " N_s= original_N\n", + " list_N = [N_,N_s]\n", + " list_others = N_to_anything(N_)\n", + " for other in list_others:\n", + " list_N.append(other)\n", + " judge,suggestion = judge_and_suggestion(prob,original_N,list_N,0.5)\n", + " return judge,suggestion\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 64, + "metadata": {}, + "outputs": [], + "source": [ + "import colored\n", + "from colored import stylize\n", + "import spacy\n", + "nlp = spacy.load('en')\n", + "\n", + "suggestions = {} #\n", + "def show_abnormals(tokens, probs, show_suggestions=False):\n", + " global suggestions\n", + " suggestions = {} \n", + " def gap2color(gap):\n", + " if gap <= 5:\n", + " return 'yellow_1'\n", + " elif gap <= 10:\n", + " return 'orange_1'\n", + " else:\n", + " return 'red_1'\n", + " \n", + " def print_token(token, suggestion, gap):\n", + " if gap == 0:\n", + " print(stylize(token + ' ', colored.fg('white') + colored.bg('black')), end='')\n", + " else:\n", + " print(stylize(token, colored.fg(gap2color(gap)) + colored.bg('black')), end='')\n", + " if show_suggestions and gap > 5:\n", + " print(stylize('/' + suggestion + ' ', colored.fg('green' if gap > 10 else 'cyan') + colored.bg('black')), end='')\n", + " else:\n", + " print(stylize(' ', colored.fg(gap2color(gap)) + colored.bg('black')), end='')\n", + " # print('/' + suggestion, end=' ')\n", + " # print('%.2f' % gap, end=' ')\n", + " \n", + " avg_gap = 0.\n", + " tokens_tag = nltk.pos_tag(tokens)\n", + " #print(tokens_tag)\n", + " for i in range(1, len(tokens) - 1): # skip first [CLS] and last [SEP]\n", + " ind_ = tokenizer.vocab[tokens[i]]\n", + " prob_ = probs[i][ind_].item()\n", + " top_prob = probs[i].max().item()\n", + " top_ind = probs[i].argmax().item()\n", + " top_word = tokenizer.ids_to_tokens[top_ind]\n", + " gap = math.log(top_prob) - math.log(prob_) #计算两个词之间的差距\n", + " print()\n", + " print(\"*******************************************************************************************************************\")\n", + " print(i)\n", + " print(gap)\n", + " avg_gap += gap\n", + " #suggestion = tokenizer.ids_to_tokens[top_ind]\n", + " suggestion = None\n", + " #tag = tokens_tag[i][1]\n", + " #doc = nlp(tokens[i])\n", + " #tag = doc[0].tag_\n", + " tag = tokens_tag[i][1]\n", + " #print(tokens_tag[i])\n", + " print(tag)\n", + " if 'VB' in tag:\n", + " if gap>3 and top_word in [\"at\",\"in\",\"on\",\"by\",\"for\",\"from\",\"with\",\"about\",\"against\",\"along\",\"among\",\"around\",\"as\",\"before\",\"behind\",\"below\",\"beside\",\"between\",\"during\",\"besides\",\"into\",\"near\",\"over\",\"through\",\"under\",\"without\",\"after\",\"above\",\"of\",'to']:\n", + " suggestion = analyse_V(i) #如果推荐的是介词,说明这个位置可能需要补充什么 \n", + " elif gap > 7.5:\n", + " suggestion = analyse_V(i)\n", + " elif gap < 7.5 and gap > 3:\n", + " judge,suggestion = judge_V_and_suggestion(probs[i],tokens[i])\n", + " if judge == 0 :\n", + " gap = 6\n", + " else:\n", + " gap = 3\n", + " elif \"DT\" == tag and gap > 3:\n", + " suggestion = analyse_DT(i)\n", + " elif \"JJ\" in tag :\n", + " if gap > 6:\n", + " suggestion = analyse_adj(i)\n", + " else:\n", + " gap = 3\n", + " elif \"RB\" in tag and gap > 5:\n", + " suggestion = analyse_adv(i)\n", + " \n", + " elif \"PRP\" in tag and gap >5:\n", + " suggestion = analyse_pronoun(i)\n", + " elif \"NN\" in tag:\n", + " if gap > 4 and tokens[i][:2]==\"##\" and suggestions.__contains__(i-1)==False:\n", + " #如果gap>4并且该位置是后缀,并且前一个位置被建议修改,说明该位置需要去掉\n", + " suggestion = '去掉' + ' ' + tokens[i]\n", + " elif gap > 7.5:\n", + " suggestion = analyse_N(i)\n", + " elif gap < 7.5 and gap > 2:\n", + " judge,suggestion = judge_N_and_suggestion(probs[i],tokens[i])\n", + " if judge == 0 :\n", + " gap = 6\n", + " else:\n", + " gap = 3\n", + " elif \"CC\" in tag and gap > 2 :\n", + " judge,suggestion = judge_CC_and_suggestion(probs[i],tokens[i])\n", + " if judge == 1 :\n", + " gap = 3\n", + "\n", + " elif (\"IN\" == tag or 'TO' == tag) and gap > 2:\n", + " suggestion = analyse_IN(i)\n", + " \n", + " elif 'MD' in tag and gap > 5:\n", + " print(\"检查点1*****************************************************\")\n", + " judge,suggestion = judge_MD_and_suggestion(probs[i],tokens[i])\n", + " if judge == 1:\n", + " gap = 3\n", + " \n", + " elif \"CD\" in tag:\n", + " gap = 0 \n", + " \n", + " elif \"WDT\" == tag and gap > 2: #who,which,that那些\n", + " suggestion = top_word\n", + " \n", + " elif gap > 5:\n", + " suggestion = top_word\n", + " \n", + " if suggestion != tokens[i] and suggestion != None:\n", + " suggestions.update({i:suggestion})\n", + " gap = max(gap,6)\n", + " else:\n", + " gap = min(gap,3)\n", + " print_token(tokens[i], suggestion, gap)\n", + " \n", + " avg_gap /= (len(tokens) - 2)\n", + " print()\n", + " print('平均gap:'+ str(avg_gap))\n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": 65, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "([[101,\n", + " 2043,\n", + " 1045,\n", + " 2001,\n", + " 2210,\n", + " 1010,\n", + " 5958,\n", + " 1005,\n", + " 1055,\n", + " 2305,\n", + " 2001,\n", + " 2256,\n", + " 2155,\n", + " 2208,\n", + " 2305,\n", + " 1012,\n", + " 102],\n", + " [101,\n", + " 2044,\n", + " 15264,\n", + " 1010,\n", + " 2057,\n", + " 2052,\n", + " 2377,\n", + " 4003,\n", + " 2399,\n", + " 1997,\n", + " 2035,\n", + " 4066,\n", + " 1999,\n", + " 1996,\n", + " 3564,\n", + " 2282,\n", + " 1012,\n", + " 102],\n", + " [101,\n", + " 2004,\n", + " 1996,\n", + " 4845,\n", + " 1010,\n", + " 1045,\n", + " 3866,\n", + " 2000,\n", + " 3422,\n", + " 13941,\n", + " 1989,\n", + " 2021,\n", + " 2053,\n", + " 3043,\n", + " 2129,\n", + " 2116,\n", + " 2335,\n", + " 1045,\n", + " 2356,\n", + " 2000,\n", + " 3666,\n", + " 2068,\n", + " 1989,\n", + " 2026,\n", + " 3008,\n", + " 2052,\n", + " 2025,\n", + " 2000,\n", + " 2292,\n", + " 2033,\n", + " 1012,\n", + " 102],\n", + " [101,\n", + " 2027,\n", + " 2052,\n", + " 2360,\n", + " 2000,\n", + " 2149,\n", + " 2008,\n", + " 2652,\n", + " 4003,\n", + " 2399,\n", + " 2052,\n", + " 2393,\n", + " 2026,\n", + " 4167,\n", + " 1012,\n", + " 102],\n", + " [101,\n", + " 2145,\n", + " 1045,\n", + " 15175,\n", + " 2000,\n", + " 2377,\n", + " 1996,\n", + " 2399,\n", + " 2005,\n", + " 2068,\n", + " 2823,\n", + " 1012,\n", + " 102],\n", + " [101,\n", + " 1045,\n", + " 2134,\n", + " 1005,\n", + " 1056,\n", + " 5382,\n", + " 2129,\n", + " 2157,\n", + " 2026,\n", + " 3008,\n", + " 2024,\n", + " 2127,\n", + " 1045,\n", + " 3133,\n", + " 2152,\n", + " 2082,\n", + " 1012,\n", + " 102],\n", + " [101,\n", + " 1996,\n", + " 2399,\n", + " 2026,\n", + " 3008,\n", + " 4036,\n", + " 2033,\n", + " 2073,\n", + " 1045,\n", + " 2001,\n", + " 1037,\n", + " 2775,\n", + " 2357,\n", + " 2041,\n", + " 2000,\n", + " 2022,\n", + " 2200,\n", + " 6179,\n", + " 2101,\n", + " 1999,\n", + " 2026,\n", + " 2166,\n", + " 1012,\n", + " 102],\n", + " [101, 1012, 102]],\n", + " [[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n", + " [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n", + " [0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0],\n", + " [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n", + " [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n", + " [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n", + " [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n", + " [0, 0, 0]],\n", + " [[0, 0],\n", + " [0, 1],\n", + " [0, 2],\n", + " [0, 3],\n", + " [0, 4],\n", + " [0, 5],\n", + " [0, 6],\n", + " [0, 7],\n", + " [0, 8],\n", + " [0, 9],\n", + " [0, 10],\n", + " [0, 11],\n", + " [0, 12],\n", + " [0, 13],\n", + " [0, 14],\n", + " [0, 15],\n", + " [1, 1],\n", + " [1, 2],\n", + " [1, 3],\n", + " [1, 4],\n", + " [1, 5],\n", + " [1, 6],\n", + " [1, 7],\n", + " [1, 8],\n", + " [1, 9],\n", + " [1, 10],\n", + " [1, 11],\n", + " [1, 12],\n", + " [1, 13],\n", + " [1, 14],\n", + " [1, 15],\n", + " [1, 16],\n", + " [2, 1],\n", + " [2, 2],\n", + " [2, 3],\n", + " [2, 4],\n", + " [2, 5],\n", + " [2, 6],\n", + " [2, 7],\n", + " [2, 8],\n", + " [2, 9],\n", + " [2, 10],\n", + " [2, 11],\n", + " [2, 12],\n", + " [2, 13],\n", + " [2, 14],\n", + " [2, 15],\n", + " [2, 16],\n", + " [2, 17],\n", + " [2, 18],\n", + " [2, 19],\n", + " [2, 20],\n", + " [2, 21],\n", + " [2, 22],\n", + " [2, 23],\n", + " [2, 24],\n", + " [2, 25],\n", + " [2, 26],\n", + " [2, 27],\n", + " [2, 28],\n", + " [2, 29],\n", + " [2, 30],\n", + " [3, 1],\n", + " [3, 2],\n", + " [3, 3],\n", + " [3, 4],\n", + " [3, 5],\n", + " [3, 6],\n", + " [3, 7],\n", + " [3, 8],\n", + " [3, 9],\n", + " [3, 10],\n", + " [3, 11],\n", + " [3, 12],\n", + " [3, 13],\n", + " [3, 14],\n", + " [4, 1],\n", + " [4, 2],\n", + " [4, 3],\n", + " [4, 4],\n", + " [4, 5],\n", + " [4, 6],\n", + " [4, 7],\n", + " [4, 8],\n", + " [4, 9],\n", + " [4, 10],\n", + " [4, 11],\n", + " [5, 1],\n", + " [5, 2],\n", + " [5, 3],\n", + " [5, 4],\n", + " [5, 5],\n", + " [5, 6],\n", + " [5, 7],\n", + " [5, 8],\n", + " [5, 9],\n", + " [5, 10],\n", + " [5, 11],\n", + " [5, 12],\n", + " [5, 13],\n", + " [5, 14],\n", + " [5, 15],\n", + " [5, 16],\n", + " [6, 1],\n", + " [6, 2],\n", + " [6, 3],\n", + " [6, 4],\n", + " [6, 5],\n", + " [6, 6],\n", + " [6, 7],\n", + " [6, 8],\n", + " [6, 9],\n", + " [6, 10],\n", + " [6, 11],\n", + " [6, 12],\n", + " [6, 13],\n", + " [6, 14],\n", + " [6, 15],\n", + " [6, 16],\n", + " [6, 17],\n", + " [6, 18],\n", + " [6, 19],\n", + " [6, 20],\n", + " [6, 21],\n", + " [6, 22]],\n", + " [\"When I was little, Friday's night was our family game night.\",\n", + " ' After supper, we would play card games of all sort in the sitting room.',\n", + " ' As the kid, I loved to watch cartoons,but no matter how many times I asked to watching them, my parents would not to let me.',\n", + " ' They would say to us that playing card games would help my brain.',\n", + " ' Still I unwilling to play the games for them sometimes.',\n", + " \" I didn't realize how right my parents are until I entered high school.\",\n", + " ' The games my parents taught me where I was a child turned out to be very useful later in my life.',\n", + " '.'],\n", + " [101,\n", + " 2043,\n", + " 1045,\n", + " 2001,\n", + " 2210,\n", + " 1010,\n", + " 5958,\n", + " 1005,\n", + " 1055,\n", + " 2305,\n", + " 2001,\n", + " 2256,\n", + " 2155,\n", + " 2208,\n", + " 2305,\n", + " 1012,\n", + " 2044,\n", + " 15264,\n", + " 1010,\n", + " 2057,\n", + " 2052,\n", + " 2377,\n", + " 4003,\n", + " 2399,\n", + " 1997,\n", + " 2035,\n", + " 4066,\n", + " 1999,\n", + " 1996,\n", + " 3564,\n", + " 2282,\n", + " 1012,\n", + " 2004,\n", + " 1996,\n", + " 4845,\n", + " 1010,\n", + " 1045,\n", + " 3866,\n", + " 2000,\n", + " 3422,\n", + " 13941,\n", + " 1989,\n", + " 2021,\n", + " 2053,\n", + " 3043,\n", + " 2129,\n", + " 2116,\n", + " 2335,\n", + " 1045,\n", + " 2356,\n", + " 2000,\n", + " 3666,\n", + " 2068,\n", + " 1989,\n", + " 2026,\n", + " 3008,\n", + " 2052,\n", + " 2025,\n", + " 2000,\n", + " 2292,\n", + " 2033,\n", + " 1012,\n", + " 2027,\n", + " 2052,\n", + " 2360,\n", + " 2000,\n", + " 2149,\n", + " 2008,\n", + " 2652,\n", + " 4003,\n", + " 2399,\n", + " 2052,\n", + " 2393,\n", + " 2026,\n", + " 4167,\n", + " 1012,\n", + " 2145,\n", + " 1045,\n", + " 15175,\n", + " 2000,\n", + " 2377,\n", + " 1996,\n", + " 2399,\n", + " 2005,\n", + " 2068,\n", + " 2823,\n", + " 1012,\n", + " 1045,\n", + " 2134,\n", + " 1005,\n", + " 1056,\n", + " 5382,\n", + " 2129,\n", + " 2157,\n", + " 2026,\n", + " 3008,\n", + " 2024,\n", + " 2127,\n", + " 1045,\n", + " 3133,\n", + " 2152,\n", + " 2082,\n", + " 1012,\n", + " 1996,\n", + " 2399,\n", + " 2026,\n", + " 3008,\n", + " 4036,\n", + " 2033,\n", + " 2073,\n", + " 1045,\n", + " 2001,\n", + " 1037,\n", + " 2775,\n", + " 2357,\n", + " 2041,\n", + " 2000,\n", + " 2022,\n", + " 2200,\n", + " 6179,\n", + " 2101,\n", + " 1999,\n", + " 2026,\n", + " 2166,\n", + " 1012,\n", + " 102],\n", + " [0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0,\n", + " 0])" + ] + }, + "execution_count": 65, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "input_ids_sen,input_type_ids_sen,in_sentence,sentences,entire_ids,entire_type_ids\n", + "\n", + "def analyze_text(text, masked_tokens=None, show_suggestions=True, show_firstk_probs=20):\n", + " step = 15\n", + " #print(text[0])\n", + " global input_ids_sen,input_type_ids_sen,in_sentence,sentences,entire_ids,entire_type_ids\n", + " input_ids_sen,input_type_ids_sen,in_sentence,sentences,entire_ids,entire_type_ids = process_text(text[0])\n", + "\n", + " examples = convert_text_to_examples(text)\n", + " features = convert_examples_to_features(examples, tokenizer, print_info=False)\n", + " given_mask = \"[MASK]\" in features[0].tokens\n", + " if not given_mask or masked_tokens is not None:\n", + " assert len(features) == 1\n", + " features, batches = copy_and_mask_feature(features[0],step, masked_tokens=masked_tokens)\n", + " #print(len(features))\n", + "\n", + " input_ids = torch.tensor([f.input_ids for f in features], dtype=torch.long) #把input_ids增加了一个维度,变成[n_features,sequence_len]\n", + " #这里的n_features实际上是句子有多少批训练\n", + "\n", + " input_type_ids = torch.tensor([f.input_type_ids for f in features], dtype=torch.long) #把input_type_ids增加了一个维度,其实每一行都一样\n", + " input_ids = input_ids.to(device) #拿去GPU\n", + " input_type_ids = input_type_ids.to(device)\n", + "\n", + " mlm_logits, _ = model(input_ids, input_type_ids)\n", + " mlm_probs = F.softmax(mlm_logits, dim=-1) #最后一维,也就是vocab 换算成概率和为百分之百\n", + " #print(mlm_probs.size())#这里实验的是torch.Size([5, 5, 30522])\n", + " tokens = features[0].tokens #为了输出,[mask]在input_ids里面表示出来,features的token都一样\n", + " #print(tokens)\n", + " if not given_mask or masked_tokens is not None:\n", + " bsz, seq_len, vocab_size = mlm_probs.size() #三个维度分别是batch_size, sequence_length, vocab_size\n", + " assert bsz == len(batches)\n", + " # reduced_mlm_probs = torch.Tensor(1, seq_len, vocab_size)\n", + " # for i in range(seq_len):\n", + " # reduced_mlm_probs[0, i] = mlm_probs[i, i]\n", + " reduced_mlm_probs = torch.Tensor(1, len(tokens), vocab_size)\n", + " for i in batches:\n", + " pos = i\n", + " while pos < len(tokens):\n", + " reduced_mlm_probs[0, pos] = mlm_probs[i, pos]\n", + " pos = pos + step\n", + " mlm_probs = reduced_mlm_probs #压缩一下大小,节约不必要浪费的空间(只需要第i个batch里面[mask]位置的词汇表概率即可)\n", + " #tokens = [tokens[i] for i in masked_positions]\n", + " top_pairs = show_lm_probs(tokens, None, mlm_probs[0], firstk=show_firstk_probs) #传入的probs是二维的\n", + " #print(top_pairs) #******************************\n", + " if not given_mask:\n", + " show_abnormals(tokens, mlm_probs[0], show_suggestions=show_suggestions)\n", + " #return top_pairs\n" + ] + }, + { + "cell_type": "code", + "execution_count": 66, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "03/21/2019 18:19:51 - INFO - examples.extract_features - tokens: [CLS] when i was little , friday ' s night was our family game night . after supper , we would play card games of all sort in the sitting room . as the kid , i loved to watch cartoons , but no matter how many times i asked to watching them , my parents would not to let me . they would say to us that playing card games would help my brain . still i unwilling to play the games for them sometimes . i didn ' t realize how right my parents are until i entered high school . the games my parents taught me where i was a child turned out to be very useful later in my life . [SEP]\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + " 0 | [CLS] \t 2 | . 1 | the 1 | ) 1 | \" 1 | , \n", + " 97 | when \t* 97 | when 2 | since 1 | until 0 | while 0 | before \n", + " 99 | i \t* 99 | i 0 | she 0 | he 0 | we 0 | me \n", + " 100 | was \t*100 | was 0 | were 0 | got 0 | turned 0 | is \n", + " 11 | little \t 19 | younger * 11 | little 8 | eight 7 | young 7 | twelve \n", + " 51 | , \t* 51 | , 24 | . 4 | and 1 | of 1 | the \n", + " 0 | friday \t 33 | valentine 16 | mother 7 | children 7 | father 5 | grandma \n", + " 100 | ' \t*100 | ' 0 | ` 0 | ′ 0 | \" 0 | * \n", + " 100 | s \t*100 | s 0 | til 0 | n 0 | d 0 | round \n", + " 39 | night \t* 39 | night 16 | dinner 6 | eve 5 | day 5 | supper \n", + " 90 | was \t* 90 | was 8 | became 1 | is 0 | were 0 | , \n", + " 4 | our \t 79 | a 13 | the * 4 | our 1 | my 0 | their \n", + " 1 | family \t 59 | favorite 18 | first 3 | favourite 2 | only 1 | last \n", + " 3 | game \t 12 | dinner 7 | christmas 6 | fun 5 | day * 3 | game \n", + " 81 | night \t* 81 | night 13 | day 1 | dinner 1 | date 0 | nights \n", + " 97 | . \t* 97 | . 2 | and 0 | ; 0 | , 0 | ! \n", + " 80 | after \t* 80 | after 6 | during 4 | at 4 | over 3 | before \n", + " 1 | supper \t 68 | school 10 | dinner 9 | that 2 | midnight 1 | breakfast \n", + " 100 | , \t*100 | , 0 | ##time 0 | together 0 | time 0 | dinner \n", + " 98 | we \t* 98 | we 1 | i 0 | they 0 | everyone 0 | people \n", + " 64 | would \t* 64 | would 21 | could 2 | will 2 | can 1 | did \n", + " 96 | play \t* 96 | play 2 | have 1 | watch 0 | enjoy 0 | played \n", + " 97 | card \t* 97 | card 1 | board 1 | cards 0 | video 0 | computer \n", + " 100 | games \t*100 | games 0 | game 0 | ##games 0 | matches 0 | sports \n", + " 99 | of \t* 99 | of 0 | in 0 | with 0 | and 0 | , \n", + " 0 | all \t 85 | some 6 | any 3 | a 3 | every 1 | this \n", + " 2 | sort \t 41 | kinds 34 | types 5 | sorts 4 | sizes 3 | kind \n", + " 98 | in \t* 98 | in 0 | around 0 | inside 0 | at 0 | from \n", + " 73 | the \t* 73 | the 23 | our 1 | my 1 | a 1 | their \n", + " 0 | sitting \t 56 | family 17 | dining 14 | living 1 | back 1 | same \n", + " 99 | room \t* 99 | room 0 | area 0 | rooms 0 | hall 0 | areas \n", + " 99 | . \t* 99 | . 1 | and 0 | ; 0 | , 0 | ... \n", + " 48 | as \t* 48 | as 29 | like 8 | being 3 | unlike 3 | for \n", + " 0 | the \t 100 | a 0 | another 0 | an * 0 | the 0 | one \n", + " 8 | kid \t 43 | child 22 | youngest * 8 | kid 4 | baby 3 | oldest \n", + " 63 | , \t* 63 | , 5 | i 2 | . 1 | myself 1 | and \n", + " 99 | i \t* 99 | i 0 | we 0 | he 0 | she 0 | me \n", + " 15 | loved \t 36 | wanted 21 | used * 15 | loved 11 | liked 4 | tried \n", + " 100 | to \t*100 | to 0 | and 0 | playing 0 | watching 0 | going \n", + " 99 | watch \t* 99 | watch 1 | see 0 | play 0 | watching 0 | watched \n", + " 0 | cartoons \t 52 | them 41 | games 1 | movies 1 | cards 1 | it \n", + " 0 | , \t 81 | , 19 | . 0 | ; 0 | - 0 | ... \n", + " 44 | but \t 47 | and * 44 | but 6 | so 1 | yet 1 | because \n", + " 100 | no \t*100 | no 0 | little 0 | the 0 | zero 0 | not \n", + " 100 | matter \t*100 | matter 0 | to 0 | telling 0 | idea 0 | , \n", + " 100 | how \t*100 | how 0 | what 0 | however 0 | the 0 | where \n", + " 100 | many \t*100 | many 0 | often 0 | few 0 | several 0 | numerous \n", + " 85 | times \t* 85 | times 3 | questions 1 | minutes 1 | hours 1 | people \n", + " 82 | i \t* 82 | i 10 | we 1 | was 1 | being 1 | he \n", + " 0 | asked \t 37 | took 19 | went 13 | admitted 6 | got 4 | confessed \n", + " 5 | to \t 23 | for 13 | about * 5 | to 4 | myself 3 | me \n", + " 0 | watching \t 64 | play 30 | watch 3 | see 1 | join 0 | read \n", + " 57 | them \t* 57 | them 23 | cartoons 5 | it 2 | movies 2 | games \n", + " 0 | , \t 100 | , 0 | . 0 | ... 0 | again 0 | even \n", + " 99 | my \t* 99 | my 0 | the 0 | her 0 | his 0 | our \n", + " 98 | parents \t* 98 | parents 0 | family 0 | father 0 | mother 0 | grandparents\n", + " 0 | would \t 47 | decided 18 | chose 8 | tried 4 | seemed 4 | knew \n", + " 0 | not \t 70 | refuse 10 | have 5 | agree 2 | want 1 | promise \n", + " 0 | to \t 45 | always 17 | have 11 | really 9 | even 3 | ever \n", + " 28 | let \t* 28 | let 17 | believe 11 | tell 11 | bother 5 | stop \n", + " 91 | me \t* 91 | me 6 | go 1 | up 1 | on 0 | it \n", + " 97 | . \t* 97 | . 1 | and 1 | ; 0 | because 0 | , \n", + " 94 | they \t* 94 | they 1 | he 1 | she 1 | dad 1 | i \n", + " 97 | would \t* 97 | would 1 | did 1 | always 1 | could 0 | might \n", + " 27 | say \t* 27 | say 21 | prove 16 | explain 4 | swear 3 | lie \n", + " 65 | to \t* 65 | to 6 | about 2 | for 2 | that 2 | in \n", + " 0 | us \t 99 | me 0 | themselves 0 | myself * 0 | us 0 | him \n", + " 94 | that \t* 94 | that 3 | how 1 | if 1 | , 0 | maybe \n", + " 89 | playing \t* 89 | playing 4 | the 4 | watching 0 | doing 0 | their \n", + " 46 | card \t* 46 | card 40 | the 4 | these 3 | those 1 | cards \n", + " 99 | games \t* 99 | games 0 | game 0 | together 0 | tricks 0 | again \n", + " 68 | would \t* 68 | would 14 | could 4 | might 4 | will 4 | did \n", + " 5 | help \t 23 | change 6 | use * 5 | help 3 | drain 3 | control \n", + " 61 | my \t* 61 | my 22 | the 9 | our 3 | your 3 | their \n", + " 1 | brain \t 15 | life 5 | family 4 | dad 3 | future 3 | parents \n", + " 57 | . \t* 57 | . 16 | and 14 | , 9 | but 1 | ; \n", + " 0 | still \t 35 | am 26 | was 8 | but 7 | is 5 | and \n", + " 6 | i \t 60 | , * 6 | i 3 | . 3 | too 3 | ... \n", + " 0 | unwilling \t 8 | want 8 | used 8 | have 8 | wanted 7 | had \n", + " 48 | to \t 50 | ##ly * 48 | to 0 | always 0 | t 0 | ##tly \n", + " 28 | play \t* 28 | play 6 | do 5 | make 1 | stop 1 | keep \n", + " 5 | the \t 82 | card * 5 | the 1 | these 1 | cards 1 | those \n", + " 59 | games \t* 59 | games 37 | game 1 | cards 0 | piano 0 | kids \n", + " 1 | for \t 92 | with * 1 | for 1 | in 1 | against 1 | without \n", + " 22 | them \t 51 | myself * 22 | them 9 | fun 1 | hours 1 | real \n", + " 2 | sometimes \t 16 | anyway 12 | anymore 11 | too 10 | all 4 | though \n", + " 96 | . \t* 96 | . 1 | because 1 | and 1 | ; 0 | , \n", + " 99 | i \t* 99 | i 0 | we 0 | they 0 | you 0 | people \n", + " 99 | didn \t* 99 | didn 0 | wouldn 0 | don 0 | couldn 0 | did \n", + " 100 | ' \t*100 | ' 0 | ` 0 | \" 0 | , 0 | ′ \n", + " 100 | t \t*100 | t 0 | m 0 | s 0 | d 0 | no \n", + " 45 | realize \t 46 | know * 45 | realize 3 | understand 3 | realise 2 | see \n", + " 100 | how \t*100 | how 0 | what 0 | the 0 | it 0 | however \n", + " 0 | right \t 6 | strict 5 | powerful 4 | wonderful 4 | smart 4 | helpful \n", + " 97 | my \t* 97 | my 1 | our 0 | the 0 | your 0 | their \n", + " 29 | parents \t* 29 | parents 6 | thoughts 4 | words 2 | people 2 | kids \n", + " 1 | are \t 97 | were * 1 | are 0 | thought 0 | felt 0 | was \n", + " 87 | until \t* 87 | until 9 | when 2 | before 1 | till 0 | once \n", + " 100 | i \t*100 | i 0 | we 0 | they 0 | he 0 | me \n", + " 13 | entered \t 54 | graduated * 13 | entered 9 | finished 7 | started 6 | left \n", + " 51 | high \t* 51 | high 20 | elementary 15 | middle 8 | grade 1 | primary \n", + " 100 | school \t*100 | school 0 | schools 0 | society 0 | college 0 | class \n", + " 81 | . \t* 81 | . 14 | and 2 | but 1 | , 1 | ; \n", + " 92 | the \t* 92 | the 5 | card 1 | those 0 | playing 0 | these \n", + " 43 | games \t* 43 | games 30 | game 6 | lessons 4 | rules 1 | math \n", + " 100 | my \t*100 | my 0 | our 0 | his 0 | me 0 | that \n", + " 53 | parents \t* 53 | parents 15 | father 13 | mother 5 | dad 4 | grandparents\n", + " 56 | taught \t* 56 | taught 20 | showed 12 | played 8 | gave 1 | told \n", + " 100 | me \t*100 | me 0 | us 0 | i 0 | him 0 | my \n", + " 0 | where \t 96 | when 2 | since 2 | while 0 | as 0 | until \n", + " 99 | i \t* 99 | i 0 | me 0 | he 0 | she 0 | my \n", + " 99 | was \t* 99 | was 0 | were 0 | as 0 | became 0 | had \n", + " 100 | a \t*100 | a 0 | the 0 | and 0 | one 0 | still \n", + " 22 | child \t 51 | kid * 22 | child 7 | boy 4 | teenager 4 | freshman \n", + " 97 | turned \t* 97 | turned 1 | turn 1 | came 0 | grew 0 | turning \n", + " 100 | out \t*100 | out 0 | into 0 | on 0 | up 0 | proving \n", + " 100 | to \t*100 | to 0 | into 0 | and 0 | not 0 | would \n", + " 94 | be \t* 94 | be 3 | become 3 | prove 0 | get 0 | seem \n", + " 69 | very \t* 69 | very 6 | extremely 5 | quite 3 | more 3 | really \n", + " 7 | useful \t 19 | important 14 | different 9 | helpful * 7 | useful 6 | influential \n", + " 6 | later \t 46 | things 17 | early * 6 | later 3 | lessons 3 | times \n", + " 100 | in \t*100 | in 0 | on 0 | during 0 | into 0 | than \n", + " 100 | my \t*100 | my 0 | our 0 | his 0 | their 0 | the \n", + " 99 | life \t* 99 | life 1 | career 0 | childhood 0 | education 0 | lives \n", + " 100 | . \t*100 | . 0 | ; 0 | ! 0 | ? 0 | ... \n", + " 0 | [SEP] \t 25 | \" 3 | for 3 | now 3 | and 2 | so \n", + "\n", + "*******************************************************************************************************************\n", + "1\n", + "0.0\n", + "WRB\n", + "\u001b[38;5;15m\u001b[48;5;0mwhen \u001b[0m\n", + "*******************************************************************************************************************\n", + "2\n", + "0.0\n", + "NN\n", + "\u001b[38;5;15m\u001b[48;5;0mi \u001b[0m\n", + "*******************************************************************************************************************\n", + "3\n", + "0.0\n", + "VBD\n", + "\u001b[38;5;15m\u001b[48;5;0mwas \u001b[0m\n", + "*******************************************************************************************************************\n", + "4\n", + "0.4996413875309904\n", + "JJ\n", + "\u001b[38;5;226m\u001b[48;5;0mlittle\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "5\n", + "0.0\n", + ",\n", + "\u001b[38;5;15m\u001b[48;5;0m, \u001b[0m\n", + "*******************************************************************************************************************\n", + "6\n", + "5.037531860577574\n", + "JJ\n", + "\u001b[38;5;226m\u001b[48;5;0mfriday\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "7\n", + "0.0\n", + "POS\n", + "\u001b[38;5;15m\u001b[48;5;0m' \u001b[0m\n", + "*******************************************************************************************************************\n", + "8\n", + "0.0\n", + "NN\n", + "\u001b[38;5;15m\u001b[48;5;0ms \u001b[0m\n", + "*******************************************************************************************************************\n", + "9\n", + "0.0\n", + "NN\n", + "\u001b[38;5;15m\u001b[48;5;0mnight \u001b[0m\n", + "*******************************************************************************************************************\n", + "10\n", + "0.0\n", + "VBD\n", + "\u001b[38;5;15m\u001b[48;5;0mwas \u001b[0m\n", + "*******************************************************************************************************************\n", + "11\n", + "2.9288282257051295\n", + "PRP$\n", + "\u001b[38;5;226m\u001b[48;5;0mour\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "12\n", + "3.944041330267972\n", + "NN\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[38;5;226m\u001b[48;5;0mfamily\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "13\n", + "1.2859363936756965\n", + "NN\n", + "\u001b[38;5;226m\u001b[48;5;0mgame\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "14\n", + "0.0\n", + "NN\n", + "\u001b[38;5;15m\u001b[48;5;0mnight \u001b[0m\n", + "*******************************************************************************************************************\n", + "15\n", + "0.0\n", + ".\n", + "\u001b[38;5;15m\u001b[48;5;0m. \u001b[0m\n", + "*******************************************************************************************************************\n", + "16\n", + "0.0\n", + "IN\n", + "\u001b[38;5;15m\u001b[48;5;0mafter \u001b[0m\n", + "*******************************************************************************************************************\n", + "17\n", + "3.864973993379616\n", + "NN\n", + "\u001b[38;5;226m\u001b[48;5;0msupper\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "18\n", + "0.0\n", + ",\n", + "\u001b[38;5;15m\u001b[48;5;0m, \u001b[0m\n", + "*******************************************************************************************************************\n", + "19\n", + "0.0\n", + "PRP\n", + "\u001b[38;5;15m\u001b[48;5;0mwe \u001b[0m\n", + "*******************************************************************************************************************\n", + "20\n", + "0.0\n", + "MD\n", + "\u001b[38;5;15m\u001b[48;5;0mwould \u001b[0m\n", + "*******************************************************************************************************************\n", + "21\n", + "0.0\n", + "VB\n", + "\u001b[38;5;15m\u001b[48;5;0mplay \u001b[0m\n", + "*******************************************************************************************************************\n", + "22\n", + "0.0\n", + "JJ\n", + "\u001b[38;5;226m\u001b[48;5;0mcard\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "23\n", + "0.0\n", + "NNS\n", + "\u001b[38;5;15m\u001b[48;5;0mgames \u001b[0m\n", + "*******************************************************************************************************************\n", + "24\n", + "0.0\n", + "IN\n", + "\u001b[38;5;15m\u001b[48;5;0mof \u001b[0m\n", + "*******************************************************************************************************************\n", + "25\n", + "6.181150402503261\n", + "DT\n", + "\u001b[38;5;226m\u001b[48;5;0mall\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "26\n", + "3.2442493513478983\n", + "NN\n", + "\u001b[38;5;214m\u001b[48;5;0msort\u001b[0m\u001b[38;5;6m\u001b[48;5;0m/sorts \u001b[0m\n", + "*******************************************************************************************************************\n", + "27\n", + "0.0\n", + "IN\n", + "\u001b[38;5;15m\u001b[48;5;0min \u001b[0m\n", + "*******************************************************************************************************************\n", + "28\n", + "0.0\n", + "DT\n", + "\u001b[38;5;15m\u001b[48;5;0mthe \u001b[0m\n", + "*******************************************************************************************************************\n", + "29\n", + "5.1759204333264215\n", + "NN\n", + "\u001b[38;5;226m\u001b[48;5;0msitting\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "30\n", + "0.0\n", + "NN\n", + "\u001b[38;5;15m\u001b[48;5;0mroom \u001b[0m\n", + "*******************************************************************************************************************\n", + "31\n", + "0.0\n", + ".\n", + "\u001b[38;5;15m\u001b[48;5;0m. \u001b[0m\n", + "*******************************************************************************************************************\n", + "32\n", + "0.0\n", + "IN\n", + "\u001b[38;5;15m\u001b[48;5;0mas \u001b[0m\n", + "*******************************************************************************************************************\n", + "33\n", + "11.548374205660924\n", + "DT\n", + "\u001b[38;5;196m\u001b[48;5;0mthe\u001b[0m\u001b[38;5;2m\u001b[48;5;0m/a \u001b[0m\n", + "*******************************************************************************************************************\n", + "34\n", + "1.7249087614151182\n", + "NN\n", + "\u001b[38;5;226m\u001b[48;5;0mkid\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "35\n", + "0.0\n", + ",\n", + "\u001b[38;5;15m\u001b[48;5;0m, \u001b[0m\n", + "*******************************************************************************************************************\n", + "36\n", + "0.0\n", + "NN\n", + "\u001b[38;5;15m\u001b[48;5;0mi \u001b[0m\n", + "*******************************************************************************************************************\n", + "37\n", + "0.9132128054919955\n", + "VBD\n", + "\u001b[38;5;226m\u001b[48;5;0mloved\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "38\n", + "0.0\n", + "TO\n", + "\u001b[38;5;15m\u001b[48;5;0mto \u001b[0m\n", + "*******************************************************************************************************************\n", + "39\n", + "0.0\n", + "VB\n", + "\u001b[38;5;15m\u001b[48;5;0mwatch \u001b[0m\n", + "*******************************************************************************************************************\n", + "40\n", + "6.495473375871382\n", + "NNS\n", + "\u001b[38;5;226m\u001b[48;5;0mcartoons\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "41\n", + "15.452660592340097\n", + "VB\n", + "\u001b[38;5;196m\u001b[48;5;0m,\u001b[0m\u001b[38;5;2m\u001b[48;5;0m/, \u001b[0m\n", + "*******************************************************************************************************************\n", + "42\n", + "0.053008093757376584\n", + "CC\n", + "\u001b[38;5;226m\u001b[48;5;0mbut\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "43\n", + "0.0\n", + "DT\n", + "\u001b[38;5;15m\u001b[48;5;0mno \u001b[0m\n", + "*******************************************************************************************************************\n", + "44\n", + "0.0\n", + "NN\n", + "\u001b[38;5;15m\u001b[48;5;0mmatter \u001b[0m\n", + "*******************************************************************************************************************\n", + "45\n", + "0.0\n", + "WRB\n", + "\u001b[38;5;15m\u001b[48;5;0mhow \u001b[0m\n", + "*******************************************************************************************************************\n", + "46\n", + "0.0\n", + "JJ\n", + "\u001b[38;5;226m\u001b[48;5;0mmany\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "47\n", + "0.0\n", + "NNS\n", + "\u001b[38;5;15m\u001b[48;5;0mtimes \u001b[0m\n", + "*******************************************************************************************************************\n", + "48\n", + "0.0\n", + "VBP\n", + "\u001b[38;5;15m\u001b[48;5;0mi \u001b[0m\n", + "*******************************************************************************************************************\n", + "49\n", + "10.646676048338493\n", + "VBN\n", + "\u001b[38;5;196m\u001b[48;5;0masked\u001b[0m\u001b[38;5;2m\u001b[48;5;0m/was used \u001b[0m\n", + "*******************************************************************************************************************\n", + "50\n", + "1.5529499099577042\n", + "TO\n", + "\u001b[38;5;226m\u001b[48;5;0mto\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "51\n", + "8.95932484381705\n", + "VBG\n", + "\u001b[38;5;214m\u001b[48;5;0mwatching\u001b[0m\u001b[38;5;6m\u001b[48;5;0m/watch \u001b[0m\n", + "*******************************************************************************************************************\n", + "52\n", + "0.0\n", + "PRP\n", + "\u001b[38;5;15m\u001b[48;5;0mthem \u001b[0m\n", + "*******************************************************************************************************************\n", + "53\n", + "18.383069999315744\n", + "VB\n", + "\u001b[38;5;196m\u001b[48;5;0m,\u001b[0m\u001b[38;5;2m\u001b[48;5;0m/, \u001b[0m\n", + "*******************************************************************************************************************\n", + "54\n", + "0.0\n", + "PRP$\n", + "\u001b[38;5;15m\u001b[48;5;0mmy \u001b[0m\n", + "*******************************************************************************************************************\n", + "55\n", + "0.0\n", + "NNS\n", + "\u001b[38;5;15m\u001b[48;5;0mparents \u001b[0m\n", + "*******************************************************************************************************************\n", + "56\n", + "5.4762173007041035\n", + "MD\n", + "检查点1*****************************************************\n", + "\u001b[38;5;226m\u001b[48;5;0mwould\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "57\n", + "5.3354081649787535\n", + "RB\n", + "\u001b[38;5;214m\u001b[48;5;0mnot\u001b[0m\u001b[38;5;6m\u001b[48;5;0m/refuse \u001b[0m\n", + "*******************************************************************************************************************\n", + "58\n", + "5.981459151215268\n", + "TO\n", + "\u001b[38;5;214m\u001b[48;5;0mto\u001b[0m\u001b[38;5;6m\u001b[48;5;0m/去掉 to \u001b[0m\n", + "*******************************************************************************************************************\n", + "59\n", + "0.0\n", + "VB\n", + "\u001b[38;5;15m\u001b[48;5;0mlet \u001b[0m\n", + "*******************************************************************************************************************\n", + "60\n", + "0.0\n", + "PRP\n", + "\u001b[38;5;15m\u001b[48;5;0mme \u001b[0m\n", + "*******************************************************************************************************************\n", + "61\n", + "0.0\n", + ".\n", + "\u001b[38;5;15m\u001b[48;5;0m. \u001b[0m\n", + "*******************************************************************************************************************\n", + "62\n", + "0.0\n", + "PRP\n", + "\u001b[38;5;15m\u001b[48;5;0mthey \u001b[0m\n", + "*******************************************************************************************************************\n", + "63\n", + "0.0\n", + "MD\n", + "\u001b[38;5;15m\u001b[48;5;0mwould \u001b[0m\n", + "*******************************************************************************************************************\n", + "64\n", + "0.0\n", + "VB\n", + "\u001b[38;5;15m\u001b[48;5;0msay \u001b[0m\n", + "*******************************************************************************************************************\n", + "65\n", + "0.0\n", + "TO\n", + "\u001b[38;5;15m\u001b[48;5;0mto \u001b[0m\n", + "*******************************************************************************************************************\n", + "66\n", + "6.581833896065917\n", + "PRP\n", + "\u001b[38;5;214m\u001b[48;5;0mus\u001b[0m\u001b[38;5;6m\u001b[48;5;0m/me \u001b[0m\n", + "*******************************************************************************************************************\n", + "67\n", + "0.0\n", + "IN\n", + "\u001b[38;5;15m\u001b[48;5;0mthat \u001b[0m\n", + "*******************************************************************************************************************\n", + "68\n", + "0.0\n", + "VBG\n", + "\u001b[38;5;15m\u001b[48;5;0mplaying \u001b[0m\n", + "*******************************************************************************************************************\n", + "69\n", + "0.0\n", + "NN\n", + "\u001b[38;5;15m\u001b[48;5;0mcard \u001b[0m\n", + "*******************************************************************************************************************\n", + "70\n", + "0.0\n", + "NNS\n", + "\u001b[38;5;15m\u001b[48;5;0mgames \u001b[0m\n", + "*******************************************************************************************************************\n", + "71\n", + "0.0\n", + "MD\n", + "\u001b[38;5;15m\u001b[48;5;0mwould \u001b[0m\n", + "*******************************************************************************************************************\n", + "72\n", + "1.4588194328350998\n", + "VB\n", + "\u001b[38;5;226m\u001b[48;5;0mhelp\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "73\n", + "0.0\n", + "PRP$\n", + "\u001b[38;5;15m\u001b[48;5;0mmy \u001b[0m\n", + "*******************************************************************************************************************\n", + "74\n", + "3.173226871228209\n", + "NN\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[38;5;226m\u001b[48;5;0mbrain\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "75\n", + "0.0\n", + ".\n", + "\u001b[38;5;15m\u001b[48;5;0m. \u001b[0m\n", + "*******************************************************************************************************************\n", + "76\n", + "5.90809919263306\n", + "RB\n", + "\u001b[38;5;214m\u001b[48;5;0mstill\u001b[0m\u001b[38;5;6m\u001b[48;5;0m/still , \u001b[0m\n", + "*******************************************************************************************************************\n", + "77\n", + "2.2313680234481628\n", + "JJ\n", + "\u001b[38;5;226m\u001b[48;5;0mi\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "78\n", + "7.241924210620825\n", + "NN\n", + "\u001b[38;5;226m\u001b[48;5;0munwilling\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "79\n", + "0.033503519227476186\n", + "TO\n", + "\u001b[38;5;226m\u001b[48;5;0mto\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "80\n", + "0.0\n", + "VB\n", + "\u001b[38;5;15m\u001b[48;5;0mplay \u001b[0m\n", + "*******************************************************************************************************************\n", + "81\n", + "2.7297515213736863\n", + "DT\n", + "\u001b[38;5;226m\u001b[48;5;0mthe\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "82\n", + "0.0\n", + "NNS\n", + "\u001b[38;5;15m\u001b[48;5;0mgames \u001b[0m\n", + "*******************************************************************************************************************\n", + "83\n", + "4.231741889869705\n", + "IN\n", + "\u001b[38;5;214m\u001b[48;5;0mfor\u001b[0m\u001b[38;5;6m\u001b[48;5;0m/with \u001b[0m\n", + "*******************************************************************************************************************\n", + "84\n", + "0.8675317652760016\n", + "PRP\n", + "\u001b[38;5;226m\u001b[48;5;0mthem\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "85\n", + "2.1558967133083646\n", + "RB\n", + "\u001b[38;5;226m\u001b[48;5;0msometimes\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "86\n", + "0.0\n", + ".\n", + "\u001b[38;5;15m\u001b[48;5;0m. \u001b[0m\n", + "*******************************************************************************************************************\n", + "87\n", + "0.0\n", + "JJ\n", + "\u001b[38;5;226m\u001b[48;5;0mi\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "88\n", + "0.0\n", + "NN\n", + "\u001b[38;5;15m\u001b[48;5;0mdidn \u001b[0m\n", + "*******************************************************************************************************************\n", + "89\n", + "0.0\n", + "POS\n", + "\u001b[38;5;15m\u001b[48;5;0m' \u001b[0m\n", + "*******************************************************************************************************************\n", + "90\n", + "0.0\n", + "NN\n", + "\u001b[38;5;15m\u001b[48;5;0mt \u001b[0m\n", + "*******************************************************************************************************************\n", + "91\n", + "0.011093191090367771\n", + "VB\n", + "\u001b[38;5;226m\u001b[48;5;0mrealize\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "92\n", + "0.0\n", + "WRB\n", + "\u001b[38;5;15m\u001b[48;5;0mhow \u001b[0m\n", + "*******************************************************************************************************************\n", + "93\n", + "3.6692828920487384\n", + "JJ\n", + "\u001b[38;5;226m\u001b[48;5;0mright\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "94\n", + "0.0\n", + "PRP$\n", + "\u001b[38;5;15m\u001b[48;5;0mmy \u001b[0m\n", + "*******************************************************************************************************************\n", + "95\n", + "0.0\n", + "NNS\n", + "\u001b[38;5;15m\u001b[48;5;0mparents \u001b[0m\n", + "*******************************************************************************************************************\n", + "96\n", + "4.758635578869137\n", + "VBP\n", + "\u001b[38;5;214m\u001b[48;5;0mare\u001b[0m\u001b[38;5;6m\u001b[48;5;0m/were \u001b[0m\n", + "*******************************************************************************************************************\n", + "97\n", + "0.0\n", + "IN\n", + "\u001b[38;5;15m\u001b[48;5;0muntil \u001b[0m\n", + "*******************************************************************************************************************\n", + "98\n", + "0.0\n", + "NNS\n", + "\u001b[38;5;15m\u001b[48;5;0mi \u001b[0m\n", + "*******************************************************************************************************************\n", + "99\n", + "1.4610567542265707\n", + "VBD\n", + "\u001b[38;5;226m\u001b[48;5;0mentered\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "100\n", + "0.0\n", + "JJ\n", + "\u001b[38;5;226m\u001b[48;5;0mhigh\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "101\n", + "0.0\n", + "NN\n", + "\u001b[38;5;15m\u001b[48;5;0mschool \u001b[0m\n", + "*******************************************************************************************************************\n", + "102\n", + "0.0\n", + ".\n", + "\u001b[38;5;15m\u001b[48;5;0m. \u001b[0m\n", + "*******************************************************************************************************************\n", + "103\n", + "0.0\n", + "DT\n", + "\u001b[38;5;15m\u001b[48;5;0mthe \u001b[0m\n", + "*******************************************************************************************************************\n", + "104\n", + "0.0\n", + "NNS\n", + "\u001b[38;5;15m\u001b[48;5;0mgames \u001b[0m\n", + "*******************************************************************************************************************\n", + "105\n", + "0.0\n", + "PRP$\n", + "\u001b[38;5;15m\u001b[48;5;0mmy \u001b[0m\n", + "*******************************************************************************************************************\n", + "106\n", + "0.0\n", + "NNS\n", + "\u001b[38;5;15m\u001b[48;5;0mparents \u001b[0m\n", + "*******************************************************************************************************************\n", + "107\n", + "0.0\n", + "IN\n", + "\u001b[38;5;15m\u001b[48;5;0mtaught \u001b[0m\n", + "*******************************************************************************************************************\n", + "108\n", + "0.0\n", + "PRP\n", + "\u001b[38;5;15m\u001b[48;5;0mme \u001b[0m\n", + "*******************************************************************************************************************\n", + "109\n", + "9.636217093727145\n", + "WRB\n", + "\u001b[38;5;214m\u001b[48;5;0mwhere\u001b[0m\u001b[38;5;6m\u001b[48;5;0m/when \u001b[0m\n", + "*******************************************************************************************************************\n", + "110\n", + "0.0\n", + "NN\n", + "\u001b[38;5;15m\u001b[48;5;0mi \u001b[0m\n", + "*******************************************************************************************************************\n", + "111\n", + "0.0\n", + "VBD\n", + "\u001b[38;5;15m\u001b[48;5;0mwas \u001b[0m\n", + "*******************************************************************************************************************\n", + "112\n", + "0.0\n", + "DT\n", + "\u001b[38;5;15m\u001b[48;5;0ma \u001b[0m\n", + "*******************************************************************************************************************\n", + "113\n", + "0.8537064036270944\n", + "NN\n", + "\u001b[38;5;226m\u001b[48;5;0mchild\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "114\n", + "0.0\n", + "VBD\n", + "\u001b[38;5;15m\u001b[48;5;0mturned \u001b[0m\n", + "*******************************************************************************************************************\n", + "115\n", + "0.0\n", + "RP\n", + "\u001b[38;5;15m\u001b[48;5;0mout \u001b[0m\n", + "*******************************************************************************************************************\n", + "116\n", + "0.0\n", + "TO\n", + "\u001b[38;5;15m\u001b[48;5;0mto \u001b[0m\n", + "*******************************************************************************************************************\n", + "117\n", + "0.0\n", + "VB\n", + "\u001b[38;5;15m\u001b[48;5;0mbe \u001b[0m\n", + "*******************************************************************************************************************\n", + "118\n", + "0.0\n", + "RB\n", + "\u001b[38;5;15m\u001b[48;5;0mvery \u001b[0m\n", + "*******************************************************************************************************************\n", + "119\n", + "0.9869002719874604\n", + "JJ\n", + "\u001b[38;5;226m\u001b[48;5;0museful\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "120\n", + "1.9653529850905183\n", + "RB\n", + "\u001b[38;5;226m\u001b[48;5;0mlater\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "121\n", + "0.0\n", + "IN\n", + "\u001b[38;5;15m\u001b[48;5;0min \u001b[0m\n", + "*******************************************************************************************************************\n", + "122\n", + "0.0\n", + "PRP$\n", + "\u001b[38;5;15m\u001b[48;5;0mmy \u001b[0m\n", + "*******************************************************************************************************************\n", + "123\n", + "0.0\n", + "NN\n", + "\u001b[38;5;15m\u001b[48;5;0mlife \u001b[0m\n", + "*******************************************************************************************************************\n", + "124\n", + "0.0\n", + ".\n", + "\u001b[38;5;15m\u001b[48;5;0m. \u001b[0m\n", + "平均gap:1.4890399906268712\n", + "time cost 8.737523794174194 s\n" + ] + } + ], + "source": [ + "import time\n", + "# text = [\"Who was Jim Henson? Jim Henson _ a puppeteer.\"]\n", + "# text = [\"Last week I went to the theater. There are many person . Luckily , I had very good seat. The plays was very interesting. However, I didn't enjoy it. A young man and a young woman were sitting behind me. They were talk loudly. I got very angry. I couldn't hear a word. I turned round. I looked at the man angry. They didn't pay any attention.In the end, I couldn't bear it. I turned round again. 'I can't hear a word!' I said angrily. 'It's none of your business,' the young man said rudely. 'This is a private conversation!'\"]\n", + "\n", + "#text = [\"Last week I went to the theater. I had very good seat. The plays was very interesting. However, I didn't enjoy it. A young man and a young woman were sitting behind me. They were talk loudly. I got very angry. I couldn't hear a word. I turned round. I looked at the man angry. They didn't pay any attention.In the end, I couldn't bear it. I turned round again. 'I can't hear a word!' I said angrily. 'It's none of your business,' the young man said rudely. 'This is a private conversation!'\"]\n", + "# text = [\"After the outbreak of the disease, the Ministry of Agriculture and rural areas immediately sent a supervision team to the local. Local Emergency Response Mechanism has been activated in accordance with the requirements, to take blockade, culling, harmless treatment, disinfection and other treatment measures to all disease and culling of pigs for harmless treatment. At the same time, all live pigs and their products are prohibited from transferring out of the blockade area, and live pigs are not allowed to be transported into the blockade area. At present, all the above measures have been implemented.\"]\n", + "# text = [\"Early critics of Emily Dickinson's poetry mistook for simplemindedness the surface of artlessness that in fact she constructed with such innocence.\"]\n", + "#text = [\"The journey was long and tired. We left London at five o'clock in the evening and spend eight hours in the train. We had been travelled for 3 hours after someone appeared selling food and drinks. It was darkness all the time we were crossing Wales, but we could see nothing through the windows. When we finally arrived Holyhead nearly , everyone was slept. As soon as the train stopped, everybody come to life, grabbing their suitcases and rushing onto the platform.\"]\n", + "text = [\"When I was little, Friday's night was our family game night. After supper, we would play card games of all sort in the sitting room. As the kid, I loved to watch cartoons,but no matter how many times I asked to watching them, my parents would not to let me. They would say to us that playing card games would help my brain. Still I unwilling to play the games for them sometimes. I didn't realize how right my parents are until I entered high school. The games my parents taught me where I was a child turned out to be very useful later in my life.\"]\n", + "#text = [\"Mr. and Mrs.Zhang all work in our school. They live far from the school, and it takes them about a hour and a half to go to work every day. In their spare time, they are interesting in planting vegetables in their garden, that is on the rooftop of their house. They often get up earlier and water the vegetables together. They have also bought in some gardening tools.beside, they often get some useful informations from the internet. When summer came, they will invite their students pick the vegetables!\"]\n", + "#text = ['The question is more easy than that.']\n", + "#text = [\"Last week I go to the zoo. I had a very good seat. The play was very interesting.\"]\n", + "#text =[\"Last week I went to the theater. I had very good seat. The play was very interesting.But I didn't enjoy it. A young man and a young woman were sitting behind me.They were talking loudly. I got very angry.\"]#因为外面有中括号,所以是二维的\n", + "time_start=time.time()\n", + "analyze_text(text, show_firstk_probs=200)\n", + "time_end=time.time()\n", + "print('time cost',time_end-time_start,'s')" + ] + }, + { + "cell_type": "code", + "execution_count": 438, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "**********************************display_suggestions********************************************************\n", + "| suggestion : position in text\n", + "---------------------------------------------------------------------------------------\n", + "| 去掉前面 more 原位置改成 easier : 5\n", + "*************************************************************************************************************\n", + "['去掉前面', 'more', '原位置改成', 'easier']\n", + " the question is easier than that .\n" + ] + } + ], + "source": [ + "#print(suggestions)\n", + "def display_suggestion():\n", + " print(\"**********************************display_suggestions********************************************************\")\n", + " print(\"| {:50} : {}\".format(\"suggestion\",\"position in text\"))\n", + " print(\"---------------------------------------------------------------------------------------\")\n", + " for key in suggestions:\n", + " print(\"| {:<50} : {}\".format(suggestions[key] ,key))\n", + " print(\"*************************************************************************************************************\")\n", + "display_suggestion()\n", + "\n", + "def modify_text(index):\n", + " #entire_ids,entire_type_ids\n", + " entire_ids_copy = copy.deepcopy(entire_ids)\n", + " new_text = \"\"\n", + " suggestion = suggestions[index]\n", + " if suggestion[0:2] == '##':\n", + " suggestion = tokenizer.ids_to_tokens[entire_ids_copy[index - 1]] + suggestion[2:]\n", + " del entire_ids_copy[index]\n", + " index = index - 1\n", + " #print(suggestion)\n", + " suggestion_tokens = suggestion.split(\" \")\n", + " print(suggestion_tokens)\n", + " if '去掉前面' == suggestion_tokens[0]:\n", + " del entire_ids_copy[index - 1]\n", + " del suggestion_tokens[0]\n", + " del suggestion_tokens[0]\n", + " index = index - 1\n", + " elif '去掉后面' == suggestion_tokens[0]:\n", + " del entire_ids_copy[index + 1]\n", + " del suggestion_tokens[0]\n", + " del suggestion_tokens[0]\n", + " elif '去掉' == suggestion_tokens[0]:\n", + " del entire_ids_copy[index]\n", + " del suggestion_tokens[0]\n", + " del suggestion_tokens[0]\n", + " if '原位置改成' in suggestion_tokens:\n", + " del suggestion_tokens[0]\n", + " \n", + " len_suggest = len(suggestion_tokens)\n", + " if len_suggest == 1:\n", + " entire_ids_copy[index] = tokenizer.vocab[suggestion_tokens[0]]\n", + " elif len_suggest == 2:\n", + " entire_ids_copy.insert(index,tokenizer.vocab[suggestion_tokens[0]])\n", + " entire_ids_copy[index + 1] = tokenizer.vocab[suggestion_tokens[1]]\n", + " \n", + " for i in range(1,len(entire_ids_copy)-1):\n", + " word = tokenizer.ids_to_tokens[entire_ids_copy[i]]\n", + " if word[0:2] == \"##\":\n", + " new_text = new_text + word[2:]\n", + " else:\n", + " new_text = new_text + ' ' + tokenizer.ids_to_tokens[entire_ids_copy[i]]\n", + " return new_text\n", + "\n", + "print(modify_text(5))" + ] + }, + { + "cell_type": "code", + "execution_count": 283, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "03/20/2019 15:48:16 - INFO - examples.extract_features - tokens: [CLS] when i was little , friday ' s night was our family game night . after supper , we would play card games of all sort in the sitting room . as the kid , i loved to watch cartoons , but no matter how many times i asked to watching them , my parents would not to let me . they would say to us that playing card games would help my brain . still i unwilling to play the games for them sometimes . i didn ' t realize how right my parents are until i entered high school . the games my parents taught me where i was a child turned out to be very useful later in my life . [SEP]\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['[CLS]', 'when', 'i', 'was', 'little', ',', 'friday', \"'\", 's', 'night', 'was', 'our', 'family', 'game', 'night', '.', 'after', 'supper', ',', 'we', 'would', 'play', 'card', 'games', 'of', 'all', 'sort', 'in', 'the', 'sitting', 'room', '.', 'as', 'the', 'kid', ',', 'i', 'loved', 'to', 'watch', 'cartoons', ',', 'but', 'no', 'matter', 'how', 'many', 'times', 'i', 'asked', 'to', 'watching', 'them', ',', 'my', 'parents', 'would', 'not', 'to', 'let', 'me', '.', 'they', 'would', 'say', 'to', 'us', 'that', 'playing', 'card', 'games', 'would', 'help', 'my', 'brain', '.', 'still', 'i', 'unwilling', 'to', 'play', 'the', 'games', 'for', 'them', 'sometimes', '.', 'i', 'didn', \"'\", 't', 'realize', 'how', 'right', 'my', 'parents', 'are', 'until', 'i', 'entered', 'high', 'school', '.', 'the', 'games', 'my', 'parents', 'taught', 'me', 'where', 'i', 'was', 'a', 'child', 'turned', 'out', 'to', 'be', 'very', 'useful', 'later', 'in', 'my', 'life', '.', '[SEP]']\n", + "********************************************************************\n", + " 0 | [CLS] \t 2 | . 1 | the 1 | ) 1 | \" 1 | , \n", + " 97 | when \t* 97 | when 2 | since 1 | until 0 | while 0 | before \n", + " 99 | i \t* 99 | i 0 | she 0 | he 0 | we 0 | me \n", + " 100 | was \t*100 | was 0 | were 0 | got 0 | turned 0 | is \n", + " 11 | little \t 19 | younger * 11 | little 8 | eight 7 | young 7 | twelve \n", + " 51 | , \t* 51 | , 24 | . 4 | and 1 | of 1 | the \n", + " 0 | friday \t 33 | valentine 16 | mother 7 | children 7 | father 5 | grandma \n", + " 100 | ' \t*100 | ' 0 | ` 0 | ′ 0 | \" 0 | * \n", + " 100 | s \t*100 | s 0 | til 0 | n 0 | d 0 | round \n", + " 39 | night \t* 39 | night 16 | dinner 6 | eve 5 | day 5 | supper \n", + " 90 | was \t* 90 | was 8 | became 1 | is 0 | were 0 | , \n", + " 4 | our \t 79 | a 13 | the * 4 | our 1 | my 0 | their \n", + " 1 | family \t 59 | favorite 18 | first 3 | favourite 2 | only 1 | last \n", + " 3 | game \t 12 | dinner 7 | christmas 6 | fun 5 | day * 3 | game \n", + " 81 | night \t* 81 | night 13 | day 1 | dinner 1 | date 0 | nights \n", + " 97 | . \t* 97 | . 2 | and 0 | ; 0 | , 0 | ! \n", + " 80 | after \t* 80 | after 6 | during 4 | at 4 | over 3 | before \n", + " 1 | supper \t 68 | school 10 | dinner 9 | that 2 | midnight 1 | breakfast \n", + " 100 | , \t*100 | , 0 | ##time 0 | together 0 | time 0 | dinner \n", + " 98 | we \t* 98 | we 1 | i 0 | they 0 | everyone 0 | people \n", + " 64 | would \t* 64 | would 21 | could 2 | will 2 | can 1 | did \n", + " 96 | play \t* 96 | play 2 | have 1 | watch 0 | enjoy 0 | played \n", + " 97 | card \t* 97 | card 1 | board 1 | cards 0 | video 0 | computer \n", + " 100 | games \t*100 | games 0 | game 0 | ##games 0 | matches 0 | sports \n", + " 99 | of \t* 99 | of 0 | in 0 | with 0 | and 0 | , \n", + " 0 | all \t 85 | some 6 | any 3 | a 3 | every 1 | this \n", + " 2 | sort \t 41 | kinds 34 | types 5 | sorts 4 | sizes 3 | kind \n", + " 98 | in \t* 98 | in 0 | around 0 | inside 0 | at 0 | from \n", + " 73 | the \t* 73 | the 23 | our 1 | my 1 | a 1 | their \n", + " 0 | sitting \t 56 | family 17 | dining 14 | living 1 | back 1 | same \n", + " 99 | room \t* 99 | room 0 | area 0 | rooms 0 | hall 0 | areas \n", + " 99 | . \t* 99 | . 1 | and 0 | ; 0 | , 0 | ... \n", + " 48 | as \t* 48 | as 29 | like 8 | being 3 | unlike 3 | for \n", + " 0 | the \t 100 | a 0 | another 0 | an * 0 | the 0 | one \n", + " 8 | kid \t 43 | child 22 | youngest * 8 | kid 4 | baby 3 | oldest \n", + " 63 | , \t* 63 | , 5 | i 2 | . 1 | myself 1 | and \n", + " 99 | i \t* 99 | i 0 | we 0 | he 0 | she 0 | me \n", + " 15 | loved \t 36 | wanted 21 | used * 15 | loved 11 | liked 4 | tried \n", + " 100 | to \t*100 | to 0 | and 0 | playing 0 | watching 0 | going \n", + " 99 | watch \t* 99 | watch 1 | see 0 | play 0 | watching 0 | watched \n", + " 0 | cartoons \t 52 | them 41 | games 1 | movies 1 | cards 1 | it \n", + " 0 | , \t 81 | , 19 | . 0 | ; 0 | - 0 | ... \n", + " 44 | but \t 47 | and * 44 | but 6 | so 1 | yet 1 | because \n", + " 100 | no \t*100 | no 0 | little 0 | the 0 | zero 0 | not \n", + " 100 | matter \t*100 | matter 0 | to 0 | telling 0 | idea 0 | , \n", + " 100 | how \t*100 | how 0 | what 0 | however 0 | the 0 | where \n", + " 100 | many \t*100 | many 0 | often 0 | few 0 | several 0 | numerous \n", + " 85 | times \t* 85 | times 3 | questions 1 | minutes 1 | hours 1 | people \n", + " 82 | i \t* 82 | i 10 | we 1 | was 1 | being 1 | he \n", + " 0 | asked \t 37 | took 19 | went 13 | admitted 6 | got 4 | confessed \n", + " 5 | to \t 23 | for 13 | about * 5 | to 4 | myself 3 | me \n", + " 0 | watching \t 64 | play 30 | watch 3 | see 1 | join 0 | read \n", + " 57 | them \t* 57 | them 23 | cartoons 5 | it 2 | movies 2 | games \n", + " 0 | , \t 100 | , 0 | . 0 | ... 0 | again 0 | even \n", + " 99 | my \t* 99 | my 0 | the 0 | her 0 | his 0 | our \n", + " 98 | parents \t* 98 | parents 0 | family 0 | father 0 | mother 0 | grandparents\n", + " 0 | would \t 47 | decided 18 | chose 8 | tried 4 | seemed 4 | knew \n", + " 0 | not \t 70 | refuse 10 | have 5 | agree 2 | want 1 | promise \n", + " 0 | to \t 45 | always 17 | have 11 | really 9 | even 3 | ever \n", + " 28 | let \t* 28 | let 17 | believe 11 | tell 11 | bother 5 | stop \n", + " 91 | me \t* 91 | me 6 | go 1 | up 1 | on 0 | it \n", + " 97 | . \t* 97 | . 1 | and 1 | ; 0 | because 0 | , \n", + " 94 | they \t* 94 | they 1 | he 1 | she 1 | dad 1 | i \n", + " 97 | would \t* 97 | would 1 | did 1 | always 1 | could 0 | might \n", + " 27 | say \t* 27 | say 21 | prove 16 | explain 4 | swear 3 | lie \n", + " 65 | to \t* 65 | to 6 | about 2 | for 2 | that 2 | in \n", + " 0 | us \t 99 | me 0 | themselves 0 | myself * 0 | us 0 | him \n", + " 94 | that \t* 94 | that 3 | how 1 | if 1 | , 0 | maybe \n", + " 89 | playing \t* 89 | playing 4 | the 4 | watching 0 | doing 0 | their \n", + " 46 | card \t* 46 | card 40 | the 4 | these 3 | those 1 | cards \n", + " 99 | games \t* 99 | games 0 | game 0 | together 0 | tricks 0 | again \n", + " 68 | would \t* 68 | would 14 | could 4 | might 4 | will 4 | did \n", + " 5 | help \t 23 | change 6 | use * 5 | help 3 | drain 3 | control \n", + " 61 | my \t* 61 | my 22 | the 9 | our 3 | your 3 | their \n", + " 1 | brain \t 15 | life 5 | family 4 | dad 3 | future 3 | parents \n", + " 57 | . \t* 57 | . 16 | and 14 | , 9 | but 1 | ; \n", + " 0 | still \t 35 | am 26 | was 8 | but 7 | is 5 | and \n", + " 6 | i \t 60 | , * 6 | i 3 | . 3 | too 3 | ... \n", + " 0 | unwilling \t 8 | want 8 | used 8 | have 8 | wanted 7 | had \n", + " 48 | to \t 50 | ##ly * 48 | to 0 | always 0 | t 0 | ##tly \n", + " 28 | play \t* 28 | play 6 | do 5 | make 1 | stop 1 | keep \n", + " 5 | the \t 82 | card * 5 | the 1 | these 1 | cards 1 | those \n", + " 59 | games \t* 59 | games 37 | game 1 | cards 0 | piano 0 | kids \n", + " 1 | for \t 92 | with * 1 | for 1 | in 1 | against 1 | without \n", + " 22 | them \t 51 | myself * 22 | them 9 | fun 1 | hours 1 | real \n", + " 2 | sometimes \t 16 | anyway 12 | anymore 11 | too 10 | all 4 | though \n", + " 96 | . \t* 96 | . 1 | because 1 | and 1 | ; 0 | , \n", + " 99 | i \t* 99 | i 0 | we 0 | they 0 | you 0 | people \n", + " 99 | didn \t* 99 | didn 0 | wouldn 0 | don 0 | couldn 0 | did \n", + " 100 | ' \t*100 | ' 0 | ` 0 | \" 0 | , 0 | ′ \n", + " 100 | t \t*100 | t 0 | m 0 | s 0 | d 0 | no \n", + " 45 | realize \t 46 | know * 45 | realize 3 | understand 3 | realise 2 | see \n", + " 100 | how \t*100 | how 0 | what 0 | the 0 | it 0 | however \n", + " 0 | right \t 6 | strict 5 | powerful 4 | wonderful 4 | smart 4 | helpful \n", + " 97 | my \t* 97 | my 1 | our 0 | the 0 | your 0 | their \n", + " 29 | parents \t* 29 | parents 6 | thoughts 4 | words 2 | people 2 | kids \n", + " 1 | are \t 97 | were * 1 | are 0 | thought 0 | felt 0 | was \n", + " 87 | until \t* 87 | until 9 | when 2 | before 1 | till 0 | once \n", + " 100 | i \t*100 | i 0 | we 0 | they 0 | he 0 | me \n", + " 13 | entered \t 54 | graduated * 13 | entered 9 | finished 7 | started 6 | left \n", + " 51 | high \t* 51 | high 20 | elementary 15 | middle 8 | grade 1 | primary \n", + " 100 | school \t*100 | school 0 | schools 0 | society 0 | college 0 | class \n", + " 81 | . \t* 81 | . 14 | and 2 | but 1 | , 1 | ; \n", + " 92 | the \t* 92 | the 5 | card 1 | those 0 | playing 0 | these \n", + " 43 | games \t* 43 | games 30 | game 6 | lessons 4 | rules 1 | math \n", + " 100 | my \t*100 | my 0 | our 0 | his 0 | me 0 | that \n", + " 53 | parents \t* 53 | parents 15 | father 13 | mother 5 | dad 4 | grandparents\n", + " 56 | taught \t* 56 | taught 20 | showed 12 | played 8 | gave 1 | told \n", + " 100 | me \t*100 | me 0 | us 0 | i 0 | him 0 | my \n", + " 0 | where \t 96 | when 2 | since 2 | while 0 | as 0 | until \n", + " 99 | i \t* 99 | i 0 | me 0 | he 0 | she 0 | my \n", + " 99 | was \t* 99 | was 0 | were 0 | as 0 | became 0 | had \n", + " 100 | a \t*100 | a 0 | the 0 | and 0 | one 0 | still \n", + " 22 | child \t 51 | kid * 22 | child 7 | boy 4 | teenager 4 | freshman \n", + " 97 | turned \t* 97 | turned 1 | turn 1 | came 0 | grew 0 | turning \n", + " 100 | out \t*100 | out 0 | into 0 | on 0 | up 0 | proving \n", + " 100 | to \t*100 | to 0 | into 0 | and 0 | not 0 | would \n", + " 94 | be \t* 94 | be 3 | become 3 | prove 0 | get 0 | seem \n", + " 69 | very \t* 69 | very 6 | extremely 5 | quite 3 | more 3 | really \n", + " 7 | useful \t 19 | important 14 | different 9 | helpful * 7 | useful 6 | influential \n", + " 6 | later \t 46 | things 17 | early * 6 | later 3 | lessons 3 | times \n", + " 100 | in \t*100 | in 0 | on 0 | during 0 | into 0 | than \n", + " 100 | my \t*100 | my 0 | our 0 | his 0 | their 0 | the \n", + " 99 | life \t* 99 | life 1 | career 0 | childhood 0 | education 0 | lives \n", + " 100 | . \t*100 | . 0 | ; 0 | ! 0 | ? 0 | ... \n", + " 0 | [SEP] \t 25 | \" 3 | for 3 | now 3 | and 2 | so \n", + "\n", + "*******************************************************************************************************************\n", + "1\n", + "0.0\n", + "WRB\n", + "\u001b[38;5;15m\u001b[48;5;0mwhen \u001b[0m\n", + "*******************************************************************************************************************\n", + "2\n", + "0.0\n", + "NN\n", + "\u001b[38;5;15m\u001b[48;5;0mi \u001b[0m\n", + "*******************************************************************************************************************\n", + "3\n", + "0.0\n", + "VBD\n", + "\u001b[38;5;15m\u001b[48;5;0mwas \u001b[0m\n", + "*******************************************************************************************************************\n", + "4\n", + "0.4996413875309904\n", + "JJ\n", + "\u001b[38;5;226m\u001b[48;5;0mlittle\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "5\n", + "0.0\n", + ",\n", + "\u001b[38;5;15m\u001b[48;5;0m, \u001b[0m\n", + "*******************************************************************************************************************\n", + "6\n", + "5.037531860577574\n", + "JJ\n", + "\u001b[38;5;226m\u001b[48;5;0mfriday\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "7\n", + "0.0\n", + "POS\n", + "\u001b[38;5;15m\u001b[48;5;0m' \u001b[0m\n", + "*******************************************************************************************************************\n", + "8\n", + "0.0\n", + "NN\n", + "\u001b[38;5;15m\u001b[48;5;0ms \u001b[0m\n", + "*******************************************************************************************************************\n", + "9\n", + "0.0\n", + "NN\n", + "\u001b[38;5;15m\u001b[48;5;0mnight \u001b[0m\n", + "*******************************************************************************************************************\n", + "10\n", + "0.0\n", + "VBD\n", + "\u001b[38;5;15m\u001b[48;5;0mwas \u001b[0m\n", + "*******************************************************************************************************************\n", + "11\n", + "2.9288282257051295\n", + "PRP$\n", + "\u001b[38;5;226m\u001b[48;5;0mour\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "12\n", + "3.944041330267972\n", + "NN\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[38;5;226m\u001b[48;5;0mfamily\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "13\n", + "1.2859363936756965\n", + "NN\n", + "\u001b[38;5;226m\u001b[48;5;0mgame\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "14\n", + "0.0\n", + "NN\n", + "\u001b[38;5;15m\u001b[48;5;0mnight \u001b[0m\n", + "*******************************************************************************************************************\n", + "15\n", + "0.0\n", + ".\n", + "\u001b[38;5;15m\u001b[48;5;0m. \u001b[0m\n", + "*******************************************************************************************************************\n", + "16\n", + "0.0\n", + "IN\n", + "\u001b[38;5;15m\u001b[48;5;0mafter \u001b[0m\n", + "*******************************************************************************************************************\n", + "17\n", + "3.864973993379616\n", + "NN\n", + "\u001b[38;5;226m\u001b[48;5;0msupper\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "18\n", + "0.0\n", + ",\n", + "\u001b[38;5;15m\u001b[48;5;0m, \u001b[0m\n", + "*******************************************************************************************************************\n", + "19\n", + "0.0\n", + "PRP\n", + "\u001b[38;5;15m\u001b[48;5;0mwe \u001b[0m\n", + "*******************************************************************************************************************\n", + "20\n", + "0.0\n", + "MD\n", + "\u001b[38;5;15m\u001b[48;5;0mwould \u001b[0m\n", + "*******************************************************************************************************************\n", + "21\n", + "0.0\n", + "VB\n", + "\u001b[38;5;15m\u001b[48;5;0mplay \u001b[0m\n", + "*******************************************************************************************************************\n", + "22\n", + "0.0\n", + "JJ\n", + "\u001b[38;5;226m\u001b[48;5;0mcard\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "23\n", + "0.0\n", + "NNS\n", + "\u001b[38;5;15m\u001b[48;5;0mgames \u001b[0m\n", + "*******************************************************************************************************************\n", + "24\n", + "0.0\n", + "IN\n", + "\u001b[38;5;15m\u001b[48;5;0mof \u001b[0m\n", + "*******************************************************************************************************************\n", + "25\n", + "6.181150402503261\n", + "DT\n", + "\u001b[38;5;226m\u001b[48;5;0mall\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "26\n", + "3.2442493513478983\n", + "NN\n", + "\u001b[38;5;226m\u001b[48;5;0msort\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "27\n", + "0.0\n", + "IN\n", + "\u001b[38;5;15m\u001b[48;5;0min \u001b[0m\n", + "*******************************************************************************************************************\n", + "28\n", + "0.0\n", + "DT\n", + "\u001b[38;5;15m\u001b[48;5;0mthe \u001b[0m\n", + "*******************************************************************************************************************\n", + "29\n", + "5.1759204333264215\n", + "NN\n", + "\u001b[38;5;226m\u001b[48;5;0msitting\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "30\n", + "0.0\n", + "NN\n", + "\u001b[38;5;15m\u001b[48;5;0mroom \u001b[0m\n", + "*******************************************************************************************************************\n", + "31\n", + "0.0\n", + ".\n", + "\u001b[38;5;15m\u001b[48;5;0m. \u001b[0m\n", + "*******************************************************************************************************************\n", + "32\n", + "0.0\n", + "IN\n", + "\u001b[38;5;15m\u001b[48;5;0mas \u001b[0m\n", + "*******************************************************************************************************************\n", + "33\n", + "11.548374205660924\n", + "DT\n", + "\u001b[38;5;196m\u001b[48;5;0mthe\u001b[0m\u001b[38;5;2m\u001b[48;5;0m/a \u001b[0m\n", + "*******************************************************************************************************************\n", + "34\n", + "1.7249087614151182\n", + "NN\n", + "\u001b[38;5;226m\u001b[48;5;0mkid\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "35\n", + "0.0\n", + ",\n", + "\u001b[38;5;15m\u001b[48;5;0m, \u001b[0m\n", + "*******************************************************************************************************************\n", + "36\n", + "0.0\n", + "NN\n", + "\u001b[38;5;15m\u001b[48;5;0mi \u001b[0m\n", + "*******************************************************************************************************************\n", + "37\n", + "0.9132128054919955\n", + "VBD\n", + "\u001b[38;5;226m\u001b[48;5;0mloved\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "38\n", + "0.0\n", + "TO\n", + "\u001b[38;5;15m\u001b[48;5;0mto \u001b[0m\n", + "*******************************************************************************************************************\n", + "39\n", + "0.0\n", + "VB\n", + "\u001b[38;5;15m\u001b[48;5;0mwatch \u001b[0m\n", + "*******************************************************************************************************************\n", + "40\n", + "6.495473375871382\n", + "NNS\n", + "\u001b[38;5;226m\u001b[48;5;0mcartoons\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "41\n", + "15.452660592340097\n", + "VB\n", + "\u001b[38;5;196m\u001b[48;5;0m,\u001b[0m\u001b[38;5;2m\u001b[48;5;0m/, \u001b[0m\n", + "*******************************************************************************************************************\n", + "42\n", + "0.053008093757376584\n", + "CC\n", + "\u001b[38;5;226m\u001b[48;5;0mbut\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "43\n", + "0.0\n", + "DT\n", + "\u001b[38;5;15m\u001b[48;5;0mno \u001b[0m\n", + "*******************************************************************************************************************\n", + "44\n", + "0.0\n", + "NN\n", + "\u001b[38;5;15m\u001b[48;5;0mmatter \u001b[0m\n", + "*******************************************************************************************************************\n", + "45\n", + "0.0\n", + "WRB\n", + "\u001b[38;5;15m\u001b[48;5;0mhow \u001b[0m\n", + "*******************************************************************************************************************\n", + "46\n", + "0.0\n", + "JJ\n", + "\u001b[38;5;226m\u001b[48;5;0mmany\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "47\n", + "0.0\n", + "NNS\n", + "\u001b[38;5;15m\u001b[48;5;0mtimes \u001b[0m\n", + "*******************************************************************************************************************\n", + "48\n", + "0.0\n", + "VBP\n", + "\u001b[38;5;15m\u001b[48;5;0mi \u001b[0m\n", + "*******************************************************************************************************************\n", + "49\n", + "10.646676048338493\n", + "VBN\n", + "\u001b[38;5;196m\u001b[48;5;0masked\u001b[0m\u001b[38;5;2m\u001b[48;5;0m/was used \u001b[0m\n", + "*******************************************************************************************************************\n", + "50\n", + "1.5529499099577042\n", + "TO\n", + "\u001b[38;5;226m\u001b[48;5;0mto\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "51\n", + "8.95932484381705\n", + "VBG\n", + "\u001b[38;5;214m\u001b[48;5;0mwatching\u001b[0m\u001b[38;5;6m\u001b[48;5;0m/watch \u001b[0m\n", + "*******************************************************************************************************************\n", + "52\n", + "0.0\n", + "PRP\n", + "\u001b[38;5;15m\u001b[48;5;0mthem \u001b[0m\n", + "*******************************************************************************************************************\n", + "53\n", + "18.383069999315744\n", + "VB\n", + "\u001b[38;5;196m\u001b[48;5;0m,\u001b[0m\u001b[38;5;2m\u001b[48;5;0m/, \u001b[0m\n", + "*******************************************************************************************************************\n", + "54\n", + "0.0\n", + "PRP$\n", + "\u001b[38;5;15m\u001b[48;5;0mmy \u001b[0m\n", + "*******************************************************************************************************************\n", + "55\n", + "0.0\n", + "NNS\n", + "\u001b[38;5;15m\u001b[48;5;0mparents \u001b[0m\n", + "*******************************************************************************************************************\n", + "56\n", + "5.4762173007041035\n", + "MD\n", + "检查点1*****************************************************\n", + "\u001b[38;5;226m\u001b[48;5;0mwould\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "57\n", + "5.3354081649787535\n", + "RB\n", + "\u001b[38;5;214m\u001b[48;5;0mnot\u001b[0m\u001b[38;5;6m\u001b[48;5;0m/refuse \u001b[0m\n", + "*******************************************************************************************************************\n", + "58\n", + "5.981459151215268\n", + "TO\n", + "\u001b[38;5;214m\u001b[48;5;0mto\u001b[0m\u001b[38;5;6m\u001b[48;5;0m/去掉 to \u001b[0m\n", + "*******************************************************************************************************************\n", + "59\n", + "0.0\n", + "VB\n", + "\u001b[38;5;15m\u001b[48;5;0mlet \u001b[0m\n", + "*******************************************************************************************************************\n", + "60\n", + "0.0\n", + "PRP\n", + "\u001b[38;5;15m\u001b[48;5;0mme \u001b[0m\n", + "*******************************************************************************************************************\n", + "61\n", + "0.0\n", + ".\n", + "\u001b[38;5;15m\u001b[48;5;0m. \u001b[0m\n", + "*******************************************************************************************************************\n", + "62\n", + "0.0\n", + "PRP\n", + "\u001b[38;5;15m\u001b[48;5;0mthey \u001b[0m\n", + "*******************************************************************************************************************\n", + "63\n", + "0.0\n", + "MD\n", + "\u001b[38;5;15m\u001b[48;5;0mwould \u001b[0m\n", + "*******************************************************************************************************************\n", + "64\n", + "0.0\n", + "VB\n", + "\u001b[38;5;15m\u001b[48;5;0msay \u001b[0m\n", + "*******************************************************************************************************************\n", + "65\n", + "0.0\n", + "TO\n", + "\u001b[38;5;15m\u001b[48;5;0mto \u001b[0m\n", + "*******************************************************************************************************************\n", + "66\n", + "6.581833896065917\n", + "PRP\n", + "\u001b[38;5;214m\u001b[48;5;0mus\u001b[0m\u001b[38;5;6m\u001b[48;5;0m/me \u001b[0m\n", + "*******************************************************************************************************************\n", + "67\n", + "0.0\n", + "IN\n", + "\u001b[38;5;15m\u001b[48;5;0mthat \u001b[0m\n", + "*******************************************************************************************************************\n", + "68\n", + "0.0\n", + "VBG\n", + "\u001b[38;5;15m\u001b[48;5;0mplaying \u001b[0m\n", + "*******************************************************************************************************************\n", + "69\n", + "0.0\n", + "NN\n", + "\u001b[38;5;15m\u001b[48;5;0mcard \u001b[0m\n", + "*******************************************************************************************************************\n", + "70\n", + "0.0\n", + "NNS\n", + "\u001b[38;5;15m\u001b[48;5;0mgames \u001b[0m\n", + "*******************************************************************************************************************\n", + "71\n", + "0.0\n", + "MD\n", + "\u001b[38;5;15m\u001b[48;5;0mwould \u001b[0m\n", + "*******************************************************************************************************************\n", + "72\n", + "1.4588194328350998\n", + "VB\n", + "\u001b[38;5;226m\u001b[48;5;0mhelp\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "73\n", + "0.0\n", + "PRP$\n", + "\u001b[38;5;15m\u001b[48;5;0mmy \u001b[0m\n", + "*******************************************************************************************************************\n", + "74\n", + "3.173226871228209\n", + "NN\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[38;5;226m\u001b[48;5;0mbrain\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "75\n", + "0.0\n", + ".\n", + "\u001b[38;5;15m\u001b[48;5;0m. \u001b[0m\n", + "*******************************************************************************************************************\n", + "76\n", + "5.90809919263306\n", + "RB\n", + "\u001b[38;5;214m\u001b[48;5;0mstill\u001b[0m\u001b[38;5;6m\u001b[48;5;0m/still , \u001b[0m\n", + "*******************************************************************************************************************\n", + "77\n", + "2.2313680234481628\n", + "JJ\n", + "\u001b[38;5;226m\u001b[48;5;0mi\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "78\n", + "7.241924210620825\n", + "NN\n", + "\u001b[38;5;226m\u001b[48;5;0munwilling\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "79\n", + "0.033503519227476186\n", + "TO\n", + "\u001b[38;5;226m\u001b[48;5;0mto\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "80\n", + "0.0\n", + "VB\n", + "\u001b[38;5;15m\u001b[48;5;0mplay \u001b[0m\n", + "*******************************************************************************************************************\n", + "81\n", + "2.7297515213736863\n", + "DT\n", + "\u001b[38;5;226m\u001b[48;5;0mthe\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "82\n", + "0.0\n", + "NNS\n", + "\u001b[38;5;15m\u001b[48;5;0mgames \u001b[0m\n", + "*******************************************************************************************************************\n", + "83\n", + "4.231741889869705\n", + "IN\n", + "\u001b[38;5;214m\u001b[48;5;0mfor\u001b[0m\u001b[38;5;6m\u001b[48;5;0m/with \u001b[0m\n", + "*******************************************************************************************************************\n", + "84\n", + "0.8675317652760016\n", + "PRP\n", + "\u001b[38;5;226m\u001b[48;5;0mthem\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "85\n", + "2.1558967133083646\n", + "RB\n", + "\u001b[38;5;226m\u001b[48;5;0msometimes\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "86\n", + "0.0\n", + ".\n", + "\u001b[38;5;15m\u001b[48;5;0m. \u001b[0m\n", + "*******************************************************************************************************************\n", + "87\n", + "0.0\n", + "JJ\n", + "\u001b[38;5;226m\u001b[48;5;0mi\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "88\n", + "0.0\n", + "NN\n", + "\u001b[38;5;15m\u001b[48;5;0mdidn \u001b[0m\n", + "*******************************************************************************************************************\n", + "89\n", + "0.0\n", + "POS\n", + "\u001b[38;5;15m\u001b[48;5;0m' \u001b[0m\n", + "*******************************************************************************************************************\n", + "90\n", + "0.0\n", + "NN\n", + "\u001b[38;5;15m\u001b[48;5;0mt \u001b[0m\n", + "*******************************************************************************************************************\n", + "91\n", + "0.011093191090367771\n", + "VB\n", + "\u001b[38;5;226m\u001b[48;5;0mrealize\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "92\n", + "0.0\n", + "WRB\n", + "\u001b[38;5;15m\u001b[48;5;0mhow \u001b[0m\n", + "*******************************************************************************************************************\n", + "93\n", + "3.6692828920487384\n", + "JJ\n", + "\u001b[38;5;226m\u001b[48;5;0mright\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "94\n", + "0.0\n", + "PRP$\n", + "\u001b[38;5;15m\u001b[48;5;0mmy \u001b[0m\n", + "*******************************************************************************************************************\n", + "95\n", + "0.0\n", + "NNS\n", + "\u001b[38;5;15m\u001b[48;5;0mparents \u001b[0m\n", + "*******************************************************************************************************************\n", + "96\n", + "4.758635578869137\n", + "VBP\n", + "\u001b[38;5;214m\u001b[48;5;0mare\u001b[0m\u001b[38;5;6m\u001b[48;5;0m/were \u001b[0m\n", + "*******************************************************************************************************************\n", + "97\n", + "0.0\n", + "IN\n", + "\u001b[38;5;15m\u001b[48;5;0muntil \u001b[0m\n", + "*******************************************************************************************************************\n", + "98\n", + "0.0\n", + "NNS\n", + "\u001b[38;5;15m\u001b[48;5;0mi \u001b[0m\n", + "*******************************************************************************************************************\n", + "99\n", + "1.4610567542265707\n", + "VBD\n", + "\u001b[38;5;226m\u001b[48;5;0mentered\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "100\n", + "0.0\n", + "JJ\n", + "\u001b[38;5;226m\u001b[48;5;0mhigh\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "101\n", + "0.0\n", + "NN\n", + "\u001b[38;5;15m\u001b[48;5;0mschool \u001b[0m\n", + "*******************************************************************************************************************\n", + "102\n", + "0.0\n", + ".\n", + "\u001b[38;5;15m\u001b[48;5;0m. \u001b[0m\n", + "*******************************************************************************************************************\n", + "103\n", + "0.0\n", + "DT\n", + "\u001b[38;5;15m\u001b[48;5;0mthe \u001b[0m\n", + "*******************************************************************************************************************\n", + "104\n", + "0.0\n", + "NNS\n", + "\u001b[38;5;15m\u001b[48;5;0mgames \u001b[0m\n", + "*******************************************************************************************************************\n", + "105\n", + "0.0\n", + "PRP$\n", + "\u001b[38;5;15m\u001b[48;5;0mmy \u001b[0m\n", + "*******************************************************************************************************************\n", + "106\n", + "0.0\n", + "NNS\n", + "\u001b[38;5;15m\u001b[48;5;0mparents \u001b[0m\n", + "*******************************************************************************************************************\n", + "107\n", + "0.0\n", + "IN\n", + "\u001b[38;5;15m\u001b[48;5;0mtaught \u001b[0m\n", + "*******************************************************************************************************************\n", + "108\n", + "0.0\n", + "PRP\n", + "\u001b[38;5;15m\u001b[48;5;0mme \u001b[0m\n", + "*******************************************************************************************************************\n", + "109\n", + "9.636217093727145\n", + "WRB\n", + "\u001b[38;5;214m\u001b[48;5;0mwhere\u001b[0m\u001b[38;5;6m\u001b[48;5;0m/when \u001b[0m\n", + "*******************************************************************************************************************\n", + "110\n", + "0.0\n", + "NN\n", + "\u001b[38;5;15m\u001b[48;5;0mi \u001b[0m\n", + "*******************************************************************************************************************\n", + "111\n", + "0.0\n", + "VBD\n", + "\u001b[38;5;15m\u001b[48;5;0mwas \u001b[0m\n", + "*******************************************************************************************************************\n", + "112\n", + "0.0\n", + "DT\n", + "\u001b[38;5;15m\u001b[48;5;0ma \u001b[0m\n", + "*******************************************************************************************************************\n", + "113\n", + "0.8537064036270944\n", + "NN\n", + "\u001b[38;5;226m\u001b[48;5;0mchild\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "114\n", + "0.0\n", + "VBD\n", + "\u001b[38;5;15m\u001b[48;5;0mturned \u001b[0m\n", + "*******************************************************************************************************************\n", + "115\n", + "0.0\n", + "RP\n", + "\u001b[38;5;15m\u001b[48;5;0mout \u001b[0m\n", + "*******************************************************************************************************************\n", + "116\n", + "0.0\n", + "TO\n", + "\u001b[38;5;15m\u001b[48;5;0mto \u001b[0m\n", + "*******************************************************************************************************************\n", + "117\n", + "0.0\n", + "VB\n", + "\u001b[38;5;15m\u001b[48;5;0mbe \u001b[0m\n", + "*******************************************************************************************************************\n", + "118\n", + "0.0\n", + "RB\n", + "\u001b[38;5;15m\u001b[48;5;0mvery \u001b[0m\n", + "*******************************************************************************************************************\n", + "119\n", + "0.9869002719874604\n", + "JJ\n", + "\u001b[38;5;226m\u001b[48;5;0museful\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "120\n", + "1.9653529850905183\n", + "RB\n", + "\u001b[38;5;226m\u001b[48;5;0mlater\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "*******************************************************************************************************************\n", + "121\n", + "0.0\n", + "IN\n", + "\u001b[38;5;15m\u001b[48;5;0min \u001b[0m\n", + "*******************************************************************************************************************\n", + "122\n", + "0.0\n", + "PRP$\n", + "\u001b[38;5;15m\u001b[48;5;0mmy \u001b[0m\n", + "*******************************************************************************************************************\n", + "123\n", + "0.0\n", + "NN\n", + "\u001b[38;5;15m\u001b[48;5;0mlife \u001b[0m\n", + "*******************************************************************************************************************\n", + "124\n", + "0.0\n", + ".\n", + "\u001b[38;5;15m\u001b[48;5;0m. \u001b[0m\n", + "平均gap:1.4890399906268712\n", + "**********************************display_suggestions********************************************************\n", + "| suggestion : position in text\n", + "---------------------------------------------------------------------------------------\n", + "| a : 33\n", + "| , : 41\n", + "| was used : 49\n", + "| watch : 51\n", + "| , : 53\n", + "| refuse : 57\n", + "| 去掉 to : 58\n", + "| me : 66\n", + "| still , : 76\n", + "| with : 83\n", + "| were : 96\n", + "| when : 109\n", + "*************************************************************************************************************\n", + "建议的数量是 12\n" + ] + }, + { + "ename": "KeyboardInterrupt", + "evalue": "", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m/home/qsj/miniconda3/lib/python3.6/site-packages/ipykernel/kernelbase.py\u001b[0m in \u001b[0;36m_input_request\u001b[0;34m(self, prompt, ident, parent, password)\u001b[0m\n\u001b[1;32m 728\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 729\u001b[0;31m \u001b[0mident\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mreply\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msession\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrecv\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstdin_socket\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m0\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 730\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0mException\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/home/qsj/miniconda3/lib/python3.6/site-packages/jupyter_client/session.py\u001b[0m in \u001b[0;36mrecv\u001b[0;34m(self, socket, mode, content, copy)\u001b[0m\n\u001b[1;32m 802\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 803\u001b[0;31m \u001b[0mmsg_list\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0msocket\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrecv_multipart\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmode\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcopy\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mcopy\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 804\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0mzmq\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mZMQError\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/home/qsj/miniconda3/lib/python3.6/site-packages/zmq/sugar/socket.py\u001b[0m in \u001b[0;36mrecv_multipart\u001b[0;34m(self, flags, copy, track)\u001b[0m\n\u001b[1;32m 466\u001b[0m \"\"\"\n\u001b[0;32m--> 467\u001b[0;31m \u001b[0mparts\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrecv\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mflags\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcopy\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mcopy\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtrack\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mtrack\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 468\u001b[0m \u001b[0;31m# have first part already, only loop while more to receive\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32mzmq/backend/cython/socket.pyx\u001b[0m in \u001b[0;36mzmq.backend.cython.socket.Socket.recv\u001b[0;34m()\u001b[0m\n", + "\u001b[0;32mzmq/backend/cython/socket.pyx\u001b[0m in \u001b[0;36mzmq.backend.cython.socket.Socket.recv\u001b[0;34m()\u001b[0m\n", + "\u001b[0;32mzmq/backend/cython/socket.pyx\u001b[0m in \u001b[0;36mzmq.backend.cython.socket._recv_copy\u001b[0;34m()\u001b[0m\n", + "\u001b[0;32m/home/qsj/miniconda3/lib/python3.6/site-packages/zmq/backend/cython/checkrc.pxd\u001b[0m in \u001b[0;36mzmq.backend.cython.checkrc._check_rc\u001b[0;34m()\u001b[0m\n", + "\u001b[0;31mKeyboardInterrupt\u001b[0m: ", + "\nDuring handling of the above exception, another exception occurred:\n", + "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 19\u001b[0m \u001b[0manalyze_text\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtext\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mshow_firstk_probs\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m200\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 20\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 21\u001b[0;31m \u001b[0manalyse_and_modify_and_review\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", + "\u001b[0;32m\u001b[0m in \u001b[0;36manalyse_and_modify_and_review\u001b[0;34m()\u001b[0m\n\u001b[1;32m 14\u001b[0m \u001b[0;32mbreak\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 15\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 16\u001b[0;31m \u001b[0mindex\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0minput\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"Please input the position you want to modify:\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 17\u001b[0m \u001b[0mindex\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mindex\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 18\u001b[0m \u001b[0mtext\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmodify_text\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mindex\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/home/qsj/miniconda3/lib/python3.6/site-packages/ipykernel/kernelbase.py\u001b[0m in \u001b[0;36mraw_input\u001b[0;34m(self, prompt)\u001b[0m\n\u001b[1;32m 702\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_parent_ident\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 703\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_parent_header\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 704\u001b[0;31m \u001b[0mpassword\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mFalse\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 705\u001b[0m )\n\u001b[1;32m 706\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/home/qsj/miniconda3/lib/python3.6/site-packages/ipykernel/kernelbase.py\u001b[0m in \u001b[0;36m_input_request\u001b[0;34m(self, prompt, ident, parent, password)\u001b[0m\n\u001b[1;32m 732\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0mKeyboardInterrupt\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 733\u001b[0m \u001b[0;31m# re-raise KeyboardInterrupt, to truncate traceback\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 734\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mKeyboardInterrupt\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 735\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 736\u001b[0m \u001b[0;32mbreak\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mKeyboardInterrupt\u001b[0m: " + ] + } + ], + "source": [ + "import os\n", + "#text = [\"Last week I went to the theater. There are many person . Luckily I had very good seat. The plays was very interesting. However, I didn't enjoy it. A young man and a young woman were sitting behind me. They were talk loudly. I got very angry. I couldn't hear a word. I turned round. I looked at the man angry. They didn't pay any attention.In the end, I couldn't bear it. I turned round again. 'I can't hear a word!' I said angrily. 'It's none of your business,' the young man said rudely. 'This is a private conversation!'\"]\n", + "#text = [\"After the outbreak of the disease, the Ministry of Agriculture and rural areas immediately sent a supervision team to the local. Local Emergency Response Mechanism has been activated in accordance with the requirements, to take blockade, culling, harmless treatment, disinfection and other treatment measures to all disease and culling of pigs for harmless treatment. At the same time, all live pigs and their products are prohibited from transferring out of the blockade area, and live pigs are not allowed to be transported into the blockade area. At present, all the above measures have been implemented.\"]\n", + "#text = [\"me love yours.\"]\n", + "#text = [\"Mr. and Mrs.Zhang all work in our school. They live far from the school, and it takes them about a hour and a half to go to work every day. In their spare time, they are interesting in planting vegetables in their garden, that is on the rooftop of their house. They often get up earlier and water the vegetables together. They have also bought in some gardening tools.beside, they often get some useful informations from the internet. When summer came, they will invite their students pick the vegetables!\"]\n", + "text = [\"When I was little, Friday's night was our family game night. After supper, we would play card games of all sort in the sitting room. As the kid, I loved to watch cartoons,but no matter how many times I asked to watching them, my parents would not to let me. They would say to us that playing card games would help my brain. Still I unwilling to play the games for them sometimes. I didn't realize how right my parents are until I entered high school. The games my parents taught me where I was a child turned out to be very useful later in my life.\"]\n", + "def analyse_and_modify_and_review():\n", + " global text\n", + " analyze_text(text, show_firstk_probs=200)\n", + " while len(suggestions)>0:\n", + " display_suggestion()\n", + " print('建议的数量是',len(suggestions))\n", + " if len(suggestions) == 0:\n", + " break\n", + " else:\n", + " index = input(\"Please input the position you want to modify:\")\n", + " index = int(index)\n", + " text[0] = modify_text(index)\n", + " analyze_text(text, show_firstk_probs=200)\n", + " \n", + "analyse_and_modify_and_review()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "text = [\"The trophy doesn't fit into the brown suitcase because the _ is too large.\"]\n", + "# text = [\"Mary beat John in the match because _ was very strong.\"]\n", + "features = convert_examples_to_features(convert_text_to_examples(text), tokenizer, print_info=False)\n", + "input_ids = torch.tensor([f.input_ids for f in features], dtype=torch.long).to(device)\n", + "input_type_ids = torch.tensor([f.input_type_ids for f in features], dtype=torch.long).to(device)\n", + "mlm_logits, _ = model(input_ids, input_type_ids)\n", + "mlm_probs = F.softmax(mlm_logits, dim=-1)\n", + "tokens = features[0].tokens\n", + "top_pairs = show_lm_probs(tokens, None, mlm_probs[0], firstk=100)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "text = [\n", + " # same / different\n", + " \"Tom has black hair. Mary has black hair. John has yellow hair. _ and Mary have the same hair color.\",\n", + " \"Tom has black hair. Mary has black hair. John has yellow hair. _ and Mary have different hair colors.\",\n", + " \"Tom has yellow hair. Mary has black hair. John has black hair. Mary and _ have the same hair color.\",\n", + " # because / although\n", + " \"John is taller/shorter than Mary because/although _ is older/younger.\",\n", + " \"The red ball is heavier/lighter than the blue ball because/although the _ ball is bigger/smaller.\",\n", + " \"Charles did a lot better/worse than his good friend Nancy on the test because/although _ had/hadn't studied so hard.\",\n", + " \"The trophy doesn't fit into the brown suitcase because/although the _ is too small/large.\",\n", + " \"John thought that he would arrive earlier than Susan, but/and indeed _ was the first to arrive.\",\n", + " # reverse\n", + " \"John came then Mary came. They left in reverse order. _ left then _ left.\",\n", + " \"John came after Mary. They left in reverse order. _ left after _ .\",\n", + " \"John came first, then came Mary. They left in reverse order: _ left first, then left _ .\",\n", + " # compare\n", + " \"Though John is tall, Tom is taller than John. So John is _ than Tom.\",\n", + " \"Tom is taller than John. So _ is shorter than _.\",\n", + " # WSC-style: before /after\n", + " \"Mary came before/after John. _ was late/early .\",\n", + " # yes / no\n", + " \"Was Tom taller than Susan? Yes, _ was taller.\",\n", + " # right / wrong, epistemic modality\n", + " \"John said the rain was about to stop. Mary said the rain would continue. Later the rain stopped. _ was wrong.\",\n", + " \n", + " \"The trophy doesn't fit into the brown suitcase because/although the _ is too small/large.\",\n", + " \"John thanked Mary because _ had given help to _ . \",\n", + " \"John felt vindicated/crushed when his longtime rival Mary revealed that _ was the winner of the competition.\",\n", + " \"John couldn't see the stage with Mary in front of him because _ is so short/tall.\",\n", + " \"Although they ran at about the same speed, John beat Sally because _ had such a bad start.\",\n", + " \"The fish ate the worm. The _ was hungry/tasty.\",\n", + " \n", + " \"John beat Mary. _ won the game/e winner.\",\n", + "]\n", + "text" + ] + }, + { + "cell_type": "code", + "execution_count": 1345, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{\n", + " \"attention_probs_dropout_prob\": 0.1,\n", + " \"hidden_act\": \"gelu\",\n", + " \"hidden_dropout_prob\": 0.1,\n", + " \"hidden_size\": 768,\n", + " \"initializer_range\": 0.02,\n", + " \"intermediate_size\": 3072,\n", + " \"max_position_embeddings\": 512,\n", + " \"num_attention_heads\": 12,\n", + " \"num_hidden_layers\": 12,\n", + " \"type_vocab_size\": 2,\n", + " \"vocab_size\": 30522\n", + "}" + ] + }, + "execution_count": 1345, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "config" + ] + }, + { + "cell_type": "code", + "execution_count": 1346, + "metadata": {}, + "outputs": [], + "source": [ + "with open('WSC_switched_label.json') as f:\n", + " examples = json.load(f)" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [], + "source": [ + "with open('WSC_child_problem.json') as f:\n", + " cexamples = json.load(f)" + ] + }, + { + "cell_type": "code", + "execution_count": 89, + "metadata": {}, + "outputs": [], + "source": [ + "for ce in cexamples:\n", + " for s in ce['sentences']:\n", + " for a in s['answer0'] + s['answer1']:\n", + " a = a.lower()\n", + " if a not in tokenizer.vocab:\n", + " ce\n", + " print(a, 'not in vocab!!!')" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [], + "source": [ + "for ce in cexamples:\n", + " if len(ce['sentences']) > 0:\n", + " e = examples[ce['index']]\n", + " assert ce['index'] == e['index']\n", + " e['score'] = all([s['score'] for s in ce['sentences']])\n", + " assert len(set([s['adjacent_ref'] for s in ce['sentences']])) == 1, 'adjcent_refs are different!'\n", + " e['adjacent_ref'] = ce['sentences'][0]['adjacent_ref']" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": {}, + "outputs": [], + "source": [ + "from collections import defaultdict\n", + "\n", + "groups = defaultdict(list)\n", + "for e in examples:\n", + " if 'score' in e:\n", + " index = e['index']\n", + " if index < 252:\n", + " if index % 2 == 1:\n", + " index -= 1\n", + " elif index in [252, 253, 254]:\n", + " index = 252\n", + " else:\n", + " if index % 2 == 0:\n", + " index -= 1\n", + " groups[index].append(e)" + ] + }, + { + "cell_type": "code", + "execution_count": 62, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[(2, 'fit into:large/small', False),\n", + " (4, 'thank:receive/give', False),\n", + " (6, 'call:successful available', True),\n", + " (8, 'ask:repeat answer', False),\n", + " (10, 'zoom by:fast/slow', False),\n", + " (12, 'vindicated/crushed:be the winner', False),\n", + " (14, 'lift:weak heavy', False),\n", + " (16, 'crash through:[hard]/[soft]', False),\n", + " (18, '[block]:short/tall', False),\n", + " (20, 'down to:top/bottom', False),\n", + " (22, 'beat:good/bad', False),\n", + " (24, 'roll off:anchored level', False),\n", + " (26, 'above/below', False),\n", + " (28, 'better/worse:study hard', False),\n", + " (30, 'after/before:far away', False),\n", + " (32, 'be upset with:buy from not work/sell not work', True),\n", + " (34, '?yell at comfort:upset', False),\n", + " (36, 'above/below:moved first', False),\n", + " (38, 'although/because', False),\n", + " (40, 'bully:punish rescue', False),\n", + " (42, 'pour:empty/full', False),\n", + " (44, 'know:nosy indiscreet', False),\n", + " (46, 'explain:convince/understand', True),\n", + " (48, '?know tell:so/because', True),\n", + " (50, 'beat:younger/older', False),\n", + " (56, 'clog:cleaned removed', True),\n", + " (58, '?immediately follow:short delayed', False),\n", + " (60, '?between:see see around', True),\n", + " (64, 'but/and', False),\n", + " (66, 'clean:put in the trash put in the drawer', False),\n", + " (68, 'because/but', False),\n", + " (70, 'out of:handy lighter', False),\n", + " (72, 'put:tall high', False),\n", + " (74, 'show:good famous', True),\n", + " (76, 'pay for:generous grateful', False),\n", + " (78, 'but', False),\n", + " (80, 'if', False),\n", + " (82, 'if', False),\n", + " (84, 'fool:get/lose', False),\n", + " (88, 'wait:impatient cautious', False),\n", + " (90, 'give birth:woman baby', True),\n", + " (92, '?stop normal/stop abnormal:strange', False),\n", + " (96, 'eat:hungry tasty', False),\n", + " (98, 'put ... into filled with ... :get in/get out', False),\n", + " (100, 'up:at the bottom/at the top', False),\n", + " (102, 'crash through:removed repaired', False),\n", + " (104, 'stab:taken to the police station taken to the hospital', False),\n", + " (106, 'hear ... humming and whistling:annoyed/annoying', True),\n", + " (108, 'see ... juggling watermelons:impressed/impressive', True),\n", + " (114, 'tell lies: truthful skeptical', True),\n", + " (130, 'but:disappointed', True),\n", + " (132, 'visit:invite come out/invite come in', True),\n", + " (134, 'take classes from:eager known to speak it fluently', False),\n", + " (138, 'cover:out gone', True),\n", + " (144, 'tuck:work sleep', True),\n", + " (150, 'influence:later/earlier', False),\n", + " (152, 'can not cut:thick small', False),\n", + " (154, 'attack:kill guard', False),\n", + " (156, 'attack:bold nervous', False),\n", + " (160, 'change:hard:easy', False),\n", + " (166, 'alive:is/was', False),\n", + " (168, 'infant:twelve years old twelve months old', False),\n", + " (170, 'better equipped and large:defeated/victorious', False),\n", + " (178, 'interview:persistent cooperative', False),\n", + " (186, 'be full of:minority/majority', False),\n", + " (188, 'like over:more/fewer', False),\n", + " (190, 'place on all:not enough/too many', True),\n", + " (192, 'stick:leave have', True),\n", + " (196, 'follow:admire/influence', True),\n", + " (198, 'fit through:wide/narrow', False),\n", + " (200, 'trade:dowdy/great', False),\n", + " (202, 'hire/hire oneself to:take care of', True),\n", + " (204, 'promise/order', False),\n", + " (208, 'mother:education place', True),\n", + " (210, 'knock:get an answer/answer', True),\n", + " (212, 'pay:receive/deliver', False),\n", + " (218, '?', False),\n", + " (220, 'say check:move take', False),\n", + " (222, '?', False),\n", + " (224, 'give a life:drive alone walk', False),\n", + " (226, 'pass the plate:full/hungry', False),\n", + " (228, 'pass:turn over turn next', False),\n", + " (232, 'stretch pat', True),\n", + " (234, 'accept share', False),\n", + " (236, 'speak:break silence break concentration', False),\n", + " (240, 'carry:leg ache leg dangle', True),\n", + " (242, 'carry:in arms in bassinet', False),\n", + " (244, 'hold:against chest against will', True),\n", + " (250, 'stop', False),\n", + " (252, 'even though/because/not', False),\n", + " (255, 'give:not hungry/hungry', False),\n", + " (259, 'ask for a favor:refuse/be refused`', False),\n", + " (261, 'cede:less popular/more popular', False),\n", + " (263, 'not pass although:see open/open', True),\n", + " (271, 'suspect regret', True)]" + ] + }, + "execution_count": 62, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "def filter_dict(d, keys=['index', 'sentence', 'correct_answer', 'relational_word', 'is_associative', 'score']):\n", + " return {k: d[k] for k in d if k in keys}\n", + "\n", + "# ([[filter_dict(e) for e in eg] for eg in groups.values() if eg[0]['relational_word'] != 'none' and all([e['score'] for e in eg])])# / len([eg for eg in groups.values() if eg[0]['relational_word'] != 'none'])\n", + "[(index, eg[0]['relational_word'], all([e['score'] for e in eg])) for index, eg in groups.items() if eg[0]['relational_word'] != 'none']\n", + "# len([filter_dict(e) for e in examples if 'score' in e and not e['score'] and e['adjacent_ref']])\n", + "# for e in examples:\n", + "# if e['index'] % 2 == 0:\n", + "# print(e['sentence'])" + ] + }, + { + "cell_type": "code", + "execution_count": 51, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "data": { + "text/plain": [ + "179" + ] + }, + "execution_count": 51, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "sum(['because' in e['sentence'] for e in examples]) + \\\n", + "sum(['so ' in e['sentence'] for e in examples]) + \\\n", + "sum(['but ' in e['sentence'] for e in examples]) + \\\n", + "sum(['though' in e['sentence'] for e in examples])" + ] + }, + { + "cell_type": "code", + "execution_count": 73, + "metadata": {}, + "outputs": [], + "source": [ + "# with open('WSC_switched_label.json', 'w') as f:\n", + "# json.dump(examples, f)" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [], + "source": [ + "vis_attn_topk = 3\n", + "\n", + "def has_chinese_label(labels):\n", + " labels = [label.split('->')[0].strip() for label in labels]\n", + " r = sum([len(label) > 1 for label in labels if label not in ['BOS', 'EOS']]) * 1. / (len(labels) - 1)\n", + " return 0 < r < 0.5 # r == 0 means empty query labels used in self attention\n", + "\n", + "def _plot_attn(ax1, attn_name, attn, key_labels, query_labels, col, color='b'):\n", + " assert len(query_labels) == attn.size(0)\n", + " assert len(key_labels) == attn.size(1)\n", + "\n", + " ax1.set_xlim([-1, 1])\n", + " ax1.set_xticks([])\n", + " ax2 = ax1.twinx()\n", + " nlabels = max(len(key_labels), len(query_labels))\n", + " pos = range(nlabels)\n", + " \n", + " if 'self' in attn_name and col < ncols - 1:\n", + " query_labels = ['' for _ in query_labels]\n", + "\n", + " for ax, labels in [(ax1, key_labels), (ax2, query_labels)]:\n", + " ax.set_yticks(pos)\n", + " if has_chinese_label(labels):\n", + " ax.set_yticklabels(labels, fontproperties=zhfont)\n", + " else:\n", + " ax.set_yticklabels(labels)\n", + " ax.set_ylim([nlabels - 1, 0])\n", + " ax.tick_params(width=0, labelsize='xx-large')\n", + "\n", + " for spine in ax.spines.values():\n", + " spine.set_visible(False)\n", + "\n", + "# mask, attn = filter_attn(attn)\n", + " for qi in range(attn.size(0)):\n", + "# if not mask[qi]:\n", + "# continue\n", + "# for ki in range(attn.size(1)):\n", + " for ki in attn[qi].topk(vis_attn_topk)[1]:\n", + " a = attn[qi, ki]\n", + " ax1.plot((-1, 1), (ki, qi), color, alpha=a)\n", + "# print(attn.mean(dim=0).topk(5)[0])\n", + "# ax1.barh(pos, attn.mean(dim=0).data.cpu().numpy())\n", + "\n", + "def plot_layer_attn(result_tuple, attn_name='dec_self_attns', layer=0, heads=None):\n", + " hypo, nheads, labels_dict = result_tuple\n", + " key_labels, query_labels = labels_dict[attn_name]\n", + " if heads is None:\n", + " heads = range(nheads)\n", + " else:\n", + " nheads = len(heads)\n", + " \n", + " stride = 2 if attn_name == 'dec_enc_attns' else 1\n", + " nlabels = max(len(key_labels), len(query_labels))\n", + " rcParams['figure.figsize'] = 20, int(round(nlabels * stride * nheads / 8 * 1.0))\n", + " \n", + " rows = nheads // ncols * stride\n", + " fig, axes = plt.subplots(rows, ncols)\n", + " \n", + " # for head in range(nheads):\n", + " for head_i, head in enumerate(heads):\n", + " row, col = head_i * stride // ncols, head_i * stride % ncols\n", + " ax1 = axes[row, col]\n", + " attn = hypo[attn_name][layer][head]\n", + " _plot_attn(ax1, attn_name, attn, key_labels, query_labels, col)\n", + " if attn_name == 'dec_enc_attns':\n", + " col = col + 1\n", + " axes[row, col].axis('off') # next subfig acts as blank place holder\n", + " # plt.suptitle('%s with %d heads, Layer %d' % (attn_name, nheads, layer), fontsize=20)\n", + " plt.show() \n", + " \n", + "ncols = 4" + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "metadata": {}, + "outputs": [ + { + "ename": "AttributeError", + "evalue": "'BertSelfAttention' object has no attribute 'attention_probs'", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0mattn_name\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m'enc_self_attns'\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0mhypo\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m{\u001b[0m\u001b[0mattn_name\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbert\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mencoder\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlayer\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mattention\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mattention_probs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mi\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mrange\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mconfig\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnum_hidden_layers\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m}\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 3\u001b[0m \u001b[0mkey_labels\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mquery_labels\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtokens\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0mlabels_dict\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m{\u001b[0m\u001b[0mattn_name\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mkey_labels\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mquery_labels\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m}\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0mresult_tuple\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mhypo\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mconfig\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnum_attention_heads\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlabels_dict\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m(.0)\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0mattn_name\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m'enc_self_attns'\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0mhypo\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m{\u001b[0m\u001b[0mattn_name\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbert\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mencoder\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlayer\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mattention\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mattention_probs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mi\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mrange\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mconfig\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnum_hidden_layers\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m}\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 3\u001b[0m \u001b[0mkey_labels\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mquery_labels\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtokens\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0mlabels_dict\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m{\u001b[0m\u001b[0mattn_name\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mkey_labels\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mquery_labels\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m}\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0mresult_tuple\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mhypo\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mconfig\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnum_attention_heads\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlabels_dict\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/home/qsj/miniconda3/lib/python3.6/site-packages/torch/nn/modules/module.py\u001b[0m in \u001b[0;36m__getattr__\u001b[0;34m(self, name)\u001b[0m\n\u001b[1;32m 516\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mmodules\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mname\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 517\u001b[0m raise AttributeError(\"'{}' object has no attribute '{}'\".format(\n\u001b[0;32m--> 518\u001b[0;31m type(self).__name__, name))\n\u001b[0m\u001b[1;32m 519\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 520\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m__setattr__\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mname\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvalue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mAttributeError\u001b[0m: 'BertSelfAttention' object has no attribute 'attention_probs'" + ] + } + ], + "source": [ + "attn_name = 'enc_self_attns'\n", + "hypo = {attn_name: [model.bert.encoder.layer[i].attention.self.attention_probs[0] for i in range(config.num_hidden_layers)]}\n", + "key_labels = query_labels = tokens\n", + "labels_dict = {attn_name: (key_labels, query_labels)}\n", + "result_tuple = (hypo, config.num_attention_heads, labels_dict)\n", + "plot_layer_attn(result_tuple, attn_name=attn_name, layer=10, heads=None)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.5" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/likunlin_final.ipynb b/likunlin_final.ipynb new file mode 100644 index 00000000000000..6bbf623bf3d641 --- /dev/null +++ b/likunlin_final.ipynb @@ -0,0 +1,2542 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Warning: apex was installed without --cpp_ext. Falling back to Python flatten and unflatten.\n", + "Warning: apex was installed without --cuda_ext. Fused syncbn kernels will be unavailable. Python fallbacks will be used instead.\n", + "Warning: apex was installed without --cuda_ext. FusedAdam will be unavailable.\n", + "Warning: apex was installed without --cuda_ext. FusedLayerNorm will be unavailable.\n", + "Better speed can be achieved with apex installed from https://www.github.com/nvidia/apex.\n" + ] + } + ], + "source": [ + "import os\n", + "import json\n", + "import nltk\n", + "import numpy as np\n", + "import math\n", + "import matplotlib\n", + "import matplotlib.pyplot as plt\n", + "from pylab import rcParams\n", + "\n", + "import torch\n", + "import torch.nn.functional as F\n", + "from pytorch_pretrained_bert import tokenization, BertTokenizer, BertModel, BertForMaskedLM, BertForPreTraining, BertConfig\n", + "from examples.extract_features import *" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "05/27/2019 11:51:05 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased/vocab.txt\n", + "05/27/2019 11:51:05 - INFO - pytorch_pretrained_bert.modeling - loading archive file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased\n", + "05/27/2019 11:51:05 - INFO - pytorch_pretrained_bert.modeling - Model config {\n", + " \"attention_probs_dropout_prob\": 0.1,\n", + " \"hidden_act\": \"gelu\",\n", + " \"hidden_dropout_prob\": 0.1,\n", + " \"hidden_size\": 768,\n", + " \"initializer_range\": 0.02,\n", + " \"intermediate_size\": 3072,\n", + " \"max_position_embeddings\": 512,\n", + " \"num_attention_heads\": 12,\n", + " \"num_hidden_layers\": 12,\n", + " \"type_vocab_size\": 2,\n", + " \"vocab_size\": 30522\n", + "}\n", + "\n", + "05/27/2019 11:51:08 - INFO - pytorch_pretrained_bert.modeling - Weights from pretrained model not used in BertForMaskedLM: ['cls.seq_relationship.weight', 'cls.seq_relationship.bias']\n" + ] + } + ], + "source": [ + "class Args:\n", + " def __init__(self):\n", + " pass\n", + " \n", + "args = Args()\n", + "args.no_cuda = True #不用GPU\n", + "\n", + "CONFIG_NAME = 'bert_config.json'\n", + "BERT_DIR = '/nas/pretrain-bert/pretrain-pytorch/bert-base-uncased'\n", + "config_file = os.path.join(BERT_DIR, CONFIG_NAME)\n", + "config = BertConfig.from_json_file(config_file)\n", + "\n", + "try:\n", + " tokenizer = BertTokenizer.from_pretrained(os.path.join(BERT_DIR, 'vocab.txt'))\n", + "except:\n", + " tokenizer = BertTokenizer.from_pretrained('bert-base-uncased')\n", + "#tokenizer.tokenize = nltk.word_tokenize\n", + "\n", + "model = BertForMaskedLM.from_pretrained(BERT_DIR)\n", + "device = torch.device(\"cuda\" if torch.cuda.is_available() and not args.no_cuda else \"cpu\")\n", + "_ = model.to(device)\n", + "_ = model.eval()\n", + "\n", + "input_ids_sen,input_type_ids_sen,in_sentence,sentences,entire_ids,entire_type_ids = [],[],[],[],[],[]\n", + "suggestions = {} #外部变量,需要传到前端\n", + "original_tokens = [] #外部变量,需要传到前端" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "/nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt\r\n" + ] + } + ], + "source": [ + "ls /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "BertForPreTraining:\n", + "Outputs:\n", + " if `masked_lm_labels` and `next_sentence_label` are not `None`:\n", + " Outputs the total_loss which is the sum of the masked language modeling loss and the next\n", + " sentence classification loss.\n", + " if `masked_lm_labels` or `next_sentence_label` is `None`:\n", + " Outputs a tuple comprising\n", + " - the masked language modeling logits of shape [batch_size, sequence_length, vocab_size], and\n", + " - the next sentence classification logits of shape [batch_size, 2]." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "from_pretrained:\n", + "Instantiate a BertPreTrainedModel from a pre-trained model file or a pytorch state dict.\n", + "Download and cache the pre-trained model file if needed." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "scrolled": false + }, + "outputs": [], + "source": [ + "import re\n", + "def convert_text_to_examples(text): \n", + " '''功能:\n", + " 把输入的文本变成一个实例,一个实例中包含text_a,text_b(text_b用于是否为上下句的任务,该任务不使用此功能)\n", + " 输入:\n", + " text:一个列表结构,列表中包含原始文本字符串,由于仅完成mlm任务,所以text列表中仅包含一个字符串,就是待检查的字符串\n", + " 输出:\n", + " example:实例,其中包含:\n", + " unique_id:此任务仅用到0\n", + " text_a:text列表内的字符串\n", + " text_b:此任务下该变量为None\n", + " '''\n", + " examples = []\n", + " unique_id = 0\n", + " if True:\n", + " for line in text:\n", + " line = line.strip()\n", + " text_a = None\n", + " text_b = None\n", + " m = re.match(r\"^(.*) \\|\\|\\| (.*)$\", line) #想要匹配这样的字符串'You are my sunshine. ||| I love you.'\n", + " \n", + " if m is None:\n", + " text_a = line\n", + " else:\n", + " text_a = m.group(1) #匹配的第一句,比如You are my sunshine,my only sunshine.\n", + " text_b = m.group(2) #匹配的第二句,比如I love you.\n", + " \n", + " examples.append(\n", + " InputExample(unique_id=unique_id, text_a=text_a, text_b=text_b))\n", + " unique_id += 1\n", + " return examples\n", + "#print(convert_text_to_examples(['I love you. The cat is so cute.'])[0].text_a)\n", + "\n", + "def convert_examples_to_features(examples, tokenizer, append_special_tokens=True, replace_mask=True, print_info=False):\n", + " '''功能:\n", + " 把实例变成一个特征列表\n", + " 输入:\n", + " examples:实例,convert_text_to_examples()函数的输出\n", + " tokenizer:BERT的tokenizer,用于将文本进行各种处理,它可以把一个text转变成tokens,把tokens变成每个token在词典中的编号以及逆运算\n", + " append_special_tokens:是否允许在生成的tokens中加入特殊符号,也就是[CLS]、[MASK]和[SEP],默认为True\n", + " replace_mask:不明\n", + " print_info:不明\n", + " 输出:\n", + " features:每一个feature包含:\n", + " unique_id:编号,目前实现的功能features里面仅有一个feature\n", + " tokens=tokens,tokens:是形如['i','love','you','.']的一个列表\n", + " input_ids=input_ids:字符串中的每个单词在词典中的index序列\n", + " input_mask=input_mask:一堆1\n", + " input_type_ids=input_type_ids)):对text_a,text_b的区分,用于上下句任务,对于本任务,该参数为一个列表,其中包含token长度个的0\n", + " '''\n", + " features = []\n", + " for (ex_index, example) in enumerate(examples):\n", + " tokens_a = tokenizer.tokenize(example.text_a) #tokenize的作用是把\"i love you.\"变成['i','love','you','.']\n", + " tokens_b = None\n", + " if example.text_b:\n", + " tokens_b = tokenizer.tokenize(example.text_b)\n", + "\n", + " tokens = []\n", + " input_type_ids = [] #segment embedding\n", + " if append_special_tokens: #输入参数中默认为true\n", + " tokens.append(\"[CLS]\")\n", + " input_type_ids.append(0)\n", + " for token in tokens_a:\n", + " if replace_mask and token == '_': # XD\n", + " token = \"[MASK]\"\n", + " tokens.append(token)\n", + " input_type_ids.append(0)\n", + " if append_special_tokens:\n", + " tokens.append(\"[SEP]\")\n", + " input_type_ids.append(0)\n", + "\n", + " if tokens_b:\n", + " for token in tokens_b:\n", + " if replace_mask and token == '_': # XD\n", + " token = \"[MASK]\"\n", + " tokens.append(token)\n", + " input_type_ids.append(1)\n", + " if append_special_tokens:\n", + " tokens.append(\"[SEP]\")\n", + " input_type_ids.append(1)\n", + " input_ids = tokenizer.convert_tokens_to_ids(tokens) #把原来句子中的词语编成在字典中的编号\n", + " input_mask = [1] * len(input_ids) \n", + " \n", + " if ex_index < 5:\n", + "# logger.info(\"*** Example ***\")\n", + "# logger.info(\"unique_id: %s\" % (example.unique_id))\n", + " logger.info(\"tokens: %s\" % \" \".join([str(x) for x in tokens]))\n", + "# logger.info(\"input_ids: %s\" % \" \".join([str(x) for x in input_ids]))\n", + "# logger.info(\"input_mask: %s\" % \" \".join([str(x) for x in input_mask]))\n", + "# logger.info(\n", + "# \"input_type_ids: %s\" % \" \".join([str(x) for x in input_type_ids]))\n", + " \n", + " features.append(\n", + " InputFeatures(\n", + " unique_id=example.unique_id,#编号,目前实现的功能features里面仅有一个feature\n", + " tokens=tokens,#形如['i','love','you','.']的一个列表\n", + " input_ids=input_ids,#字符串中的每个单词在词典中的index序列\n", + " input_mask=input_mask, #一堆1\n", + " input_type_ids=input_type_ids)) #第0类和第1类,对text_a,text_b的区分,本代码中全都是零\n", + " return features \n", + "\n", + "def copy_and_mask_feature(feature, step, masked_tokens=None): \n", + " '''\n", + " 功能:\n", + " 输入feature生成训练的批次数以及mask好的训练素材\n", + " 输入:\n", + " feature:convert_examples_to_features函数的输出\n", + " step:两个[mask]位置的步长\n", + " masked_tokens:默认为None,在程序中没有使用\n", + " '''\n", + " import copy\n", + " tokens = feature.tokens\n", + " len_token = len(tokens)\n", + " if len_token 0\n", + " masked_feature_copies = []\n", + " for i in batches: #用[mask]依次掩盖每一个位置\n", + " feature_copy = copy.deepcopy(feature)\n", + " masked_pos = i\n", + " while masked_pos < len_token:\n", + " feature_copy.input_ids[masked_pos] = tokenizer.vocab[\"[MASK]\"]\n", + " masked_pos = masked_pos + step\n", + " masked_feature_copies.append(feature_copy)\n", + " return masked_feature_copies, batches\n", + "\n", + "#masked_feature_copies, batches = copy_and_mask_feature(features[0],3)\n", + "#print(masked_feature_copies[0].input_ids) #结果[101, 1045, 2293, 103, 102]\n", + "#print(batches) #结果是一个range(0,5)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "scrolled": true + }, + "outputs": [], + "source": [ + "analyzed_cache = {}\n", + "from pattern.en import conjugate, lemma, lexeme,PRESENT,SG\n", + "#print (lemma('gave'))\n", + "#print (lexeme('production'))\n", + "#print (conjugate(verb='give',tense=PRESENT,number=SG))\n", + "def process_text(text): \n", + " '''\n", + " 功能:\n", + " 处理输入文本,将文本按句子分成若干token,得出原来text中index位置的单词在x句子的y位置,还得出各个句子类别码\n", + " 输入:\n", + " text:文本字符串,注意区别\n", + " 输出:\n", + " input_ids_sen:二维列表,第一维列表的元素是每个句子的input_ids列表\n", + " input_type_ids_sen:二维列表,第一维列表的元素是每个句子的input_type_ids列表\n", + " in_sentence:通过这个二维数组可以很方便的通过在完整text中的下标找到这个下标所在的句子和在句子中的下标\n", + " sentences:字符串列表,列表中每一个元素是一个句子字符串\n", + " entire_ids:整个text的input_ids\n", + " entire_type_ids:整个text的input_type_ids\n", + " '''\n", + " token =[]\n", + " entire_type_ids = []\n", + " token0 = tokenizer.tokenize(text)\n", + " token.append('[CLS]')\n", + " entire_type_ids.append(0)\n", + " for i in token0:\n", + " token.append(i)\n", + " entire_type_ids.append(0)\n", + " token.append('[SEP]')\n", + " entire_type_ids.append(0)\n", + " \n", + " entire_ids = tokenizer.convert_tokens_to_ids(token)\n", + " in_sentence = [[0,0]] \n", + " sentence_n = 0\n", + " index = 1\n", + " for i in range(1,len(token)-1):\n", + " in_sentence.append([sentence_n,index]) #每个token中的词在所在句中的位置表示出来,以及该位置在哪一句中\n", + " index = index + 1 #比如,位置i这个词在第sentence句的index位置上\n", + " if token[i] == '.':\n", + " sentence_n = sentence_n + 1\n", + " index = 1\n", + " sentences = text.split(\".\")\n", + " \n", + " sen_token = []\n", + " input_ids_sen = []\n", + " input_type_ids_sen = []\n", + " for i,sentence in enumerate(sentences):\n", + " sentence = sentence + '.'\n", + " sentences[i] = sentences[i] + '.'\n", + " token = []\n", + " input_type_ids = []\n", + " tokens = tokenizer.tokenize(sentence)\n", + " token.append('[CLS]')\n", + " input_type_ids.append(0) \n", + " for i in tokens:\n", + " token.append(i)\n", + " input_type_ids.append(0) \n", + " token.append('[SEP]') \n", + " input_type_ids.append(0)\n", + " input_ids_sen.append(tokenizer.convert_tokens_to_ids(token))\n", + " input_type_ids_sen.append(input_type_ids)\n", + " return input_ids_sen,input_type_ids_sen,in_sentence,sentences,entire_ids,entire_type_ids\n" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "def get_word(index):\n", + " '''\n", + " 输入:\n", + " index:在完整text中的位置\n", + " 输出\n", + " word:该位置上的单词\n", + " '''\n", + " word_id = entire_ids[index]\n", + " word = tokenizer.ids_to_tokens[word_id]\n", + " return word\n" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [], + "source": [ + "import copy\n", + "import nltk\n", + "from pattern.en import conjugate, lemma, lexeme,PRESENT,SG,PRESENT,SG,INFINITIVE, PRESENT, PAST, FUTURE, PROGRESSIVE\n", + "\n", + "def give_suggestion(input_ids_,input_type_ids_,id_in_sen,alternative_word,threshold):\n", + " '''\n", + " 功能:\n", + " 给出指定文本指定位置的推荐用词\n", + " 输入:\n", + " input_ids_:要分析的文本的input_ids\n", + " input_type_ids_:要分析的文本的的input_type_ids\n", + " id_in_sen:要分析的文本中[MASK]的位置下标,也就是需要给出建议用词的位置\n", + " alternative_word:推荐的备选词范围\n", + " threshold:阈值\n", + " 输出:\n", + " suggestion:推荐\n", + " need:推荐的是否是备选词中的词\n", + " suggestion_prob:推荐词填在id_in_sen位置的概率\n", + " top_of_alternative:备选词中最值得推荐的词\n", + " '''\n", + " input_ids = copy.deepcopy(input_ids_)\n", + " input_type_ids = copy.deepcopy(input_type_ids_)\n", + " word0 = input_ids[id_in_sen]\n", + " word0 = tokenizer.ids_to_tokens[word0]\n", + " list_word_id = []\n", + " \n", + " input_ids[id_in_sen] = tokenizer.vocab[\"[MASK]\"]\n", + " T_input_ids = torch.tensor([input_ids], dtype=torch.long) #把input_ids增加了一个维度\n", + " T_input_type_ids = torch.tensor([input_type_ids], dtype=torch.long) #把input_type_ids增加了一个维度,其实每一行都一样\n", + " T_input_ids = T_input_ids.to(device) #拿去GPU\n", + " T_input_type_ids = T_input_type_ids.to(device)\n", + "\n", + " mlm_logits, _ = model(T_input_ids, T_input_type_ids)\n", + " mlm_probs = F.softmax(mlm_logits, dim=-1)\n", + " reduced_mlm_probs = mlm_probs[0][id_in_sen]\n", + "\n", + " top_ind = reduced_mlm_probs.argmax().item()\n", + " top_prob = reduced_mlm_probs.max().item() \n", + " \n", + " list_word = []\n", + " \n", + " top_of_alternative = None\n", + " if len(alternative_word)>0:\n", + " list_word_prob = {}\n", + " for word in alternative_word:\n", + " try:\n", + " list_word_id.append(tokenizer.vocab[word])\n", + " list_word.append(word)\n", + " except KeyError:\n", + " pass\n", + "\n", + " for word,word_id in zip(list_word,list_word_id):\n", + " list_word_prob.update({word:float(reduced_mlm_probs[word_id].data)})\n", + " prob_ord = sorted(list_word_prob.items(),key = lambda x:x[1],reverse = True)\n", + " \n", + " top_prob_word = prob_ord[0][1]\n", + " top_of_alternative = prob_ord[0][0]\n", + " gap = math.log(top_prob) - math.log(top_prob_word)\n", + " \n", + " if gap < threshold:\n", + " suggestion = prob_ord[0][0]\n", + " suggestion_prob = prob_ord[0][1]\n", + " need = 1\n", + " else:\n", + " suggestion = tokenizer.ids_to_tokens[top_ind]\n", + " suggestion_prob = top_prob\n", + " need = 0\n", + " #print(\"gap = \" + str(gap))\n", + " #print(prob_ord)\n", + " else:\n", + " suggestion = tokenizer.ids_to_tokens[top_ind]\n", + " suggestion_prob = top_prob\n", + " need = 0\n", + " \n", + " return suggestion,need,suggestion_prob,top_of_alternative \n", + "\n", + "#返回变量5\n", + "#suggestion -> 最值得推荐的词\n", + "#need -> 是否需要可选词中的一个\n", + "#suggestion_prob ->最值得推荐的词的概率\n", + "#top_of_alternative -> 可选词中最值得推荐的\n", + "#suggestion,need,suggestion_prob,top_of_alternative = give_suggestion(input_ids_,input_type_ids_,id_in_sen,alternative_word,threshold)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "from spacy.lemmatizer import Lemmatizer\n", + "from spacy.lang.en import LEMMA_INDEX, LEMMA_EXC, LEMMA_RULES\n", + "from pattern.en import comparative, superlative\n", + "from pattern.en import suggest\n", + "from nltk.stem.lancaster import LancasterStemmer\n", + "from nltk.stem.porter import PorterStemmer\n", + "from nltk.stem import SnowballStemmer\n", + "import enchant\n", + "d = enchant.Dict(\"en_US\")\n" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [], + "source": [ + "stemmers=[]\n", + "stemmers.append(LancasterStemmer()) \n", + "stemmers.append(SnowballStemmer(\"english\"))\n", + "stemmers.append(PorterStemmer())\n", + "lemmatizer = Lemmatizer(LEMMA_INDEX, LEMMA_EXC, LEMMA_RULES)\n", + "def word_convert(word,new_word,Stemmer):\n", + " '''\n", + " 功能:\n", + " 根据提供的word和可能的变形new_word,得到正确的变形,例如给出basic,basicly得到basically\n", + " 输入:\n", + " word:需要变形的词\n", + " new_word:猜想的变形\n", + " 输出:\n", + " suggest_word:推荐的正确变形\n", + " '''\n", + " suggest_word = None\n", + " word_stem = Stemmer().stem(word)\n", + " suggest_ = new_word\n", + " \n", + " suggest_list = suggest(suggest_)\n", + "\n", + " if len(word) 0.95):# or word_[1] > 0.95 :\n", + " suggest_word = word_[0]\n", + " break \n", + " if word_[1] < 0.001:\n", + " break\n", + " stem_list = []\n", + " for stemmer in stemmers:\n", + " suggest_stem = stemmer.stem(word_[0])\n", + " if flag == 1 and suggest_stem[:-1] in word_stem and word_stem[:3] in suggest_stem[:3]: #一般是去后缀\n", + " suggest_word = word_[0]\n", + " break\n", + " elif flag == 0 and word_stem in suggest_stem and word_[0][-1:] in suggest_[-1:]: #一般是加后缀,后缀一定要一样\n", + " suggest_word = word_[0]\n", + " break\n", + " \n", + " if suggest_word != None:\n", + " break\n", + " return suggest_word \n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "stemmers=[]\n", + "stemmers.append(LancasterStemmer()) \n", + "stemmers.append(SnowballStemmer(\"english\"))\n", + "stemmers.append(PorterStemmer())\n", + "lemmatizer = Lemmatizer(LEMMA_INDEX, LEMMA_EXC, LEMMA_RULES)\n", + "def word_convert(word,new_word,Stemmer):\n", + " '''\n", + " 说明;\n", + " 与上面的区别是使用的拼写改错算法不同,上面那个平均速度慢,但更符合我的要求,这个平均速度更快\n", + " 功能:\n", + " 根据提供的word和可能的变形new_word,得到正确的变形,例如给出basic,basicly得到basically\n", + " 输入:\n", + " word:需要变形的词\n", + " new_word:猜想的变形\n", + " Stemmer:词根提取器\n", + " 输出:\n", + " suggest_word:推荐的正确变形\n", + " '''\n", + " if d.check(new_word)==True: #如果发现new_word拼写正确,则直接返回\n", + " return new_word\n", + " else:\n", + " suggest_word = None\n", + " word_stem = Stemmer().stem(word)\n", + " suggest_ = new_word\n", + " suggest_list = d.suggest(suggest_) #可能的正确单词列表\n", + "\n", + " if len(word)death,success->succeed无能为力'" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "'''下面是词性转换系列函数\n", + " 功能:\n", + " 词性转变系列函数\n", + " 输入:\n", + " word:原形词\n", + " 输出:\n", + " suggest_word:推荐的变形\n", + " suggest_list:推荐的变形列表\n", + " 说明:\n", + " 词性变化的能力有限,对于有些特殊变形,比如die->death,success->succeed无能为力'''" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [], + "source": [ + "\n", + "def adj_to_adv(word):\n", + " suggest_word = None\n", + " if(word == \"good\"):\n", + " return \"well\"\n", + " else:\n", + " suggest_ = word + 'ly'\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " return suggest_word\n", + "#如果形容词副词同形,那么他会返回none,但是不影响计算,因为形容词副词同形啊\n", + "\n", + "\n", + "def adv_to_adj(word):\n", + " suggest_word = None\n", + " if(word == \"well\"):\n", + " return \"good\" \n", + " elif word[-2:] == 'ly':\n", + " suggest_ = word[:-2]\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " return suggest_word\n", + "\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [], + "source": [ + "def adj_to_anything(word):#形容词变成其他词性\n", + " suggest_word = None\n", + " suggest_list = []\n", + " if word[-1:] == 'y': #举例 healthy->health\n", + " suggest_ = word[:-1]\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word)\n", + " elif word[-3:] == 'ful':#举例 successful->success\n", + " suggest_ = word[:-3]\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word)\n", + " elif word[-3:] == 'ive': #举例 active -> act\n", + " suggest_ = word[:-4]\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word)\n", + " elif word[-2:] == 'ed': #举例 interested->interest->interesting\n", + " suggest_ = word[:-2]\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word) \n", + " suggest_ = suggest_ + 'ing'\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word) \n", + " \n", + " elif word[-3:] == 'ing':#举例 interesting->interest->interested\n", + " suggest_ = word[:-3]\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word)\n", + " suggest_ = suggest_ + 'ed'\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word) \n", + " \n", + " elif word[-4:] == 'less': #举例 careless -> care\n", + " suggest_ = word[:-4]\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word)\n", + " elif word[-2:] == 'ly': #举例: friendly -> friend , lovely -> love\n", + " suggest_ = word[:-2]\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word)\n", + " \n", + " elif word[-1:] == 't': #举例 different -> different\n", + " suggest_ = word[:-1]\n", + " suggest_ = suggest_ + 'ce'\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word)\n", + " elif word[-3:] == 'ous': #举例 dangerous -> danger\n", + " suggest_ = word[:-3]\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word)\n", + " elif word[-2:] == 'al': #举例 original -> origin\n", + " suggest_ = word[:-2]\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word)\n", + " elif word[-4:] == 'able':\n", + " suggest_ = word[:-4]\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word)\n", + " elif word[-2:] == 'en': #举例 woolen -> wool\n", + " suggest_ = word[:-2]\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word)\n", + " elif word[-2:] == 'ic': \n", + " suggest_ = word + 'al'\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word) \n", + " suggest_ = word[:-2]\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word) \n", + " elif word[-3:] == 'ish':\n", + " suggest_ = word[:-3]\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word == None:\n", + " suggest_ = word[:-3]\n", + " suggest_ = suggest_ + 'and'\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer) \n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word)\n", + " elif word[-3:] == 'ese':\n", + " suggest_ = word[:-3]\n", + " suggest_ = suggest_ + 'a'\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer) \n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word)\n", + " elif word[-3:] == 'ian':\n", + " suggest_ = word[:-1]\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word == None:\n", + " suggest_ = word[:-3]\n", + " suggest_ = suggest_ + 'y'\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word)\n", + " if suggest_word == None:\n", + " HouZhui_list = ['ment','ness','tion','ture','sion','ty','y','tive','sive']\n", + " for HouZhui in HouZhui_list:\n", + " suggest_ = word + HouZhui\n", + " new_word = word_convert(word,suggest_,PorterStemmer)\n", + " if new_word != None:\n", + " suggest_word = new_word\n", + " suggest_list.append(suggest_word)\n", + " suggest_list = list(set(suggest_list)) \n", + " return suggest_list\n", + "\n", + "\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [], + "source": [ + "def N_to_anything(word):#名词变成其他词性\n", + " suggest_list = []\n", + " list_HouZhui = ['y','ful','tive','sive','ed','ing','less','ly','ous','al','able','en','tic','ish','ance','er','or']\n", + " list_QianZhui = ['a']\n", + " if word[-4:] in ['ment','ness','tion','ture','sion','tive','sive']:\n", + " suggest_ = word[:-4]\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word)\n", + " else:\n", + " for HouZhui in list_HouZhui:\n", + " suggest_ = word + HouZhui\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word)\n", + " for QianZhui in list_QianZhui:\n", + " suggest_ = QianZhui + word\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word)\n", + " if word[-2:] == 'ce':\n", + " suggest_ = word[:-2]\n", + " suggest_ = suggest_ + 't'\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word) \n", + " elif word[-4:] == 'land':\n", + " suggest_ = word[:-4]\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word == None:\n", + " suggest_ = suggest_ + 'lish'\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word) \n", + " #print(suggest_list)\n", + " suggest_list = list(set(suggest_list))\n", + " return suggest_list\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [], + "source": [ + "def V_to_anything(word):#动词变成其他词性\n", + " suggest_word = None\n", + " suggest_list = []\n", + "\n", + " HouZhui_list = ['ful','tive','sive','ed','less','ly','ous','al','able','en','tic','ish','ance','tion','sion','ment','er','or','ee']\n", + " for HouZhui in HouZhui_list:\n", + " suggest_ = word + HouZhui\n", + " suggest_word = word_convert(word,suggest_,PorterStemmer)\n", + " if suggest_word != None:\n", + " suggest_list.append(suggest_word)\n", + " suggest_list = list(set(suggest_list))\n", + " return suggest_list\n" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [], + "source": [ + "'''\n", + " 功能:\n", + " 生成形容词,副词关联词表\n", + " 输入:\n", + " word:形容词/副词\n", + " 输出:\n", + " list_word:为没有添加词的其他形式,包括三音节以下词的比较级最高级\n", + " list_word2:为三音节及以上的词的比较级最高级,如果输入形容词比较级最高级没有more/most,该列表为空\n", + " 说明:\n", + " 由于三音节形容词/副词的比较级,最高级为more/most+原形容词/副词,所以特别把形容词/副词和其他词性变形区分出来\n", + "'''\n", + "\n", + "def build_like_word_adj(word): #创建类似形容词列表\n", + " list_word = []\n", + " list_word2 = [] #把比较级最高级带more的放在这里\n", + " lemmas = lemmatizer(word, u'adj')\n", + " #print(lemmas)\n", + " for i in lemmas:\n", + " list_word.append(i)\n", + " word_er = comparative(i)\n", + " if \"more\" in word_er: #把比较级带more,most的词放在另一个列表list_word2\n", + " list_word2.append(word_er)\n", + " else:\n", + " list_word.append(word_er)\n", + " word_est = superlative(i)\n", + " if \"most\" in word_est:\n", + " list_word2.append(word_est)\n", + " else:\n", + " list_word.append(word_est)\n", + " word_adv = adj_to_adv(i)\n", + " if word_adv != None:\n", + " list_word.append(word_adv)\n", + " list_N = adj_to_anything(word)\n", + " for N in list_N:\n", + " list_word.append(N)\n", + " \n", + " list_word = list(set(list_word))\n", + " return list_word,list_word2\n", + "\n", + "def build_like_word_adv(word): #创建类似形容词列表\n", + " list_word = []\n", + " list_word2 = []\n", + " list_special = ['however','seldom','often','never','otherwise']\n", + " if word in list_special:\n", + " list_word = [word]\n", + " list_word2 = []\n", + " else:\n", + " lemmas = lemmatizer(word, u'adj')\n", + " #print(lemmas)\n", + " for i in lemmas:\n", + " list_word.append(i)\n", + " word_er = comparative(i)\n", + " if \"more\" in word_er:\n", + " list_word2.append(word_er)\n", + " else:\n", + " list_word.append(word_er)\n", + " word_est = superlative(i)\n", + " if \"most\" in word_est:\n", + " list_word2.append(word_est)\n", + " else:\n", + " list_word.append(word_est)\n", + " word_adv = adv_to_adj(i)\n", + " if word_adv != None:\n", + " list_word.append(word_adv)\n", + " list_word = list(set(list_word))\n", + " return list_word,list_word2\n" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [], + "source": [ + "'''\n", + " 功能:\n", + " 根据检查的位置整理出放入BERT模型的input_ids,input_type_ids以及检查位置在input_ids中的下标位置\n", + " pre_training_input_in_sentence得到检查位置所在句子的信息\n", + " pre_training_input_entire得到检查位置在完整text中的信息\n", + " 输入:\n", + " index:在完整text中的位置\n", + " 输出:\n", + " word:该下标下的单词\n", + " input_ids:tokens的对应字典id列表\n", + " input_type_ids:零列表\n", + " id_in_sen:检查位置在句子中的下标(pre_training_input_in_sentence的返回)\n", + " index:检查位置在完整text中的下标,其实就是输入的下标\n", + "'''\n", + "def pre_training_input_in_sentence(index): \n", + " sentence_id = in_sentence[index][0]\n", + " id_in_sen = in_sentence[index][1]\n", + " word = input_ids_sen[sentence_id][id_in_sen]\n", + " word = tokenizer.ids_to_tokens[word]\n", + " input_ids = copy.deepcopy(input_ids_sen[sentence_id])\n", + " input_type_ids = copy.deepcopy(input_type_ids_sen[sentence_id])\n", + "\n", + " return word,input_ids,input_type_ids,id_in_sen\n", + "\n", + "def pre_training_input_entire(index): \n", + " word = entire_ids[index]\n", + " word = tokenizer.ids_to_tokens[word]\n", + " input_ids = copy.deepcopy(entire_ids)\n", + " input_type_ids = copy.deepcopy(entire_type_ids)\n", + "\n", + " return word,input_ids,input_type_ids,index\n", + "\n", + "#[101, 1045, 2572, 3153, 2006, 1996, 2754, 1012, 102]\n", + "#[101, 1045, 2572, 3153, 2006, 1996, 2754, 1012, 1045, 2018, 1037, 2200, 2204, 2835, 1012, 1996, 2377, 2001, 2200, 5875, 1012, 102]" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [], + "source": [ + "import math\n", + "from pattern import en\n", + "from pattern.en import conjugate, lemma, lexeme,PRESENT,SG,INFINITIVE, PRESENT, PAST, FUTURE, PROGRESSIVE\n", + "\n", + "\n", + "'''\n", + " 功能:\n", + " 1.judge_and_suggestion系列函数,这个系列函数是在analyse之前做的一个预先判断处理,判断的是该位置原来词的相关词中有没有可以代替它的词\n", + " 2.当相关词中有词的可能性和原词的可能性的差距大于阈值,则认为原词是错的,可以用相关词替换\n", + " 3.替换词的gap还要经过后续的检查才能决定他是不是最好的推荐,这一步骤放在了show_abnormals里\n", + " 输入:\n", + " prob:该位置可能性列表\n", + " original:该位置原先的词\n", + " list_word:该位置相关词表\n", + " threhold:门槛,也就是阈值\n", + " 输出:\n", + " judge:判断原来的词是否正确,0表示需要换词,1表示不需要换词或者说相关词里面没一个合适的\n", + " suggestion:相关词中最好的推荐\n", + " gap_with_totally_top:备选词中概率最高的和所有词中概率最高的之间的gap,可以换的词也有可能因为gap太大而遭到拒绝\n", + "'''\n", + "def judge_and_suggestion(prob,original,list_word,threhold):\n", + " top_prob = 0\n", + " list_word = list_word + [original]\n", + " original_prob = prob[tokenizer.vocab[original]]\n", + " best = None\n", + " suggestion = None\n", + " for word in list_word:\n", + " try:\n", + " word_id = tokenizer.vocab[word]\n", + " prob_word = prob[word_id]\n", + " if prob_word > top_prob:\n", + " top_prob = prob_word\n", + " best_word = word\n", + " except KeyError:#有的词enchant认为是正确的拼写,bert的词典里却没有,比如tiring,这种情况暂时没法解决,但是实际上bert不认的词会自动分词\n", + " pass\n", + "\n", + " totally_top = prob.max().item() #最高的概率(不需要知道概率最大的词是哪一个)\n", + " gap_with_origin = math.log(top_prob) - math.log(original_prob) #备选词中最大概率和原来的词的概率的差\n", + " gap_with_totally_top = math.log(totally_top) - math.log(top_prob) #所有词中最高的概率和备选词中最高的概率的差\n", + " \n", + " if gap_with_origin > threhold:\n", + " suggestion = best_word\n", + " return 0,suggestion,gap_with_totally_top\n", + " else:\n", + " return 1,suggestion,gap_with_totally_top\n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'分析各种词性系列函数\\n 功能:对第一遍检查得出的有问题的位置的单词,根据不同的词性进行不同步骤的分析\\n 输入:\\n index:在原文中的错误位置\\n prob:该位置可能性列表\\n gap:原文该位置的词和概率最高的词之间的gap\\n top_word:概率最高的词\\n threshold:免检查门槛\\n threshold2:免修正门槛(勉强不算错)\\n threshold3:用推荐词替换的最低要求,大于该阈值才可以替换\\n 输出:\\n suggestion:给出的修改建议,修改建议不局限于错误位置\\n 说明:\\n 不仅局限于错误位置的分析是通过预添加或者去掉一个token,多进行一次model计算\\n'" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "'''分析各种词性系列函数\n", + " 功能:对第一遍检查得出的有问题的位置的单词,根据不同的词性进行不同步骤的分析\n", + " 输入:\n", + " index:在原文中的错误位置\n", + " prob:该位置可能性列表\n", + " gap:原文该位置的词和概率最高的词之间的gap\n", + " top_word:概率最高的词\n", + " threshold:免检查门槛\n", + " threshold2:免修正门槛(勉强不算错)\n", + " threshold3:用推荐词替换的最低要求,大于该阈值才可以替换\n", + " 输出:\n", + " suggestion:给出的修改建议,修改建议不局限于错误位置\n", + " 说明:\n", + " 不仅局限于错误位置的分析是通过预添加或者去掉一个token,多进行一次model计算\n", + "'''" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [], + "source": [ + "'''\n", + " 这是一个相关代词的词典,容易混淆的词放在一个列表中\n", + "\n", + "'''\n", + "like_he = ['he','his','him','himself','who', 'whom', 'whose']\n", + "like_she = ['she','her','herself','hers','who', 'whom', 'whose']\n", + "like_it = ['it','its','itself','who', 'whom', 'whose']\n", + "like_i = ['i','me','my','myself','mine']\n", + "like_you = ['you','your','yourself','yourselves']\n", + "like_we = ['we','us','our','ours','ourselves']\n", + "like_they = ['they','them','their','theirs']\n", + "\n", + "like_this = ['this', 'these'] \n", + "like_that = ['that','those'] \n", + "pronoun_Question = ['who', 'whom', 'whose', 'which', 'what', 'whoever', 'whichever', 'whatever'] #疑问代词\n", + "pronoun_relation = ['that', 'which', 'who', 'whom', 'whose', 'as'] #关系代词\n", + "like_some = ['some','any']\n", + "like_few = ['few','little']\n", + "like_many = ['many','much']\n", + "like_other = ['another','other']\n", + "\n", + "pronoun = [like_he,like_she,like_it,like_i,like_you,like_we,like_they,like_this,like_that,pronoun_Question,pronoun_relation,like_some,like_few,like_many,like_other]\n", + "pronoun_dictionary = {}\n", + "pronoun_list = []\n", + "for list_word in pronoun:\n", + " pronoun_list = pronoun_list + list_word\n", + " for word in list_word:\n", + " pronoun_dictionary.update({word:list_word})" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": { + "scrolled": true + }, + "outputs": [], + "source": [ + "import copy\n", + "import nltk\n", + "from pattern.en import conjugate, lemma, lexeme,PRESENT,SG,PRESENT,SG,INFINITIVE, PRESENT, PAST, FUTURE, PROGRESSIVE\n", + "\n", + "def analyse_V(index,prob,gap,top_word,threshold,threshold2,threshold3):\n", + "#这是一个处理动词语法问题的函数,输入为问题词在text的token中的下标index\n", + " if gap < threshold:\n", + " return None\n", + " #******************************top_word暗示我应该是不定式**************************\n", + " if top_word in [\"to\",\"for\"]:\n", + " wordV,input_ids,input_type_ids,index = pre_training_input_entire(index)\n", + " input_ids.insert(index,tokenizer.vocab['to'])\n", + " input_type_ids.append(0)\n", + " list_word = [conjugate(verb=wordV,tense=PRESENT,person = 1)]\n", + " suggestion,need,_,_= give_suggestion(input_ids,input_type_ids,index + 1,list_word,5) \n", + " if need == 1:\n", + " return 'to ' + suggestion \n", + " \n", + " #*****************************判断是不是时态或者拼写错误,又或者是其他词性********\n", + " wordV = get_word(index)\n", + " #这三种是不涉及位置变化的检查,根据生成词表的速度从快到慢依次检查,之后也不需要再生成词表\n", + "\n", + " list_V = lexeme(wordV)\n", + " judge,suggestion,gap_with_totally_top = judge_and_suggestion(prob,wordV,list_V,threshold3)\n", + " if judge==0 and gap_with_totally_top < threshold2:\n", + " return suggestion\n", + " \n", + " list_others = V_to_anything(conjugate(verb=wordV,tense=PRESENT,person = 1))\n", + " judge,suggestion,gap_with_totally_top = judge_and_suggestion(prob,wordV,list_others,threshold3)\n", + " if judge==0 and gap_with_totally_top < threshold2:\n", + " return suggestion \n", + " \n", + " list_spell_correct = d.suggest(wordV)\n", + " judge,suggestion,gap_with_totally_top = judge_and_suggestion(prob,wordV,list_spell_correct,threshold3)\n", + " if judge==0 and gap_with_totally_top < threshold2:\n", + " return suggestion\n", + " \n", + " if gap < threshold2:#没有可以替换的词,而且原本该位置的词就勉强符合要求\n", + " return None\n", + " \n", + " front_word = get_word(index - 1)\n", + " behind_word = get_word(index + 1)\n", + " #**************************************判断是不是缺介词***************************\n", + " list_IN = [\"to\",\"at\",\"in\",\"on\",\"by\",\"for\",\"from\",\"with\",\"about\",\"against\",\"along\",\"among\",\"around\",\"as\",\"before\",\"behind\",\"below\",\"beside\",\"between\",\"during\",\"besides\",\"into\",\"near\",\"over\",\"through\",\"under\",\"without\",\"after\",\"above\",\"of\"]\n", + " if behind_word not in list_IN:\n", + " print(\"检查点\")\n", + " wordV,input_ids,input_type_ids,id_in_sen = pre_training_input_in_sentence(index)\n", + " input_ids.insert(id_in_sen + 1,tokenizer.vocab['at'])#就随便插入一个东西,占位子\n", + " input_type_ids.append(0)\n", + " suggestion_IN,need_IN,_,_ = give_suggestion(input_ids,input_type_ids,id_in_sen + 1,list_IN,2)\n", + " if need_IN == 1:\n", + " input_ids[id_in_sen + 1] = tokenizer.vocab[suggestion_IN]\n", + " list_word = list_V\n", + " suggestion_V,need,_,_ = give_suggestion(input_ids,input_type_ids,id_in_sen,list_word,5)\n", + " if need == 1:\n", + " suggestion = suggestion_V + ' ' + suggestion_IN\n", + " return suggestion\n", + " \n", + " need_to_will = need_be = 0\n", + " \n", + " #**************************************判断是不是不定式或者将来时*************************** \n", + " if front_word not in [\"to\",\"will\"]:\n", + " wordV,input_ids,input_type_ids,id_in_sen = pre_training_input_in_sentence(index)\n", + " input_ids.insert(id_in_sen,tokenizer.vocab['to'])#就随便插入一个东西,占位子\n", + " input_type_ids.append(0)\n", + " try:\n", + " input_ids[id_in_sen + 1] = tokenizer.vocab[conjugate(verb=wordV,tense=PRESENT,person = 1)]\n", + " suggestion_to_will,need_to_will,prob0,_ = give_suggestion(input_ids,input_type_ids,id_in_sen,[\"to\",\"will\"],1)\n", + " except KeyError:\n", + " need_to_will = 0\n", + " #**************************************判断是不是被动语态或者进行时******************* \n", + " list_be = lexeme('be')\n", + " list_be = lexeme('be')[:8] #把否定去掉 \n", + " #********************是不是被动语态**************** \n", + "\n", + " wordV,input_ids,input_type_ids,index = pre_training_input_entire(index)\n", + " input_ids.insert(index,tokenizer.vocab['be'])#就随便插入一个东西,占位子\n", + " input_type_ids.append(0)\n", + " try:\n", + " input_ids[index + 1]=tokenizer.vocab[conjugate(verb=wordV,tense=PAST,aspect=PROGRESSIVE)]\n", + " suggestion1,need_be1,prob1,_ = give_suggestion(input_ids,input_type_ids,index,list_be,1)\n", + " except KeyError:\n", + " need_be1 = 0\n", + " \n", + " #********************是不是现在分词**************** \n", + " try:\n", + " input_ids[index + 1]=tokenizer.vocab[conjugate(verb=wordV,tense=PRESENT,aspect=PROGRESSIVE)]\n", + " suggestion2,need_be2,prob2,_ = give_suggestion(input_ids,input_type_ids,index,list_be,1)\n", + " #print(tokenizer.convert_ids_to_tokens(input_ids))\n", + " except KeyError:\n", + " need_be2 = 0\n", + "\n", + " #***************************选择是不定式还是被动语态还是进行时****************************\n", + " prob_max = 0\n", + " if need_to_will == 1:\n", + " prob_max = max(prob_max,prob0)\n", + " if need_be1 == 1:\n", + " prob_max = max(prob_max,prob1)\n", + " if need_be2 == 1:\n", + " prob_max = max(prob_max,prob2)\n", + "\n", + " if need_to_will == 1 and prob_max == prob0:\n", + " need_be = 0\n", + " if need_be1 == 1 and prob_max == prob1:\n", + " need_to_will = 0\n", + " need_be = 1\n", + " be_ = suggestion1\n", + " if need_be2 == 1 and prob_max == prob2:\n", + " need_to_will = 0\n", + " need_be = 1\n", + " be_ = suggestion2\n", + " #*************************************************处理各种语法******************************************************************\n", + " if need_to_will == 1:\n", + " wordV,input_ids,input_type_ids,index = pre_training_input_entire(index)\n", + " input_ids.insert(index,tokenizer.vocab[suggestion_to_will])\n", + " input_type_ids.append(0)\n", + " list_word = [conjugate(verb=wordV,tense=PRESENT,person = 1),conjugate(verb=wordV,tense=PRESENT,aspect=PROGRESSIVE)]\n", + " suggestion,need,_,_= give_suggestion(input_ids,input_type_ids,index + 1,list_word,5)\n", + " if need == 1:\n", + " return 'to ' + suggestion\n", + " else:\n", + " return top_word\n", + "\n", + " elif need_be == 1:\n", + " #********************************被动语态或者进行时*****************\n", + " wordV,input_ids,input_type_ids,index = pre_training_input_entire(index)\n", + " input_ids.insert(index,tokenizer.vocab[be_])\n", + " input_type_ids.append(0)\n", + " list_word = lexeme(wordV)\n", + " suggestion,need,_,_= give_suggestion(input_ids,input_type_ids,index + 1,list_word,5)\n", + " if need == 1:\n", + " return be_ + ' '+ suggestion\n", + " else:\n", + " return top_word\n", + " else:\n", + " return top_word\n", + " \n", + " return suggestion\n", + " \n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": { + "scrolled": true + }, + "outputs": [], + "source": [ + "def analyse_adj(index,prob,gap,top_word,threshold,threshold2,threshold3):\n", + " if gap < threshold:\n", + " return None\n", + " wordADJ = get_word(index)\n", + " #*****************************判断是不是时态或者拼写错误,又或者是其他词性********\n", + " list_word,list_word2 = build_like_word_adj(wordADJ)\n", + " judge,suggestion,gap_with_totally_top = judge_and_suggestion(prob,wordADJ,list_word,threshold3)\n", + " if judge==0 and gap_with_totally_top < threshold2:\n", + " return suggestion \n", + " \n", + " list_spell_correct = d.suggest(wordADJ)\n", + " judge,suggestion,gap_with_totally_top = judge_and_suggestion(prob,wordADJ,list_spell_correct,threshold3)\n", + " if judge==0 and gap_with_totally_top < threshold2:\n", + " return suggestion\n", + " \n", + " #list_word = list_word + list_spell_correct\n", + " front_word = get_word(index - 1)\n", + " behind_word = get_word(index + 1)\n", + " if front_word in ['more','most'] and len(list_word2) == 0:\n", + " #判断是不是比较级使用错误,如果该形容词比较级/最高级不需要加more/most,但是前面有more/most\n", + " wordADJ,input_ids,input_type_ids,id_in_sen = pre_training_input_in_sentence(index) \n", + " del input_ids[id_in_sen - 1]\n", + " del input_type_ids[0]\n", + " suggestion3,need_adj,prob,_ = give_suggestion(input_ids,input_type_ids,id_in_sen - 1,list_word,min(threshold2, gap - threshold3))\n", + " return '去掉前面 ' + get_word(index - 1)+ ' 原位置改成 ' + suggestion3\n", + " \n", + " elif behind_word in ['##er','##r'] and len(list_word2) != 0:\n", + " #判断是不是比较级使用错误,如果该形容词比较级/最高级需要more/most,但是错写成形容词+er/est\n", + " wordADJ,input_ids,input_type_ids,id_in_sen = pre_training_input_in_sentence(index) \n", + " input_ids[id_in_sen] = tokenizer.vocab['more']\n", + " suggestion5,need_adj,prob,_ = give_suggestion(input_ids,input_type_ids,id_in_sen + 1,list_word,min(threshold2, gap - threshold3))\n", + " return '去掉后面 '+ get_word(index + 1) + ' 原位置改成 '+ 'more' + ' ' + suggestion5 \n", + " \n", + " elif behind_word in ['##est','##st'] and len(list_word2) != 0:\n", + " #判断是不是比较级使用错误,如果该形容词比较级/最高级需要more/most,但是错写成形容词+er/est\n", + " wordADJ,input_ids,input_type_ids,id_in_sen = pre_training_input_in_sentence(index) \n", + " input_ids[id_in_sen] = tokenizer.vocab['most']\n", + " suggestion5,need_adj,prob,_ = give_suggestion(input_ids,input_type_ids,id_in_sen + 1,list_word,min(threshold2, gap - threshold3))\n", + " return '去掉后面 '+ get_word(index + 1) + ' 原位置改成 '+ 'most' + ' ' + suggestion5 \n", + " \n", + " if gap < threshold2:#没有可以替换的词,而且原本该位置的词就勉强符合要求\n", + " return None\n", + " \n", + " if front_word not in ['this','that','these','those','more','most']:#检查形容词前面是否需要加冠词或者是需要more,most的比较级,最高级抑或是be动词\n", + " wordADJ,input_ids,input_type_ids,id_in_sen = pre_training_input_in_sentence(index) \n", + " input_ids.insert(id_in_sen,tokenizer.vocab[\"[MASK]\"])\n", + " input_type_ids.append(0)\n", + " list_front = ['the','a','an','this','that','these','those','some','any','all','more','most','am','is','are','was','were'] \n", + " suggestion,need_front,_,_= give_suggestion(input_ids,input_type_ids,id_in_sen,list_front,2)\n", + " if need_front == 1:\n", + " wordADJ,input_ids,input_type_ids,index = pre_training_input_entire(index)\n", + " input_ids.insert(index,tokenizer.vocab[suggestion])\n", + " input_type_ids.append(0)\n", + " suggestion2,need,_,_= give_suggestion(input_ids,input_type_ids,index + 1,list_word,min(threshold2, gap - threshold3)) \n", + " if need == 1:\n", + " return suggestion + ' ' + suggestion2\n", + " else:\n", + " return top_word\n", + " \n", + " return top_word\n" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [], + "source": [ + "def analyse_adv(index,prob,gap,top_word,threshold,threshold2,threshold3):\n", + " if gap < threshold:\n", + " return None\n", + " \n", + " wordADV = get_word(index)\n", + " if wordADV in ['not']:\n", + " return None\n", + " #*****************************判断是不是时态或者拼写错误,又或者是其他词性********\n", + " \n", + " list_word,list_word2 = build_like_word_adv(wordADV)\n", + " judge,suggestion,gap_with_totally_top = judge_and_suggestion(prob,wordADV,list_word,threshold3)\n", + " if judge==0 and gap_with_totally_top < threshold2:\n", + " return suggestion \n", + " \n", + " list_spell_correct = d.suggest(wordADV)\n", + " judge,suggestion,gap_with_totally_top = judge_and_suggestion(prob,wordADV,list_spell_correct,threshold3)\n", + " if judge==0 and gap_with_totally_top < threshold2:\n", + " return suggestion\n", + "\n", + " if gap < threshold2:#没有可以替换的词,而且原本该位置的词就勉强符合要求\n", + " return None\n", + " \n", + " #list_word = list_word + list_spell_correct\n", + " if get_word(index - 1) in ['more','most'] and len(list_word2) == 0:\n", + " #判断是不是比较级使用错误,这个if语句处理:该形容词比较级/最高级不需要加more/most,但是前面有more/most \n", + " wordADV,input_ids,input_type_ids,id_in_sen = pre_training_input_in_sentence(index) \n", + " del input_ids[id_in_sen - 1]\n", + " del input_type_ids[0]\n", + " suggestion3,need_adj,prob,_ = give_suggestion(input_ids,input_type_ids,id_in_sen - 1,list_word,5)\n", + " return '去掉前面 ' + get_word(index - 1)+ ' 原位置改成 ' + suggestion3\n", + " \n", + " elif get_word(index + 1) in ['##er','##r'] and len(list_word2) != 0:\n", + " #判断是不是比较级使用错误,如果该形容词比较级/最高级需要more/most,但是错写成形容词+er/est\n", + " wordADV,input_ids,input_type_ids,id_in_sen = pre_training_input_in_sentence(index) \n", + " input_ids[id_in_sen] = tokenizer.vocab['more']\n", + " suggestion5,need_adj,prob,_ = give_suggestion(input_ids,input_type_ids,id_in_sen+1,list_word,5)\n", + " return '去掉后面 '+ get_word(index + 1) + ' 原位置改成 '+ 'more' + ' ' + suggestion5 \n", + " \n", + " elif get_word(index + 1) in ['##est','##st'] and len(list_word2) != 0:\n", + " #判断是不是比较级使用错误,如果该形容词比较级/最高级需要more/most,但是错写成形容词+er/est\n", + " wordADV,input_ids,input_type_ids,id_in_sen = pre_training_input_in_sentence(index) \n", + " input_ids[id_in_sen] = tokenizer.vocab['most']\n", + " suggestion5,need_adj,prob,_ = give_suggestion(input_ids,input_type_ids,id_in_sen+1,list_word,5)\n", + " return '去掉后面 '+ get_word(index + 1) + ' 原位置改成 '+ 'most' + ' ' + suggestion5 \n", + "\n", + " else:\n", + " #检查形容词前面是否需要加冠词或者是需要more,most的比较级,最高级,be动词\n", + " wordADV,input_ids,input_type_ids,id_in_sen = pre_training_input_in_sentence(index)\n", + " input_ids.insert(id_in_sen,tokenizer.vocab[\"[MASK]\"])\n", + " input_type_ids.append(0)\n", + " list_front = ['the','a','an','this','that','these','those','some','any','all','more','most','am','is','are','was','were'] \n", + " suggestion,need_front,_,_= give_suggestion(input_ids,input_type_ids,id_in_sen,list_front,2)\n", + " if need_front == 1:\n", + " wordADV,input_ids,input_type_ids,index = pre_training_input_entire(index)\n", + " input_ids.insert(index,tokenizer.vocab[suggestion])\n", + " input_type_ids.append(0)\n", + " #print(tokenizer.convert_ids_to_tokens(input_ids))\n", + " suggestion2,need,_,_= give_suggestion(input_ids,input_type_ids,index + 1,list_word,5) \n", + " if need == 1:\n", + " return suggestion + ' ' + suggestion2\n", + " else:\n", + " return top_word\n", + " else:\n", + " wordADV,input_ids,input_type_ids,id_in_sen = pre_training_input_in_sentence(index)\n", + " input_ids.insert(id_in_sen + 1,tokenizer.vocab[\",\"])\n", + " input_type_ids.append(0)\n", + " suggestion3,need_douhao,_,_= give_suggestion(input_ids,input_type_ids,id_in_sen,list_word,2)\n", + " if need_douhao == 1:\n", + " return suggestion3 + ' ,'\n", + " else:\n", + " return top_word\n" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['grandparents', 'grandpas']" + ] + }, + "execution_count": 24, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "N_to_anything(\"grandpa\")" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": {}, + "outputs": [], + "source": [ + "from pattern.en import article,referenced,pluralize, singularize\n", + "import nltk\n", + "def analyse_N(index,prob,gap,top_word,threshold,threshold2,threshold3):\n", + " #这是一个处理名词语法问题的函数,输入为问题词在text的token中的下标index\n", + " if gap < threshold:\n", + " return None\n", + " \n", + " wordN = get_word(index)\n", + " #*****************************判断是不是时态或者拼写错误,又或者是其他词性********\n", + " word_tag = nltk.pos_tag([wordN])\n", + " if word_tag[0][1] == \"NN\":\n", + " N_ = wordN\n", + " N_s= pluralize(wordN)\n", + " else:\n", + " N_ = singularize(wordN)\n", + " N_s= wordN\n", + " list_N = [N_,N_s]\n", + " judge,suggestion,gap_with_totally_top = judge_and_suggestion(prob,wordN,list_N,threshold3)\n", + " if judge==0 and gap_with_totally_top < threshold2:\n", + " return suggestion\n", + " \n", + " list_others = N_to_anything(N_)\n", + " judge,suggestion,gap_with_totally_top = judge_and_suggestion(prob,wordN,list_others,threshold3)\n", + " if judge==0 and gap_with_totally_top < threshold2:\n", + " return suggestion \n", + " \n", + " list_spell_correct = d.suggest(wordN)\n", + " judge,suggestion,gap_with_totally_top = judge_and_suggestion(prob,wordN,list_spell_correct,threshold3)\n", + " if judge==0 and gap_with_totally_top < threshold2:\n", + " return suggestion\n", + "\n", + " #***********************************************************************************************************************************\n", + " need_DT = 0 #表示是否需要在前面加冠词 \n", + " wordN,input_ids,input_type_ids,id_in_sen = pre_training_input_in_sentence(index)\n", + "\n", + " #*****************************************判断是否需要冠词或介词************************************************************************ \n", + " list_DT = ['the','a','an']\n", + " front_word = get_word(index - 1)\n", + " if front_word in list_DT:#如果前一个词就是冠词,那么一定不需要再往前面加介词或冠词\n", + " if gap < threshold2:#没有可以替换的词,而且原本该位置的词就勉强符合要求\n", + " return None\n", + " else:\n", + " return top_word\n", + " \n", + " input_ids.insert(id_in_sen,tokenizer.vocab[\"[MASK]\"])\n", + " input_type_ids.append(0)\n", + " list_IN = [\"of\",'to',\"at\",\"in\",\"on\",\"by\",\"for\",\"from\",\"with\",\"about\",\"against\",\"along\",\"among\",\"around\",\"as\",\"before\",\"behind\",\"below\",\"beside\",\"between\",\"during\",\"besides\",\"into\",\"near\",\"over\",\"through\",\"under\",\"without\",\"after\",\"above\"]\n", + " list_DT_IN = list_DT + list_IN\n", + " suggestion,need_DT_IN,_,_= give_suggestion(input_ids,input_type_ids,id_in_sen,list_DT_IN,2)\n", + " if need_DT_IN == 0:#不需要冠词或介词\n", + " if gap < threshold2:#没有可以替换的词,而且原本该位置的词就勉强符合要求\n", + " return None\n", + " else:\n", + " return top_word\n", + " \n", + " elif need_DT_IN == 1:#需要冠词或介词\n", + " wordN,input_ids,input_type_ids,index = pre_training_input_entire(index)\n", + " input_ids.insert(index,tokenizer.vocab[suggestion])\n", + " input_type_ids.append(0)\n", + " suggestion2,need,_,_= give_suggestion(input_ids,input_type_ids,index + 1,list_N ,min(9.5,gap - threshold3))\n", + " if need == 1:\n", + " return suggestion + ' ' + suggestion2\n", + " \n", + " if gap < threshold2:#没有可以替换的词,而且原本该位置的词就勉强符合要求\n", + " return None\n", + " else:\n", + " return top_word\n" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "metadata": {}, + "outputs": [], + "source": [ + "def analyse_pronoun(index,prob,gap,top_word,threshold,threshold2,threshold3):\n", + " #这是一个处理代词语法问题的函数,输入为问题词在text的token中的下标index\n", + " if gap < threshold:\n", + " return None\n", + " \n", + " wordPROP = get_word(index)\n", + " #*****************************判断是不是时态或者拼写错误,又或者是其他代词********\n", + " try:\n", + " list_PROP = pronoun_dictionary[wordPROP]\n", + " except:\n", + " list_PROP = []\n", + " judge,suggestion,gap_with_totally_top = judge_and_suggestion(prob,wordPROP,list_PROP,threshold3)\n", + " if judge==0 and gap_with_totally_top < threshold2:\n", + " return suggestion \n", + "\n", + " if gap < threshold2:#没有可以替换的词,而且原本该位置的词就勉强符合要求\n", + " return None\n", + " else:\n", + " judge,suggestion,gap_with_totally_top = judge_and_suggestion(prob,wordPROP,pronoun_list,threshold3)#在所有代词里面选择\n", + " if judge==0 and gap_with_totally_top < threshold2:\n", + " return suggestion \n", + " else:\n", + " return None\n" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "metadata": {}, + "outputs": [], + "source": [ + "def analyse_DT(index,prob,gap,top_word,threshold,threshold2,threshold3):\n", + " if gap < threshold:\n", + " return None \n", + " \n", + " wordDT = get_word(index)\n", + " if wordDT in [\"every\",'per','each','no']:#有实际意义,不做修改\n", + " return None\n", + "\n", + " if wordDT in ['some']:\n", + " list_word = ['some','any','a','an']\n", + " elif wordDT in ['any']:\n", + " list_word = ['some','any',\"every\",'per','each']\n", + " elif wordDT in ['this','that','these','those']:\n", + " list_word = ['this','that','these','those']\n", + " elif wordDT in ['the','a','an']:\n", + " list_word = ['the','a','an','some','any']\n", + " elif wordDT in ['another','other']:\n", + " list_word = ['another','other']\n", + " elif wordDT in ['all','both']:\n", + " list_word = ['all','both']\n", + " else:\n", + " list_word = [wordDT]\n", + " \n", + " judge,suggestion,gap_with_totally_top = judge_and_suggestion(prob,wordDT,list_word,threshold3)\n", + " if judge==0 and gap_with_totally_top < threshold2:\n", + " return suggestion \n", + " \n", + " if gap < threshold2:#没有可以替换的词,而且原本该位置的词就勉强符合要求\n", + " return None\n", + " \n", + " elif top_word in [\"at\",\"in\",\"on\",\"by\",\"for\",\"from\",\"with\",\"about\",\"against\",\"along\",\"among\",\"around\",\"as\",\"before\",\"behind\",\"below\",\"beside\",\"between\",\"during\",\"besides\",\"into\",\"near\",\"over\",\"through\",\"under\",\"without\",\"after\",\"above\",\"of\",'to']:\n", + " return top_word + ' ' + wordDT\n", + " else:\n", + " if top_word in ['some','any','this','that','these','those','the','a','an']:\n", + " return top_word\n", + " elif wordDT in ['another','other','all','both']:\n", + " return None\n", + " else:\n", + " return \"去掉 \" + wordDT\n", + "#print(analyse_DT(77))" + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "metadata": {}, + "outputs": [], + "source": [ + "def analyse_IN(index,prob,gap,top_word,threshold,threshold2,threshold3):\n", + " #检查介词,确认需不需要删掉或者换介词\n", + " if gap < threshold:\n", + " return None \n", + " \n", + " wordIN = get_word(index)\n", + " if wordIN in ['before',\"after\",\"above\",\"below\",\"underneath\",\"beneath\",\"without\"]:#有实际意义,不做修改\n", + " return None\n", + " list_word = [\"at\",\"in\",\"on\",\"by\",\"for\",\"from\",\"with\",\"about\",\"against\",\"along\",\"among\",\"around\",\"as\",\"before\",\"behind\",\"below\",\"beside\",\"between\",\"during\",\"besides\",\"into\",\"near\",\"over\",\"through\",\"under\",\"without\",\"after\",\"above\",\"of\",'to']\n", + " \n", + " judge,suggestion,gap_with_totally_top = judge_and_suggestion(prob,wordIN,list_word,threshold3)\n", + " if judge==0 and gap_with_totally_top < threshold2:\n", + " return suggestion \n", + " \n", + " list_spell_correct = d.suggest(wordIN)\n", + " judge,suggestion,gap_with_totally_top = judge_and_suggestion(prob,wordIN,list_spell_correct,threshold3)\n", + " if judge==0 and gap_with_totally_top < threshold2:\n", + " return suggestion \n", + " \n", + " if gap < threshold2:#没有可以替换的词,而且原本该位置的词就勉强符合要求\n", + " return None\n", + " elif top_word in u',.!?[]()<>\"\\'':\n", + " return top_word\n", + " else:\n", + " return \"去掉 \" + wordIN\n", + "#print(analyse_IN(76))" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "metadata": {}, + "outputs": [], + "source": [ + "def analyse_CC(index,prob,gap,top_word,threshold,threshold2,threshold3):\n", + " if gap < threshold:\n", + " return None \n", + " \n", + " wordCC = get_word(index)\n", + " list_CC = [\"but\",\"because\",\"yet\",\"still\",\"however\",\"although\",\"so\",\"thus\",\"and\",\"or\",\"too\",\"either\",\"or\",\"neither\",\"nor\",\"when\",\"while\",\"as\",\"whenever\",\"since\",\"until\",\"till\",\",\"]\n", + " judge,suggestion,gap_with_totally_top = judge_and_suggestion(prob,wordCC,list_CC,threshold3)\n", + " if judge==0 and gap_with_totally_top < threshold2:\n", + " return suggestion \n", + " \n", + " if gap < threshold2:#没有可以替换的词,而且原本该位置的词就勉强符合要求\n", + " return None\n", + " else:\n", + " return None\n" + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "metadata": {}, + "outputs": [], + "source": [ + "def analyse_MD(index,prob,gap,top_word,threshold,threshold2,threshold3):\n", + " if gap < threshold:\n", + " return None \n", + " \n", + " wordMD = get_word(index)\n", + " if wordMD in ['can','could']:\n", + " list_MD = ['can','could']\n", + " elif wordMD in ['may','might']:\n", + " list_MD = ['may','might']\n", + " elif wordMD in ['shall','should']:\n", + " list_MD = ['shall','should'] \n", + " elif wordMD in ['will','would']:\n", + " list_MD = ['will','would'] \n", + " elif wordMD in ['dare','dared']:\n", + " list_MD = ['dare','dared'] \n", + " else:\n", + " list_MD = [wordMD]\n", + " judge,suggestion,gap_with_totally_top = judge_and_suggestion(prob,wordMD,list_MD,threshold3)\n", + " if judge==0 and gap_with_totally_top < threshold2:\n", + " return suggestion \n", + " \n", + " if gap < threshold2:#没有可以替换的词,而且原本该位置的词就勉强符合要求\n", + " return None\n", + " else:\n", + " return None" + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "metadata": {}, + "outputs": [], + "source": [ + "def analyse_biaodian(index,prob,gap,top_word,threshold,threshold2,threshold3):\n", + " if gap < threshold:\n", + " return None \n", + " \n", + " biaodian = get_word(index) \n", + " biaodian_list = ['.',',',';','!','?','\"',\"'\",',','。','’','‘','“','”','and','but']\n", + " judge,suggestion,gap_with_totally_top = judge_and_suggestion(prob,biaodian,biaodian_list,threshold3)\n", + " if judge==0 and gap_with_totally_top < threshold2:\n", + " return suggestion \n", + " \n", + " if gap < threshold2:#没有可以替换的词,而且原本该位置的词就勉强符合要求\n", + " return None\n", + " else:\n", + " return None" + ] + }, + { + "cell_type": "code", + "execution_count": 32, + "metadata": {}, + "outputs": [], + "source": [ + "'''\n", + " 功能:\n", + " 这是几个和拼写检查相关函数\n", + " correct_spelling:用于发现text中拼写错误,写成不存在的词的情况,并暂时把它改成存在的词,这样再放入模型训练,完成之后的步骤\n", + " token_Align:展示拼写错误时需要将原来错误的词显示出来,由于BERT的tokenize会把错误的词分段,造成未知序号的混乱,因而需要将原来的token和被correct的token位置对齐\n", + " 这两个函数需要配合使用\n", + "'''\n", + "import enchant\n", + "import re\n", + "d = enchant.Dict(\"en_US\")\n", + "from pattern.en import suggest\n", + "\n", + "def C_trans_to_E(string): #标点符号转换函数\n", + " E_pun = u',.!?[]()<>\"\\'\"\\'.:;'\n", + " C_pun = u',。!?【】()《》“‘”’.:'\n", + " table= {ord(f):ord(t) for f,t in zip(C_pun,E_pun)}\n", + " return string.translate(table)\n", + "\n", + "def process_biaodian(text):#把标点和字母分开,使得用split分词能把标点分成单独的token,顺便把中文标点变成英文标点\n", + " text1 = ''\n", + " for character in text[0]: \n", + " if character in u',.!?[]()<>\"\\':-;,。!?【】()《》“‘”’.%':\n", + " character1 = C_trans_to_E(character)\n", + " text1 = text1 + ' '+character1+' '\n", + " else:\n", + " text1 = text1 + character \n", + " return [text1]\n", + "\n", + "def correct_spelling(text):\n", + " #text:原本可能带有拼写错误的文本\n", + " #返回[correct_text]:不带拼写错误的文本,外面套上中括号,保持列表的形式\n", + " global suggestions\n", + " correct_text = ''\n", + " text0 = text\n", + " text1 = ''\n", + " \n", + " tokens = text.split(' ')\n", + " for token in tokens: #给拼写错误的单词标上‘错’\n", + " if token not in ['.',',',';','!','?','\"',\"'\",',','。','’','‘','“','”',\"\\r\\n\",\"\"]:\n", + " if d.check(token)==False and token != suggest(token)[0][0]:\n", + " word = '不' + suggest(token)[0][0] #pattern的suggestion \n", + " else:\n", + " word = token\n", + " elif token == \"\\r\\n\":\n", + " word = '换'\n", + " else:\n", + " word = token\n", + " correct_text = correct_text + ' ' + word\n", + " tokens = tokenizer.tokenize(correct_text) \n", + " length = len(tokens)\n", + " correct_text = \"\"\n", + " i = 0\n", + " while(i < length):\n", + "\n", + " if tokens[i] == '不':#中文乱码\n", + " suggestions.update({i+1:tokens[i+1]})#给外部变量suggestions添加错误\n", + " del tokens[i]\n", + " length = length - 1\n", + " elif tokens[i][0:2] == '##':\n", + " word = tokens[i][2:]\n", + " correct_text = correct_text + word \n", + " i = i+1\n", + " else:\n", + " token = tokens[i]\n", + " if token not in [\"'\"]:\n", + " word = ' '+ token\n", + " else:\n", + " word = token\n", + " \n", + " correct_text = correct_text + word \n", + " i = i+1\n", + " return [correct_text]\n", + "\n", + "\n", + "def token_Align(tokens,text): \n", + " #tokens是拼写修正之后的文本的分词结果\n", + " #text是原本可能带有拼写错误的文本\n", + " #返回的是text的分词结果\n", + " original_tokens = tokenizer.tokenize(text)\n", + " original_tokens = ['[CLS]'] + original_tokens + ['[SEP]']\n", + " print(original_tokens)\n", + " length = len(tokens)\n", + " i = 0\n", + " while(i < min(length - 1,len(original_tokens) - 1)):\n", + " if original_tokens[i] == tokens[i] or original_tokens[i+1] == tokens[i+1] or original_tokens[i+2] == tokens[i+2] or original_tokens[i+3] == tokens[i+3]:\n", + " i = i+1\n", + " continue\n", + " else:\n", + " if original_tokens[i][:2] == \"##\":\n", + " original_tokens[i-1] = original_tokens[i-1] + original_tokens[i][2:]\n", + " del original_tokens[i]\n", + " elif original_tokens[i+1][:2] == \"##\":\n", + " original_tokens[i] = original_tokens[i] + original_tokens[i+1][2:]\n", + " del original_tokens[i+1] \n", + " elif tokens[i] == '[UNK]':\n", + " original_tokens.insert(i,'[UNK]')\n", + " else:\n", + " if original_tokens[i+1] == tokens[i] or original_tokens[i+2] == tokens[i+1] or original_tokens[i+3] == tokens[i+2]:\n", + " if re.match(r'[a-z]',original_tokens[i]) == None :\n", + " original_tokens[i] = original_tokens[i] + original_tokens[i+1]\n", + " del original_tokens[i+1] \n", + " elif original_tokens[i] == tokens[i+1] or original_tokens[i+1] == tokens[i+2] or original_tokens[i+2] == tokens[i+3]:\n", + " original_tokens.insert(i,' ')\n", + " i = i + 1\n", + " \n", + " return original_tokens\n", + "\n", + "def split_text(text0,threshold1,threshold2):\n", + " #把文章分成一定长度的文段,保证GPU可以正常使用以及BERT模型不会超过最大的embeding\n", + " #当计数大于threshold1并且达到句尾时,将文本分开\n", + " #当计数大于threshold2并且达到分段位置时,将文本分开\n", + " #我们希望尽量能按照段落分,因此threshold2要比threshold1稍小一些\n", + " texts = []\n", + " text = ''\n", + " tokens = text0[0].split(' ')\n", + " count_tokens = 0\n", + " last_HuanHang = -1\n", + " new_tokens = []\n", + " for token in tokens:\n", + " if token == '':\n", + " continue\n", + " count_tokens = count_tokens + 1\n", + " text = text + ' '+ token\n", + " if (token == '.'and count_tokens > threshold1) or (token == '\\r\\n' and count_tokens > threshold2):\n", + " texts.append([text])\n", + " text = ''\n", + " count_tokens = 0\n", + " if count_tokens > 0: \n", + " texts.append([text]) \n", + " return texts" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[' i drive at home .']" + ] + }, + "execution_count": 34, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "text = \"I arive at home.\"\n", + "correct_spelling(text)" + ] + }, + { + "cell_type": "code", + "execution_count": 33, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['[CLS]', 'that', 'will', 'generate', 'the', 'ur', '##l', 'string', '/', 'about', '?', 'name', '=', 'ze', '##it', ',', 'you', 'can', 'use', 'every', 'property', 'as', 'defined', 'in', 'the', 'node', '.', 'j', '##s', 'ur', '##l', 'module', 'documentation', '.', '[SEP]']\n", + "[CLS] [CLS]\n", + "that that\n", + "will will\n", + "generate generate\n", + "the the\n", + "curl url\n", + "string string\n", + "about /about\n", + "? ?\n", + "name name\n", + "= =\n", + "ze ze\n", + "##st ##it\n", + ", ,\n", + "you you\n", + "can can\n", + "use use\n", + "every every\n", + "property property\n", + "as as\n", + "defined defined\n", + "in in\n", + "the the\n", + "node node\n", + ". .\n", + "is js\n", + "curl url\n", + "module module\n", + "documentation documentation\n", + ". .\n", + "[SEP] [SEP]\n" + ] + }, + { + "data": { + "text/plain": [ + "[[' that will generate the url string /about ? name = zeit , you can use every property as defined in the node .'],\n", + " [' js url module documentation .']]" + ] + }, + "execution_count": 33, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "text = [\"that will generate the url string /about ? name = zeit , you can use every property as defined in the node .js url module documentation .\"]\n", + "text = process_biaodian(text)\n", + "new_text = correct_spelling(text[0])\n", + "tokens = tokenizer.tokenize(new_text[0])\n", + "tokens = ['[CLS]'] + tokens + ['[SEP]']\n", + "original_tokens = token_Align(tokens,text[0])\n", + "len_ = len(tokens)\n", + "for i in range(0,len_):\n", + " print(tokens[i],original_tokens[i])\n", + " \n", + "split_text(text,20,15)" + ] + }, + { + "cell_type": "code", + "execution_count": 89, + "metadata": {}, + "outputs": [], + "source": [ + "import nltk\n", + "from pattern.en import conjugate, lemma, lexeme,PRESENT,SG\n", + "'''\n", + " 这是一个输出BERT模型训练结果的函数,方便查看调试\n", + "'''\n", + "def show_lm_probs(tokens, input_ids, probs, topk=5, firstk=20): #输出结果的函数,要最高概率topk个输出\n", + " def print_pair(token, prob, end_str='', hit_mark=' '):\n", + " if i < firstk:\n", + " # token = token.replace('', '').replace('\\n', '/n')\n", + " print('{}{: >3} | {: <12}'.format(hit_mark, int(round(prob*100)), token), end=end_str)\n", + " \n", + " ret = None\n", + " for i in range(len(tokens)):\n", + " ind_ = input_ids[i].item() if input_ids is not None else tokenizer.vocab[tokens[i]]\n", + " prob_ = probs[i][ind_].item() #这个probs是该字符串第i个位置上填上词典上各个词的概率,prob_是词典上原来天的这个词的概率\n", + " print_pair(tokens[i], prob_, end_str='\\t')\n", + " values, indices = probs[i].topk(topk)\n", + " #print(values, indices)\n", + " #print(\"****************************************************************************************************\")\n", + " top_pairs = []\n", + " for j in range(topk):\n", + " ind, prob = indices[j].item(), values[j].item()\n", + " hit_mark = '*' if ind == ind_ else ' '\n", + " token = tokenizer.ids_to_tokens[ind]\n", + " print_pair(token, prob, hit_mark=hit_mark, end_str='' if j < topk - 1 else '\\n')\n", + " top_pairs.append((token, prob))\n", + " if tokens[i] == \"[MASK]\":\n", + " ret = top_pairs\n", + " return ret " + ] + }, + { + "cell_type": "code", + "execution_count": 90, + "metadata": {}, + "outputs": [], + "source": [ + "def analyse_prob(prob,token):\n", + " ind_ = tokenizer.vocab[token]\n", + " prob_ = prob[ind_].item()\n", + " top_prob = prob.max().item()\n", + " top_ind = prob.argmax().item()\n", + " top_word = tokenizer.ids_to_tokens[top_ind] #可能性最高的词\n", + " gap = math.log(top_prob) - math.log(prob_) #计算两个词之间的差距 \n", + " return top_word,gap" + ] + }, + { + "cell_type": "code", + "execution_count": 100, + "metadata": {}, + "outputs": [], + "source": [ + "import colored\n", + "from colored import stylize\n", + "import spacy\n", + "nlp = spacy.load('en')\n", + "from nltk.corpus import wordnet as wn\n", + "\n", + "def analyse_词性(token,tag):\n", + " if 'VB' in tag: #如果是动词的各种时态\n", + " tag0 = \"v\"\n", + " elif \"JJ\" in tag : #形容词\n", + " tag0 = \"a\"\n", + " elif \"RB\" in tag: #副词\n", + " tag0 = \"r\"\n", + " elif \"NN\" in tag: #名词\n", + " tag0 = \"n\"\n", + " else:\n", + " return tag\n", + " if wn.morphy(token, tag0)==None:\n", + " tag = nltk.pos_tag([token])[0][1]\n", + " return tag\n", + " \n", + "def show_abnormals(tokens,probs,text,show_suggestions=False): #多加了一个参数text,用来生成原来的token的\n", + " global suggestions\n", + " global original_tokens\n", + " original_tokens = token_Align(tokens,text)\n", + " def gap2color(mode):\n", + " if mode == 1:\n", + " return 'yellow_1'\n", + " elif mode == 2:\n", + " return 'orange_1'\n", + " else:\n", + " return 'red_1'\n", + " \n", + " def print_token(token, suggestion, gap ,mode):\n", + " if gap == 0 and mode == 1:\n", + " print(stylize(token + ' ', colored.fg('white') + colored.bg('black')), end='')\n", + " else:\n", + " print(stylize(token, colored.fg(gap2color(mode)) + colored.bg('black')), end='')\n", + " if show_suggestions and mode > 1:\n", + " print(stylize('/' + str(suggestion) + ' ', colored.fg('green' if gap > 10 else 'cyan') + colored.bg('black')), end='')\n", + " else:\n", + " print(stylize(' ', colored.fg(gap2color(mode)) + colored.bg('black')), end='')\n", + "\n", + " \n", + " avg_gap = 0.\n", + " tokens_tag = nltk.pos_tag(tokens) #给整个text做词性标注\n", + " for i in range(1, len(tokens) - 1): # skip first [CLS] and last [SEP]\n", + " if tokens[i]=='[UNK]':\n", + " continue\n", + " top_word,gap = analyse_prob(probs[i],tokens[i])\n", + " print()\n", + " print(\"*******************************************************************************************************************\")\n", + " print(i)\n", + " print(gap)\n", + " avg_gap += gap\n", + " suggestion = None\n", + " tag = tokens_tag[i][1]#当前tokens的词性\n", + " tag = analyse_词性(tokens[i],tag)\n", + " print(tag)\n", + " \n", + " if 'VB' in tag: #如果是动词的各种时态\n", + " suggestion = analyse_V(i,probs[i],gap,top_word,2.5 ,8 ,1.8)\n", + " \n", + " elif \"DT\" == tag: #如果是冠词(冠词原则上不改变词性)\n", + " suggestion = analyse_DT(i,probs[i],gap,top_word,3 ,4 ,1)\n", + " \n", + " elif \"JJ\" in tag : #形容词\n", + " suggestion = analyse_adj(i,probs[i],gap,top_word,6 ,8 ,2)\n", + " \n", + " elif \"RB\" in tag: #副词\n", + " suggestion = analyse_adv(i,probs[i],gap,top_word,5 ,8 ,2)\n", + " \n", + " elif \"PRP\" in tag: #代词\n", + " suggestion = analyse_pronoun(i,probs[i],gap,top_word,4 ,5 ,1.5)\n", + " \n", + " elif \"NN\" in tag: #名词\n", + " suggestion = analyse_N(i,probs[i],gap,top_word,4 ,10 ,2.2)\n", + " \n", + " elif \"CC\" in tag: #连词\n", + " suggestion = analyse_CC(i,probs[i],gap,top_word,2 ,2.5 ,1.5)\n", + " \n", + " elif \"IN\" == tag or 'TO' == tag: #介词\n", + " suggestion = analyse_IN(i,probs[i],gap,top_word,3.5 ,4 ,1.5)\n", + " \n", + " elif 'MD' in tag: #情态动词\n", + " suggestion = analyse_MD(i,probs[i],gap,top_word,3 ,4 ,1.5)\n", + " \n", + " elif \"CD\" in tag: #数词直接pass\n", + " pass \n", + " \n", + " elif \"WDT\" == tag and gap > 3.5: #who,which,that那些\n", + " suggestion = top_word #推荐的词一般比较准\n", + " \n", + " elif tokens[i] in u',.!?[]()<>\"\\':,。!?【】()《》“‘”’.':\n", + " suggestion = analyse_biaodian(i,probs[i],gap,top_word,1.3 ,2 ,1)\n", + " \n", + " elif gap > 5:\n", + " suggestion = top_word\n", + " \n", + " if (suggestion != None and suggestion.lower() != tokens[i] and suggestion.lower() != original_tokens[i]): #修改存在并且是另外一个词\n", + " suggestions.update({i:suggestion})\n", + " mode = 2\n", + " elif suggestions.__contains__(i)==True: #这是因为之前在拼写检查时已经修改了该位置的单词\n", + " if original_tokens[i] == tokens[i]:\n", + " del suggestions[i]\n", + " mode = 1\n", + " else:\n", + " mode = 2\n", + " suggestion = suggestions[i]\n", + " else:\n", + " if original_tokens[i] != tokens[i]:\n", + " mode = 2\n", + " suggestions[i] = tokens[i]\n", + " suggestion = tokens[i]\n", + " else:\n", + " mode = 1\n", + " \n", + " print_token(original_tokens[i], suggestion, gap, mode)\n", + " print()\n", + " print(original_tokens[i],tokens[i],suggestion,mode)\n", + " avg_gap /= (len(tokens) - 2)\n", + " print()\n", + " print('平均gap:'+ str(avg_gap))\n", + " return avg_gap" + ] + }, + { + "cell_type": "code", + "execution_count": 117, + "metadata": {}, + "outputs": [], + "source": [ + "\n", + "def analyze_part_text(text, masked_tokens=None, show_suggestions=True, show_firstk_probs=500):\n", + " step = 15 #用于训练加速的步长,每15个token被mask一个位置\n", + " global input_ids_sen,input_type_ids_sen,in_sentence,sentences,entire_ids,entire_type_ids,suggestions,original_tokens\n", + " suggestions = {}#清空全局变量\n", + " text = process_biaodian(text)\n", + " text0 = text #保存有拼写错误的文本\n", + " text = correct_spelling(text[0]) #拼写修正过得文本\n", + " print(\"********************************\")\n", + " print(text)\n", + " print(\"********************************\")\n", + " #黄金搭档token_Align放在show_abnormals里面了\n", + " input_ids_sen,input_type_ids_sen,in_sentence,sentences,entire_ids,entire_type_ids = process_text(text[0])\n", + " \n", + " examples = convert_text_to_examples(text)\n", + " features = convert_examples_to_features(examples, tokenizer, print_info=False)\n", + " given_mask = \"[MASK]\" in features[0].tokens\n", + " if not given_mask or masked_tokens is not None:\n", + " assert len(features) == 1\n", + " features, batches = copy_and_mask_feature(features[0],step, masked_tokens=masked_tokens)\n", + " #print(len(features))\n", + "\n", + " input_ids = torch.tensor([f.input_ids for f in features], dtype=torch.long) #把input_ids增加了一个维度,变成[n_features,sequence_len]\n", + " #这里的n_features实际上是句子有多少批训练\n", + "\n", + " input_type_ids = torch.tensor([f.input_type_ids for f in features], dtype=torch.long) #把input_type_ids增加了一个维度,其实每一行都一样\n", + " input_ids = input_ids.to(device) \n", + " input_type_ids = input_type_ids.to(device)\n", + " \n", + " time_start=time.time()\n", + " mlm_logits= model(input_ids)\n", + " time_end=time.time()\n", + " print('time cost1',time_end-time_start,'s')\n", + " \n", + " mlm_probs = F.softmax(mlm_logits, dim=-1) \n", + " tokens = features[0].tokens #为了输出,[mask]在input_ids里面表示出来,features的token都一样\n", + " print(tokens)\n", + " if not given_mask or masked_tokens is not None:\n", + " bsz, seq_len, vocab_size = mlm_probs.size() #三个维度分别是batch_size, sequence_length, vocab_size\n", + " assert bsz == len(batches)\n", + " reduced_mlm_probs = torch.Tensor(1, len(tokens), vocab_size)\n", + " for i in batches:\n", + " pos = i\n", + " while pos < len(tokens):\n", + " reduced_mlm_probs[0, pos] = mlm_probs[i, pos]\n", + " pos = pos + step\n", + " mlm_probs = reduced_mlm_probs #压缩一下大小,节约不必要浪费的空间(只需要第i个batch里面[mask]位置的词汇表概率即可)\n", + " top_pairs = show_lm_probs(tokens, None, mlm_probs[0], firstk=show_firstk_probs) #传入的probs是二维的\n", + " if not given_mask:\n", + " avg_gap = show_abnormals(tokens,mlm_probs[0],text0[0], show_suggestions=show_suggestions)\n", + " return suggestions,original_tokens,avg_gap\n" + ] + }, + { + "cell_type": "code", + "execution_count": 118, + "metadata": {}, + "outputs": [], + "source": [ + "def analyze_text(text, masked_tokens=None, show_suggestions=True, show_firstk_probs=500):\n", + " suggestions = {}\n", + " avg_gap = 0\n", + " new_part_suggestions = {}\n", + " original_tokens = ['[CLS]','[SEP]']\n", + " text = process_biaodian(text)\n", + " text0 = text #保存有拼写错误的文本\n", + " texts = split_text(text,50,40)\n", + " accumulate_length = 0\n", + " remainer = 2 #[CLS]和[SEP]\n", + " for text0 in texts:\n", + " part_suggestions,part_original_tokens,part_avg_gap = analyze_part_text(text0, masked_tokens, show_suggestions, show_firstk_probs)\n", + " for key in part_suggestions:\n", + " new_part_suggestions[key + accumulate_length] = part_suggestions[key]\n", + " tokens_length = len(part_original_tokens)\n", + " accumulate_length = accumulate_length + tokens_length - remainer\n", + " suggestions.update(new_part_suggestions)\n", + " original_tokens = original_tokens[:-1] + part_original_tokens[1:]\n", + " avg_gap = avg_gap + part_avg_gap*(tokens_length - 2)\n", + " avg_gap = avg_gap/(accumulate_length-1)\n", + " return suggestions,original_tokens,avg_gap" + ] + }, + { + "cell_type": "code", + "execution_count": 119, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "05/14/2019 16:02:15 - INFO - examples.extract_features - tokens: [CLS] when i was little , friday ' s night was our family game night . after supper , we would play card games of all sort in the sitting room . as the kid , i loved to watch cartoons , but no matter how many times i asked for watching them , my parents would not to let me . [SEP]\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "********************************\n", + "[\" when i was little , friday' s night was our family game night . after supper , we would play card games of all sort in the sitting room . as the kid , i loved to watch cartoons , but no matter how many times i asked for watching them , my parents would not to let me .\"]\n", + "********************************\n", + "time cost1 0.5431368350982666 s\n", + "['[CLS]', 'when', 'i', 'was', 'little', ',', 'friday', \"'\", 's', 'night', 'was', 'our', 'family', 'game', 'night', '.', 'after', 'supper', ',', 'we', 'would', 'play', 'card', 'games', 'of', 'all', 'sort', 'in', 'the', 'sitting', 'room', '.', 'as', 'the', 'kid', ',', 'i', 'loved', 'to', 'watch', 'cartoons', ',', 'but', 'no', 'matter', 'how', 'many', 'times', 'i', 'asked', 'for', 'watching', 'them', ',', 'my', 'parents', 'would', 'not', 'to', 'let', 'me', '.', '[SEP]']\n", + " 0 | [CLS] \t 2 | . 1 | the 1 | ) 1 | , 1 | \" \n", + " 97 | when \t* 97 | when 3 | since 0 | until 0 | while 0 | as \n", + " 88 | i \t* 88 | i 7 | she 2 | he 0 | we 0 | cassie \n", + " 100 | was \t*100 | was 0 | were 0 | turned 0 | got 0 | became \n", + " 6 | little \t 12 | twelve 11 | younger 10 | eight * 6 | little 5 | older \n", + " 100 | , \t*100 | , 0 | . 0 | again 0 | ... 0 | and \n", + " 0 | friday \t 47 | valentine 14 | mother 5 | father 4 | grandma 4 | children \n", + " 100 | ' \t*100 | ' 0 | ′ 0 | ` 0 | \" 0 | - \n", + " 100 | s \t*100 | s 0 | til 0 | n 0 | d 0 | o \n", + " 38 | night \t* 38 | night 13 | dinner 7 | game 6 | eve 4 | day \n", + " 93 | was \t* 93 | was 7 | became 0 | is 0 | were 0 | , \n", + " 6 | our \t 79 | a 10 | the * 6 | our 2 | my 0 | their \n", + " 2 | family \t 68 | favorite 7 | first * 2 | family 2 | favourite 2 | only \n", + " 2 | game \t 12 | fun 9 | dinner 5 | christmas 4 | ' 3 | entertainment\n", + " 85 | night \t* 85 | night 9 | day 1 | date 1 | tonight 1 | dinner \n", + " 96 | . \t* 96 | . 3 | and 1 | ; 0 | - 0 | : \n", + " 82 | after \t* 82 | after 5 | over 5 | during 4 | at 2 | before \n", + " 7 | supper \t 38 | dinner 15 | school 9 | midnight * 7 | supper 4 | lunch \n", + " 100 | , \t*100 | , 0 | together 0 | ##time 0 | and 0 | time \n", + " 99 | we \t* 99 | we 0 | they 0 | everyone 0 | people 0 | i \n", + " 98 | would \t* 98 | would 2 | could 0 | did 0 | might 0 | helped \n", + " 86 | play \t* 86 | play 10 | have 2 | watch 0 | enjoy 0 | hold \n", + " 8 | card \t 44 | board 23 | video * 8 | card 1 | family 1 | computer \n", + " 98 | games \t* 98 | games 1 | game 0 | ##io 0 | tricks 0 | matches \n", + " 100 | of \t*100 | of 0 | and 0 | in 0 | , 0 | to \n", + " 0 | all \t 91 | some 5 | any 1 | every 1 | a 0 | the \n", + " 2 | sort \t 33 | kinds 25 | types 16 | sorts 4 | sizes 2 | ages \n", + " 99 | in \t* 99 | in 0 | around 0 | inside 0 | outside 0 | at \n", + " 82 | the \t* 82 | the 15 | our 1 | my 0 | a 0 | his \n", + " 0 | sitting \t 52 | family 19 | living 14 | dining 2 | back 1 | common \n", + " 100 | room \t*100 | room 0 | area 0 | rooms 0 | hall 0 | ##room \n", + " 100 | . \t*100 | . 0 | and 0 | ; 0 | ! 0 | ... \n", + " 43 | as \t* 43 | as 16 | like 15 | with 7 | for 2 | unlike \n", + " 0 | the \t 100 | a 0 | an 0 | another * 0 | the 0 | one \n", + " 10 | kid \t 39 | youngest 27 | child * 10 | kid 5 | baby 3 | oldest \n", + " 99 | , \t* 99 | , 0 | prodigy 0 | then 0 | now 0 | here \n", + " 98 | i \t* 98 | i 0 | he 0 | we 0 | she 0 | dad \n", + " 27 | loved \t 36 | wanted * 27 | loved 17 | used 13 | liked 2 | tried \n", + " 100 | to \t*100 | to 0 | and 0 | playing 0 | watching 0 | being \n", + " 99 | watch \t* 99 | watch 1 | see 0 | watched 0 | play 0 | watching \n", + " 1 | cartoons \t 44 | games 18 | them 14 | movies 8 | cards 3 | kids \n", + " 95 | , \t* 95 | , 4 | . 0 | - 0 | ; 0 | ... \n", + " 38 | but \t 58 | and * 38 | but 2 | so 2 | because 0 | though \n", + " 100 | no \t*100 | no 0 | little 0 | the 0 | zero 0 | without \n", + " 100 | matter \t*100 | matter 0 | to 0 | telling 0 | idea 0 | tell \n", + " 100 | how \t*100 | how 0 | what 0 | however 0 | the 0 | who \n", + " 100 | many \t*100 | many 0 | often 0 | few 0 | numerous 0 | several \n", + " 99 | times \t* 99 | times 0 | kids 0 | nights 0 | years 0 | days \n", + " 97 | i \t* 97 | i 1 | we 0 | he 0 | people 0 | she \n", + " 4 | asked \t 32 | apologized 15 | begged 13 | paid 4 | wished * 4 | asked \n", + " 25 | for \t 41 | about * 25 | for 7 | while 6 | after 3 | without \n", + " 86 | watching \t* 86 | watching 1 | to 1 | just 1 | for 1 | playing \n", + " 72 | them \t* 72 | them 9 | cartoons 4 | it 3 | one 3 | movies \n", + " 100 | , \t*100 | , 0 | - 0 | . 0 | ... 0 | and \n", + " 99 | my \t* 99 | my 0 | his 0 | the 0 | our 0 | her \n", + " 32 | parents \t* 32 | parents 22 | mother 21 | father 11 | mom 4 | dad \n", + " 0 | would \t 33 | decided 29 | chose 8 | tried 6 | seemed 5 | knew \n", + " 0 | not \t 48 | refuse 36 | have 7 | agree 2 | promise 1 | want \n", + " 0 | to \t 58 | always 26 | have 4 | ever 3 | even 2 | really \n", + " 56 | let \t* 56 | let 9 | believe 8 | bother 4 | stop 3 | tell \n", + " 97 | me \t* 97 | me 2 | go 0 | up 0 | it 0 | on \n", + " 100 | . \t*100 | . 0 | ; 0 | ! 0 | ? 0 | ... \n", + " 0 | [SEP] \t 20 | \" 15 | but 7 | and 5 | so 3 | for \n", + "['[CLS]', 'when', 'i', 'was', 'little', ',', 'friday', \"'\", 's', 'night', 'was', 'our', 'family', 'game', 'night', '.', 'after', 'supper', ',', 'we', 'would', 'play', 'card', 'games', 'of', 'all', 'sort', 'in', 'the', 'sitting', 'room', '.', 'as', 'the', 'kid', ',', 'i', 'loved', 'to', 'watch', 'cartoons', ',', 'but', 'no', 'matter', 'how', 'many', 'times', 'i', 'asked', 'for', 'watching', 'them', ',', 'my', 'parents', 'would', 'not', 'to', 'let', 'me', '.', '[SEP]']\n", + "\n", + "*******************************************************************************************************************\n", + "1\n", + "0.0\n", + "WRB\n", + "\u001b[38;5;15m\u001b[48;5;0mwhen \u001b[0m\n", + "when when None 1\n", + "\n", + "*******************************************************************************************************************\n", + "2\n", + "0.0\n", + "NN\n", + "\u001b[38;5;15m\u001b[48;5;0mi \u001b[0m\n", + "i i None 1\n", + "\n", + "*******************************************************************************************************************\n", + "3\n", + "0.0\n", + "VBD\n", + "\u001b[38;5;15m\u001b[48;5;0mwas \u001b[0m\n", + "was was None 1\n", + "\n", + "*******************************************************************************************************************\n", + "4\n", + "0.6453734100348458\n", + "JJ\n", + "\u001b[38;5;226m\u001b[48;5;0mlittle\u001b[0m\u001b[38;5;226m\u001b[48;5;0m \u001b[0m\n", + "little little None 1\n", + "\n", + "*******************************************************************************************************************\n", + "5\n", + "0.0\n", + ",\n", + "\u001b[38;5;15m\u001b[48;5;0m, \u001b[0m\n", + ", , None 1\n", + "\n", + "*******************************************************************************************************************\n", + "6\n", + "5.662634394823419\n", + "NN\n" + ] + }, + { + "ename": "ValueError", + "evalue": "not enough values to unpack (expected 2, got 1)", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 20\u001b[0m \u001b[0;31m#text = [\"During my last winter holiday, I went to countryside with my father to visit my grandparents. I find a big change there. The first time I went there, they were living in a small house with dogs, ducks, and another animals. Last winter when I went here again, they had a big separate house to raise dozens of chicken. They also had a small pond which they raised fish. My grandpa said last summer they earned quite a lot by sell the fish. I felt happily that their life had improved. At the end of our trip,I told my father that I planned to return for every two years, but he agreed.\"]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 21\u001b[0m \u001b[0mtime_start\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mtime\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtime\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 22\u001b[0;31m \u001b[0manalyze_text\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtext\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mshow_firstk_probs\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m500\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 23\u001b[0m \u001b[0mtime_end\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mtime\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtime\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 24\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'time cost'\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mtime_end\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0mtime_start\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m's'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m\u001b[0m in \u001b[0;36manalyze_text\u001b[0;34m(text, masked_tokens, show_suggestions, show_firstk_probs)\u001b[0m\n\u001b[1;32m 10\u001b[0m \u001b[0mremainer\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m2\u001b[0m \u001b[0;31m#[CLS]和[SEP]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 11\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mtext0\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mtexts\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 12\u001b[0;31m \u001b[0mpart_suggestions\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mpart_original_tokens\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mpart_avg_gap\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0manalyze_part_text\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtext0\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmasked_tokens\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mshow_suggestions\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mshow_firstk_probs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 13\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mkey\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mpart_suggestions\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 14\u001b[0m \u001b[0mnew_part_suggestions\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mkey\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0maccumulate_length\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mpart_suggestions\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mkey\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m\u001b[0m in \u001b[0;36manalyze_part_text\u001b[0;34m(text, masked_tokens, show_suggestions, show_firstk_probs)\u001b[0m\n\u001b[1;32m 48\u001b[0m \u001b[0mtop_pairs\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mshow_lm_probs\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtokens\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmlm_probs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfirstk\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mshow_firstk_probs\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;31m#传入的probs是二维的\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 49\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0mgiven_mask\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 50\u001b[0;31m \u001b[0mavg_gap\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mshow_abnormals\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtokens\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mmlm_probs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mtext0\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mshow_suggestions\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mshow_suggestions\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 51\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0msuggestions\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0moriginal_tokens\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mavg_gap\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m\u001b[0m in \u001b[0;36mshow_abnormals\u001b[0;34m(tokens, probs, text, show_suggestions)\u001b[0m\n\u001b[1;32m 75\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 76\u001b[0m \u001b[0;32melif\u001b[0m \u001b[0;34m\"NN\"\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mtag\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;31m#名词\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 77\u001b[0;31m \u001b[0msuggestion\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0manalyse_N\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mprobs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mgap\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mtop_word\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m4\u001b[0m \u001b[0;34m,\u001b[0m\u001b[0;36m10\u001b[0m \u001b[0;34m,\u001b[0m\u001b[0;36m2.2\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 78\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 79\u001b[0m \u001b[0;32melif\u001b[0m \u001b[0;34m\"CC\"\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mtag\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;31m#连词\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m\u001b[0m in \u001b[0;36manalyse_N\u001b[0;34m(index, prob, gap, top_word, threshold, threshold2, threshold3)\u001b[0m\n\u001b[1;32m 47\u001b[0m \u001b[0mlist_IN\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m\"of\"\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m'to'\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\"at\"\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\"in\"\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\"on\"\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\"by\"\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\"for\"\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\"from\"\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\"with\"\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\"about\"\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\"against\"\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\"along\"\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\"among\"\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\"around\"\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\"as\"\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\"before\"\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\"behind\"\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\"below\"\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\"beside\"\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\"between\"\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\"during\"\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\"besides\"\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\"into\"\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\"near\"\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\"over\"\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\"through\"\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\"under\"\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\"without\"\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\"after\"\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\"above\"\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 48\u001b[0m \u001b[0mlist_DT_IN\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mlist_DT\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0mlist_IN\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 49\u001b[0;31m \u001b[0msuggestion\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mneed_DT_IN\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0m_\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0m_\u001b[0m\u001b[0;34m=\u001b[0m \u001b[0mgive_suggestion\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0minput_ids\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0minput_type_ids\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mid_in_sen\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mlist_DT_IN\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m2\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 50\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mneed_DT_IN\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;36m0\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;31m#不需要冠词或介词\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 51\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mgap\u001b[0m \u001b[0;34m<\u001b[0m \u001b[0mthreshold2\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;31m#没有可以替换的词,而且原本该位置的词就勉强符合要求\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m\u001b[0m in \u001b[0;36mgive_suggestion\u001b[0;34m(input_ids_, input_type_ids_, id_in_sen, alternative_word, threshold)\u001b[0m\n\u001b[1;32m 31\u001b[0m \u001b[0mT_input_type_ids\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mT_input_type_ids\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mto\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdevice\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 32\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 33\u001b[0;31m \u001b[0mmlm_logits\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0m_\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmodel\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mT_input_ids\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mT_input_type_ids\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 34\u001b[0m \u001b[0mmlm_probs\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mF\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msoftmax\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmlm_logits\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdim\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 35\u001b[0m \u001b[0mreduced_mlm_probs\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmlm_probs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mid_in_sen\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mValueError\u001b[0m: not enough values to unpack (expected 2, got 1)" + ] + } + ], + "source": [ + "import time\n", + "# text = [\"Who was Jim Henson? Jim Henson _ a puppeteer.\"]\n", + "#text = [\"Last week I went to the theater. There are many person . Luckily , I had very good seat. The plays was very interesting. However, I didn't enjoy it. A young man and a young woman were sitting behind me. They were talk loudly. I got very angry. I couldn't hear a word. I turned round. I looked at the man angry. They didn't pay any attention.In the end, I couldn't bear it. I turned round again. 'I can't hear a word!' I said angrily. 'It's none of your business,' the young man said rudely. 'This is a private conversation!'\"]\n", + "#text = [\"He is my friend.\"]\n", + "text = [\"When I was little, Friday's night was our family game night. After supper, we would play card games of all sort in the sitting room. As the kid, I loved to watch cartoons,but no matter how many times I asked for watching them, my parents would not to let me.They would say to us that playing card games would help my brain. Still I unwilling to play the games for them sometimes. \"]\n", + "\n", + "#text = [\"Last week I went to the theater. I had very good seat. The plays was very interesting. However, I didn't enjoy it. A young man and a young woman were sitting behind me. They were talk loudly. I got very angry. I couldn't hear a word. I turned round. I looked at the man angry. They didn't pay any attention.In the end, I couldn't bear it. I turned round again. 'I can't hear a word!' I said angrily. 'It's none of your business,' the young man said rudely. 'This is a private conversation!'\"]\n", + "# text = [\"After the outbreak of the disease, the Ministry of Agriculture and rural areas immediately sent a supervision team to the local. Local Emergency Response Mechanism has been activated in accordance with the requirements, to take blockade, culling, harmless treatment, disinfection and other treatment measures to all disease and culling of pigs for harmless treatment. At the same time, all live pigs and their products are prohibited from transferring out of the blockade area, and live pigs are not allowed to be transported into the blockade area. At present, all the above measures have been implemented.\"]\n", + "# text = [\"Early critics of Emily Dickinson's poetry mistook for simplemindedness the surface of artlessness that in fact she constructed with such innocence.\"]\n", + "#text = [\"The journey was long and tired. We left London at five o'clock in the evening and spend eight hours in the train. We had been travelled for 3 hours after someone appeared selling food and drinks. It was darkness all the time we were crossing Wales, but we could see nothing through the windows. When we finally arrived Holyhead nearly , everyone was sleeping. As soon as the train stopped, everybody come to life, grabbing their suitcases and rushing onto the platform.\"]\n", + "#text = [\"When I was little, Friday's night was our family game night. After supper, we would play card games of all sort in the sitting room. As the kid, I loved to watch cartoons,but no matter how many times I asked to watching them, my parents would not to let me. They would say to us that playing card games would help my brain. Still I unwilling to play the games for them sometimes. I didn't realize how right my parents are until I entered high school. The games my parents taught me where I was a child turned out to be very useful later in my life.\"]\n", + "#text = [\"Mr. and Mrs.Zhang all work in our school. They live far from the school, and it takes them about a hour and a half to go to work every day. In their spare time, they are interesting in planting vegetables in their garden, that is on the rooftop of their house. They often get up earlier and water the vegetables together. They have also bought for some gardening tools.beside, they often get some useful informations from the internet. When summer came, they will invite their students pick the vegetables!\"]\n", + "#text = ['The question is more easy than that.']\n", + "#text = [\"Last week I go to the zoo. I had a very good seat. The play was very interesting.\"]\n", + "#text =[\"Last week I went to the theater. I had very good seat. The play was very interesting.But I didn't enjoy it. A young man and a young woman were sitting behind me.They were talking loudly. I got very angry.\"]#因为外面有中括号,所以是二维的\n", + "#text = ['It was Monday morning, and the writeing class had just begin.We were tiring. Everyone was silent, wait to see who would be called upon to read his and her paragraph aloud. Some of us were confidont and eagerly take part in the class activity, others were nervous and anxious. I had done myself homework but I was shy. I was afraid that to speak in front of a larger group of people. At that moment, I remembered that my father once said, \"The classroom is a place for learning and that include leaning from textbooks, and mistake as well.\" Immediate, I raised my hand.']\n", + "#text = ['During my last winter holiday, I went to countryside with my father to visit my grandparents. I find a big change there. The first time I went there, they were living in a small house with dogs, ducks, and another animals. Last winter when I went here again, they had a big separate house to raise dozens of chicken. They also had a small pond which they raised fish. My grandpa said last summer they earned quite a lot by sell the fish. I felt happily that their life had improved. At the end of our trip,I told my father that I planned to return for every two years, but he agreed.']\n", + "#text = [\"what is justice ? what is good ? what kind of life is a happy life ? how can a justice ' s life benefit human beings ? is it certain that a justice ' s life must lead to happiness ? these problems have already been questioned thousands and hundreds years . they will continue to be questioned . this dissertation tries to discuss the connection between the city - state and the citizen . in the first and the second part of the dissertation the writer tries to make it clear what city - state , citizen and justice mean in the republic . plato ' s idea theory is explained in the third part . and how can his idea theory apply to the education system of the city - state and the happiness of the citizens . the fourth part reviewed old education system and educators , which includes poets and wise men . the poets are criticized for their negative effects to the youth . the fifth part is the education lawmaking of the ideal city - state , together with education means and education principle . the sixth and the seventh parts explain how can the city - state educate qualified soldiers and philosophers . they receive the same nation educate at first which is poetry educatdion and athletics education . some excellent soldiers go into higher category by the selection . they will receive philosopher ' s education , studying some specified subjects . then it makes a conclusion that the education is the only means to attain an ideal city - state .\"]\n", + "#text = ['The head of state immunity principle is an ancient principle of customary international law. Diplomatic privileges and immunity, monarchy personal exemption, and state immunity theory has a close connection. By analyzing the interrelation of the three concepts ,them are closely related.and has important effects on the head of state immunity principle.The head of state immunity in criminal is also a widespread international recognition. However, from the beginning of the last century, with the development of international criminal law, the principle has been impacted by the international criminal law. Because the punishments by international criminal institutions, and the individual criminal responsibility shall be investigated for. And the head of state is particular, the implementation of the international crimes is different with general international crime.So,it’s cause some controversial issue. In the part two,according to discusses the main cases about the head of state.After the world war II.We can known that although the practices of international criminal justice institution repeatedly emerge the judgment of the head of state.but,the principls as such as \"official identity independence\" and \"individual criminal responsibility\" emphasize the criminal responsibility of the head of state.Seems the criminal jurisdiction of heads of state immunity can no longer competed the criminal responsibility. But in fact, there still not an common answer to solve the debate. The part three summarizes the reasons of the conflict and description the heads of state immunity is necessity. In the new international situation it’s necessary to reserve the head of state immunity in the criminal rationally. And find some ways to solved this contradiction from the standpoint of draft norm of international law. For example the international community should be improving the international force law norms.']\n", + "#text = [\"During my last winter holiday, I went to countryside with my father to visit my grandparents. I find a big change there. The first time I went there, they were living in a small house with dogs, ducks, and another animals. Last winter when I went here again, they had a big separate house to raise dozens of chicken. They also had a small pond which they raised fish. My grandpa said last summer they earned quite a lot by sell the fish. I felt happily that their life had improved. At the end of our trip,I told my father that I planned to return for every two years, but he agreed.\"]\n", + "time_start=time.time()\n", + "analyze_text(text, show_firstk_probs=500)\n", + "time_end=time.time()\n", + "print('time cost',time_end-time_start,'s')" + ] + }, + { + "cell_type": "code", + "execution_count": 55, + "metadata": {}, + "outputs": [], + "source": [ + "'''\n", + " 功能:对suggestions进行修改,由于某处位置改变造成suggestions后面的错误位置都相应移动\n", + " 输入:\n", + " index:开始移动的位置\n", + " direction:移动的方向,1表示向右边移,-1表示向左边移\n", + "'''\n", + "def modify_suggestions(index,direction):\n", + " global suggestions\n", + " new_suggestions = {};\n", + " if direction == 0:\n", + " pass\n", + " elif direction == 1:\n", + " for key in suggestions:\n", + " if key < index:\n", + " new_suggestions.update({key:suggestions[key]})\n", + " else:\n", + " new_suggestions.update({key+1:suggestions[key]})\n", + " elif direction == -1:\n", + " for key in suggestions:\n", + " if key < index:\n", + " new_suggestions.update({key:suggestions[key]})\n", + " else:\n", + " new_suggestions.update({key-1:suggestions[key]}) \n", + " suggestions = new_suggestions\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1717, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'\\n 功能:\\n 修改文本,tokens,suggestions\\n 输入:\\n index:修改的位置\\n text:被修改前的原文\\n 输出:\\n [text]:修改后的文本\\n new_tokens:修改后的新tokens\\n suggestions:修改后新的建议字典\\n'" + ] + }, + "execution_count": 1717, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "#print(suggestions)\n", + "def display_suggestion():\n", + " print(\"**********************************display_suggestions********************************************************\")\n", + " print(\"| {:50} : {}\".format(\"suggestion\",\"position in text\"))\n", + " print(\"---------------------------------------------------------------------------------------\")\n", + " for key in suggestions:\n", + " print(\"| {:<50} : {}\".format(suggestions[key] ,key))\n", + " print(\"*************************************************************************************************************\")\n", + "#display_suggestion()\n", + "\n", + "'''\n", + " 功能:\n", + " 修改文本,tokens,suggestions\n", + " 输入:\n", + " index:修改的位置\n", + " text:被修改前的原文\n", + " 输出:\n", + " [text]:修改后的文本\n", + " new_tokens:修改后的新tokens\n", + " suggestions:修改后新的建议字典\n", + "'''\n", + "def modify_text(index,text): #修改文本,tokens,以及suggestions\n", + " global suggestions,original_tokens\n", + " tokens = original_tokens\n", + " new_text = \"\"\n", + " suggestion = suggestions[index]\n", + " del(suggestions[index])\n", + " suggestion_tokens = suggestion.split(\" \")\n", + " #print(suggestion_tokens)\n", + " if '去掉前面' == suggestion_tokens[0]:\n", + " del tokens[index - 1]\n", + " del suggestion_tokens[0]\n", + " del suggestion_tokens[0]\n", + " modify_suggestions(index,-1)\n", + " index = index - 1\n", + " elif '去掉后面' == suggestion_tokens[0]:\n", + " del tokens[index + 1]\n", + " del suggestion_tokens[0]\n", + " del suggestion_tokens[0]\n", + " modify_suggestions(index+2,-1)\n", + " elif '去掉' == suggestion_tokens[0]:\n", + " del tokens[index]\n", + " del suggestion_tokens[0]\n", + " del suggestion_tokens[0]\n", + " modify_suggestions(index+1,-1)\n", + " if '原位置改成' in suggestion_tokens:\n", + " del suggestion_tokens[0]\n", + " \n", + " \n", + " len_suggest = len(suggestion_tokens)\n", + " if len_suggest == 1:\n", + " tokens[index] = suggestion_tokens[0]\n", + " elif len_suggest == 2:\n", + " tokens.insert(index,suggestion_tokens[0])\n", + " tokens[index + 1] = suggestion_tokens[1]\n", + " modify_suggestions(index+1,1)\n", + " final_len = len(tokens)\n", + "\n", + " for i in range(1,len(tokens)-1):\n", + " word = tokens[i]\n", + " if word[0:2] == \"##\":\n", + " new_text = new_text + word[2:]\n", + " else:\n", + " new_text = new_text + ' ' + word\n", + " \n", + " original_tokens = tokens\n", + " return [text],tokens,suggestions\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 1628, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[NbConvertApp] Converting notebook likunlin_final.ipynb to python\n", + "[NbConvertApp] Writing 79979 bytes to likunlin_final.py\n" + ] + } + ], + "source": [ + "#变成py文件\n", + "try:\n", + " !jupyter nbconvert --to python likunlin_final.ipynb\n", + "except:\n", + " pass" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.5" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/likunlin_final.py b/likunlin_final.py new file mode 100644 index 00000000000000..be3b4ff5453b4f --- /dev/null +++ b/likunlin_final.py @@ -0,0 +1,1960 @@ + +# coding: utf-8 + +# In[242]: + + +import os +import json +import nltk +import numpy as np +import math +import matplotlib +import matplotlib.pyplot as plt +from pylab import rcParams + +import torch +import torch.nn.functional as F +from pytorch_pretrained_bert import tokenization, BertTokenizer, BertModel, BertForMaskedLM, BertForPreTraining, BertConfig +from examples.extract_features import * + + +# In[274]: +class Args: + def __init__(self): + pass + +args = Args() +args.no_cuda = False #不用GPU + +CONFIG_NAME = 'bert_config.json' +BERT_DIR = '/nas/pretrain-bert/pretrain-pytorch/bert-base-uncased' +config_file = os.path.join(BERT_DIR, CONFIG_NAME) +config = BertConfig.from_json_file(config_file) + +try: + tokenizer = BertTokenizer.from_pretrained(os.path.join(BERT_DIR, 'vocab.txt'))#do_lower_case:在标记化时将文本转换为小写。默认= True +except: + tokenizer = BertTokenizer.from_pretrained('bert-base-uncased') +#tokenizer.tokenize = nltk.word_tokenize + +model = BertForMaskedLM.from_pretrained(BERT_DIR) +device = torch.device("cuda" if torch.cuda.is_available() and not args.no_cuda else "cpu") +_ = model.to(device) +_ = model.eval() + +input_ids_sen,input_type_ids_sen,in_sentence,sentences,entire_ids,entire_type_ids = [],[],[],[],[],[] +suggestions = {} #外部变量,需要传到前端 +original_tokens = [] #外部变量,需要传到前端 + + +# BertForPreTraining: +# Outputs: +# if `masked_lm_labels` and `next_sentence_label` are not `None`: +# Outputs the total_loss which is the sum of the masked language modeling loss and the next +# sentence classification loss. +# if `masked_lm_labels` or `next_sentence_label` is `None`: +# Outputs a tuple comprising +# - the masked language modeling logits of shape [batch_size, sequence_length, vocab_size], and +# - the next sentence classification logits of shape [batch_size, 2]. + +# from_pretrained: +# Instantiate a BertPreTrainedModel from a pre-trained model file or a pytorch state dict. +# Download and cache the pre-trained model file if needed. + +# In[254]: + + +import re +def convert_text_to_examples(text): + '''功能: + 把输入的文本变成一个实例,一个实例中包含text_a,text_b(text_b用于是否为上下句的任务,该任务不使用此功能) + 输入: + text:一个列表结构,列表中包含原始文本字符串,由于仅完成mlm任务,所以text列表中仅包含一个字符串,就是待检查的字符串 + 输出: + example:实例,其中包含: + unique_id:此任务仅用到0 + text_a:text列表内的字符串 + text_b:此任务下该变量为None + ''' + examples = [] + unique_id = 0 + if True: + for line in text: + line = line.strip() + text_a = None + text_b = None + m = re.match(r"^(.*) \|\|\| (.*)$", line) #想要匹配这样的字符串'You are my sunshine. ||| I love you.' + + if m is None: + text_a = line + else: + text_a = m.group(1) #匹配的第一句,比如You are my sunshine,my only sunshine. + text_b = m.group(2) #匹配的第二句,比如I love you. + + examples.append( + InputExample(unique_id=unique_id, text_a=text_a, text_b=text_b)) + unique_id += 1 + return examples +#print(convert_text_to_examples(['I love you. The cat is so cute.'])[0].text_a) + +def convert_examples_to_features(examples, tokenizer, append_special_tokens=True, replace_mask=True, print_info=False): + '''功能: + 把实例变成一个特征列表 + 输入: + examples:实例,convert_text_to_examples()函数的输出 + tokenizer:BERT的tokenizer,用于将文本进行各种处理,它可以把一个text转变成tokens,把tokens变成每个token在词典中的编号以及逆运算 + append_special_tokens:是否允许在生成的tokens中加入特殊符号,也就是[CLS]、[MASK]和[SEP],默认为True + replace_mask:不明 + print_info:不明 + 输出: + features:每一个feature包含: + unique_id:编号,目前实现的功能features里面仅有一个feature + tokens=tokens,tokens:是形如['i','love','you','.']的一个列表 + input_ids=input_ids:字符串中的每个单词在词典中的index序列 + input_mask=input_mask:一堆1 + input_type_ids=input_type_ids)):对text_a,text_b的区分,用于上下句任务,对于本任务,该参数为一个列表,其中包含token长度个的0 + ''' + features = [] + for (ex_index, example) in enumerate(examples): + tokens_a = tokenizer.tokenize(example.text_a) #tokenize的作用是把"i love you."变成['i','love','you','.'] + tokens_b = None + if example.text_b: + tokens_b = tokenizer.tokenize(example.text_b) + + tokens = [] + input_type_ids = [] #segment embedding + if append_special_tokens: #输入参数中默认为true + tokens.append("[CLS]") + input_type_ids.append(0) + for token in tokens_a: + if replace_mask and token == '_': # XD + token = "[MASK]" + tokens.append(token) + input_type_ids.append(0) + if append_special_tokens: + tokens.append("[SEP]") + input_type_ids.append(0) + + if tokens_b: + for token in tokens_b: + if replace_mask and token == '_': # XD + token = "[MASK]" + tokens.append(token) + input_type_ids.append(1) + if append_special_tokens: + tokens.append("[SEP]") + input_type_ids.append(1) + input_ids = tokenizer.convert_tokens_to_ids(tokens) #把原来句子中的词语编成在字典中的编号 + input_mask = [1] * len(input_ids) + + if ex_index < 5: +# logger.info("*** Example ***") +# logger.info("unique_id: %s" % (example.unique_id)) + logger.info("tokens: %s" % " ".join([str(x) for x in tokens])) +# logger.info("input_ids: %s" % " ".join([str(x) for x in input_ids])) +# logger.info("input_mask: %s" % " ".join([str(x) for x in input_mask])) +# logger.info( +# "input_type_ids: %s" % " ".join([str(x) for x in input_type_ids])) + + features.append( + InputFeatures( + unique_id=example.unique_id,#编号,目前实现的功能features里面仅有一个feature + tokens=tokens,#形如['i','love','you','.']的一个列表 + input_ids=input_ids,#字符串中的每个单词在词典中的index序列 + input_mask=input_mask, #一堆1 + input_type_ids=input_type_ids)) #第0类和第1类,对text_a,text_b的区分,本代码中全都是零 + return features + +def copy_and_mask_feature(feature, step, masked_tokens=None): + ''' + 功能: + 输入feature生成训练的批次数以及mask好的训练素材 + 输入: + feature:convert_examples_to_features函数的输出 + step:两个[mask]位置的步长 + masked_tokens:默认为None,在程序中没有使用 + ''' + import copy + tokens = feature.tokens + len_token = len(tokens) + if len_token 0 + masked_feature_copies = [] + for i in batches: #用[mask]依次掩盖每一个位置 + feature_copy = copy.deepcopy(feature) + masked_pos = i + while masked_pos < len_token: + feature_copy.input_ids[masked_pos] = tokenizer.vocab["[MASK]"] + masked_pos = masked_pos + step + masked_feature_copies.append(feature_copy) + return masked_feature_copies, batches + +#masked_feature_copies, batches = copy_and_mask_feature(features[0],3) +#print(masked_feature_copies[0].input_ids) #结果[101, 1045, 2293, 103, 102] +#print(batches) #结果是一个range(0,5) + + +# In[7]: + + +analyzed_cache = {} +from pattern.en import conjugate, lemma, lexeme,PRESENT,SG +#print (lemma('gave')) +#print (lexeme('production')) +#print (conjugate(verb='give',tense=PRESENT,number=SG)) +def process_text(text): + ''' + 功能: + 处理输入文本,将文本按句子分成若干token,得出原来text中index位置的单词在x句子的y位置,还得出各个句子类别码 + 输入: + text:文本字符串,注意区别 + 输出: + input_ids_sen:二维列表,第一维列表的元素是每个句子的input_ids列表 + input_type_ids_sen:二维列表,第一维列表的元素是每个句子的input_type_ids列表 + in_sentence:通过这个二维数组可以很方便的通过在完整text中的下标找到这个下标所在的句子和在句子中的下标 + sentences:字符串列表,列表中每一个元素是一个句子字符串 + entire_ids:整个text的input_ids + entire_type_ids:整个text的input_type_ids + ''' + token =[] + entire_type_ids = [] + token0 = tokenizer.tokenize(text) + token.append('[CLS]') + entire_type_ids.append(0) + for i in token0: + token.append(i) + entire_type_ids.append(0) + token.append('[SEP]') + entire_type_ids.append(0) + + entire_ids = tokenizer.convert_tokens_to_ids(token) + in_sentence = [[0,0]] + sentence_n = 0 + index = 1 + for i in range(1,len(token)-1): + in_sentence.append([sentence_n,index]) #每个token中的词在所在句中的位置表示出来,以及该位置在哪一句中 + index = index + 1 #比如,位置i这个词在第sentence句的index位置上 + if token[i] == '.': + sentence_n = sentence_n + 1 + index = 1 + sentences = text.split(".") + + sen_token = [] + input_ids_sen = [] + input_type_ids_sen = [] + for i,sentence in enumerate(sentences): + sentence = sentence + '.' + sentences[i] = sentences[i] + '.' + token = [] + input_type_ids = [] + tokens = tokenizer.tokenize(sentence) + token.append('[CLS]') + input_type_ids.append(0) + for i in tokens: + token.append(i) + input_type_ids.append(0) + token.append('[SEP]') + input_type_ids.append(0) + input_ids_sen.append(tokenizer.convert_tokens_to_ids(token)) + input_type_ids_sen.append(input_type_ids) + return input_ids_sen,input_type_ids_sen,in_sentence,sentences,entire_ids,entire_type_ids + + +# In[8]: + + +def get_word(index): + ''' + 输入: + index:在完整text中的位置 + 输出 + word:该位置上的单词 + ''' + word_id = entire_ids[index] + word = tokenizer.ids_to_tokens[word_id] + return word + + +# In[1559]: + + +import copy +import nltk +from pattern.en import conjugate, lemma, lexeme,PRESENT,SG,PRESENT,SG,INFINITIVE, PRESENT, PAST, FUTURE, PROGRESSIVE + +def give_suggestion(input_ids_,input_type_ids_,id_in_sen,alternative_word,threshold): + ''' + 功能: + 给出指定文本指定位置的推荐用词 + 输入: + input_ids_:要分析的文本的input_ids + input_type_ids_:要分析的文本的的input_type_ids + id_in_sen:要分析的文本中[MASK]的位置下标,也就是需要给出建议用词的位置 + alternative_word:推荐的备选词范围 + threshold:阈值 + 输出: + suggestion:推荐 + need:推荐的是否是备选词中的词 + suggestion_prob:推荐词填在id_in_sen位置的概率 + top_of_alternative:备选词中最值得推荐的词 + ''' + input_ids = copy.deepcopy(input_ids_) + input_type_ids = copy.deepcopy(input_type_ids_) + word0 = input_ids[id_in_sen] + word0 = tokenizer.ids_to_tokens[word0] + list_word_id = [] + + input_ids[id_in_sen] = tokenizer.vocab["[MASK]"] + T_input_ids = torch.tensor([input_ids], dtype=torch.long) #把input_ids增加了一个维度 + T_input_type_ids = torch.tensor([input_type_ids], dtype=torch.long) #把input_type_ids增加了一个维度,其实每一行都一样 + T_input_ids = T_input_ids.to(device) #拿去GPU + T_input_type_ids = T_input_type_ids.to(device) + + mlm_logits = model(T_input_ids) + mlm_probs = F.softmax(mlm_logits, dim=-1) + reduced_mlm_probs = mlm_probs[0][id_in_sen] + + top_ind = reduced_mlm_probs.argmax().item() + top_prob = reduced_mlm_probs.max().item() + + list_word = [] + + top_of_alternative = None + if len(alternative_word)>0: + list_word_prob = {} + for word in alternative_word: + try: + list_word_id.append(tokenizer.vocab[word]) + list_word.append(word) + except KeyError: + pass + + for word,word_id in zip(list_word,list_word_id): + list_word_prob.update({word:float(reduced_mlm_probs[word_id].data)}) + prob_ord = sorted(list_word_prob.items(),key = lambda x:x[1],reverse = True) + + top_prob_word = prob_ord[0][1] + top_of_alternative = prob_ord[0][0] + gap = math.log(top_prob) - math.log(top_prob_word) + + if gap < threshold: + suggestion = prob_ord[0][0] + suggestion_prob = prob_ord[0][1] + need = 1 + else: + suggestion = tokenizer.ids_to_tokens[top_ind] + suggestion_prob = top_prob + need = 0 + #print("gap = " + str(gap)) + #print(prob_ord) + else: + suggestion = tokenizer.ids_to_tokens[top_ind] + suggestion_prob = top_prob + need = 0 + + return suggestion,need,suggestion_prob,top_of_alternative + +#返回变量5 +#suggestion -> 最值得推荐的词 +#need -> 是否需要可选词中的一个 +#suggestion_prob ->最值得推荐的词的概率 +#top_of_alternative -> 可选词中最值得推荐的 +#suggestion,need,suggestion_prob,top_of_alternative = give_suggestion(input_ids_,input_type_ids_,id_in_sen,alternative_word,threshold) + + +# In[1473]: + + +from spacy.lemmatizer import Lemmatizer +from spacy.lang.en import LEMMA_INDEX, LEMMA_EXC, LEMMA_RULES +from pattern.en import comparative, superlative +from pattern.en import suggest +from nltk.stem.lancaster import LancasterStemmer +from nltk.stem.porter import PorterStemmer +from nltk.stem import SnowballStemmer +import enchant +d = enchant.Dict("en_US") + + +# In[1474]: + + +stemmers=[] +stemmers.append(LancasterStemmer()) +stemmers.append(SnowballStemmer("english")) +stemmers.append(PorterStemmer()) +lemmatizer = Lemmatizer(LEMMA_INDEX, LEMMA_EXC, LEMMA_RULES) +def word_convert(word,new_word,Stemmer): + ''' + 功能: + 根据提供的word和可能的变形new_word,得到正确的变形,例如给出basic,basicly得到basically + 输入: + word:需要变形的词 + new_word:猜想的变形 + 输出: + suggest_word:推荐的正确变形 + ''' + suggest_word = None + word_stem = Stemmer().stem(word) + suggest_ = new_word + + suggest_list = suggest(suggest_) + + if len(word) 0.95):# or word_[1] > 0.95 : + suggest_word = word_[0] + break + if word_[1] < 0.001: + break + stem_list = [] + for stemmer in stemmers: + suggest_stem = stemmer.stem(word_[0]) + if flag == 1 and suggest_stem[:-1] in word_stem and word_stem[:3] in suggest_stem[:3]: #一般是去后缀 + suggest_word = word_[0] + break + elif flag == 0 and word_stem in suggest_stem and word_[0][-1:] in suggest_[-1:]: #一般是加后缀,后缀一定要一样 + suggest_word = word_[0] + break + + if suggest_word != None: + break + return suggest_word + + +# In[1475]: + + +stemmers=[] +stemmers.append(LancasterStemmer()) +stemmers.append(SnowballStemmer("english")) +stemmers.append(PorterStemmer()) +lemmatizer = Lemmatizer(LEMMA_INDEX, LEMMA_EXC, LEMMA_RULES) +def word_convert(word,new_word,Stemmer): + ''' + 说明; + 与上面的区别是使用的拼写改错算法不同,上面那个平均速度慢,但更符合我的要求,这个平均速度更快 + 功能: + 根据提供的word和可能的变形new_word,得到正确的变形,例如给出basic,basicly得到basically + 输入: + word:需要变形的词 + new_word:猜想的变形 + Stemmer:词根提取器 + 输出: + suggest_word:推荐的正确变形 + ''' + if d.check(new_word)==True: #如果发现new_word拼写正确,则直接返回 + return new_word + else: + suggest_word = None + word_stem = Stemmer().stem(word) + suggest_ = new_word + suggest_list = d.suggest(suggest_) #可能的正确单词列表 + + if len(word)death,success->succeed无能为力''' + + +# In[1477]: + + + +def adj_to_adv(word): + suggest_word = None + if(word == "good"): + return "well" + else: + suggest_ = word + 'ly' + suggest_word = word_convert(word,suggest_,PorterStemmer) + return suggest_word +#如果形容词副词同形,那么他会返回none,但是不影响计算,因为形容词副词同形啊 + + +def adv_to_adj(word): + suggest_word = None + if(word == "well"): + return "good" + elif word[-2:] == 'ly': + suggest_ = word[:-2] + suggest_word = word_convert(word,suggest_,PorterStemmer) + return suggest_word + + + +# In[1550]: + + +def adj_to_anything(word):#形容词变成其他词性 + suggest_word = None + suggest_list = [] + if word[-1:] == 'y': #举例 healthy->health + suggest_ = word[:-1] + suggest_word = word_convert(word,suggest_,PorterStemmer) + if suggest_word != None: + suggest_list.append(suggest_word) + elif word[-3:] == 'ful':#举例 successful->success + suggest_ = word[:-3] + suggest_word = word_convert(word,suggest_,PorterStemmer) + if suggest_word != None: + suggest_list.append(suggest_word) + elif word[-3:] == 'ive': #举例 active -> act + suggest_ = word[:-4] + suggest_word = word_convert(word,suggest_,PorterStemmer) + if suggest_word != None: + suggest_list.append(suggest_word) + elif word[-2:] == 'ed': #举例 interested->interest->interesting + suggest_ = word[:-2] + suggest_word = word_convert(word,suggest_,PorterStemmer) + if suggest_word != None: + suggest_list.append(suggest_word) + suggest_ = suggest_ + 'ing' + suggest_word = word_convert(word,suggest_,PorterStemmer) + if suggest_word != None: + suggest_list.append(suggest_word) + + elif word[-3:] == 'ing':#举例 interesting->interest->interested + suggest_ = word[:-3] + suggest_word = word_convert(word,suggest_,PorterStemmer) + if suggest_word != None: + suggest_list.append(suggest_word) + suggest_ = suggest_ + 'ed' + suggest_word = word_convert(word,suggest_,PorterStemmer) + if suggest_word != None: + suggest_list.append(suggest_word) + + elif word[-4:] == 'less': #举例 careless -> care + suggest_ = word[:-4] + suggest_word = word_convert(word,suggest_,PorterStemmer) + if suggest_word != None: + suggest_list.append(suggest_word) + elif word[-2:] == 'ly': #举例: friendly -> friend , lovely -> love + suggest_ = word[:-2] + suggest_word = word_convert(word,suggest_,PorterStemmer) + if suggest_word != None: + suggest_list.append(suggest_word) + + elif word[-1:] == 't': #举例 different -> different + suggest_ = word[:-1] + suggest_ = suggest_ + 'ce' + suggest_word = word_convert(word,suggest_,PorterStemmer) + if suggest_word != None: + suggest_list.append(suggest_word) + elif word[-3:] == 'ous': #举例 dangerous -> danger + suggest_ = word[:-3] + suggest_word = word_convert(word,suggest_,PorterStemmer) + if suggest_word != None: + suggest_list.append(suggest_word) + elif word[-2:] == 'al': #举例 original -> origin + suggest_ = word[:-2] + suggest_word = word_convert(word,suggest_,PorterStemmer) + if suggest_word != None: + suggest_list.append(suggest_word) + elif word[-4:] == 'able': + suggest_ = word[:-4] + suggest_word = word_convert(word,suggest_,PorterStemmer) + if suggest_word != None: + suggest_list.append(suggest_word) + elif word[-2:] == 'en': #举例 woolen -> wool + suggest_ = word[:-2] + suggest_word = word_convert(word,suggest_,PorterStemmer) + if suggest_word != None: + suggest_list.append(suggest_word) + elif word[-2:] == 'ic': + suggest_ = word + 'al' + suggest_word = word_convert(word,suggest_,PorterStemmer) + if suggest_word != None: + suggest_list.append(suggest_word) + suggest_ = word[:-2] + suggest_word = word_convert(word,suggest_,PorterStemmer) + if suggest_word != None: + suggest_list.append(suggest_word) + elif word[-3:] == 'ish': + suggest_ = word[:-3] + suggest_word = word_convert(word,suggest_,PorterStemmer) + if suggest_word == None: + suggest_ = word[:-3] + suggest_ = suggest_ + 'and' + suggest_word = word_convert(word,suggest_,PorterStemmer) + if suggest_word != None: + suggest_list.append(suggest_word) + elif word[-3:] == 'ese': + suggest_ = word[:-3] + suggest_ = suggest_ + 'a' + suggest_word = word_convert(word,suggest_,PorterStemmer) + if suggest_word != None: + suggest_list.append(suggest_word) + elif word[-3:] == 'ian': + suggest_ = word[:-1] + suggest_word = word_convert(word,suggest_,PorterStemmer) + if suggest_word == None: + suggest_ = word[:-3] + suggest_ = suggest_ + 'y' + suggest_word = word_convert(word,suggest_,PorterStemmer) + if suggest_word != None: + suggest_list.append(suggest_word) + if suggest_word == None: + HouZhui_list = ['ment','ness','tion','ture','sion','ty','y','tive','sive'] + for HouZhui in HouZhui_list: + suggest_ = word + HouZhui + new_word = word_convert(word,suggest_,PorterStemmer) + if new_word != None: + suggest_word = new_word + suggest_list.append(suggest_word) + suggest_list = list(set(suggest_list)) + return suggest_list + + + + +# In[1551]: + + +def N_to_anything(word):#名词变成其他词性 + suggest_list = [] + list_HouZhui = ['y','ful','tive','sive','ed','ing','less','ly','ous','al','able','en','tic','ish','ance','er','or'] + list_QianZhui = ['a'] + if word[-4:] in ['ment','ness','tion','ture','sion','tive','sive']: + suggest_ = word[:-4] + suggest_word = word_convert(word,suggest_,PorterStemmer) + if suggest_word != None: + suggest_list.append(suggest_word) + else: + for HouZhui in list_HouZhui: + suggest_ = word + HouZhui + suggest_word = word_convert(word,suggest_,PorterStemmer) + if suggest_word != None: + suggest_list.append(suggest_word) + for QianZhui in list_QianZhui: + suggest_ = QianZhui + word + suggest_word = word_convert(word,suggest_,PorterStemmer) + if suggest_word != None: + suggest_list.append(suggest_word) + if word[-2:] == 'ce': + suggest_ = word[:-2] + suggest_ = suggest_ + 't' + suggest_word = word_convert(word,suggest_,PorterStemmer) + if suggest_word != None: + suggest_list.append(suggest_word) + elif word[-4:] == 'land': + suggest_ = word[:-4] + suggest_word = word_convert(word,suggest_,PorterStemmer) + if suggest_word == None: + suggest_ = suggest_ + 'lish' + suggest_word = word_convert(word,suggest_,PorterStemmer) + if suggest_word != None: + suggest_list.append(suggest_word) + #print(suggest_list) + suggest_list = list(set(suggest_list)) + return suggest_list + + +# In[1552]: + + +def V_to_anything(word):#动词变成其他词性 + suggest_word = None + suggest_list = [] + + HouZhui_list = ['ful','tive','sive','ed','less','ly','ous','al','able','en','tic','ish','ance','tion','sion','ment','er','or','ee'] + for HouZhui in HouZhui_list: + suggest_ = word + HouZhui + suggest_word = word_convert(word,suggest_,PorterStemmer) + if suggest_word != None: + suggest_list.append(suggest_word) + suggest_list = list(set(suggest_list)) + return suggest_list + + +# In[1553]: + + +''' + 功能: + 生成形容词,副词关联词表 + 输入: + word:形容词/副词 + 输出: + list_word:为没有添加词的其他形式,包括三音节以下词的比较级最高级 + list_word2:为三音节及以上的词的比较级最高级,如果输入形容词比较级最高级没有more/most,该列表为空 + 说明: + 由于三音节形容词/副词的比较级,最高级为more/most+原形容词/副词,所以特别把形容词/副词和其他词性变形区分出来 +''' + +def build_like_word_adj(word): #创建类似形容词列表 + list_word = [] + list_word2 = [] #把比较级最高级带more的放在这里 + lemmas = lemmatizer(word, u'adj') + #print(lemmas) + for i in lemmas: + list_word.append(i) + word_er = comparative(i) + if "more" in word_er: #把比较级带more,most的词放在另一个列表list_word2 + list_word2.append(word_er) + else: + list_word.append(word_er) + word_est = superlative(i) + if "most" in word_est: + list_word2.append(word_est) + else: + list_word.append(word_est) + word_adv = adj_to_adv(i) + if word_adv != None: + list_word.append(word_adv) + list_N = adj_to_anything(word) + for N in list_N: + list_word.append(N) + + list_word = list(set(list_word)) + return list_word,list_word2 + +def build_like_word_adv(word): #创建类似形容词列表 + list_word = [] + list_word2 = [] + list_special = ['however','seldom','often','never','otherwise'] + if word in list_special: + list_word = [word] + list_word2 = [] + else: + lemmas = lemmatizer(word, u'adj') + #print(lemmas) + for i in lemmas: + list_word.append(i) + word_er = comparative(i) + if "more" in word_er: + list_word2.append(word_er) + else: + list_word.append(word_er) + word_est = superlative(i) + if "most" in word_est: + list_word2.append(word_est) + else: + list_word.append(word_est) + word_adv = adv_to_adj(i) + if word_adv != None: + list_word.append(word_adv) + list_word = list(set(list_word)) + return list_word,list_word2 + + +# In[1554]: + + +''' + 功能: + 根据检查的位置整理出放入BERT模型的input_ids,input_type_ids以及检查位置在input_ids中的下标位置 + pre_training_input_in_sentence得到检查位置所在句子的信息 + pre_training_input_entire得到检查位置在完整text中的信息 + 输入: + index:在完整text中的位置 + 输出: + word:该下标下的单词 + input_ids:tokens的对应字典id列表 + input_type_ids:零列表 + id_in_sen:检查位置在句子中的下标(pre_training_input_in_sentence的返回) + index:检查位置在完整text中的下标,其实就是输入的下标 +''' +def pre_training_input_in_sentence(index): + sentence_id = in_sentence[index][0] + id_in_sen = in_sentence[index][1] + word = input_ids_sen[sentence_id][id_in_sen] + word = tokenizer.ids_to_tokens[word] + input_ids = copy.deepcopy(input_ids_sen[sentence_id]) + input_type_ids = copy.deepcopy(input_type_ids_sen[sentence_id]) + + return word,input_ids,input_type_ids,id_in_sen + +def pre_training_input_entire(index): + word = entire_ids[index] + word = tokenizer.ids_to_tokens[word] + input_ids = copy.deepcopy(entire_ids) + input_type_ids = copy.deepcopy(entire_type_ids) + + return word,input_ids,input_type_ids,index + +#[101, 1045, 2572, 3153, 2006, 1996, 2754, 1012, 102] +#[101, 1045, 2572, 3153, 2006, 1996, 2754, 1012, 1045, 2018, 1037, 2200, 2204, 2835, 1012, 1996, 2377, 2001, 2200, 5875, 1012, 102] + + +# In[1555]: + + +import math +from pattern import en +from pattern.en import conjugate, lemma, lexeme,PRESENT,SG,INFINITIVE, PRESENT, PAST, FUTURE, PROGRESSIVE + + +''' + 功能: + 1.judge_and_suggestion系列函数,这个系列函数是在analyse之前做的一个预先判断处理,判断的是该位置原来词的相关词中有没有可以代替它的词 + 2.当相关词中有词的可能性和原词的可能性的差距大于阈值,则认为原词是错的,可以用相关词替换 + 3.替换词的gap还要经过后续的检查才能决定他是不是最好的推荐,这一步骤放在了show_abnormals里 + 输入: + prob:该位置可能性列表 + original:该位置原先的词 + list_word:该位置相关词表 + threhold:门槛,也就是阈值 + 输出: + judge:判断原来的词是否正确,0表示需要换词,1表示不需要换词或者说相关词里面没一个合适的 + suggestion:相关词中最好的推荐 + gap_with_totally_top:备选词中概率最高的和所有词中概率最高的之间的gap,可以换的词也有可能因为gap太大而遭到拒绝 +''' +def judge_and_suggestion(prob,original,list_word,threhold): + top_prob = 0 + list_word = list_word + [original] + original_prob = prob[tokenizer.vocab[original]] + best = None + suggestion = None + for word in list_word: + try: + word_id = tokenizer.vocab[word] + prob_word = prob[word_id] + if prob_word > top_prob: + top_prob = prob_word + best_word = word + except KeyError:#有的词enchant认为是正确的拼写,bert的词典里却没有,比如tiring,这种情况暂时没法解决,但是实际上bert不认的词会自动分词 + pass + + totally_top = prob.max().item() #最高的概率(不需要知道概率最大的词是哪一个) + gap_with_origin = math.log(top_prob) - math.log(original_prob) #备选词中最大概率和原来的词的概率的差 + gap_with_totally_top = math.log(totally_top) - math.log(top_prob) #所有词中最高的概率和备选词中最高的概率的差 + + if gap_with_origin > threhold: + suggestion = best_word + return 0,suggestion,gap_with_totally_top + else: + return 1,suggestion,gap_with_totally_top + + + +# In[1556]: + + +'''分析各种词性系列函数 + 功能:对第一遍检查得出的有问题的位置的单词,根据不同的词性进行不同步骤的分析 + 输入: + index:在原文中的错误位置 + prob:该位置可能性列表 + gap:原文该位置的词和概率最高的词之间的gap + top_word:概率最高的词 + threshold:免检查门槛 + threshold2:免修正门槛(勉强不算错) + threshold3:用推荐词替换的最低要求,大于该阈值才可以替换 + 输出: + suggestion:给出的修改建议,修改建议不局限于错误位置 + 说明: + 不仅局限于错误位置的分析是通过预添加或者去掉一个token,多进行一次model计算 +''' + + +# In[1557]: + + +import copy +import nltk +from pattern.en import conjugate, lemma, lexeme,PRESENT,SG,PRESENT,SG,INFINITIVE, PRESENT, PAST, FUTURE, PROGRESSIVE + +def analyse_V(index,prob,gap,top_word,threshold,threshold2,threshold3): +#这是一个处理动词语法问题的函数,输入为问题词在text的token中的下标index + if gap < threshold: + return None + #******************************top_word暗示我应该是不定式************************** + if top_word in ["to","for"]: + wordV,input_ids,input_type_ids,index = pre_training_input_entire(index) + input_ids.insert(index,tokenizer.vocab['to']) + input_type_ids.append(0) + list_word = [conjugate(verb=wordV,tense=PRESENT,person = 1)] + suggestion,need,_,_= give_suggestion(input_ids,input_type_ids,index + 1,list_word,5) + if need == 1: + return 'to ' + suggestion + + #*****************************判断是不是时态或者拼写错误,又或者是其他词性******** + wordV = get_word(index) + #这三种是不涉及位置变化的检查,根据生成词表的速度从快到慢依次检查,之后也不需要再生成词表 + + list_V = lexeme(wordV) + judge,suggestion,gap_with_totally_top = judge_and_suggestion(prob,wordV,list_V,threshold3) + if judge==0 and gap_with_totally_top < threshold2: + return suggestion + + list_others = V_to_anything(conjugate(verb=wordV,tense=PRESENT,person = 1)) + judge,suggestion,gap_with_totally_top = judge_and_suggestion(prob,wordV,list_others,threshold3) + if judge==0 and gap_with_totally_top < threshold2: + return suggestion + + list_spell_correct = d.suggest(wordV) + judge,suggestion,gap_with_totally_top = judge_and_suggestion(prob,wordV,list_spell_correct,threshold3) + if judge==0 and gap_with_totally_top < threshold2: + return suggestion + + if gap < threshold2:#没有可以替换的词,而且原本该位置的词就勉强符合要求 + return None + + front_word = get_word(index - 1) + behind_word = get_word(index + 1) + #**************************************判断是不是缺介词*************************** + list_IN = ["to","at","in","on","by","for","from","with","about","against","along","among","around","as","before","behind","below","beside","between","during","besides","into","near","over","through","under","without","after","above","of"] + if behind_word not in list_IN: + print("检查点") + wordV,input_ids,input_type_ids,id_in_sen = pre_training_input_in_sentence(index) + input_ids.insert(id_in_sen + 1,tokenizer.vocab['at'])#就随便插入一个东西,占位子 + input_type_ids.append(0) + suggestion_IN,need_IN,_,_ = give_suggestion(input_ids,input_type_ids,id_in_sen + 1,list_IN,2) + if need_IN == 1: + input_ids[id_in_sen + 1] = tokenizer.vocab[suggestion_IN] + list_word = list_V + suggestion_V,need,_,_ = give_suggestion(input_ids,input_type_ids,id_in_sen,list_word,5) + if need == 1: + suggestion = suggestion_V + ' ' + suggestion_IN + return suggestion + + need_to_will = need_be = 0 + + #**************************************判断是不是不定式或者将来时*************************** + if front_word not in ["to","will"]: + wordV,input_ids,input_type_ids,id_in_sen = pre_training_input_in_sentence(index) + input_ids.insert(id_in_sen,tokenizer.vocab['to'])#就随便插入一个东西,占位子 + input_type_ids.append(0) + try: + input_ids[id_in_sen + 1] = tokenizer.vocab[conjugate(verb=wordV,tense=PRESENT,person = 1)] + suggestion_to_will,need_to_will,prob0,_ = give_suggestion(input_ids,input_type_ids,id_in_sen,["to","will"],1) + except KeyError: + need_to_will = 0 + #**********************************判断是不是被动语态或者进行时******************* + list_be = lexeme('be') + list_be = lexeme('be')[:8] #把否定去掉 + #********************是不是被动语态**************** + + wordV,input_ids,input_type_ids,index = pre_training_input_entire(index) + input_ids.insert(index,tokenizer.vocab['be'])#就随便插入一个东西,占位子 + input_type_ids.append(0) + try: + input_ids[index + 1]=tokenizer.vocab[conjugate(verb=wordV,tense=PAST,aspect=PROGRESSIVE)] + suggestion1,need_be1,prob1,_ = give_suggestion(input_ids,input_type_ids,index,list_be,1) + except KeyError: + need_be1 = 0 + + #********************是不是现在分词**************** + try: + input_ids[index + 1]=tokenizer.vocab[conjugate(verb=wordV,tense=PRESENT,aspect=PROGRESSIVE)] + suggestion2,need_be2,prob2,_ = give_suggestion(input_ids,input_type_ids,index,list_be,1) + #print(tokenizer.convert_ids_to_tokens(input_ids)) + except KeyError: + need_be2 = 0 + + #***************************选择是不定式还是被动语态还是进行时**************************** + prob_max = 0 + if need_to_will == 1: + prob_max = max(prob_max,prob0) + if need_be1 == 1: + prob_max = max(prob_max,prob1) + if need_be2 == 1: + prob_max = max(prob_max,prob2) + + if need_to_will == 1 and prob_max == prob0: + need_be = 0 + if need_be1 == 1 and prob_max == prob1: + need_to_will = 0 + need_be = 1 + be_ = suggestion1 + if need_be2 == 1 and prob_max == prob2: + need_to_will = 0 + need_be = 1 + be_ = suggestion2 + #*************************************************处理各种语法****************************************************************** + if need_to_will == 1: + wordV,input_ids,input_type_ids,index = pre_training_input_entire(index) + input_ids.insert(index,tokenizer.vocab[suggestion_to_will]) + input_type_ids.append(0) + list_word = [conjugate(verb=wordV,tense=PRESENT,person = 1),conjugate(verb=wordV,tense=PRESENT,aspect=PROGRESSIVE)] + suggestion,need,_,_= give_suggestion(input_ids,input_type_ids,index + 1,list_word,5) + if need == 1: + return 'to ' + suggestion + else: + return top_word + + elif need_be == 1: + #********************************被动语态或者进行时***************** + wordV,input_ids,input_type_ids,index = pre_training_input_entire(index) + input_ids.insert(index,tokenizer.vocab[be_]) + input_type_ids.append(0) + list_word = lexeme(wordV) + suggestion,need,_,_= give_suggestion(input_ids,input_type_ids,index + 1,list_word,5) + if need == 1: + return be_ + ' '+ suggestion + else: + return top_word + else: + return top_word + + return suggestion + + + +# In[1558]: + + +def analyse_adj(index,prob,gap,top_word,threshold,threshold2,threshold3): + if gap < threshold: + return None + wordADJ = get_word(index) + #*****************************判断是不是时态或者拼写错误,又或者是其他词性******** + + list_word,list_word2 = build_like_word_adj(wordADJ) + judge,suggestion,gap_with_totally_top = judge_and_suggestion(prob,wordADJ,list_word,threshold3) + if judge==0 and gap_with_totally_top < threshold2: + return suggestion + + list_spell_correct = d.suggest(wordADJ) + judge,suggestion,gap_with_totally_top = judge_and_suggestion(prob,wordADJ,list_spell_correct,threshold3) + if judge==0 and gap_with_totally_top < threshold2: + return suggestion + + #list_word = list_word + list_spell_correct + front_word = get_word(index - 1) + behind_word = get_word(index + 1) + if front_word in ['more','most'] and len(list_word2) == 0: + #判断是不是比较级使用错误,如果该形容词比较级/最高级不需要加more/most,但是前面有more/most + wordADJ,input_ids,input_type_ids,id_in_sen = pre_training_input_in_sentence(index) + del input_ids[id_in_sen - 1] + del input_type_ids[0] + suggestion3,need_adj,prob,_ = give_suggestion(input_ids,input_type_ids,id_in_sen - 1,list_word,min(threshold2, gap - threshold3)) + return '去掉前面 ' + get_word(index - 1)+ ' 原位置改成 ' + suggestion3 + + elif behind_word in ['##er','##r'] and len(list_word2) != 0: + #判断是不是比较级使用错误,如果该形容词比较级/最高级需要more/most,但是错写成形容词+er/est + wordADJ,input_ids,input_type_ids,id_in_sen = pre_training_input_in_sentence(index) + input_ids[id_in_sen] = tokenizer.vocab['more'] + suggestion5,need_adj,prob,_ = give_suggestion(input_ids,input_type_ids,id_in_sen + 1,list_word,min(threshold2, gap - threshold3)) + return '去掉后面 '+ get_word(index + 1) + ' 原位置改成 '+ 'more' + ' ' + suggestion5 + + elif behind_word in ['##est','##st'] and len(list_word2) != 0: + #判断是不是比较级使用错误,如果该形容词比较级/最高级需要more/most,但是错写成形容词+er/est + wordADJ,input_ids,input_type_ids,id_in_sen = pre_training_input_in_sentence(index) + input_ids[id_in_sen] = tokenizer.vocab['most'] + suggestion5,need_adj,prob,_ = give_suggestion(input_ids,input_type_ids,id_in_sen + 1,list_word,min(threshold2, gap - threshold3)) + return '去掉后面 '+ get_word(index + 1) + ' 原位置改成 '+ 'most' + ' ' + suggestion5 + + + if gap < threshold2:#没有可以替换的词,而且原本该位置的词就勉强符合要求 + return None + + if front_word not in ['this','that','these','those','more','most']:#检查形容词前面是否需要加冠词或者是需要more,most的比较级,最高级抑或是be动词 + wordADJ,input_ids,input_type_ids,id_in_sen = pre_training_input_in_sentence(index) + input_ids.insert(id_in_sen,tokenizer.vocab["[MASK]"]) + input_type_ids.append(0) + list_front = ['the','a','an','this','that','these','those','some','any','all','more','most','am','is','are','was','were'] + suggestion,need_front,_,_= give_suggestion(input_ids,input_type_ids,id_in_sen,list_front,2) + if need_front == 1: + wordADJ,input_ids,input_type_ids,index = pre_training_input_entire(index) + input_ids.insert(index,tokenizer.vocab[suggestion]) + input_type_ids.append(0) + suggestion2,need,_,_= give_suggestion(input_ids,input_type_ids,index + 1,list_word,min(threshold2, gap - threshold3)) + if need == 1: + return suggestion + ' ' + suggestion2 + else: + return top_word + + return top_word + + +# In[1600]: + + +def analyse_adv(index,prob,gap,top_word,threshold,threshold2,threshold3): + if gap < threshold: + return None + + wordADV = get_word(index) + if wordADV in ['not']: + return None + #*****************************判断是不是时态或者拼写错误,又或者是其他词性******** + + list_word,list_word2 = build_like_word_adv(wordADV) + judge,suggestion,gap_with_totally_top = judge_and_suggestion(prob,wordADV,list_word,threshold3) + if judge==0 and gap_with_totally_top < threshold2: + return suggestion + + list_spell_correct = d.suggest(wordADV) + judge,suggestion,gap_with_totally_top = judge_and_suggestion(prob,wordADV,list_spell_correct,threshold3) + if judge==0 and gap_with_totally_top < threshold2: + return suggestion + + if gap < threshold2:#没有可以替换的词,而且原本该位置的词就勉强符合要求 + return None + + #list_word = list_word + list_spell_correct + if get_word(index - 1) in ['more','most'] and len(list_word2) == 0: + #判断是不是比较级使用错误,这个if语句处理:该形容词比较级/最高级不需要加more/most,但是前面有more/most + wordADV,input_ids,input_type_ids,id_in_sen = pre_training_input_in_sentence(index) + del input_ids[id_in_sen - 1] + del input_type_ids[0] + suggestion3,need_adj,prob,_ = give_suggestion(input_ids,input_type_ids,id_in_sen - 1,list_word,5) + return '去掉前面 ' + get_word(index - 1)+ ' 原位置改成 ' + suggestion3 + + elif get_word(index + 1) in ['##er','##r'] and len(list_word2) != 0: + #判断是不是比较级使用错误,如果该形容词比较级/最高级需要more/most,但是错写成形容词+er/est + wordADV,input_ids,input_type_ids,id_in_sen = pre_training_input_in_sentence(index) + input_ids[id_in_sen] = tokenizer.vocab['more'] + suggestion5,need_adj,prob,_ = give_suggestion(input_ids,input_type_ids,id_in_sen+1,list_word,5) + return '去掉后面 '+ get_word(index + 1) + ' 原位置改成 '+ 'more' + ' ' + suggestion5 + + elif get_word(index + 1) in ['##est','##st'] and len(list_word2) != 0: + #判断是不是比较级使用错误,如果该形容词比较级/最高级需要more/most,但是错写成形容词+er/est + wordADV,input_ids,input_type_ids,id_in_sen = pre_training_input_in_sentence(index) + input_ids[id_in_sen] = tokenizer.vocab['most'] + suggestion5,need_adj,prob,_ = give_suggestion(input_ids,input_type_ids,id_in_sen+1,list_word,5) + return '去掉后面 '+ get_word(index + 1) + ' 原位置改成 '+ 'most' + ' ' + suggestion5 + + else: + #检查形容词前面是否需要加冠词或者是需要more,most的比较级,最高级,be动词 + wordADV,input_ids,input_type_ids,id_in_sen = pre_training_input_in_sentence(index) + input_ids.insert(id_in_sen,tokenizer.vocab["[MASK]"]) + input_type_ids.append(0) + list_front = ['the','a','an','this','that','these','those','some','any','all','more','most','am','is','are','was','were'] + suggestion,need_front,_,_= give_suggestion(input_ids,input_type_ids,id_in_sen,list_front,2) + if need_front == 1: + wordADV,input_ids,input_type_ids,index = pre_training_input_entire(index) + input_ids.insert(index,tokenizer.vocab[suggestion]) + input_type_ids.append(0) + #print(tokenizer.convert_ids_to_tokens(input_ids)) + suggestion2,need,_,_= give_suggestion(input_ids,input_type_ids,index + 1,list_word,5) + if need == 1: + return suggestion + ' ' + suggestion2 + else: + return top_word + else: + wordADV,input_ids,input_type_ids,id_in_sen = pre_training_input_in_sentence(index) + input_ids.insert(id_in_sen + 1,tokenizer.vocab[","]) + input_type_ids.append(0) + suggestion3,need_douhao,_,_= give_suggestion(input_ids,input_type_ids,id_in_sen,list_word,2) + if need_douhao == 1: + return suggestion3 + ' ,' + else: + return top_word + + +# In[1536]: + + +from pattern.en import article,referenced,pluralize, singularize +import nltk +def analyse_N(index,prob,gap,top_word,threshold,threshold2,threshold3): + #这是一个处理名词语法问题的函数,输入为问题词在text的token中的下标index + if gap < threshold: + return None + + wordN = get_word(index) + #*****************************判断是不是时态或者拼写错误,又或者是其他词性******** + word_tag = nltk.pos_tag([wordN]) + if word_tag[0][1] == "NN": + N_ = wordN + N_s= pluralize(wordN) + else: + N_ = singularize(wordN) + N_s= wordN + list_N = [N_,N_s] + judge,suggestion,gap_with_totally_top = judge_and_suggestion(prob,wordN,list_N,threshold3) + if judge==0 and gap_with_totally_top < threshold2: + return suggestion + + list_others = N_to_anything(N_) + judge,suggestion,gap_with_totally_top = judge_and_suggestion(prob,wordN,list_others,threshold3) + if judge==0 and gap_with_totally_top < threshold2: + return suggestion + + list_spell_correct = d.suggest(wordN) + judge,suggestion,gap_with_totally_top = judge_and_suggestion(prob,wordN,list_spell_correct,threshold3) + if judge==0 and gap_with_totally_top < threshold2: + return suggestion + + #*********************************************************************************************************************************** + need_DT = 0 #表示是否需要在前面加冠词 + wordN,input_ids,input_type_ids,id_in_sen = pre_training_input_in_sentence(index) + + #*****************************************判断是否需要冠词或介词************************************************************************ + list_DT = ['the','a','an'] + front_word = get_word(index - 1) + if front_word in list_DT:#如果前一个词就是冠词,那么一定不需要再往前面加介词或冠词 + if gap < threshold2:#没有可以替换的词,而且原本该位置的词就勉强符合要求 + return None + else: + return top_word + + input_ids.insert(id_in_sen,tokenizer.vocab["[MASK]"]) + input_type_ids.append(0) + list_IN = ["of",'to',"at","in","on","by","for","from","with","about","against","along","among","around","as","before","behind","below","beside","between","during","besides","into","near","over","through","under","without","after","above"] + list_DT_IN = list_DT + list_IN + suggestion,need_DT_IN,_,_= give_suggestion(input_ids,input_type_ids,id_in_sen,list_DT_IN,2) + if need_DT_IN == 0:#不需要冠词或介词 + if gap < threshold2:#没有可以替换的词,而且原本该位置的词就勉强符合要求 + return None + else: + return top_word + + elif need_DT_IN == 1:#需要冠词或介词 + wordN,input_ids,input_type_ids,index = pre_training_input_entire(index) + input_ids.insert(index,tokenizer.vocab[suggestion]) + input_type_ids.append(0) + suggestion2,need,_,_= give_suggestion(input_ids,input_type_ids,index + 1,list_N ,min(9.5,gap - threshold3)) + if need == 1: + return suggestion + ' ' + suggestion2 + + if gap < threshold2:#没有可以替换的词,而且原本该位置的词就勉强符合要求 + return None + else: + return top_word + + +# In[1537]: + + +''' + 这是一个相关代词的词典,容易混淆的词放在一个列表中 + +''' +like_he = ['he','his','him','himself','who', 'whom', 'whose'] +like_she = ['she','her','herself','hers','who', 'whom', 'whose'] +like_it = ['it','its','itself','who', 'whom', 'whose'] +like_i = ['i','me','my','myself','mine'] +like_you = ['you','your','yourself','yourselves'] +like_we = ['we','us','our','ours','ourselves'] +like_they = ['they','them','their','theirs'] + +like_this = ['this', 'these'] +like_that = ['that','those'] +pronoun_Question = ['who', 'whom', 'whose', 'which', 'what', 'whoever', 'whichever', 'whatever'] #疑问代词 +pronoun_relation = ['that', 'which', 'who', 'whom', 'whose', 'as'] #关系代词 +like_some = ['some','any'] +like_few = ['few','little'] +like_many = ['many','much'] +like_other = ['another','other'] + +pronoun = [like_he,like_she,like_it,like_i,like_you,like_we,like_they,like_this,like_that,pronoun_Question,pronoun_relation,like_some,like_few,like_many,like_other] +pronoun_dictionary = {} +pronoun_list = [] +for list_word in pronoun: + pronoun_list = pronoun_list + list_word + for word in list_word: + pronoun_dictionary.update({word:list_word}) + + +# In[1538]: + + +def analyse_pronoun(index,prob,gap,top_word,threshold,threshold2,threshold3): + #这是一个处理代词语法问题的函数,输入为问题词在text的token中的下标index + if gap < threshold: + return None + + wordPROP = get_word(index) + #*****************************判断是不是时态或者拼写错误,又或者是其他代词******** + try: + list_PROP = pronoun_dictionary[wordPROP] + except: + list_PROP = [] + judge,suggestion,gap_with_totally_top = judge_and_suggestion(prob,wordPROP,list_PROP,threshold3) + if judge==0 and gap_with_totally_top < threshold2: + return suggestion + + if gap < threshold2:#没有可以替换的词,而且原本该位置的词就勉强符合要求 + return None + else: + judge,suggestion,gap_with_totally_top = judge_and_suggestion(prob,wordPROP,pronoun_list,threshold3)#在所有代词里面选择 + if judge==0 and gap_with_totally_top < threshold2: + return suggestion + else: + return None + + +# In[1613]: + + +def analyse_DT(index,prob,gap,top_word,threshold,threshold2,threshold3): + if gap < threshold: + return None + + wordDT = get_word(index) + if wordDT in ["every",'per','each','no']:#有实际意义,不做修改 + return None + + if wordDT in ['some']: + list_word = ['some','any','a','an'] + elif wordDT in ['any']: + list_word = ['some','any',"every",'per','each'] + elif wordDT in ['this','that','these','those']: + list_word = ['this','that','these','those'] + elif wordDT in ['the','a','an']: + list_word = ['the','a','an','some','any'] + elif wordDT in ['another','other']: + list_word = ['another','other'] + elif wordDT in ['all','both']: + list_word = ['all','both'] + else: + list_word = [wordDT] + + judge,suggestion,gap_with_totally_top = judge_and_suggestion(prob,wordDT,list_word,threshold3) + if judge==0 and gap_with_totally_top < threshold2: + return suggestion + + if gap < threshold2:#没有可以替换的词,而且原本该位置的词就勉强符合要求 + return None + + elif top_word in ["at","in","on","by","for","from","with","about","against","along","among","around","as","before","behind","below","beside","between","during","besides","into","near","over","through","under","without","after","above","of",'to']: + return top_word + ' ' + wordDT + else: + if top_word in ['some','any','this','that','these','those','the','a','an']: + return top_word + elif wordDT in ['another','other','all','both']: + return None + else: + return "去掉 " + wordDT +# In[1614]: + + +def analyse_IN(index,prob,gap,top_word,threshold,threshold2,threshold3): + #检查介词,确认需不需要删掉或者换介词 + if gap < threshold: + return None + + wordIN = get_word(index) + if wordIN in ['before',"after","above","below","underneath","beneath","without"]:#有实际意义,不做修改 + return None + + list_word = ["at","in","on","by","for","from","with","about","against","along","among","around","as","before","behind","below","beside","between","during","besides","into","near","over","through","under","without","after","above","of",'to'] + + judge,suggestion,gap_with_totally_top = judge_and_suggestion(prob,wordIN,list_word,threshold3) + if judge==0 and gap_with_totally_top < threshold2: + return suggestion + + list_spell_correct = d.suggest(wordIN) + judge,suggestion,gap_with_totally_top = judge_and_suggestion(prob,wordIN,list_spell_correct,threshold3) + if judge==0 and gap_with_totally_top < threshold2: + return suggestion + + if gap < threshold2:#没有可以替换的词,而且原本该位置的词就勉强符合要求 + return None + elif top_word in u',.!?[]()<>"\'': + return top_word + else: + return "去掉 " + wordIN +#print(analyse_IN(76)) + + +# In[1615]: + + +def analyse_CC(index,prob,gap,top_word,threshold,threshold2,threshold3): + if gap < threshold: + return None + + wordCC = get_word(index) + list_CC = ["but","because","yet","still","however","although","so","thus","and","or","too","either","or","neither","nor","when","while","as","whenever","since","until","till",","] + judge,suggestion,gap_with_totally_top = judge_and_suggestion(prob,wordCC,list_CC,threshold3) + if judge==0 and gap_with_totally_top < threshold2: + return suggestion + + if gap < threshold2:#没有可以替换的词,而且原本该位置的词就勉强符合要求 + return None + else: + return None + + +# In[1616]: + + +def analyse_MD(index,prob,gap,top_word,threshold,threshold2,threshold3): + if gap < threshold: + return None + + wordMD = get_word(index) + if wordMD in ['can','could']: + list_MD = ['can','could'] + elif wordMD in ['may','might']: + list_MD = ['may','might'] + elif wordMD in ['shall','should']: + list_MD = ['shall','should'] + elif wordMD in ['will','would']: + list_MD = ['will','would'] + elif wordMD in ['dare','dared']: + list_MD = ['dare','dared'] + else: + list_MD = [wordMD] + judge,suggestion,gap_with_totally_top = judge_and_suggestion(prob,wordMD,list_MD,threshold3) + if judge==0 and gap_with_totally_top < threshold2: + return suggestion + + if gap < threshold2:#没有可以替换的词,而且原本该位置的词就勉强符合要求 + return None + else: + return None + + +# In[1617]: + + +def analyse_biaodian(index,prob,gap,top_word,threshold,threshold2,threshold3): + if gap < threshold: + return None + + biaodian = get_word(index) + biaodian_list = ['.',',',';','!','?','"',"'",',','。','’','‘','“','”','and','but'] + judge,suggestion,gap_with_totally_top = judge_and_suggestion(prob,biaodian,biaodian_list,threshold3) + if judge==0 and gap_with_totally_top < threshold2: + return suggestion + + if gap < threshold2:#没有可以替换的词,而且原本该位置的词就勉强符合要求 + return None + else: + return None + + +# In[1618]: + + +''' + 功能: + 这是几个和拼写检查相关函数 + correct_spelling:用于发现text中拼写错误,写成不存在的词的情况,并暂时把它改成存在的词,这样再放入模型训练,完成之后的步骤 + token_Align:展示拼写错误时需要将原来错误的词显示出来,由于BERT的tokenize会把错误的词分段,造成未知序号的混乱,因而需要将原来的token和被correct的token位置对齐 + 这两个函数需要配合使用 +''' +import enchant +import re +d = enchant.Dict("en_US") +from pattern.en import suggest + +def C_trans_to_E(string): #标点符号转换函数 + E_pun = u',.!?[]()<>"\'"\'.:;' + C_pun = u',。!?【】()《》“‘”’.:' + table= {ord(f):ord(t) for f,t in zip(C_pun,E_pun)} + return string.translate(table) + +def process_biaodian(text):#把标点和字母分开,使得用split分词能把标点分成单独的token,顺便把中文标点变成英文标点 + text1 = '' + for character in text[0]: + if character in u',.!?[]()<>"\':-;,。!?【】()《》“‘”’.%': + character1 = C_trans_to_E(character) + text1 = text1 + ' '+character1+' ' + else: + text1 = text1 + character + return [text1] + +def correct_spelling(text): + #text:原本可能带有拼写错误的文本 + #返回[correct_text]:不带拼写错误的文本,外面套上中括号,保持列表的形式 + global suggestions + correct_text = '' + text0 = text + text1 = '' + + tokens = text.split(' ') + for token in tokens: #给拼写错误的单词标上‘错’ + if token not in ['.',',',';','!','?','"',"'",',','。','’','‘','“','”',"\r\n",""]: + if d.check(token)==False and token != suggest(token)[0][0]: + word = '不' + suggest(token)[0][0] #pattern的suggestion + else: + word = token + elif token == "\r\n": + word = '换' + else: + word = token + correct_text = correct_text + ' ' + word + tokens = tokenizer.tokenize(correct_text) + length = len(tokens) + correct_text = "" + i = 0 + while(i < length): + + if tokens[i] == '不':#中文乱码 + suggestions.update({i+1:tokens[i+1]})#给外部变量suggestions添加错误 + del tokens[i] + length = length - 1 + elif tokens[i][0:2] == '##': + word = tokens[i][2:] + correct_text = correct_text + word + i = i+1 + else: + token = tokens[i] + if token not in ["'"]: + word = ' '+ token + else: + word = token + + correct_text = correct_text + word + i = i+1 + return [correct_text] + + +def token_Align(tokens,text): + #tokens是拼写修正之后的文本的分词结果 + #text是原本可能带有拼写错误的文本 + #返回的是text的分词结果 + original_tokens = tokenizer.tokenize(text) + original_tokens = ['[CLS]'] + original_tokens + ['[SEP]'] + print(original_tokens) + length = len(tokens) + i = 0 + while(i < min(length - 1,len(original_tokens) - 1)): + tokens_length = min(length - 1,len(original_tokens) - 1) + if original_tokens[i] == tokens[i] or (i+1 threshold1) or (token == '\r\n' and count_tokens > threshold2): + texts.append([text]) + text = '' + count_tokens = 0 + if count_tokens > 0: + texts.append([text]) + return texts + +# In[1619]: + + +import nltk +from pattern.en import conjugate, lemma, lexeme,PRESENT,SG +''' + 这是一个输出BERT模型训练结果的函数,方便查看调试 +''' +def show_lm_probs(tokens, input_ids, probs, topk=5, firstk=20): #输出结果的函数,要最高概率topk个输出 + def print_pair(token, prob, end_str='', hit_mark=' '): + if i < firstk: + # token = token.replace('', '').replace('\n', '/n') + print('{}{: >3} | {: <12}'.format(hit_mark, int(round(prob*100)), token), end=end_str) + + ret = None + for i in range(len(tokens)): + ind_ = input_ids[i].item() if input_ids is not None else tokenizer.vocab[tokens[i]] + prob_ = probs[i][ind_].item() #这个probs是该字符串第i个位置上填上词典上各个词的概率,prob_是词典上原来天的这个词的概率 + print_pair(tokens[i], prob_, end_str='\t') + values, indices = probs[i].topk(topk) + #print(values, indices) + #print("****************************************************************************************************") + top_pairs = [] + for j in range(topk): + ind, prob = indices[j].item(), values[j].item() + hit_mark = '*' if ind == ind_ else ' ' + token = tokenizer.ids_to_tokens[ind] + print_pair(token, prob, hit_mark=hit_mark, end_str='' if j < topk - 1 else '\n') + top_pairs.append((token, prob)) + if tokens[i] == "[MASK]": + ret = top_pairs + return ret + + +# In[1621]: + + +def analyse_prob(prob,token): + ind_ = tokenizer.vocab[token] + prob_ = prob[ind_].item() + top_prob = prob.max().item() + top_ind = prob.argmax().item() + top_word = tokenizer.ids_to_tokens[top_ind] #可能性最高的词 + gap = math.log(top_prob) - math.log(prob_) #计算两个词之间的差距 + return top_word,gap + + +# In[1622]: + + +import colored +from colored import stylize +import spacy +nlp = spacy.load('en') +from nltk.corpus import wordnet as wn + +def analyse_词性(token,tag): + if 'VB' in tag: #如果是动词的各种时态 + tag0 = "v" + elif "JJ" in tag : #形容词 + tag0 = "a" + elif "RB" in tag: #副词 + tag0 = "r" + elif "NN" in tag: #名词 + tag0 = "n" + else: + return tag + if wn.morphy(token, tag0)==None: + nlp = spacy.load('en') + doc = nlp(token) + tag = doc[0].tag_ + return tag + +def show_abnormals(tokens,probs,text,show_suggestions=False): #多加了一个参数text,用来生成原来的token的 + global suggestions + global original_tokens + original_tokens = token_Align(tokens,text) + def gap2color(mode): + if mode == 1: + return 'yellow_1' + elif mode == 2: + return 'orange_1' + else: + return 'red_1' + + def print_token(token, suggestion, gap ,mode): + if gap == 0 and mode == 1: + print(stylize(token + ' ', colored.fg('white') + colored.bg('black')), end='') + else: + print(stylize(token, colored.fg(gap2color(mode)) + colored.bg('black')), end='') + if show_suggestions and mode > 1: + print(stylize('/' + str(suggestion) + ' ', colored.fg('green' if gap > 10 else 'cyan') + colored.bg('black')), end='') + else: + print(stylize(' ', colored.fg(gap2color(mode)) + colored.bg('black')), end='') + + + avg_gap = 0. + tokens_tag = nltk.pos_tag(tokens) #给整个text做词性标注 + for i in range(1, len(tokens) - 1): # skip first [CLS] and last [SEP] + if tokens[i]=='[UNK]': + continue + top_word,gap = analyse_prob(probs[i],tokens[i]) + print() + print("*******************************************************************************************************************") + print(i) + print(gap) + avg_gap += gap + suggestion = None + #doc = nlp(tokens[i]) #用spacy标记 + #tag = doc[0].tag_ + #tag = nltk.pos_tag([tokens[i]])[0][1] #直接对token标记 + tag = tokens_tag[i][1]#当前tokens的词性,上面是用不同的方法标注词性 + tag = analyse_词性(tokens[i],tag) + print(tag) + + if 'VB' in tag: #如果是动词的各种时态 + suggestion = analyse_V(i,probs[i],gap,top_word,2.5 ,7.9 ,1.8) + + elif "DT" == tag: #如果是冠词(冠词原则上不改变词性) + suggestion = analyse_DT(i,probs[i],gap,top_word,3 ,4 ,1) + + elif "JJ" in tag : #形容词 + suggestion = analyse_adj(i,probs[i],gap,top_word,5 ,8 ,2) + + elif "RB" in tag: #副词 + suggestion = analyse_adv(i,probs[i],gap,top_word,5 ,8 ,2) + + elif "PRP" in tag: #代词 + suggestion = analyse_pronoun(i,probs[i],gap,top_word,3 ,5 ,1.5) + + elif "NN" in tag: #名词 + suggestion = analyse_N(i,probs[i],gap,top_word,4 ,10 ,2.2) + + elif "CC" in tag: #连词 + suggestion = analyse_CC(i,probs[i],gap,top_word,2 ,2.5 ,1.5) + + elif "IN" == tag or 'TO' == tag: #介词 + suggestion = analyse_IN(i,probs[i],gap,top_word,3.5 ,4 ,1.5) + + elif 'MD' in tag: #情态动词 + suggestion = analyse_MD(i,probs[i],gap,top_word,3 ,4 ,1.5) + + elif "CD" in tag: #数词直接pass + pass + + elif "WDT" == tag and gap > 3.5: #who,which,that那些 + suggestion = top_word #推荐的词一般比较准 + + elif tokens[i] in u',.!?[]()<>"\':,。!?【】()《》“‘”’.': + suggestion = analyse_biaodian(i,probs[i],gap,top_word,1.3 ,2 ,1) + + elif gap > 5: + suggestion = top_word + + if (suggestion != None and suggestion.lower() != tokens[i] and suggestion.lower() != original_tokens[i]): #修改存在并且是另外一个词 + suggestions.update({i:suggestion}) + mode = 2 + elif suggestions.__contains__(i)==True: #这是因为之前在拼写检查时已经修改了该位置的单词 + if original_tokens[i] == tokens[i]: + del suggestions[i] + mode = 1 + else: + mode = 2 + suggestion = suggestions[i] + else: + if original_tokens[i] != tokens[i]: + mode = 2 + suggestions[i] = tokens[i] + suggestion = tokens[i] + else: + mode = 1 + + print_token(original_tokens[i], suggestion, gap, mode) + print() + print(original_tokens[i],tokens[i],suggestion,mode) + avg_gap /= (len(tokens) - 2) + print() + print('平均gap:'+ str(avg_gap)) + return avg_gap + +def analyze_part_text(text, masked_tokens=None, show_suggestions=True, show_firstk_probs=500): + print("原始文本") + print(text) + step = 15 #用于训练加速的步长,每15个token被mask一个位置 + global input_ids_sen,input_type_ids_sen,in_sentence,sentences,entire_ids,entire_type_ids,suggestions,original_tokens + suggestions = {}#清空全局变量 + text = process_biaodian(text) + print("标点处理后") + print(text) + text0 = text #保存有拼写错误的文本 + text = correct_spelling(text[0]) #拼写修正过得文本 + print("拼写修正后********************************") + print(text) + print("********************************") + #黄金搭档token_Align放在show_abnormals里面了 + input_ids_sen,input_type_ids_sen,in_sentence,sentences,entire_ids,entire_type_ids = process_text(text[0]) + + examples = convert_text_to_examples(text) + features = convert_examples_to_features(examples, tokenizer, print_info=False) + given_mask = "[MASK]" in features[0].tokens + if not given_mask or masked_tokens is not None: + assert len(features) == 1 + features, batches = copy_and_mask_feature(features[0],step, masked_tokens=masked_tokens) + #print(len(features)) + + input_ids = torch.tensor([f.input_ids for f in features], dtype=torch.long) #把input_ids增加了一个维度,变成[n_features,sequence_len] + #这里的n_features实际上是句子有多少批训练 + + input_type_ids = torch.tensor([f.input_type_ids for f in features], dtype=torch.long) #把input_type_ids增加了一个维度,其实每一行都一样 + input_ids = input_ids.to(device) + input_type_ids = input_type_ids.to(device) + + mlm_logits = model(input_ids) + mlm_probs = F.softmax(mlm_logits, dim=-1) + tokens = features[0].tokens #为了输出,[mask]在input_ids里面表示出来,features的token都一样 + print(tokens) + if not given_mask or masked_tokens is not None: + bsz, seq_len, vocab_size = mlm_probs.size() #三个维度分别是batch_size, sequence_length, vocab_size + assert bsz == len(batches) + reduced_mlm_probs = torch.Tensor(1, len(tokens), vocab_size) + for i in batches: + pos = i + while pos < len(tokens): + reduced_mlm_probs[0, pos] = mlm_probs[i, pos] + pos = pos + step + mlm_probs = reduced_mlm_probs #压缩一下大小,节约不必要浪费的空间(只需要第i个batch里面[mask]位置的词汇表概率即可) + top_pairs = show_lm_probs(tokens, None, mlm_probs[0], firstk=show_firstk_probs) #传入的probs是二维的 + if not given_mask: + avg_gap = show_abnormals(tokens,mlm_probs[0],text0[0], show_suggestions=show_suggestions) + return suggestions,original_tokens,avg_gap + + +def analyze_text(text, masked_tokens=None, show_suggestions=True, show_firstk_probs=500): + suggestions = {} + avg_gap = 0 + new_part_suggestions = {} + original_tokens = ['[CLS]','[SEP]'] + text = process_biaodian(text) + text0 = text #保存有拼写错误的文本 + texts = split_text(text,130,100) + accumulate_length = 0 + remainer = 2 #[CLS]和[SEP] + for text0 in texts: + part_suggestions,part_original_tokens,part_avg_gap = analyze_part_text(text0, masked_tokens, show_suggestions, show_firstk_probs) + for key in part_suggestions: + new_part_suggestions[key + accumulate_length] = part_suggestions[key] + tokens_length = len(part_original_tokens) + accumulate_length = accumulate_length + tokens_length - remainer + suggestions.update(new_part_suggestions) + original_tokens = original_tokens[:-1] + part_original_tokens[1:] + avg_gap = avg_gap + part_avg_gap*(tokens_length - 2) + avg_gap = avg_gap/(accumulate_length) + return suggestions,original_tokens,avg_gap +# In[1626]: + + + +''' + 功能:对suggestions进行修改,由于某处位置改变造成suggestions后面的错误位置都相应移动 + 输入: + index:开始移动的位置 + direction:移动的方向,1表示向右边移,-1表示向左边移 +''' +def modify_suggestions(index,direction): + global suggestions + new_suggestions = {}; + if direction == 0: + pass + elif direction == 1: + for key in suggestions: + if key < index: + new_suggestions.update({key:suggestions[key]}) + else: + new_suggestions.update({key+1:suggestions[key]}) + elif direction == -1: + for key in suggestions: + if key < index: + new_suggestions.update({key:suggestions[key]}) + else: + new_suggestions.update({key-1:suggestions[key]}) + suggestions = new_suggestions + + +# In[1592]: + + +#print(suggestions) +def display_suggestion(): + print("**********************************display_suggestions********************************************************") + print("| {:50} : {}".format("suggestion","position in text")) + print("---------------------------------------------------------------------------------------") + for key in suggestions: + print("| {:<50} : {}".format(suggestions[key] ,key)) + print("*************************************************************************************************************") +#display_suggestion() + +''' + 功能: + 修改文本,tokens,suggestions + 输入: + index:修改的位置 + text:被修改前的原文 + 输出: + [text]:修改后的文本 + new_tokens:修改后的新tokens + suggestions:修改后新的建议字典 +''' +def modify_text(index,text): #修改文本,tokens,以及suggestions + global suggestions,original_tokens + tokens = original_tokens + new_text = "" + suggestion = suggestions[index] + del(suggestions[index]) + suggestion_tokens = suggestion.split(" ") + #print(suggestion_tokens) + if '去掉前面' == suggestion_tokens[0]: + del tokens[index - 1] + del suggestion_tokens[0] + del suggestion_tokens[0] + modify_suggestions(index,-1) + index = index - 1 + elif '去掉后面' == suggestion_tokens[0]: + del tokens[index + 1] + del suggestion_tokens[0] + del suggestion_tokens[0] + modify_suggestions(index+2,-1) + elif '去掉' == suggestion_tokens[0]: + del tokens[index] + del suggestion_tokens[0] + del suggestion_tokens[0] + modify_suggestions(index+1,-1) + if '原位置改成' in suggestion_tokens: + del suggestion_tokens[0] + + + len_suggest = len(suggestion_tokens) + if len_suggest == 1: + tokens[index] = suggestion_tokens[0] + elif len_suggest == 2: + tokens.insert(index,suggestion_tokens[0]) + tokens[index + 1] = suggestion_tokens[1] + modify_suggestions(index+1,1) + final_len = len(tokens) + + for i in range(1,len(tokens)-1): + word = tokens[i] + if word[0:2] == "##": + new_text = new_text + word[2:] + else: + new_text = new_text + ' ' + word + + original_tokens = tokens + return [text],tokens,suggestions + + +# In[1576]: + + +#变成py文件 +try: + get_ipython().system('jupyter nbconvert --to python likunlin_final.ipynb') +except: + pass + diff --git "a/likunlin_\350\215\211\347\250\277.ipynb" "b/likunlin_\350\215\211\347\250\277.ipynb" new file mode 100644 index 00000000000000..356bcac377cc68 --- /dev/null +++ "b/likunlin_\350\215\211\347\250\277.ipynb" @@ -0,0 +1,1216 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "05/14/2019 17:59:16 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-uncased-vocab.txt from cache at /home/xd/.pytorch_pretrained_bert/26bc1ad6c0ac742e9b52263248f6d0f00068293b33709fae12320c0e35ccfbbb.542ce4285a40d23a559526243235df47c5f75c197f04f37d1a0c124c32c9a084\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['dr', '##ae', '##m']\n" + ] + }, + { + "ename": "AssertionError", + "evalue": "", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mAssertionError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 19\u001b[0m \u001b[0;31m# Mask a token that we will try to predict back with `BertForMaskedLM`\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 20\u001b[0m \u001b[0mmasked_index\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m8\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 21\u001b[0;31m \u001b[0;32massert\u001b[0m \u001b[0mtokenized_text\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m'[CLS]'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'who'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'was'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'jim'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'henson'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'?'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'[SEP]'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'jim'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'[MASK]'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'was'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'a'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'puppet'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'##eer'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'[SEP]'\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 22\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 23\u001b[0m \u001b[0;31m# Convert token to vocabulary indices\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mAssertionError\u001b[0m: " + ] + } + ], + "source": [ + "import torch\n", + "from pytorch_pretrained_bert import BertTokenizer, BertModel, BertForMaskedLM\n", + "\n", + "# OPTIONAL: if you want to have more information on what's happening, activate the logger as follows\n", + "import logging\n", + "\n", + "logging.basicConfig(format = '%(asctime)s - %(levelname)s - %(name)s - %(message)s',\n", + " datefmt = '%m/%d/%Y %H:%M:%S',\n", + " level = logging.INFO)\n", + "logger = logging.getLogger(__name__)\n", + "\n", + "# Load pre-trained model tokenizer (vocabulary)\n", + "tokenizer = BertTokenizer.from_pretrained('bert-base-uncased')\n", + "\n", + "# Tokenized input\n", + "text = \"draem\"\n", + "tokenized_text = tokenizer.tokenize(text)\n", + "print(tokenized_text)\n", + "# Mask a token that we will try to predict back with `BertForMaskedLM`\n", + "masked_index = 8\n", + "assert tokenized_text == ['[CLS]', 'who', 'was', 'jim', 'henson', '?', '[SEP]', 'jim', '[MASK]', 'was', 'a', 'puppet', '##eer', '[SEP]']\n", + "\n", + "# Convert token to vocabulary indices\n", + "indexed_tokens = tokenizer.convert_tokens_to_ids(tokenized_text)\n", + "# Define sentence A and B indices associated to 1st and 2nd sentences (see paper)\n", + "segments_ids = [0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1]\n", + "\n", + "# Convert inputs to PyTorch tensors\n", + "tokens_tensor = torch.tensor([indexed_tokens])\n", + "segments_tensors = torch.tensor([segments_ids])" + ] + }, + { + "cell_type": "code", + "execution_count": 39, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[('tall', 'NN')]\n" + ] + } + ], + "source": [ + "import nltk\n", + "from pattern.en import conjugate, lemma, lexeme,PRESENT,SG\n", + "words = nltk.word_tokenize(\"I don't like the flower.\")\n", + "word_tag = nltk.pos_tag(['tall'])\n", + "print(word_tag)\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "nltk.word_tokenize(text):对指定的句子进行分词,返回单词列表\n", + "\n", + "nltk.pos_tag(words):对指定的单词列表进行词性标记,返回标记列表\n", + "\n", + "CC coordinating conjunction\n", + "CD cardinal digit\n", + "DT determiner\n", + "EX existential there (like: \"there is\" ... think of it like \"there exists\")\n", + "FW foreign word\n", + "IN preposition/subordinating conjunction\n", + "JJ adjective 'big'\n", + "JJR adjective, comparative 'bigger'\n", + "JJS adjective, superlative 'biggest'\n", + "LS list marker 1)\n", + "MD modal could, will\n", + "NN noun, singular 'desk'\n", + "NNS noun plural 'desks'\n", + "NNP proper noun, singular 'Harrison'\n", + "NNPS proper noun, plural 'Americans'\n", + "PDT predeterminer 'all the kids'\n", + "POS possessive ending parent's\n", + "PRP personal pronoun I, he, she\n", + "PRP$ possessive pronoun my, his, hers\n", + "RB adverb very, silently,\n", + "RBR adverb, comparative better\n", + "RBS adverb, superlative best\n", + "RP particle give up\n", + "TO to go 'to' the store.\n", + "UH interjection errrrrrrrm\n", + "VB verb, base form take\n", + "VBD verb, past tense took\n", + "VBG verb, gerund/present participle taking\n", + "VBN verb, past participle taken\n", + "VBP verb, sing. present, non-3d take\n", + "VBZ verb, 3rd person sing. present takes\n", + "WDT wh-determiner which\n", + "WP wh-pronoun who, what\n", + "WP$ possessive wh-pronoun whose\n", + "WRB wh-abverb where, when" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 248, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "my\n", + "cactus\n", + "good\n", + "rock\n", + "python\n", + "friendly\n", + "best\n", + "run\n", + "run\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[nltk_data] Downloading package wordnet to /home/xd/nltk_data...\n", + "[nltk_data] Package wordnet is already up-to-date!\n" + ] + } + ], + "source": [ + "import nltk\n", + "from nltk.stem import WordNetLemmatizer\n", + "nltk.download('wordnet')\n", + "lemmatizer = WordNetLemmatizer()\n", + "\n", + "print(lemmatizer.lemmatize(\"my\"))\n", + "print(lemmatizer.lemmatize(\"cacti\"))\n", + "print(lemmatizer.lemmatize(\"better\",pos=\"a\"))#pos只能是a,v,r,n\n", + "print(lemmatizer.lemmatize(\"rocks\"))\n", + "print(lemmatizer.lemmatize(\"python\"))\n", + "print(lemmatizer.lemmatize(\"friendly\", pos=\"n\"))\n", + "print(lemmatizer.lemmatize(\"best\", pos=\"a\"))\n", + "print(lemmatizer.lemmatize(\"run\"))\n", + "print(lemmatizer.lemmatize(\"run\",'a'))" + ] + }, + { + "cell_type": "code", + "execution_count": 42, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " taller | JJR \n" + ] + } + ], + "source": [ + "import spacy\n", + "nlp = spacy.load('en')\n", + "doc = nlp(\"taller\")\n", + "for i in range(0,len(doc)):\n", + " print('{: >10} | {: <10}'.format(doc[i].text, doc[i].tag_,))" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['apple', 'apples', 'appling', 'appled']\n", + "putts\n" + ] + } + ], + "source": [ + "from pattern import en\n", + "from pattern.en import conjugate, lemma, lexeme,PRESENT,SG,INFINITIVE, PRESENT, PAST, FUTURE, PROGRESSIVE \n", + "#print (lemma('better','a'))\n", + "list0 = lexeme('apples')\n", + "\n", + "print(list0)\n", + "#print (lexeme('had'))\n", + "word = \"give\"\n", + "#print( conjugate('purred', '3sg'))\n", + "print (conjugate(verb='putting',tense=PRESENT,person = 3))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Verb conjugation\n", + "The pattern.en module has a lexicon of 8,500 common English verbs and their conjugated forms (infinitive, 3rd singular present, present participle, past and past participle – verbs such as be may have more forms). Some verbs can also be negated, including be, can, do, will, must, have, may, need, dare, ought.\n", + "\n", + "conjugate(verb, \n", + " tense = PRESENT, # INFINITIVE, PRESENT, PAST, FUTURE\n", + " person = 3, # 1, 2, 3 or None\n", + " number = SINGULAR, # SG, PL\n", + " mood = INDICATIVE, # INDICATIVE, IMPERATIVE, CONDITIONAL, SUBJUNCTIVE\n", + " aspect = IMPERFECTIVE, # IMPERFECTIVE, PERFECTIVE, PROGRESSIVE \n", + " negated = False, # True or False\n", + " parse = True)\n", + "lemma(verb) # Base form, e.g., are => be.\n", + "lexeme(verb) # List of possible forms: be => is, was, ...\n", + "tenses(verb) # List of possible tenses of the given form.\n", + "The conjugate() function takes the following optional parameters:\n", + "\n", + "Tense\tPerson\tNumber\tMood\tAspect\tAlias\tTag\tExample\n", + "INFINITIVE\tNone\tNone\tNone\tNone\t\"inf\"\tVB\tbe\n", + "PRESENT\t1\tSG\tINDICATIVE\tIMPERFECTIVE\t\"1sg\"\tVBP\tI am\n", + "PRESENT\t2\tSG\tINDICATIVE\tIMPERFECTIVE\t\"2sg\"\t ·\tyou are\n", + "PRESENT\t3\tSG\tINDICATIVE\tIMPERFECTIVE\t\"3sg\"\tVBZ\the is\n", + "PRESENT\tNone\tPL\tINDICATIVE\tIMPERFECTIVE\t\"pl\"\t ·\tare\n", + "PRESENT\tNone\tNone\tINDICATIVE\tPROGRESSIVE\t\"part\"\tVBG\tbeing\n", + " \n", + "PAST\tNone\tNone\tNone\tNone\t\"p\"\tVBD\twere\n", + "PAST\t1\tPL\tINDICATIVE\tIMPERFECTIVE\t\"1sgp\"\t ·\tI was\n", + "PAST\t2\tPL\tINDICATIVE\tIMPERFECTIVE\t\"2sgp\"\t ·\tyou were\n", + "PAST\t3\tPL\tINDICATIVE\tIMPERFECTIVE\t\"3gp\"\t ·\the was\n", + "PAST\tNone\tPL\tINDICATIVE\tIMPERFECTIVE\t\"ppl\"\t ·\twere\n", + "PAST\tNone\tNone\tINDICATIVE\tPROGRESSIVE\t\"ppart\"\tVBN\tbeen" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[1, 2, 3], [4, 5, 6]]\n" + ] + } + ], + "source": [ + "import torch\n", + "a = [[1,2,3],[4,5,6]]\n", + "\n", + "torch.tensor(a)\n", + "print(a)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "article(word, function=INDEFINITE) # DEFINITE | INDEFINITE,限定性冠词the或者非限定性冠词a/an\n", + "referenced(word, article=INDEFINITE) # Returns article + word. 返回冠词 + word\n", + "pluralize(word, pos=NOUN, custom={}, classical=True)\n", + "singularize(word, pos=NOUN, custom={})" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "a university\n", + "an\n", + "suppers\n", + "supper\n" + ] + } + ], + "source": [ + "from pattern.en import article,referenced,pluralize, singularize\n", + "print(referenced('university'))\n", + "print(article('hour'))\n", + "\n", + "print(pluralize('supper'))\n", + "print(singularize('supper'))" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "ename": "AttributeError", + "evalue": "module 'pattern.en' has no attribute 'adjective'", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0mpattern\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0men\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0;31m#print( en.is_number(12))\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 3\u001b[0;31m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0men\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0madjective\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mis_emotion\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"anxious\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mboolean\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mFalse\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", + "\u001b[0;31mAttributeError\u001b[0m: module 'pattern.en' has no attribute 'adjective'" + ] + } + ], + "source": [ + "from pattern import en\n", + "#print( en.is_number(12))\n", + "print(en.adjective.is_emotion(\"anxious\", boolean=False))" + ] + }, + { + "cell_type": "code", + "execution_count": 382, + "metadata": {}, + "outputs": [ + { + "ename": "NameError", + "evalue": "name 'path_similarity' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0mc\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mwordnet\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msynsets\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'basil'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 6\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 7\u001b[0;31m \u001b[0mt\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mpath_similarity\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'basic'\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m'base'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 8\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mt\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 9\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m \u001b[0mwordnet\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msimilarity\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0ma\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mNameError\u001b[0m: name 'path_similarity' is not defined" + ] + } + ], + "source": [ + "from pattern.en import wordnet\n", + "\n", + "a = wordnet.synsets('basement')[0]\n", + "b = wordnet.synsets('base')[0]\n", + "c = wordnet.synsets('basil')[0]\n", + "\n", + "t = path_similarity('basic','base')\n", + "print(t)\n", + "print( wordnet.similarity(a, a)) \n", + "print (wordnet.similarity(a, b))\n", + "print( wordnet.similarity(a, c)) " + ] + }, + { + "cell_type": "code", + "execution_count": 263, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "loudlier\n" + ] + } + ], + "source": [ + "from pattern.en import comparative, superlative,grade\n", + " \n", + "print (comparative('loudly'))\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + " if univ_pos in (NOUN, 'NOUN', 'noun'):\n", + " univ_pos = 'noun'\n", + " elif univ_pos in (VERB, 'VERB', 'verb'):\n", + " univ_pos = 'verb'\n", + " elif univ_pos in (ADJ, 'ADJ', 'adj'):\n", + " univ_pos = 'adj'\n", + " elif univ_pos in (PUNCT, 'PUNCT', 'punct'):\n", + " univ_pos = 'punct'" + ] + }, + { + "cell_type": "code", + "execution_count": 107, + "metadata": {}, + "outputs": [ + { + "ename": "ImportError", + "evalue": "cannot import name 'Tagger'", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mImportError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0;32mfrom\u001b[0m \u001b[0mspacy\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpipeline\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mTagger\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2\u001b[0m \u001b[0mtagger\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mTagger\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mnlp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvocab\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0mdoc\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mnlp\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34mu\"I went to countryside with my family.\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0mprocessed\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtagger\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdoc\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mImportError\u001b[0m: cannot import name 'Tagger'" + ] + } + ], + "source": [ + "from spacy.pipeline import Tagger\n", + "tagger = Tagger(nlp.vocab)\n", + "doc = nlp(u\"I went to countryside with my family.\")\n", + "processed = tagger(doc)" + ] + }, + { + "cell_type": "code", + "execution_count": 108, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "I i\n", + "went go\n", + "to to\n", + "countryside countryside\n", + "with with\n", + "my my\n", + "family family\n", + ". .\n", + "hurrily hurrily\n" + ] + } + ], + "source": [ + "import spacy\n", + "\n", + "print(nlp(u''))\n", + "for tok in nlp(u'I went to countryside with my family.'):\n", + " print (tok, tok.lemma_)\n", + " \n", + "for tok in nlp(u'He tried his best to run hurrily'):\n", + " if tok.text == 'hurrily':\n", + " print (tok, tok.lemma_) " + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "better 0 \n", + ". 0 \n" + ] + } + ], + "source": [ + "import spacy\n", + "\n", + "doc = nlp(u\"better.\")\n", + "\n", + "for token in doc:\n", + " print(token, token.lemma, token.lemma_)" + ] + }, + { + "cell_type": "code", + "execution_count": 39, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{'ducks'}\n" + ] + } + ], + "source": [ + "from spacy.lemmatizer import Lemmatizer\n", + "from spacy.en import LEMMA_INDEX, LEMMA_EXC, LEMMA_RULES\n", + "lemmatizer = Lemmatizer(LEMMA_INDEX, LEMMA_EXC, LEMMA_RULES)\n", + "lemmas = lemmatizer(u'ducks', u'NOUN')\n", + "print(lemmas)" + ] + }, + { + "cell_type": "code", + "execution_count": 93, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[('summarise', 0.6666666666666666), ('summarises', 0.3333333333333333)]\n" + ] + } + ], + "source": [ + "from pattern.en import suggest\n", + "word = 'darkment'\n", + "print (suggest('summeries'))\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "这里包含中文字符.!?\n" + ] + } + ], + "source": [ + "def C_trans_to_E(string):\n", + " E_pun = u',.!?[]()<>\"\\''\n", + " C_pun = u',。!?【】()《》“‘'\n", + " table= {ord(f):ord(t) for f,t in zip(C_pun,E_pun)}\n", + " return string.translate(table)\n", + "\n", + "s1 = '这里包含中文字符。!?'\n", + "s2 = C_trans_to_E(s1)\n", + "print(s2)" + ] + }, + { + "cell_type": "code", + "execution_count": 111, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "beauty\n", + "beauti\n", + "beauti\n" + ] + } + ], + "source": [ + "from nltk.stem.lancaster import LancasterStemmer\n", + "from nltk.stem.porter import PorterStemmer\n", + "from nltk.stem import SnowballStemmer\n", + "stemmers=[]\n", + "stemmers.append(LancasterStemmer()) \n", + "stemmers.append(SnowballStemmer(\"english\"))\n", + "stemmers.append(PorterStemmer())\n", + "for stemmer in stemmers:\n", + " word = stemmer.stem(\"beautiful\")\n", + " print(word)\n", + " \n" + ] + }, + { + "cell_type": "code", + "execution_count": 42, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "None\n", + "useful\n" + ] + } + ], + "source": [ + "from pattern.en import suggest\n", + "from nltk.stem.lancaster import LancasterStemmer\n", + "from nltk.stem.porter import PorterStemmer\n", + "from nltk.stem import SnowballStemmer\n", + "stemmers=[]\n", + "stemmers.append(LancasterStemmer()) \n", + "stemmers.append(SnowballStemmer(\"english\"))\n", + "stemmers.append(PorterStemmer())\n", + "\n", + "def adj_to_adv(word):\n", + " suggest_word = None\n", + " if(word == \"good\"):\n", + " return \"well\"\n", + " else:\n", + " word_stem = LancasterStemmer().stem(word)\n", + " #print(word_stem)\n", + " suggest_ = word + 'ly'\n", + " #print(suggest_)\n", + " suggest_list = suggest(suggest_)\n", + " #print(suggest_list)\n", + " for word_ in suggest_list:\n", + " stem_list = []\n", + " #print(word_[0])\n", + " for stemmer in stemmers:\n", + " stem_list.append(stemmer.stem(word_[0]))\n", + " #print(stem_list)\n", + " if word_stem in stem_list and word != word_[0]:\n", + " suggest_word = word_[0]\n", + " break\n", + " return suggest_word\n", + "\n", + "def adv_to_adj(word):\n", + " suggest_word = None\n", + " if(word == \"well\"):\n", + " return \"good\" \n", + " else:\n", + " word_stem = PorterStemmer().stem(word)\n", + " #print(\"词根\" + word_stem)\n", + " suggest_ = word[:-2]\n", + " #print(word)\n", + " suggest_list = suggest(suggest_)\n", + " #print(suggest_list)\n", + " for word_ in suggest_list:\n", + " stem_list = []\n", + " #print(word_[0])\n", + " for stemmer in stemmers:\n", + " stem_list.append(stemmer.stem(word_[0]))\n", + " #print(stem_list)\n", + " if word_stem in stem_list and word != word_[0]:\n", + " suggest_word = word_[0]\n", + " break\n", + " return suggest_word\n", + "\n", + "print(adj_to_adv(\"difficult\"))\n", + "print(adv_to_adj(\"usefully\"))" + ] + }, + { + "cell_type": "code", + "execution_count": 44, + "metadata": {}, + "outputs": [ + { + "ename": "NameError", + "evalue": "name 'comparative' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 52\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mlist_word\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mlist_word2\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 53\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 54\u001b[0;31m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mbuild_like_word_adj\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"angry\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 55\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mbuild_like_word_adj2\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"angry\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 56\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mbuild_like_word_adv\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"however\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m\u001b[0m in \u001b[0;36mbuild_like_word_adj\u001b[0;34m(word)\u001b[0m\n\u001b[1;32m 11\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mi\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mlemmas\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 12\u001b[0m \u001b[0mlist_word\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mappend\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 13\u001b[0;31m \u001b[0mlist_word\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mappend\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcomparative\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 14\u001b[0m \u001b[0mlist_word\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mappend\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msuperlative\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 15\u001b[0m \u001b[0mword_adv\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0madj_to_adv\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mNameError\u001b[0m: name 'comparative' is not defined" + ] + } + ], + "source": [ + "from spacy.lemmatizer import Lemmatizer\n", + "from spacy.en import LEMMA_INDEX, LEMMA_EXC, LEMMA_RULES\n", + "from pattern.en import suggest\n", + "lemmatizer = Lemmatizer(LEMMA_INDEX, LEMMA_EXC, LEMMA_RULES)\n", + "#lemmas = lemmatizer(u'best', u'adj')\n", + "\n", + "def build_like_word_adj(word): #创建类似形容词列表\n", + " list_word = []\n", + " lemmas = lemmatizer(word, u'adj')\n", + " #print(lemmas)\n", + " for i in lemmas:\n", + " list_word.append(i)\n", + " list_word.append(comparative(i))\n", + " list_word.append(superlative(i))\n", + " word_adv = adj_to_adv(i)\n", + " if word_adv != None:\n", + " list_word.append(word_adv)\n", + " return list_word\n", + "\n", + "def build_like_word_adv(word): #创建类似形容词列表\n", + " list_word = []\n", + " lemmas = lemmatizer(word, u'adj')\n", + " #print(lemmas)\n", + " for i in lemmas:\n", + " list_word.append(i)\n", + " list_word.append(comparative(i))\n", + " list_word.append(superlative(i))\n", + " word_adj = adv_to_adj(i)\n", + " if word_adj != None:\n", + " list_word.append(word_adj)\n", + " return list_word\n", + "def build_like_word_adj2(word): #创建类似形容词列表\n", + " list_word = []\n", + " list_word2 = [] #把比较级最高级带more的放在这里\n", + " lemmas = lemmatizer(word, u'adj')\n", + " #print(lemmas)\n", + " for i in lemmas:\n", + " list_word.append(i)\n", + " word_er = comparative(i)\n", + " if \"more\" in word_er:\n", + " list_word2.append(word_er)\n", + " else:\n", + " list_word.append(word_er)\n", + " word_est = superlative(i)\n", + " if \"most\" in word_est:\n", + " list_word2.append(word_est)\n", + " else:\n", + " list_word.append(word_est)\n", + " word_adv = adj_to_adv(i)\n", + " if word_adv != None:\n", + " list_word.append(word_adv)\n", + " return list_word,list_word2\n", + "\n", + "print(build_like_word_adj(\"angry\"))\n", + "print(build_like_word_adj2(\"angry\"))\n", + "print(build_like_word_adv(\"however\"))" + ] + }, + { + "cell_type": "code", + "execution_count": 287, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "19 xiaofang -13\n", + "18 xiaofang -13\n" + ] + } + ], + "source": [ + "name = 'Tim' #全局变量\n", + "ids = 130\n", + "def f1():\n", + " age = 18 #局部变量\n", + " print(age,name,ids)\n", + "\n", + " \n", + "def f2():\n", + " age=19 #局部变量\n", + " global name,ids\n", + " name = 'xiaofang'\n", + " ids = -13\n", + " print(age,name,ids)\n", + " f1()\n", + "\n", + "\n", + "f2()" + ] + }, + { + "cell_type": "code", + "execution_count": 302, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "03/12/2019 16:58:49 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-uncased-vocab.txt from cache at /home/xd/.pytorch_pretrained_bert/26bc1ad6c0ac742e9b52263248f6d0f00068293b33709fae12320c0e35ccfbbb.542ce4285a40d23a559526243235df47c5f75c197f04f37d1a0c124c32c9a084\n" + ] + } + ], + "source": [ + "import os\n", + "import json\n", + "\n", + "import numpy as np\n", + "import math\n", + "import matplotlib\n", + "import matplotlib.pyplot as plt\n", + "from pylab import rcParams\n", + "\n", + "import torch\n", + "import torch.nn.functional as F\n", + "from pytorch_pretrained_bert import tokenization, BertTokenizer, BertModel, BertForMaskedLM, BertForPreTraining, BertConfig\n", + "from examples.extract_features import *\n", + "\n", + "tokenizer = BertTokenizer.from_pretrained('bert-base-uncased')#do_lower_case:在标记化时将文本转换为小写。默认= True\n" + ] + }, + { + "cell_type": "code", + "execution_count": 38, + "metadata": {}, + "outputs": [ + { + "ename": "NameError", + "evalue": "name 'tokenizer' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mword\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mlist_word\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 5\u001b[0;31m \u001b[0mlist_word_id\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mappend\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtokenizer\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvocab\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mword\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 6\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0mKeyError\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 7\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mword\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0;34m'是错误的key'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mNameError\u001b[0m: name 'tokenizer' is not defined" + ] + } + ], + "source": [ + "list_word_id = []\n", + "list_word = ['angry', 'angrier', 'angriest', 'angrily']\n", + "for word in list_word:\n", + " try:\n", + " list_word_id.append(tokenizer.vocab[word])\n", + " except KeyError:\n", + " print(word + '是错误的key')\n", + "print(list_word_id)" + ] + }, + { + "cell_type": "code", + "execution_count": 304, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[2]\n" + ] + } + ], + "source": [ + "a = 1\n", + "b = [2, 3]\n", + "\n", + "def func():\n", + " del b[1]\n", + "\n", + "func()\n", + "print(b)" + ] + }, + { + "cell_type": "code", + "execution_count": 37, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['The', 'man', 'is', 'a', 'Chinese', '.', 'He', 'is', \"n't\", 'a', 'bitch', '.']\n" + ] + } + ], + "source": [ + "tokenize = nltk.word_tokenize\n", + "text = tokenize(\"The man is a Chinese . He isn't a bitch.\")\n", + "print(text)" + ] + }, + { + "cell_type": "code", + "execution_count": 317, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{1: 'xiaofang', 13: 'xiaoheimao'}\n" + ] + } + ], + "source": [ + "dictionary = {}\n", + "dictionary.update({1:\"xiaofang\"})\n", + "dictionary.update({13:\"xiaoheimao\"})\n", + "\n", + "print(dictionary)" + ] + }, + { + "cell_type": "code", + "execution_count": 35, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "3、词干化\n", + "you are best. it is lemmatize test for spacy. I love these books\n", + "you -PRON- 757862\n", + "are be 536\n", + "best best 902\n", + ". . 453\n", + "it it 519\n", + "is is 513\n", + "lemmatize lemmatize 1138934\n", + "test test 1877\n", + "for for 531\n", + "spacy spacy 857539\n", + ". . 453\n", + "I -PRON- 757862\n", + "love love 949\n", + "these these 742\n", + "books book 1300\n" + ] + } + ], + "source": [ + "print(\"\\n3、词干化\")\n", + "test_doc = nlp(u\"you are best. it is lemmatize test for spacy. I love these books\")\n", + "print(test_doc)\n", + "for token in test_doc:\n", + " print(token, token.lemma_, token.lemma)" + ] + }, + { + "cell_type": "code", + "execution_count": 32, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "basic basic 0.9999999284398302\n", + "basic as 0.3952775975639635\n", + "basic base 0.44361194055627906\n", + "as basic 0.3952775975639635\n", + "as as 0.9999999975007859\n", + "as base 0.3801193503113012\n", + "base basic 0.44361194055627906\n", + "base as 0.3801193503113012\n", + "base base 0.9999999536640364\n" + ] + } + ], + "source": [ + "import spacy\n", + "import en_core_web_md\n", + "nlp = en_core_web_md.load() # make sure to use larger model!\n", + "tokens = nlp(u'basic as base')\n", + "\n", + "for token1 in tokens:\n", + " for token2 in tokens:\n", + " print(token1.text, token2.text, token1.similarity(token2))" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "True\n", + "[('increasing', 1.0)]\n" + ] + } + ], + "source": [ + "import enchant\n", + "from pattern.en import suggest\n", + "d = enchant.Dict(\"en_US\")\n", + "\n", + "print(d.check(\"cream\"))\n", + "print(suggest(\"increasing\"))" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[('cream', 0.5384615384615384), ('crew', 0.4230769230769231), ('cret', 0.038461538461538464)]\n", + "pattern的suggest time cost 0.0010383129119873047 s\n", + "['rem', 'creme', 'cream', 'cram', 'chem', 'crew', 'c rem', 'Cree', 'Rem', 'crime', 'crimp']\n", + "enchant的suggest time cost 0.019581079483032227 s\n" + ] + } + ], + "source": [ + "from pattern.en import suggest\n", + "import time\n", + "import enchant\n", + "d = enchant.Dict(\"en_US\")\n", + "time_start=time.time()\n", + "print(suggest(\"crem\"))\n", + "time_end=time.time()\n", + "print('pattern的suggest time cost',time_end-time_start,'s')\n", + "\n", + "time_start=time.time()\n", + "print(d.suggest(\"crem\"))\n", + "time_end=time.time()\n", + "print('enchant的suggest time cost',time_end-time_start,'s')" + ] + }, + { + "cell_type": "code", + "execution_count": 125, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[('beauteous', 0.5), ('dishy', 0.5)]\n" + ] + } + ], + "source": [ + "from convert_pos import convert\n", + " \n", + "print(convert(\"beauty\", 'n', 'a'))" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[Synset('happy.a.01'), Synset('felicitous.s.02'), Synset('glad.s.02'), Synset('happy.s.04')]\n" + ] + } + ], + "source": [ + "word = 'happy'\n", + "from_pos = 'a'\n", + "synsets = wn.synsets(word, pos=from_pos)\n", + "print(synsets)" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['it', 'was', 'monday', 'morning', ',', 'and', 'the', 'writing', 'class', 'had', 'just', 'begin', '.', 'everyone', 'was', 'silent', ',', 'wait', 'to', 'see', 'who', 'would', 'be', 'called', 'upon', 'to', 'read', 'his', 'and', 'her', 'paragraph', 'aloud', '.', 'some', 'of', 'us', 'were', 'confident', 'and', 'eager', 'take', 'part', 'in', 'the', 'class', 'activity', ',', 'others', 'were', 'nervous', 'and', 'anxious', '.', 'i', 'had', 'done', 'myself', 'homework', 'but', 'i', 'was', 'shy', '.', 'i', 'was', 'afraid', 'that', 'to', 'speak', 'in', 'front', 'of', 'a', 'larger', 'group', 'of', 'people', '.', 'at', 'that', 'moment', ',', 'i', 'remembered', 'that', 'my', 'father', 'once', 'said', ',', '``', 'the', 'classroom', 'is', 'a', 'place', 'for', 'learning', 'and', 'that', 'include', 'leaning', 'from', 'textbooks', ',', 'and', 'mistake', 'as', 'well', '.', \"''\", 'immediate', ',', 'i', 'raised', 'my', 'huuuand', '.']\n" + ] + } + ], + "source": [ + "text = nltk.word_tokenize('It was Monday morning, and the writing class had just begin. Everyone was silent, wait to see who would be called upon to read his and her paragraph aloud. Some of us were confident and eager take part in the class activity, others were nervous and anxious. I had done myself homework but I was shy. I was afraid that to speak in front of a larger group of people. At that moment, I remembered that my father once said, \"The classroom is a place for learning and that include leaning from textbooks, and mistake as well.\" Immediate, I raised my huuuand.'.lower())\n", + "print(text)" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[NbConvertApp] Converting notebook likunlin_草稿.ipynb to python\n", + "[NbConvertApp] Writing 14963 bytes to likunlin_草稿.py\n" + ] + } + ], + "source": [ + "try:\n", + " !jupyter nbconvert --to python likunlin_草稿.ipynb\n", + "except:\n", + " pass" + ] + }, + { + "cell_type": "code", + "execution_count": 63, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\"\n" + ] + } + ], + "source": [ + "def C_trans_to_E(string): #标点符号转换函数\n", + " E_pun = u',.!?[]()<>\"\\''\n", + " C_pun = u',。!?【】()《》“‘'\n", + " table= {ord(f):ord(t) for f,t in zip(C_pun,E_pun)}\n", + " return string.translate(table)\n", + "\n", + "print(C_trans_to_E(\"“\"))" + ] + }, + { + "cell_type": "code", + "execution_count": 37, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "None\n" + ] + } + ], + "source": [ + "from nltk.corpus import wordnet as wn\n", + "\n", + "print(wn.morphy('taller', \"n\"))" + ] + }, + { + "cell_type": "code", + "execution_count": 43, + "metadata": {}, + "outputs": [], + "source": [ + "def analyse_词性(token,tag):\n", + " if 'VB' in tag: #如果是动词的各种时态\n", + " tag0 = \"v\"\n", + " elif \"JJ\" in tag : #形容词\n", + " tag0 = \"a\"\n", + " elif \"RB\" in tag: #副词\n", + " tag0 = \"r\"\n", + " elif \"NN\" in tag: #名词\n", + " tag0 = \"n\"\n", + " else:\n", + " return tag\n", + " if wn.morphy(token, tag0)==None:\n", + " tag = nltk.pos_tag([token])[0][1]\n", + " return tag" + ] + }, + { + "cell_type": "code", + "execution_count": 47, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[('countryside', 'NN')]\n" + ] + } + ], + "source": [ + "tokens = ['I','went','to','countryside','.']\n", + "tokens = ['countryside']\n", + "print(nltk.pos_tag(tokens))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.5" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/lkl.tar b/lkl.tar new file mode 100644 index 00000000000000..310b50123edd39 Binary files /dev/null and b/lkl.tar differ diff --git a/mnist/processed/test.pt b/mnist/processed/test.pt new file mode 100644 index 00000000000000..eb8ca2281f459d Binary files /dev/null and b/mnist/processed/test.pt differ diff --git a/mnist/processed/training.pt b/mnist/processed/training.pt new file mode 100644 index 00000000000000..ccd5b1c99cdc24 Binary files /dev/null and b/mnist/processed/training.pt differ diff --git a/mnist/raw/t10k-images-idx3-ubyte b/mnist/raw/t10k-images-idx3-ubyte new file mode 100644 index 00000000000000..1170b2cae98de7 Binary files /dev/null and b/mnist/raw/t10k-images-idx3-ubyte differ diff --git a/mnist/raw/t10k-labels-idx1-ubyte b/mnist/raw/t10k-labels-idx1-ubyte new file mode 100644 index 00000000000000..d1c3a970612bbd Binary files /dev/null and b/mnist/raw/t10k-labels-idx1-ubyte differ diff --git a/mnist/raw/train-images-idx3-ubyte b/mnist/raw/train-images-idx3-ubyte new file mode 100644 index 00000000000000..bbce27659e0fc2 Binary files /dev/null and b/mnist/raw/train-images-idx3-ubyte differ diff --git a/mnist/raw/train-labels-idx1-ubyte b/mnist/raw/train-labels-idx1-ubyte new file mode 100644 index 00000000000000..d6b4c5db3b5206 Binary files /dev/null and b/mnist/raw/train-labels-idx1-ubyte differ diff --git a/pattern_develop b/pattern_develop new file mode 160000 index 00000000000000..53245196139c6e --- /dev/null +++ b/pattern_develop @@ -0,0 +1 @@ +Subproject commit 53245196139c6ef26dc9c34873dda8a16f236d23 diff --git a/probe_pretrained_model.py b/probe_pretrained_model.py new file mode 100644 index 00000000000000..be03d451adb729 --- /dev/null +++ b/probe_pretrained_model.py @@ -0,0 +1,274 @@ +import os + +import numpy as np +import math +import matplotlib +import matplotlib.pyplot as plt +from pylab import rcParams + +import torch +import torch.nn.functional as F +from pytorch_pretrained_bert import tokenization, BertTokenizer, BertModel, BertForMaskedLM, BertForPreTraining, BertConfig +from examples.extract_features import * + +tokenizer = BertTokenizer.from_pretrained('bert-base-uncased') + +CONFIG_NAME = 'bert_config.json' +BERT_DIR = '/nas/pretrain-bert/pretrain-tensorflow/uncased_L-12_H-768_A-12/' +config_file = os.path.join(BERT_DIR, CONFIG_NAME) +config = BertConfig.from_json_file(config_file) + +model = BertForPreTraining.from_pretrained(BERT_DIR) +model.eval() + +vis_attn_topk = 3 + +def has_chinese_label(labels): + labels = [label.split('->')[0].strip() for label in labels] + r = sum([len(label) > 1 for label in labels if label not in ['BOS', 'EOS']]) * 1. / (len(labels) - 1) + return 0 < r < 0.5 # r == 0 means empty query labels used in self attention + +def _plot_attn(ax1, attn_name, attn, key_labels, query_labels, col, color='b'): + assert len(query_labels) == attn.size(0) + assert len(key_labels) == attn.size(1) + + ax1.set_xlim([-1, 1]) + ax1.set_xticks([]) + ax2 = ax1.twinx() + nlabels = max(len(key_labels), len(query_labels)) + pos = range(nlabels) + + if 'self' in attn_name and col < ncols - 1: + query_labels = ['' for _ in query_labels] + + for ax, labels in [(ax1, key_labels), (ax2, query_labels)]: + ax.set_yticks(pos) + if has_chinese_label(labels): + ax.set_yticklabels(labels, fontproperties=zhfont) + else: + ax.set_yticklabels(labels) + ax.set_ylim([nlabels - 1, 0]) + ax.tick_params(width=0, labelsize='xx-large') + + for spine in ax.spines.values(): + spine.set_visible(False) + +# mask, attn = filter_attn(attn) + for qi in range(attn.size(0)): +# if not mask[qi]: +# continue +# for ki in range(attn.size(1)): + for ki in attn[qi].topk(vis_attn_topk)[1]: + a = attn[qi, ki] + ax1.plot((-1, 1), (ki, qi), color, alpha=a) +# print(attn.mean(dim=0).topk(5)[0]) +# ax1.barh(pos, attn.mean(dim=0).data.cpu().numpy()) + +def plot_layer_attn(result_tuple, attn_name='dec_self_attns', layer=0, heads=None): + hypo, nheads, labels_dict = result_tuple + key_labels, query_labels = labels_dict[attn_name] + if heads is None: + heads = range(nheads) + else: + nheads = len(heads) + + stride = 2 if attn_name == 'dec_enc_attns' else 1 + nlabels = max(len(key_labels), len(query_labels)) + rcParams['figure.figsize'] = 20, int(round(nlabels * stride * nheads / 8 * 1.0)) + + rows = nheads // ncols * stride + fig, axes = plt.subplots(rows, ncols) + + # for head in range(nheads): + for head_i, head in enumerate(heads): + row, col = head_i * stride // ncols, head_i * stride % ncols + ax1 = axes[row, col] + attn = hypo[attn_name][layer][head] + _plot_attn(ax1, attn_name, attn, key_labels, query_labels, col) + if attn_name == 'dec_enc_attns': + col = col + 1 + axes[row, col].axis('off') # next subfig acts as blank place holder + # plt.suptitle('%s with %d heads, Layer %d' % (attn_name, nheads, layer), fontsize=20) + plt.show() + +ncols = 4 + +import re +def convert_text_to_examples(text): + examples = [] + unique_id = 0 + if True: + for line in text: + line = tokenization.convert_to_unicode(line) + line = line.strip() + text_a = None + text_b = None + m = re.match(r"^(.*) \|\|\| (.*)$", line) + if m is None: + text_a = line + else: + text_a = m.group(1) + text_b = m.group(2) + examples.append( + InputExample(unique_id=unique_id, text_a=text_a, text_b=text_b)) + unique_id += 1 + return examples + +def convert_examples_to_features(examples, tokenizer, append_special_tokens=True, replace_mask=True, print_info=False): + features = [] + for (ex_index, example) in enumerate(examples): + tokens_a = tokenizer.tokenize(example.text_a) + tokens_b = None + if example.text_b: + tokens_b = tokenizer.tokenize(example.text_b) + + tokens = [] + input_type_ids = [] + if append_special_tokens: + tokens.append("[CLS]") + input_type_ids.append(0) + for token in tokens_a: + if replace_mask and token == '_': # XD + token = "[MASK]" + tokens.append(token) + input_type_ids.append(0) + if append_special_tokens: + tokens.append("[SEP]") + input_type_ids.append(0) + + if tokens_b: + for token in tokens_b: + if replace_mask and token == '_': # XD + token = "[MASK]" + tokens.append(token) + input_type_ids.append(1) + if append_special_tokens: + tokens.append("[SEP]") + input_type_ids.append(1) + + input_ids = tokenizer.convert_tokens_to_ids(tokens) + input_mask = [1] * len(input_ids) + + if ex_index < 5 and print_info: + logger.info("*** Example ***") + logger.info("unique_id: %s" % (example.unique_id)) + logger.info("tokens: %s" % " ".join([str(x) for x in tokens])) + logger.info("input_ids: %s" % " ".join([str(x) for x in input_ids])) + logger.info("input_mask: %s" % " ".join([str(x) for x in input_mask])) + logger.info( + "input_type_ids: %s" % " ".join([str(x) for x in input_type_ids])) + + features.append( + InputFeatures( + unique_id=example.unique_id, + tokens=tokens, + input_ids=input_ids, + input_mask=input_mask, + input_type_ids=input_type_ids)) + return features + +def copy_and_mask_features(features): + import copy + masked_feature_copies = [] + for feature in features: + for masked_pos in range(len(feature.tokens)): + feature_copy = copy.deepcopy(feature) + feature_copy.input_ids[masked_pos] = tokenizer.vocab["[MASK]"] + masked_feature_copies.append(feature_copy) + return masked_feature_copies + +def show_lm_probs(tokens, input_ids, probs, topk=5, firstk=20): + def print_pair(token, prob, end_str='', hit_mark=' '): + # token = token.replace('', '').replace('\n', '/n') + print('{}{: >3} | {: <12}'.format(hit_mark, int(round(prob*100)), token), end=end_str) + + for i in range(len(tokens)): + if i >= firstk: + break + ind_ = input_ids[i].item() if input_ids is not None else tokenizer.vocab[tokens[i]] + prob_ = probs[i][ind_].item() + print_pair(tokens[i], prob_, end_str='\t') + values, indices = probs[i].topk(topk) + for j in range(topk): + ind, prob = indices[j].item(), values[j].item() + hit_mark = '*' if ind == ind_ else ' ' + print_pair(tokenizer.ids_to_tokens[ind], prob, hit_mark=hit_mark) + print() + +import colored +from colored import stylize + +def show_abnormals(tokens, probs, show_suggestions=False): + def gap2color(gap): + if gap <= 5: + return 'yellow' + elif gap <= 10: + return 'orange_1' + else: + return 'red_1' + + def print_token(token, suggestion, gap): + if gap == 0: + print(stylize(token + ' ', colored.fg('white') + colored.bg('black')), end='') + else: + print(stylize(token, colored.fg(gap2color(gap)) + colored.bg('black')), end='') + if show_suggestions and gap > 5: + print(stylize('/' + suggestion + ' ', colored.fg('green' if gap > 10 else 'cyan') + colored.bg('black')), end='') + else: + print(stylize(' ', colored.fg(gap2color(gap)) + colored.bg('black')), end='') + # print('/' + suggestion, end=' ') + # print('%.2f' % gap, end=' ') + + avg_gap = 0. + for i in range(1, len(tokens) - 1): # skip first [CLS] and last [SEP] + ind_ = tokenizer.vocab[tokens[i]] + prob_ = probs[i][ind_].item() + top_prob = probs[i].max().item() + top_ind = probs[i].argmax().item() + gap = math.log(top_prob) - math.log(prob_) + suggestion = tokenizer.ids_to_tokens[top_ind] + print_token(tokens[i], suggestion, gap) + avg_gap += gap + avg_gap /= (len(tokens) - 2) + print() + print(avg_gap) + +analyzed_cache = {} + +def analyze_text(text, show_suggestions=False, show_firstk_probs=20): + if text[0] in analyzed_cache: + features, mlm_probs = analyzed_cache[text[0]] + given_mask = "[MASK]" in features[0].tokens + else: + examples = convert_text_to_examples(text) + features = convert_examples_to_features(examples, tokenizer, print_info=False) + given_mask = "[MASK]" in features[0].tokens + if not given_mask: + features = copy_and_mask_features(features) + + input_ids = torch.tensor([f.input_ids for f in features], dtype=torch.long) + input_type_ids = torch.tensor([f.input_type_ids for f in features], dtype=torch.long) + + mlm_logits, _ = model(input_ids, input_type_ids) + mlm_probs = F.softmax(mlm_logits, dim=-1) + + if not given_mask: + seq_len, _, vocab_size = mlm_probs.size() + reduced_mlm_probs = torch.Tensor(1, seq_len, vocab_size) + for i in range(seq_len): + reduced_mlm_probs[0, i] = mlm_probs[i, i] + mlm_probs = reduced_mlm_probs + + analyzed_cache[text[0]] = (features, mlm_probs) + + show_lm_probs(features[0].tokens, None, mlm_probs[0], firstk=show_firstk_probs) + if not given_mask: + show_abnormals(features[0].tokens, mlm_probs[0], show_suggestions=show_suggestions) + +text = ["Who was Jim Henson? Jim Henson was a puppeteer."] +text = ["I went to school by bus. I was very tired."] +text = ["Last week I went to the theatre. I had a very good seat. The play was very interesting. But I didn't enjoy it. A young man and a young woman were sitting behind me. They were talking loudly. I got very angry. I couldn't hear a word. I turned round. I looked at the man angrily. They didn't pay any attention.In the end, I couldn't bear it. I turned round again. 'I can't hear a word!' I said angrily. 'It's none of your business,' the young man said rudely. 'This is a private conversation!'"] +# text = ["Last week I went to the theatre. I had a very good seat. The play was very interesting. But I didn't enjoy it. A young man and a young woman were sitting behind me. They were talking loudly. I got very angry. I couldn't hear a word. I turned round. I looked at the man angrily. They didn't pay any attention."] +# text = ["After the outbreak of the disease, the Ministry of Agriculture and rural areas immediately sent a supervision team to the local. Local Emergency Response Mechanism has been activated in accordance with the requirements, to take blockade, culling, harmless treatment, disinfection and other treatment measures to all disease and culling of pigs for harmless treatment. At the same time, all live pigs and their products are prohibited from transferring out of the blockade area, and live pigs are not allowed to be transported into the blockade area. At present, all the above measures have been implemented."] + +# analyze_text(text) diff --git a/pytorch_pretrained_bert/__init__.py b/pytorch_pretrained_bert/__init__.py index 7850fa5555e5a4..0ef8263748150b 100644 --- a/pytorch_pretrained_bert/__init__.py +++ b/pytorch_pretrained_bert/__init__.py @@ -1,6 +1,8 @@ +__version__ = "0.4.0" from .tokenization import BertTokenizer, BasicTokenizer, WordpieceTokenizer from .modeling import (BertConfig, BertModel, BertForPreTraining, BertForMaskedLM, BertForNextSentencePrediction, - BertForSequenceClassification, BertForQuestionAnswering) + BertForSequenceClassification, BertForMultipleChoice, + BertForTokenClassification, BertForQuestionAnswering) from .optimization import BertAdam from .file_utils import PYTORCH_PRETRAINED_BERT_CACHE diff --git a/pytorch_pretrained_bert/__main__.py b/pytorch_pretrained_bert/__main__.py index 73f1909b43a264..79ad8429323221 100644 --- a/pytorch_pretrained_bert/__main__.py +++ b/pytorch_pretrained_bert/__main__.py @@ -1,5 +1,5 @@ # coding: utf8 -if __name__ == '__main__': +def main(): import sys try: from .convert_tf_checkpoint_to_pytorch import convert_tf_checkpoint_to_pytorch @@ -17,3 +17,6 @@ TF_CONFIG = sys.argv.pop() TF_CHECKPOINT = sys.argv.pop() convert_tf_checkpoint_to_pytorch(TF_CHECKPOINT, TF_CONFIG, PYTORCH_DUMP_OUTPUT) + +if __name__ == '__main__': + main() diff --git a/pytorch_pretrained_bert/convert_tf_checkpoint_to_pytorch.py b/pytorch_pretrained_bert/convert_tf_checkpoint_to_pytorch.py index 20fdd8c0d6e856..1ff6c073e32909 100755 --- a/pytorch_pretrained_bert/convert_tf_checkpoint_to_pytorch.py +++ b/pytorch_pretrained_bert/convert_tf_checkpoint_to_pytorch.py @@ -50,7 +50,7 @@ def convert_tf_checkpoint_to_pytorch(tf_checkpoint_path, bert_config_file, pytor name = name.split('/') # adam_v and adam_m are variables used in AdamWeightDecayOptimizer to calculated m and v # which are not required for using pretrained model - if name[-1] in ["adam_v", "adam_m"]: + if any(n in ["adam_v", "adam_m", "global_step"] for n in name): print("Skipping {}".format("/".join(name))) continue pointer = model @@ -59,9 +59,9 @@ def convert_tf_checkpoint_to_pytorch(tf_checkpoint_path, bert_config_file, pytor l = re.split(r'_(\d+)', m_name) else: l = [m_name] - if l[0] == 'kernel': + if l[0] == 'kernel' or l[0] == 'gamma': pointer = getattr(pointer, 'weight') - elif l[0] == 'output_bias': + elif l[0] == 'output_bias' or l[0] == 'beta': pointer = getattr(pointer, 'bias') elif l[0] == 'output_weights': pointer = getattr(pointer, 'weight') diff --git a/pytorch_pretrained_bert/file_utils.py b/pytorch_pretrained_bert/file_utils.py index f734b7e22b114b..43fa8ca87e20ee 100644 --- a/pytorch_pretrained_bert/file_utils.py +++ b/pytorch_pretrained_bert/file_utils.py @@ -45,13 +45,15 @@ def url_to_filename(url: str, etag: str = None) -> str: return filename -def filename_to_url(filename: str, cache_dir: str = None) -> Tuple[str, str]: +def filename_to_url(filename: str, cache_dir: Union[str, Path] = None) -> Tuple[str, str]: """ Return the url and etag (which may be ``None``) stored for `filename`. Raise ``FileNotFoundError`` if `filename` or its stored metadata do not exist. """ if cache_dir is None: cache_dir = PYTORCH_PRETRAINED_BERT_CACHE + if isinstance(cache_dir, Path): + cache_dir = str(cache_dir) cache_path = os.path.join(cache_dir, filename) if not os.path.exists(cache_path): @@ -69,7 +71,7 @@ def filename_to_url(filename: str, cache_dir: str = None) -> Tuple[str, str]: return url, etag -def cached_path(url_or_filename: Union[str, Path], cache_dir: str = None) -> str: +def cached_path(url_or_filename: Union[str, Path], cache_dir: Union[str, Path] = None) -> str: """ Given something that might be a URL (or might be a local path), determine which. If it's a URL, download the file and cache it, and @@ -80,6 +82,8 @@ def cached_path(url_or_filename: Union[str, Path], cache_dir: str = None) -> str cache_dir = PYTORCH_PRETRAINED_BERT_CACHE if isinstance(url_or_filename, Path): url_or_filename = str(url_or_filename) + if isinstance(cache_dir, Path): + cache_dir = str(cache_dir) parsed = urlparse(url_or_filename) @@ -158,13 +162,15 @@ def http_get(url: str, temp_file: IO) -> None: progress.close() -def get_from_cache(url: str, cache_dir: str = None) -> str: +def get_from_cache(url: str, cache_dir: Union[str, Path] = None) -> str: """ Given a URL, look for the corresponding dataset in the local cache. If it's not there, download it. Then return the path to the cached file. """ if cache_dir is None: cache_dir = PYTORCH_PRETRAINED_BERT_CACHE + if isinstance(cache_dir, Path): + cache_dir = str(cache_dir) os.makedirs(cache_dir, exist_ok=True) @@ -221,7 +227,7 @@ def read_set_from_file(filename: str) -> Set[str]: Expected file format is one item per line. ''' collection = set() - with open(filename, 'r') as file_: + with open(filename, 'r', encoding='utf-8') as file_: for line in file_: collection.add(line.rstrip()) return collection diff --git a/pytorch_pretrained_bert/modeling.py b/pytorch_pretrained_bert/modeling.py index 2d6dfa531dc5fd..0826531badaaca 100644 --- a/pytorch_pretrained_bert/modeling.py +++ b/pytorch_pretrained_bert/modeling.py @@ -1,5 +1,6 @@ # coding=utf-8 # Copyright 2018 The Google AI Language Team Authors and The HugginFace Inc. team. +# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -33,16 +34,15 @@ from .file_utils import cached_path -logging.basicConfig(format = '%(asctime)s - %(levelname)s - %(name)s - %(message)s', - datefmt = '%m/%d/%Y %H:%M:%S', - level = logging.INFO) logger = logging.getLogger(__name__) PRETRAINED_MODEL_ARCHIVE_MAP = { 'bert-base-uncased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-uncased.tar.gz", 'bert-large-uncased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-uncased.tar.gz", 'bert-base-cased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-cased.tar.gz", - 'bert-base-multilingual': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-multilingual.tar.gz", + 'bert-large-cased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-cased.tar.gz", + 'bert-base-multilingual-uncased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-multilingual-uncased.tar.gz", + 'bert-base-multilingual-cased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-multilingual-cased.tar.gz", 'bert-base-chinese': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-chinese.tar.gz", } CONFIG_NAME = 'bert_config.json' @@ -103,7 +103,7 @@ def __init__(self, initializing all weight matrices. """ if isinstance(vocab_size_or_config_json_file, str): - with open(vocab_size_or_config_json_file, "r") as reader: + with open(vocab_size_or_config_json_file, "r", encoding='utf-8') as reader: json_config = json.loads(reader.read()) for key, value in json_config.items(): self.__dict__[key] = value @@ -134,7 +134,7 @@ def from_dict(cls, json_object): @classmethod def from_json_file(cls, json_file): """Constructs a `BertConfig` from a json file of parameters.""" - with open(json_file, "r") as reader: + with open(json_file, "r", encoding='utf-8') as reader: text = reader.read() return cls.from_dict(json.loads(text)) @@ -150,22 +150,24 @@ def to_json_string(self): """Serializes this instance to a JSON string.""" return json.dumps(self.to_dict(), indent=2, sort_keys=True) + "\n" - -class BertLayerNorm(nn.Module): - def __init__(self, config, variance_epsilon=1e-12): - """Construct a layernorm module in the TF style (epsilon inside the square root). - """ - super(BertLayerNorm, self).__init__() - self.gamma = nn.Parameter(torch.ones(config.hidden_size)) - self.beta = nn.Parameter(torch.zeros(config.hidden_size)) - self.variance_epsilon = variance_epsilon - - def forward(self, x): - u = x.mean(-1, keepdim=True) - s = (x - u).pow(2).mean(-1, keepdim=True) - x = (x - u) / torch.sqrt(s + self.variance_epsilon) - return self.gamma * x + self.beta - +try: + from apex.normalization.fused_layer_norm import FusedLayerNorm as BertLayerNorm +except ImportError: + print("Better speed can be achieved with apex installed from https://www.github.com/nvidia/apex.") + class BertLayerNorm(nn.Module): + def __init__(self, hidden_size, eps=1e-12): + """Construct a layernorm module in the TF style (epsilon inside the square root). + """ + super(BertLayerNorm, self).__init__() + self.weight = nn.Parameter(torch.ones(hidden_size)) + self.bias = nn.Parameter(torch.zeros(hidden_size)) + self.variance_epsilon = eps + + def forward(self, x): + u = x.mean(-1, keepdim=True) + s = (x - u).pow(2).mean(-1, keepdim=True) + x = (x - u) / torch.sqrt(s + self.variance_epsilon) + return self.weight * x + self.bias class BertEmbeddings(nn.Module): """Construct the embeddings from word, position and token_type embeddings. @@ -178,7 +180,7 @@ def __init__(self, config): # self.LayerNorm is not snake-cased to stick with TensorFlow model variable name and be able to load # any TensorFlow checkpoint file - self.LayerNorm = BertLayerNorm(config) + self.LayerNorm = BertLayerNorm(config.hidden_size, eps=1e-12) self.dropout = nn.Dropout(config.hidden_dropout_prob) def forward(self, input_ids, token_type_ids=None): @@ -253,7 +255,7 @@ class BertSelfOutput(nn.Module): def __init__(self, config): super(BertSelfOutput, self).__init__() self.dense = nn.Linear(config.hidden_size, config.hidden_size) - self.LayerNorm = BertLayerNorm(config) + self.LayerNorm = BertLayerNorm(config.hidden_size, eps=1e-12) self.dropout = nn.Dropout(config.hidden_dropout_prob) def forward(self, hidden_states, input_tensor): @@ -292,7 +294,7 @@ class BertOutput(nn.Module): def __init__(self, config): super(BertOutput, self).__init__() self.dense = nn.Linear(config.intermediate_size, config.hidden_size) - self.LayerNorm = BertLayerNorm(config) + self.LayerNorm = BertLayerNorm(config.hidden_size, eps=1e-12) self.dropout = nn.Dropout(config.hidden_dropout_prob) def forward(self, hidden_states, input_tensor): @@ -320,7 +322,7 @@ class BertEncoder(nn.Module): def __init__(self, config): super(BertEncoder, self).__init__() layer = BertLayer(config) - self.layer = nn.ModuleList([copy.deepcopy(layer) for _ in range(config.num_hidden_layers)]) + self.layer = nn.ModuleList([copy.deepcopy(layer) for _ in range(config.num_hidden_layers)]) def forward(self, hidden_states, attention_mask, output_all_encoded_layers=True): all_encoder_layers = [] @@ -354,7 +356,7 @@ def __init__(self, config): self.dense = nn.Linear(config.hidden_size, config.hidden_size) self.transform_act_fn = ACT2FN[config.hidden_act] \ if isinstance(config.hidden_act, str) else config.hidden_act - self.LayerNorm = BertLayerNorm(config) + self.LayerNorm = BertLayerNorm(config.hidden_size, eps=1e-12) def forward(self, hidden_states): hidden_states = self.dense(hidden_states) @@ -437,28 +439,32 @@ def init_bert_weights(self, module): # cf https://github.com/pytorch/pytorch/pull/5617 module.weight.data.normal_(mean=0.0, std=self.config.initializer_range) elif isinstance(module, BertLayerNorm): - module.beta.data.normal_(mean=0.0, std=self.config.initializer_range) - module.gamma.data.normal_(mean=0.0, std=self.config.initializer_range) + module.bias.data.zero_() + module.weight.data.fill_(1.0) if isinstance(module, nn.Linear) and module.bias is not None: module.bias.data.zero_() @classmethod - def from_pretrained(cls, pretrained_model_name, cache_dir=None, *inputs, **kwargs): + def from_pretrained(cls, pretrained_model_name, state_dict=None, cache_dir=None, *inputs, **kwargs): """ - Instantiate a PreTrainedBertModel from a pre-trained model file. + Instantiate a PreTrainedBertModel from a pre-trained model file or a pytorch state dict. Download and cache the pre-trained model file if needed. - + Params: pretrained_model_name: either: - a str with the name of a pre-trained model to load selected in the list of: . `bert-base-uncased` . `bert-large-uncased` . `bert-base-cased` - . `bert-base-multilingual` + . `bert-large-cased` + . `bert-base-multilingual-uncased` + . `bert-base-multilingual-cased` . `bert-base-chinese` - a path or url to a pretrained model archive containing: . `bert_config.json` a configuration file for the model . `pytorch_model.bin` a PyTorch dump of a BertForPreTraining instance + cache_dir: an optional path to a folder in which the pre-trained models will be cached. + state_dict: an optional state dictionnary (collections.OrderedDict object) to use instead of Google pre-trained models *inputs, **kwargs: additional input for the specific Bert class (ex: num_labels for BertForSequenceClassification) """ @@ -476,7 +482,7 @@ def from_pretrained(cls, pretrained_model_name, cache_dir=None, *inputs, **kwarg "associated to this path or url.".format( pretrained_model_name, ', '.join(PRETRAINED_MODEL_ARCHIVE_MAP.keys()), - pretrained_model_name)) + archive_file)) return None if resolved_archive_file == archive_file: logger.info("loading archive file {}".format(archive_file)) @@ -497,11 +503,26 @@ def from_pretrained(cls, pretrained_model_name, cache_dir=None, *inputs, **kwarg # Load config config_file = os.path.join(serialization_dir, CONFIG_NAME) config = BertConfig.from_json_file(config_file) - logger.info("Model config {}".format(config)) + # logger.info("Model config {}".format(config)) # XD # Instantiate model. model = cls(config, *inputs, **kwargs) - weights_path = os.path.join(serialization_dir, WEIGHTS_NAME) - state_dict = torch.load(weights_path) + if state_dict is None: + weights_path = os.path.join(serialization_dir, WEIGHTS_NAME) + state_dict = torch.load(weights_path) + + old_keys = [] + new_keys = [] + for key in state_dict.keys(): + new_key = None + if 'gamma' in key: + new_key = key.replace('gamma', 'weight') + if 'beta' in key: + new_key = key.replace('beta', 'bias') + if new_key: + old_keys.append(key) + new_keys.append(new_key) + for old_key, new_key in zip(old_keys, new_keys): + state_dict[new_key] = state_dict.pop(old_key) missing_keys = [] unexpected_keys = [] @@ -557,7 +578,7 @@ class BertModel(PreTrainedBertModel): of each attention block (i.e. 12 full sequences for BERT-base, 24 for BERT-large), each encoded-hidden-state is a torch.FloatTensor of size [batch_size, sequence_length, hidden_size], - `output_all_encoded_layers=False`: outputs only the full sequence of hidden-states corresponding - to the last attention block, + to the last attention block of shape [batch_size, sequence_length, hidden_size], `pooled_output`: a torch.FloatTensor of size [batch_size, hidden_size] which is the output of a classifier pretrained on top of the hidden state associated to the first character of the input (`CLF`) to train on the Next-Sentence task (see BERT's paper). @@ -567,10 +588,10 @@ class BertModel(PreTrainedBertModel): # Already been converted into WordPiece token ids input_ids = torch.LongTensor([[31, 51, 99], [15, 5, 0]]) input_mask = torch.LongTensor([[1, 1, 1], [1, 1, 0]]) - token_type_ids = torch.LongTensor([[0, 0, 1], [0, 2, 0]]) + token_type_ids = torch.LongTensor([[0, 0, 1], [0, 1, 0]]) - config = modeling.BertConfig(vocab_size=32000, hidden_size=512, - num_hidden_layers=8, num_attention_heads=6, intermediate_size=1024) + config = modeling.BertConfig(vocab_size_or_config_json_file=32000, hidden_size=768, + num_hidden_layers=12, num_attention_heads=12, intermediate_size=3072) model = modeling.BertModel(config=config) all_encoder_layers, pooled_output = model(input_ids, token_type_ids, input_mask) @@ -648,18 +669,18 @@ class BertForPreTraining(PreTrainedBertModel): sentence classification loss. if `masked_lm_labels` or `next_sentence_label` is `None`: Outputs a tuple comprising - - the masked language modeling logits, and - - the next sentence classification logits. + - the masked language modeling logits of shape [batch_size, sequence_length, vocab_size], and + - the next sentence classification logits of shape [batch_size, 2]. Example usage: ```python # Already been converted into WordPiece token ids input_ids = torch.LongTensor([[31, 51, 99], [15, 5, 0]]) input_mask = torch.LongTensor([[1, 1, 1], [1, 1, 0]]) - token_type_ids = torch.LongTensor([[0, 0, 1], [0, 2, 0]]) + token_type_ids = torch.LongTensor([[0, 0, 1], [0, 1, 0]]) - config = BertConfig(vocab_size=32000, hidden_size=512, - num_hidden_layers=8, num_attention_heads=6, intermediate_size=1024) + config = BertConfig(vocab_size_or_config_json_file=32000, hidden_size=768, + num_hidden_layers=12, num_attention_heads=12, intermediate_size=3072) model = BertForPreTraining(config) masked_lm_logits_scores, seq_relationship_logits = model(input_ids, token_type_ids, input_mask) @@ -678,7 +699,7 @@ def forward(self, input_ids, token_type_ids=None, attention_mask=None, masked_lm if masked_lm_labels is not None and next_sentence_label is not None: loss_fct = CrossEntropyLoss(ignore_index=-1) - masked_lm_loss = loss_fct(prediction_scores.view(-1, self.config.vocab_size), masked_lm_labels(-1)) + masked_lm_loss = loss_fct(prediction_scores.view(-1, self.config.vocab_size), masked_lm_labels.view(-1)) next_sentence_loss = loss_fct(seq_relationship_score.view(-1, 2), next_sentence_label.view(-1)) total_loss = masked_lm_loss + next_sentence_loss return total_loss @@ -709,20 +730,20 @@ class BertForMaskedLM(PreTrainedBertModel): is only computed for the labels set in [0, ..., vocab_size] Outputs: - if `masked_lm_labels` is `None`: + if `masked_lm_labels` is not `None`: Outputs the masked language modeling loss. if `masked_lm_labels` is `None`: - Outputs the masked language modeling logits. + Outputs the masked language modeling logits of shape [batch_size, sequence_length, vocab_size]. Example usage: ```python # Already been converted into WordPiece token ids input_ids = torch.LongTensor([[31, 51, 99], [15, 5, 0]]) input_mask = torch.LongTensor([[1, 1, 1], [1, 1, 0]]) - token_type_ids = torch.LongTensor([[0, 0, 1], [0, 2, 0]]) + token_type_ids = torch.LongTensor([[0, 0, 1], [0, 1, 0]]) - config = BertConfig(vocab_size=32000, hidden_size=512, - num_hidden_layers=8, num_attention_heads=6, intermediate_size=1024) + config = BertConfig(vocab_size_or_config_json_file=32000, hidden_size=768, + num_hidden_layers=12, num_attention_heads=12, intermediate_size=3072) model = BertForMaskedLM(config) masked_lm_logits_scores = model(input_ids, token_type_ids, input_mask) @@ -774,7 +795,7 @@ class BertForNextSentencePrediction(PreTrainedBertModel): Outputs the total_loss which is the sum of the masked language modeling loss and the next sentence classification loss. if `next_sentence_label` is `None`: - Outputs the next sentence classification logits. + Outputs the next sentence classification logits of shape [batch_size, 2]. Example usage: ```python @@ -783,8 +804,8 @@ class BertForNextSentencePrediction(PreTrainedBertModel): input_mask = torch.LongTensor([[1, 1, 1], [1, 1, 0]]) token_type_ids = torch.LongTensor([[0, 0, 1], [0, 1, 0]]) - config = BertConfig(vocab_size=32000, hidden_size=512, - num_hidden_layers=8, num_attention_heads=6, intermediate_size=1024) + config = BertConfig(vocab_size_or_config_json_file=32000, hidden_size=768, + num_hidden_layers=12, num_attention_heads=12, intermediate_size=3072) model = BertForNextSentencePrediction(config) seq_relationship_logits = model(input_ids, token_type_ids, input_mask) @@ -836,17 +857,17 @@ class BertForSequenceClassification(PreTrainedBertModel): if `labels` is not `None`: Outputs the CrossEntropy classification loss of the output with the labels. if `labels` is `None`: - Outputs the classification logits. + Outputs the classification logits of shape [batch_size, num_labels]. Example usage: ```python # Already been converted into WordPiece token ids input_ids = torch.LongTensor([[31, 51, 99], [15, 5, 0]]) input_mask = torch.LongTensor([[1, 1, 1], [1, 1, 0]]) - token_type_ids = torch.LongTensor([[0, 0, 1], [0, 2, 0]]) + token_type_ids = torch.LongTensor([[0, 0, 1], [0, 1, 0]]) - config = BertConfig(vocab_size=32000, hidden_size=512, - num_hidden_layers=8, num_attention_heads=6, intermediate_size=1024) + config = BertConfig(vocab_size_or_config_json_file=32000, hidden_size=768, + num_hidden_layers=12, num_attention_heads=12, intermediate_size=3072) num_labels = 2 @@ -870,7 +891,142 @@ def forward(self, input_ids, token_type_ids=None, attention_mask=None, labels=No if labels is not None: loss_fct = CrossEntropyLoss() loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) - return loss, logits + return loss + else: + return logits + + +class BertForMultipleChoice(PreTrainedBertModel): + """BERT model for multiple choice tasks. + This module is composed of the BERT model with a linear layer on top of + the pooled output. + + Params: + `config`: a BertConfig class instance with the configuration to build a new model. + `num_choices`: the number of classes for the classifier. Default = 2. + + Inputs: + `input_ids`: a torch.LongTensor of shape [batch_size, num_choices, sequence_length] + with the word token indices in the vocabulary(see the tokens preprocessing logic in the scripts + `extract_features.py`, `run_classifier.py` and `run_squad.py`) + `token_type_ids`: an optional torch.LongTensor of shape [batch_size, num_choices, sequence_length] + with the token types indices selected in [0, 1]. Type 0 corresponds to a `sentence A` + and type 1 corresponds to a `sentence B` token (see BERT paper for more details). + `attention_mask`: an optional torch.LongTensor of shape [batch_size, num_choices, sequence_length] with indices + selected in [0, 1]. It's a mask to be used if the input sequence length is smaller than the max + input sequence length in the current batch. It's the mask that we typically use for attention when + a batch has varying length sentences. + `labels`: labels for the classification output: torch.LongTensor of shape [batch_size] + with indices selected in [0, ..., num_choices]. + + Outputs: + if `labels` is not `None`: + Outputs the CrossEntropy classification loss of the output with the labels. + if `labels` is `None`: + Outputs the classification logits of shape [batch_size, num_labels]. + + Example usage: + ```python + # Already been converted into WordPiece token ids + input_ids = torch.LongTensor([[[31, 51, 99], [15, 5, 0]], [[12, 16, 42], [14, 28, 57]]]) + input_mask = torch.LongTensor([[[1, 1, 1], [1, 1, 0]],[[1,1,0], [1, 0, 0]]]) + token_type_ids = torch.LongTensor([[[0, 0, 1], [0, 1, 0]],[[0, 1, 1], [0, 0, 1]]]) + config = BertConfig(vocab_size_or_config_json_file=32000, hidden_size=768, + num_hidden_layers=12, num_attention_heads=12, intermediate_size=3072) + + num_choices = 2 + + model = BertForMultipleChoice(config, num_choices) + logits = model(input_ids, token_type_ids, input_mask) + ``` + """ + def __init__(self, config, num_choices=2): + super(BertForMultipleChoice, self).__init__(config) + self.num_choices = num_choices + self.bert = BertModel(config) + self.dropout = nn.Dropout(config.hidden_dropout_prob) + self.classifier = nn.Linear(config.hidden_size, 1) + self.apply(self.init_bert_weights) + + def forward(self, input_ids, token_type_ids=None, attention_mask=None, labels=None): + flat_input_ids = input_ids.view(-1, input_ids.size(-1)) + flat_token_type_ids = token_type_ids.view(-1, token_type_ids.size(-1)) + flat_attention_mask = attention_mask.view(-1, attention_mask.size(-1)) + _, pooled_output = self.bert(flat_input_ids, flat_token_type_ids, flat_attention_mask, output_all_encoded_layers=False) + pooled_output = self.dropout(pooled_output) + logits = self.classifier(pooled_output) + reshaped_logits = logits.view(-1, self.num_choices) + + if labels is not None: + loss_fct = CrossEntropyLoss() + loss = loss_fct(reshaped_logits, labels) + return loss + else: + return reshaped_logits + + +class BertForTokenClassification(PreTrainedBertModel): + """BERT model for token-level classification. + This module is composed of the BERT model with a linear layer on top of + the full hidden state of the last layer. + + Params: + `config`: a BertConfig class instance with the configuration to build a new model. + `num_labels`: the number of classes for the classifier. Default = 2. + + Inputs: + `input_ids`: a torch.LongTensor of shape [batch_size, sequence_length] + with the word token indices in the vocabulary(see the tokens preprocessing logic in the scripts + `extract_features.py`, `run_classifier.py` and `run_squad.py`) + `token_type_ids`: an optional torch.LongTensor of shape [batch_size, sequence_length] with the token + types indices selected in [0, 1]. Type 0 corresponds to a `sentence A` and type 1 corresponds to + a `sentence B` token (see BERT paper for more details). + `attention_mask`: an optional torch.LongTensor of shape [batch_size, sequence_length] with indices + selected in [0, 1]. It's a mask to be used if the input sequence length is smaller than the max + input sequence length in the current batch. It's the mask that we typically use for attention when + a batch has varying length sentences. + `labels`: labels for the classification output: torch.LongTensor of shape [batch_size] + with indices selected in [0, ..., num_labels]. + + Outputs: + if `labels` is not `None`: + Outputs the CrossEntropy classification loss of the output with the labels. + if `labels` is `None`: + Outputs the classification logits of shape [batch_size, sequence_length, num_labels]. + + Example usage: + ```python + # Already been converted into WordPiece token ids + input_ids = torch.LongTensor([[31, 51, 99], [15, 5, 0]]) + input_mask = torch.LongTensor([[1, 1, 1], [1, 1, 0]]) + token_type_ids = torch.LongTensor([[0, 0, 1], [0, 1, 0]]) + + config = BertConfig(vocab_size_or_config_json_file=32000, hidden_size=768, + num_hidden_layers=12, num_attention_heads=12, intermediate_size=3072) + + num_labels = 2 + + model = BertForTokenClassification(config, num_labels) + logits = model(input_ids, token_type_ids, input_mask) + ``` + """ + def __init__(self, config, num_labels=2): + super(BertForTokenClassification, self).__init__(config) + self.num_labels = num_labels + self.bert = BertModel(config) + self.dropout = nn.Dropout(config.hidden_dropout_prob) + self.classifier = nn.Linear(config.hidden_size, num_labels) + self.apply(self.init_bert_weights) + + def forward(self, input_ids, token_type_ids=None, attention_mask=None, labels=None): + sequence_output, _ = self.bert(input_ids, token_type_ids, attention_mask, output_all_encoded_layers=False) + sequence_output = self.dropout(sequence_output) + logits = self.classifier(sequence_output) + + if labels is not None: + loss_fct = CrossEntropyLoss() + loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) + return loss else: return logits @@ -881,15 +1037,7 @@ class BertForQuestionAnswering(PreTrainedBertModel): the sequence output that computes start_logits and end_logits Params: - `config`: either - - a BertConfig class instance with the configuration to build a new model, or - - a str with the name of a pre-trained model to load selected in the list of: - . `bert-base-uncased` - . `bert-large-uncased` - . `bert-base-cased` - . `bert-base-multilingual` - . `bert-base-chinese` - The pre-trained model will be downloaded and cached if needed. + `config`: a BertConfig class instance with the configuration to build a new model. Inputs: `input_ids`: a torch.LongTensor of shape [batch_size, sequence_length] @@ -914,17 +1062,17 @@ class BertForQuestionAnswering(PreTrainedBertModel): Outputs the total_loss which is the sum of the CrossEntropy loss for the start and end token positions. if `start_positions` or `end_positions` is `None`: Outputs a tuple of start_logits, end_logits which are the logits respectively for the start and end - position tokens. + position tokens of shape [batch_size, sequence_length]. Example usage: ```python # Already been converted into WordPiece token ids input_ids = torch.LongTensor([[31, 51, 99], [15, 5, 0]]) input_mask = torch.LongTensor([[1, 1, 1], [1, 1, 0]]) - token_type_ids = torch.LongTensor([[0, 0, 1], [0, 2, 0]]) + token_type_ids = torch.LongTensor([[0, 0, 1], [0, 1, 0]]) - config = BertConfig(vocab_size=32000, hidden_size=512, - num_hidden_layers=8, num_attention_heads=6, intermediate_size=1024) + config = BertConfig(vocab_size_or_config_json_file=32000, hidden_size=768, + num_hidden_layers=12, num_attention_heads=12, intermediate_size=3072) model = BertForQuestionAnswering(config) start_logits, end_logits = model(input_ids, token_type_ids, input_mask) diff --git a/pytorch_pretrained_bert/optimization.py b/pytorch_pretrained_bert/optimization.py index 4266a8f83ba699..f3d1de0d37b8b6 100644 --- a/pytorch_pretrained_bert/optimization.py +++ b/pytorch_pretrained_bert/optimization.py @@ -17,6 +17,7 @@ import math import torch from torch.optim import Optimizer +from torch.optim.optimizer import required from torch.nn.utils import clip_grad_norm_ def warmup_cosine(x, warmup=0.002): @@ -52,13 +53,13 @@ class BertAdam(Optimizer): b1: Adams b1. Default: 0.9 b2: Adams b2. Default: 0.999 e: Adams epsilon. Default: 1e-6 - weight_decay_rate: Weight decay. Default: 0.01 + weight_decay: Weight decay. Default: 0.01 max_grad_norm: Maximum norm for the gradients (-1 means no clipping). Default: 1.0 """ - def __init__(self, params, lr, warmup=-1, t_total=-1, schedule='warmup_linear', - b1=0.9, b2=0.999, e=1e-6, weight_decay_rate=0.01, + def __init__(self, params, lr=required, warmup=-1, t_total=-1, schedule='warmup_linear', + b1=0.9, b2=0.999, e=1e-6, weight_decay=0.01, max_grad_norm=1.0): - if not lr >= 0.0: + if lr is not required and lr < 0.0: raise ValueError("Invalid learning rate: {} - should be >= 0.0".format(lr)) if schedule not in SCHEDULES: raise ValueError("Invalid schedule parameter: {}".format(schedule)) @@ -71,7 +72,7 @@ def __init__(self, params, lr, warmup=-1, t_total=-1, schedule='warmup_linear', if not e >= 0.0: raise ValueError("Invalid epsilon value: {} - should be >= 0.0".format(e)) defaults = dict(lr=lr, schedule=schedule, warmup=warmup, t_total=t_total, - b1=b1, b2=b2, e=e, weight_decay_rate=weight_decay_rate, + b1=b1, b2=b2, e=e, weight_decay=weight_decay, max_grad_norm=max_grad_norm) super(BertAdam, self).__init__(params, defaults) @@ -139,8 +140,8 @@ def step(self, closure=None): # Instead we want to decay the weights in a manner that doesn't interact # with the m/v parameters. This is equivalent to adding the square # of the weights to the loss with plain (non-momentum) SGD. - if group['weight_decay_rate'] > 0.0: - update += group['weight_decay_rate'] * p.data + if group['weight_decay'] > 0.0: + update += group['weight_decay'] * p.data if group['t_total'] != -1: schedule_fct = SCHEDULES[group['schedule']] diff --git a/pytorch_pretrained_bert/tokenization.py b/pytorch_pretrained_bert/tokenization.py index c37a7e3b9ee32b..595eb8fdaa92a8 100644 --- a/pytorch_pretrained_bert/tokenization.py +++ b/pytorch_pretrained_bert/tokenization.py @@ -25,18 +25,27 @@ from .file_utils import cached_path -logging.basicConfig(format = '%(asctime)s - %(levelname)s - %(name)s - %(message)s', - datefmt = '%m/%d/%Y %H:%M:%S', - level = logging.INFO) logger = logging.getLogger(__name__) PRETRAINED_VOCAB_ARCHIVE_MAP = { 'bert-base-uncased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-uncased-vocab.txt", 'bert-large-uncased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-uncased-vocab.txt", 'bert-base-cased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-cased-vocab.txt", - 'bert-base-multilingual': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-multilingual-vocab.txt", + 'bert-large-cased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-large-cased-vocab.txt", + 'bert-base-multilingual-uncased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-multilingual-uncased-vocab.txt", + 'bert-base-multilingual-cased': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-multilingual-cased-vocab.txt", 'bert-base-chinese': "https://s3.amazonaws.com/models.huggingface.co/bert/bert-base-chinese-vocab.txt", } +PRETRAINED_VOCAB_POSITIONAL_EMBEDDINGS_SIZE_MAP = { + 'bert-base-uncased': 512, + 'bert-large-uncased': 512, + 'bert-base-cased': 512, + 'bert-large-cased': 512, + 'bert-base-multilingual-uncased': 512, + 'bert-base-multilingual-cased': 512, + 'bert-base-chinese': 512, +} +VOCAB_NAME = 'vocab.txt' def load_vocab(vocab_file): @@ -65,7 +74,9 @@ def whitespace_tokenize(text): class BertTokenizer(object): """Runs end-to-end tokenization: punctuation splitting + wordpiece""" - def __init__(self, vocab_file, do_lower_case=True): + + def __init__(self, vocab_file, do_lower_case=True, max_len=None, + never_split=("[UNK]", "[SEP]", "[PAD]", "[CLS]", "[MASK]")): if not os.path.isfile(vocab_file): raise ValueError( "Can't find a vocabulary file at path '{}'. To load the vocabulary from a Google pretrained " @@ -73,8 +84,10 @@ def __init__(self, vocab_file, do_lower_case=True): self.vocab = load_vocab(vocab_file) self.ids_to_tokens = collections.OrderedDict( [(ids, tok) for tok, ids in self.vocab.items()]) - self.basic_tokenizer = BasicTokenizer(do_lower_case=do_lower_case) + self.basic_tokenizer = BasicTokenizer(do_lower_case=do_lower_case, + never_split=never_split) self.wordpiece_tokenizer = WordpieceTokenizer(vocab=self.vocab) + self.max_len = max_len if max_len is not None else int(1e12) def tokenize(self, text): split_tokens = [] @@ -88,6 +101,12 @@ def convert_tokens_to_ids(self, tokens): ids = [] for token in tokens: ids.append(self.vocab[token]) + if len(ids) > self.max_len: + raise ValueError( + "Token indices sequence length is longer than the specified maximum " + " sequence length for this BERT model ({} > {}). Running this" + " sequence through BERT will result in indexing errors".format(len(ids), self.max_len) + ) return ids def convert_ids_to_tokens(self, ids): @@ -98,7 +117,7 @@ def convert_ids_to_tokens(self, ids): return tokens @classmethod - def from_pretrained(cls, pretrained_model_name, do_lower_case=True): + def from_pretrained(cls, pretrained_model_name, cache_dir=None, *inputs, **kwargs): """ Instantiate a PreTrainedBertModel from a pre-trained model file. Download and cache the pre-trained model file if needed. @@ -107,16 +126,11 @@ def from_pretrained(cls, pretrained_model_name, do_lower_case=True): vocab_file = PRETRAINED_VOCAB_ARCHIVE_MAP[pretrained_model_name] else: vocab_file = pretrained_model_name + if os.path.isdir(vocab_file): + vocab_file = os.path.join(vocab_file, VOCAB_NAME) # redirect to the cache, if necessary try: - resolved_vocab_file = cached_path(vocab_file) - if resolved_vocab_file == vocab_file: - logger.info("loading vocabulary file {}".format(vocab_file)) - else: - logger.info("loading vocabulary file {} from cache at {}".format( - vocab_file, resolved_vocab_file)) - # Instantiate tokenizer. - tokenizer = cls(resolved_vocab_file, do_lower_case) + resolved_vocab_file = cached_path(vocab_file, cache_dir=cache_dir) except FileNotFoundError: logger.error( "Model name '{}' was not found in model name list ({}). " @@ -124,21 +138,36 @@ def from_pretrained(cls, pretrained_model_name, do_lower_case=True): "associated to this path or url.".format( pretrained_model_name, ', '.join(PRETRAINED_VOCAB_ARCHIVE_MAP.keys()), - pretrained_model_name)) - tokenizer = None + vocab_file)) + return None + if resolved_vocab_file == vocab_file: + logger.info("loading vocabulary file {}".format(vocab_file)) + else: + logger.info("loading vocabulary file {} from cache at {}".format( + vocab_file, resolved_vocab_file)) + if pretrained_model_name in PRETRAINED_VOCAB_POSITIONAL_EMBEDDINGS_SIZE_MAP: + # if we're using a pretrained model, ensure the tokenizer wont index sequences longer + # than the number of positional embeddings + max_len = PRETRAINED_VOCAB_POSITIONAL_EMBEDDINGS_SIZE_MAP[pretrained_model_name] + kwargs['max_len'] = min(kwargs.get('max_len', int(1e12)), max_len) + # Instantiate tokenizer. + tokenizer = cls(resolved_vocab_file, *inputs, **kwargs) return tokenizer class BasicTokenizer(object): """Runs basic tokenization (punctuation splitting, lower casing, etc.).""" - def __init__(self, do_lower_case=True): + def __init__(self, + do_lower_case=True, + never_split=("[UNK]", "[SEP]", "[PAD]", "[CLS]", "[MASK]")): """Constructs a BasicTokenizer. Args: do_lower_case: Whether to lower case the input. """ self.do_lower_case = do_lower_case + self.never_split = never_split def tokenize(self, text): """Tokenizes a piece of text.""" @@ -153,7 +182,7 @@ def tokenize(self, text): orig_tokens = whitespace_tokenize(text) split_tokens = [] for token in orig_tokens: - if self.do_lower_case: + if self.do_lower_case and token not in self.never_split: token = token.lower() token = self._run_strip_accents(token) split_tokens.extend(self._run_split_on_punc(token)) @@ -174,6 +203,8 @@ def _run_strip_accents(self, text): def _run_split_on_punc(self, text): """Splits punctuation on a piece of text.""" + if text in self.never_split: + return [text] chars = list(text) i = 0 start_new_word = True @@ -191,7 +222,7 @@ def _run_split_on_punc(self, text): i += 1 return ["".join(x) for x in output] - + def _tokenize_chinese_chars(self, text): """Adds whitespace around any CJK character.""" output = [] @@ -216,17 +247,17 @@ def _is_chinese_char(self, cp): # space-separated words, so they are not treated specially and handled # like the all of the other languages. if ((cp >= 0x4E00 and cp <= 0x9FFF) or # - (cp >= 0x3400 and cp <= 0x4DBF) or # - (cp >= 0x20000 and cp <= 0x2A6DF) or # - (cp >= 0x2A700 and cp <= 0x2B73F) or # - (cp >= 0x2B740 and cp <= 0x2B81F) or # - (cp >= 0x2B820 and cp <= 0x2CEAF) or - (cp >= 0xF900 and cp <= 0xFAFF) or # - (cp >= 0x2F800 and cp <= 0x2FA1F)): # + (cp >= 0x3400 and cp <= 0x4DBF) or # + (cp >= 0x20000 and cp <= 0x2A6DF) or # + (cp >= 0x2A700 and cp <= 0x2B73F) or # + (cp >= 0x2B740 and cp <= 0x2B81F) or # + (cp >= 0x2B820 and cp <= 0x2CEAF) or + (cp >= 0xF900 and cp <= 0xFAFF) or # + (cp >= 0x2F800 and cp <= 0x2FA1F)): # return True - + return False - + def _clean_text(self, text): """Performs invalid character removal and whitespace cleanup on text.""" output = [] @@ -261,7 +292,7 @@ def tokenize(self, text): Args: text: A single token or whitespace separated tokens. This should have - already been passed through `BasicTokenizer. + already been passed through `BasicTokenizer`. Returns: A list of wordpiece tokens. diff --git a/requirements.txt b/requirements.txt index e9a3640a9b3a63..f37f11cc540bb1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,5 @@ -# This installs Pytorch for CUDA 8 only. If you are using a newer version, -# please visit http://pytorch.org/ and install the relevant version. -torch>=0.4.1,<0.5.0 +# PyTorch +torch>=0.4.1 # progress bars in model download and training scripts tqdm # Accessing files from S3 directly. diff --git a/run_child_finetuning.py b/run_child_finetuning.py new file mode 100644 index 00000000000000..3fd4a66f3f9de4 --- /dev/null +++ b/run_child_finetuning.py @@ -0,0 +1,531 @@ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import os +import logging +import argparse +from tqdm import tqdm, trange +import math + +import numpy as np +import torch +from torch.utils.data import DataLoader, RandomSampler, SequentialSampler +from torch.utils.data.distributed import DistributedSampler + +from pytorch_pretrained_bert.tokenization import BertTokenizer +from pytorch_pretrained_bert.modeling import BertForPreTraining +from pytorch_pretrained_bert.optimization import BertAdam + +# from child_generator import make_sentences +# from child_frames import frames + +from torch.utils.data import Dataset, TensorDataset +import random + +logging.basicConfig(format='%(asctime)s - %(levelname)s - %(name)s - %(message)s', + datefmt='%m/%d/%Y %H:%M:%S', + level=logging.INFO) +logger = logging.getLogger(__name__) + + +def warmup_linear(x, warmup=0.002): + if x < warmup: + return x/warmup + return 1.0 - x + + +def rejoin_masked_tokens(tokens): + out = [] + while len(tokens) > 0: + token = tokens.pop(0) + if token not in ['[', ']']: + out.append(token) + else: + assert token == '[' + next_token = tokens.pop(0) # the maksed word + next_next_token = tokens.pop(0) # "]" symbol + out.append(token + next_token + next_next_token) + return out + + +class CHILDDataset(Dataset): + def __init__(self, tokenizer, all_lines, one_sent=False, seq_len=None, dev_percent=0.2): + self.tokenizer = tokenizer + self.one_sent = one_sent + self.seq_len = seq_len + + self.all_lines = all_lines +# self.all_lines = [] +# for frame in frames: +# self.all_lines += make_sentences(**frame) + + random.shuffle(self.all_lines) + + self.examples = [] + cur_id = 0 + for line in self.all_lines: + t1, t2, is_next_label = self.split_sent(line) + + tokens_a = self.tokenizer.tokenize(t1) + tokens_a = rejoin_masked_tokens(tokens_a) + + if t2 is None: + tokens_b = None + else: + tokens_b = self.tokenizer.tokenize(t2) + tokens_b = rejoin_masked_tokens(tokens_b) + + example = InputExample(guid=cur_id, tokens_a=tokens_a, tokens_b=tokens_b, is_next=is_next_label) + self.examples.append(example) + cur_id += 1 + + if self.seq_len is None: + # self.seq_len = max([len(example.tokens_a) + 3 for example in self.examples]) + # if example.tokens_b is not None: + # self.seq_len += len(example.tokens_b) + self.seq_len = max([len(example.tokens_a) + len(example.tokens_b) + 3 + if example.tokens_b is not None else len(example.tokens_a) + 2 + for example in self.examples]) + + self.features = [convert_example_to_features(example, self.seq_len, self.tokenizer) for example in self.examples] + + self.n_examples = len(self.all_lines) + self.n_dev = int(self.n_examples * dev_percent) + self.n_train = self.n_examples - self.n_dev + + def get_train_examples(self): + return self.examples[:self.n_train] + + def get_dev_examples(self): + return self.examples[self.n_train:] + + def get_train_features(self): + return self.features[:self.n_train] + + def get_dev_features(self): + return self.features[self.n_train:] + + def build_dataset(self, features): + all_input_ids = torch.tensor([f.input_ids for f in features], dtype=torch.long) + all_input_mask = torch.tensor([f.input_mask for f in features], dtype=torch.long) + all_segment_ids = torch.tensor([f.segment_ids for f in features], dtype=torch.long) + all_lm_label_ids = torch.tensor([f.lm_label_ids for f in features], dtype=torch.long) + all_is_next = torch.tensor([f.is_next for f in features], dtype=torch.long) + dataset = TensorDataset(all_input_ids, all_input_mask, all_segment_ids, all_lm_label_ids, all_is_next) + return dataset + + def __len__(self): + return len(self.all_lines) + + def split_sent(self, line): + label = 0 + if "|||" in line: + t1, t2 = [t.strip() for t in line.split("|||")] + assert len(t1) > 0 and len(t2) > 0, "%d %d" % (len(t1), len(t2)) + if self.one_sent: + t1 = t1 + " " + t2 + t2 = None + else: + # assert self.one_sent + t1, t2 = line.strip(), None + return t1, t2, label + + +class InputExample(object): + def __init__(self, guid, tokens_a, tokens_b=None, is_next=None, lm_labels=None): + self.guid = guid + self.tokens_a = tokens_a + self.tokens_b = tokens_b + self.is_next = is_next # nextSentence + self.lm_labels = lm_labels # masked words for language model + + +class InputFeatures(object): + def __init__(self, input_ids, input_mask, segment_ids, is_next, lm_label_ids): + self.input_ids = input_ids + self.input_mask = input_mask + self.segment_ids = segment_ids + self.is_next = is_next + self.lm_label_ids = lm_label_ids + + +def mask_word(tokens, tokenizer): + output_label = [] + + for i, token in enumerate(tokens): + if token.startswith("[") and token.endswith("]"): # masked word + token = token[1:-1] + tokens[i] = "[MASK]" + output_label.append(tokenizer.vocab[token]) + else: + output_label.append(-1) + + return tokens, output_label + + +def convert_example_to_features(example, max_seq_length, tokenizer): + tokens_a = example.tokens_a + tokens_b = example.tokens_b + + t1_random, t1_label = mask_word(tokens_a, tokenizer) + lm_label_ids = [-1] + t1_label + [-1] + + tokens = [] + segment_ids = [] + tokens.append("[CLS]") + segment_ids.append(0) + for token in tokens_a: + tokens.append(token) + segment_ids.append(0) + tokens.append("[SEP]") + segment_ids.append(0) + + if tokens_b is not None and len(tokens_b) > 0: + t2_random, t2_label = mask_word(tokens_b, tokenizer) + lm_label_ids += (t2_label + [-1]) + + for token in tokens_b: + tokens.append(token) + segment_ids.append(1) + tokens.append("[SEP]") + segment_ids.append(1) + + input_ids = tokenizer.convert_tokens_to_ids(tokens) + + # The mask has 1 for real tokens and 0 for padding tokens. Only real + # tokens are attended to. + input_mask = [1] * len(input_ids) + + # Zero-pad up to the sequence length. + while len(input_ids) < max_seq_length: + input_ids.append(0) + input_mask.append(0) + segment_ids.append(0) + lm_label_ids.append(-1) + + assert len(input_ids) == max_seq_length, '%d != %d' % (len(input_ids), max_seq_length) + assert len(input_mask) == max_seq_length + assert len(segment_ids) == max_seq_length + assert len(lm_label_ids) == max_seq_length + + if example.guid < -5: + logger.info("*** Example ***") + logger.info("guid: %s" % (example.guid)) + logger.info("tokens: %s" % " ".join( + [str(x) for x in tokens])) + logger.info("input_ids: %s" % " ".join([str(x) for x in input_ids])) + logger.info("input_mask: %s" % " ".join([str(x) for x in input_mask])) + logger.info( + "segment_ids: %s" % " ".join([str(x) for x in segment_ids])) + logger.info("LM label: %s " % (lm_label_ids)) + logger.info("Is next sentence label: %s " % (example.is_next)) + + features = InputFeatures(input_ids=input_ids, + input_mask=input_mask, + segment_ids=segment_ids, + lm_label_ids=lm_label_ids, + is_next=example.is_next) + return features + + +def main(): + parser = argparse.ArgumentParser() + + ## Required parameters + parser.add_argument("--output_dir", + default=None, + type=str, + required=True, + help="The output directory where the model checkpoints will be written.") + + ## Other parameters + parser.add_argument("--max_seq_length", + default=128, + type=int, + help="The maximum total input sequence length after WordPiece tokenization. \n" + "Sequences longer than this will be truncated, and sequences shorter \n" + "than this will be padded.") + parser.add_argument("--do_train", + action='store_true', + help="Whether to run training.") + parser.add_argument("--do_eval", + action='store_true', + help="Whether to run eval on the dev set.") + parser.add_argument("--train_batch_size", + default=32, + type=int, + help="Total batch size for training.") + parser.add_argument("--eval_batch_size", + default=32, + type=int, + help="Total batch size for eval.") + parser.add_argument("--learning_rate", + default=3e-5, + type=float, + help="The initial learning rate for Adam.") + parser.add_argument("--num_train_epochs", + default=3.0, + type=float, + help="Total number of training epochs to perform.") + parser.add_argument("--warmup_proportion", + default=0.1, + type=float, + help="Proportion of training to perform linear learning rate warmup for. " + "E.g., 0.1 = 10%% of training.") + parser.add_argument("--no_cuda", + action='store_true', + help="Whether not to use CUDA when available") + parser.add_argument("--do_lower_case", + action='store_true', + help="Whether to lower case the input text. True for uncased models, False for cased models.") + parser.add_argument("--local_rank", + type=int, + default=-1, + help="local_rank for distributed training on gpus") + parser.add_argument('--seed', + type=int, + default=42, + help="random seed for initialization") + parser.add_argument('--gradient_accumulation_steps', + type=int, + default=1, + help="Number of updates steps to accumualte before performing a backward/update pass.") + parser.add_argument('--fp16', + action='store_true', + help="Whether to use 16-bit float precision instead of 32-bit") + parser.add_argument('--loss_scale', + type = float, default = 0, + help = "Loss scaling to improve fp16 numeric stability. Only used when fp16 set to True.\n" + "0 (default value): dynamic loss scaling.\n" + "Positive power of 2: static loss scaling value.\n") + + args = parser.parse_args() + + if args.local_rank == -1 or args.no_cuda: + device = torch.device("cuda" if torch.cuda.is_available() and not args.no_cuda else "cpu") + n_gpu = torch.cuda.device_count() + else: + torch.cuda.set_device(args.local_rank) + device = torch.device("cuda", args.local_rank) + n_gpu = 1 + # Initializes the distributed backend which will take care of sychronizing nodes/GPUs + torch.distributed.init_process_group(backend='nccl') + logger.info("device: {} n_gpu: {}, distributed training: {}, 16-bits training: {}".format( + device, n_gpu, bool(args.local_rank != -1), args.fp16)) + + if args.gradient_accumulation_steps < 1: + raise ValueError("Invalid gradient_accumulation_steps parameter: {}, should be >= 1".format( + args.gradient_accumulation_steps)) + + args.train_batch_size = int(args.train_batch_size / args.gradient_accumulation_steps) + + random.seed(args.seed) + np.random.seed(args.seed) + torch.manual_seed(args.seed) + if n_gpu > 0: + torch.cuda.manual_seed_all(args.seed) + + if not args.do_train and not args.do_eval: + raise ValueError("At least one of `do_train` or `do_eval` must be True.") + + if os.path.exists(args.output_dir) and os.listdir(args.output_dir): + raise ValueError("Output directory ({}) already exists and is not empty.".format(args.output_dir)) + os.makedirs(args.output_dir, exist_ok=True) + + BERT_DIR = '/nas/pretrain-bert/pretrain-tensorflow/uncased_L-12_H-768_A-12/' + tokenizer = BertTokenizer.from_pretrained(os.path.join(BERT_DIR, 'vocab.txt'), do_lower_case=args.do_lower_case) + + #train_examples = None + num_train_steps = None + if args.do_train: + print("Loading Train Dataset", args.train_file) + train_features = CHILDDataset(tokenizer).get_train_features() + num_train_steps = int( + len(train_features) / args.train_batch_size / args.gradient_accumulation_steps * args.num_train_epochs) + + # Prepare model + model = BertForMaskedLM.from_pretrained(BERT_DIR) + if args.fp16: + model.half() + model.to(device) + if args.local_rank != -1: + try: + from apex.parallel import DistributedDataParallel as DDP + except ImportError: + raise ImportError("Please install apex from https://www.github.com/nvidia/apex to use distributed and fp16 training.") + model = DDP(model) + elif n_gpu > 1: + model = torch.nn.DataParallel(model) + + # Prepare optimizer + param_optimizer = list(model.named_parameters()) + no_decay = ['bias', 'LayerNorm.bias', 'LayerNorm.weight'] + optimizer_grouped_parameters = [ + {'params': [p for n, p in param_optimizer if not any(nd in n for nd in no_decay)], 'weight_decay': 0.01}, + {'params': [p for n, p in param_optimizer if any(nd in n for nd in no_decay)], 'weight_decay': 0.0} + ] + if args.fp16: + try: + from apex.optimizers import FP16_Optimizer + from apex.optimizers import FusedAdam + except ImportError: + raise ImportError("Please install apex from https://www.github.com/nvidia/apex to use distributed and fp16 training.") + + optimizer = FusedAdam(optimizer_grouped_parameters, + lr=args.learning_rate, + bias_correction=False, + max_grad_norm=1.0) + if args.loss_scale == 0: + optimizer = FP16_Optimizer(optimizer, dynamic_loss_scale=True) + else: + optimizer = FP16_Optimizer(optimizer, static_loss_scale=args.loss_scale) + + else: + optimizer = BertAdam(optimizer_grouped_parameters, + lr=args.learning_rate, + warmup=args.warmup_proportion, + t_total=num_train_steps) + + global_step = 0 + if args.do_train: + logger.info("***** Running training *****") + logger.info(" Num examples = %d", len(train_dataset)) + logger.info(" Batch size = %d", args.train_batch_size) + logger.info(" Num steps = %d", num_train_steps) + + all_input_ids = torch.tensor([f.input_ids for f in train_features], dtype=torch.long) + all_input_mask = torch.tensor([f.input_mask for f in train_features], dtype=torch.long) + all_segment_ids = torch.tensor([f.segment_ids for f in train_features], dtype=torch.long) + all_lm_label_ids = torch.tensor([f.lm_label_id for f in train_features], dtype=torch.long) + all_is_next = torch.tensor([f.is_next for f in train_features], dtype=torch.long) + train_dataset = TensorDataset(all_input_ids, all_input_mask, all_segment_ids, all_lm_label_ids, all_is_next) + + if args.local_rank == -1: + train_sampler = RandomSampler(train_dataset) + else: + #TODO: check if this works with current data generator from disk that relies on file.__next__ + # (it doesn't return item back by index) + train_sampler = DistributedSampler(train_dataset) + train_dataloader = DataLoader(train_dataset, sampler=train_sampler, batch_size=args.train_batch_size) + + if args.do_eval: + eval_features = CHILDDataset(tokenizer).get_dev_features() + all_input_ids = torch.tensor([f.input_ids for f in eval_features], dtype=torch.long) + all_input_mask = torch.tensor([f.input_mask for f in eval_features], dtype=torch.long) + all_segment_ids = torch.tensor([f.segment_ids for f in eval_features], dtype=torch.long) + all_lm_label_ids = torch.tensor([f.lm_label_id for f in eval_features], dtype=torch.long) + all_is_next = torch.tensor([f.is_next for f in eval_features], dtype=torch.long) + eval_dataset = TensorDataset(all_input_ids, all_input_mask, all_segment_ids, all_lm_label_ids, all_is_next) + + eval_sampler = SequentialSampler(eval_dataset) + eval_dataloader = DataLoader(eval_dataset, sampler=eval_sampler, batch_size=args.eval_batch_size) + + logger.info("Epoch 0") + logger.info("Evaluating on train set...") + validate(model, train_dataloader) + logger.info("Evaluating on valid set...") + validate(model, eval_dataloader) + + for epoch in trange(int(args.num_train_epochs), desc="Epoch"): + model.train() + tr_loss = 0 + nb_tr_examples, nb_tr_steps = 0, 0 + for step, batch in enumerate(tqdm(train_dataloader, desc="Iteration")): + batch = tuple(t.to(device) for t in batch) + input_ids, input_mask, segment_ids, lm_label_ids, is_next = batch + loss = model(input_ids, segment_ids, input_mask, lm_label_ids) + if n_gpu > 1: + loss = loss.mean() # mean() to average on multi-gpu. + if args.gradient_accumulation_steps > 1: + loss = loss / args.gradient_accumulation_steps + if args.fp16: + optimizer.backward(loss) + else: + loss.backward() + tr_loss += loss.item() + nb_tr_examples += input_ids.size(0) + nb_tr_steps += 1 + if (step + 1) % args.gradient_accumulation_steps == 0: + # modify learning rate with special warm up BERT uses + lr_this_step = args.learning_rate * warmup_linear(global_step/num_train_steps, args.warmup_proportion) + for param_group in optimizer.param_groups: + param_group['lr'] = lr_this_step + optimizer.step() + optimizer.zero_grad() + global_step += 1 + + if args.do_eval: + logger.info("Epoch %d" % epoch + 1) + logger.info("Evaluating on train set...") + validate(model, train_dataloader) + logger.info("Evaluating on valid set...") + validate(model, eval_dataloader) + + # Save a trained model + logger.info("** ** * Saving fine - tuned model ** ** * ") + model_to_save = model.module if hasattr(model, 'module') else model # Only save the model it-self + output_model_file = os.path.join(args.output_dir, "pytorch_model.bin") + if args.do_train: + torch.save(model_to_save.state_dict(), output_model_file) + + +def validate(model, dataset, device, batch_size=128, randomized=False): + model.eval() + eval_loss, eval_accuracy = 0, 0 + nb_eval_steps, nb_eval_examples = 0, 0 + +# for input_ids, input_mask, segment_ids, label_ids, is_next in tqdm(eval_dataloader, desc="Evaluating"): + for i, batch_idx in enumerate(get_batch_index(len(dataset), batch_size, randomized=randomized)): + batch = tuple(t[batch_idx] for t in dataset.tensors) + batch = tuple(t.to(device) for t in batch) + input_ids, input_mask, segment_ids, label_ids, is_next = batch + input_ids = input_ids.to(device) + input_mask = input_mask.to(device) + segment_ids = segment_ids.to(device) + label_ids = label_ids.to(device) + + with torch.no_grad(): + tmp_eval_loss = model(input_ids, segment_ids, input_mask, label_ids) + logits = model(input_ids, segment_ids, input_mask) + + logits = logits.detach().cpu().numpy() + label_ids = label_ids.to('cpu').numpy() + tmp_eval_accuracy = accuracy(logits, label_ids) + + eval_loss += tmp_eval_loss.mean().item() + eval_accuracy += tmp_eval_accuracy + + nb_eval_examples += input_ids.size(0) + nb_eval_steps += 1 + + eval_loss = eval_loss / nb_eval_steps + eval_accuracy = eval_accuracy / nb_eval_examples +# loss = tr_loss/nb_tr_steps if args.do_train else None + result = {'eval_loss': eval_loss, + 'eval_accuracy': eval_accuracy,} +# 'global_step': global_step, +# 'loss': loss} + + logger.info("***** Eval results *****") + for key in sorted(result.keys()): + logger.info(" %s = %s", key, str(result[key])) + + +def get_batch_index(dataset_size, batch_size, randomized=False): + import math + idx_list = list(range(dataset_size)) + if randomized: + random.shuffle(idx_list) + n_batches = math.ceil(len(idx_list) / batch_size) + return [idx_list[i * batch_size: (i + 1) * batch_size] for i in range(n_batches)] + + +def accuracy(out, labels): + outputs = np.argmax(out, axis=-1) +# return int(np.all((outputs == labels)[labels != -1])) + return int(np.sum((outputs == labels)[labels != -1])) + + +if __name__ == "__main__": + main() diff --git a/score.py b/score.py new file mode 100644 index 00000000000000..f1c3224c0b9d4a --- /dev/null +++ b/score.py @@ -0,0 +1,92 @@ +from collections import OrderedDict +import re +import json +import os +from pprint import pprint + + +def score(): + path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'WSC_child_problem.json') + with open(path, 'r') as f: + data_l = json.load(f) + f.close() + + result = [] + s_order = ['sentence', 'answer1', 'answer0', 'correct_answer', 'adjacent_ref', 'predict_answer', 'score'] + data_order = ['index', 'sentences'] + for data in data_l: + if data['sentences'] != []: + for i in range(len(data['sentences'])): + s = data['sentences'][i] + score = 0 + if s['predict_answer'] != []: + predict_answer = s['predict_answer'][0] + if any(answer.lower() == predict_answer[0] for answer in s['correct_answer']): + score = 1 + s['score'] = score + s = OrderedDict(sorted(s.items(), key=lambda i:s_order.index(i[0]))) + data['sentences'][i] = s + data = OrderedDict(sorted(data.items(), key=lambda i:data_order.index(i[0]))) + result.append(data) + + print('Save the score in WSC_child_problem.json\n') + with open(path, 'w') as f: + json.dump(result, f, indent=4, separators=(',', ': '), ensure_ascii=False) + f.close() + + total_score = 0 + total_valid_problems = 0 + l = {} + for r in result: + for s in r['sentences']: + if 'score' in s: + total_valid_problems += 1 + score = s['score'] + total_score += score + if r['index'] not in l.keys(): + l[r['index']] = [0, 1] + else: + l[r['index']][1] += 1 + if score == 1: + l[r['index']][0] += 1 + print('Correct problems:') + pprint(l) + print() + + print('Score each valid problems:') + description = ' Total valid problems: {0}\n Correct answers: {1}\n Accuracy: {2}' + print(description.format(total_valid_problems, total_score, float(total_score/total_valid_problems))) + + print() + result_dict = {} + for r in result: + for s in r['sentences']: + if 'score' in s: + index = r['index'] + if index < 252: + if index % 2 == 1: + index -= 1 + elif index in [252, 253, 254]: + index = 252 + else: + if index % 2 == 0: + index -= 1 + if index in result_dict.keys(): + result_dict[index].append(s) + else: + result_dict[index] = [s] + + total_score = 0 + for key in result_dict.keys(): + score = 1 + for s in result_dict[key]: + if s['score'] == 0: + score = 0 + total_score += score + print('Score each valid problem groups:') + description = ' Total valid problems: {0}\n Correct answers: {1}\n Accuracy: {2}' + print(description.format(len(result_dict), total_score, float(total_score/len(result_dict)))) + + +if __name__ == '__main__': + score() diff --git a/setup.py b/setup.py index 9b2a678832005e..e9b5c077c4914e 100644 --- a/setup.py +++ b/setup.py @@ -1,12 +1,47 @@ +""" +Simple check list from AllenNLP repo: https://github.com/allenai/allennlp/blob/master/setup.py + +To create the package for pypi. + +1. Change the version in __init__.py and setup.py. + +2. Commit these changes with the message: "Release: VERSION" + +3. Add a tag in git to mark the release: "git tag VERSION -m'Adds tag VERSION for pypi' " + Push the tag to git: git push --tags origin master + +4. Build both the sources and the wheel. Do not change anything in setup.py between + creating the wheel and the source distribution (obviously). + + For the wheel, run: "python setup.py bdist_wheel" in the top level allennlp directory. + (this will build a wheel for the python version you use to build it - make sure you use python 3.x). + + For the sources, run: "python setup.py sdist" + You should now have a /dist directory with both .whl and .tar.gz source versions of allennlp. + +5. Check that everything looks correct by uploading the package to the pypi test server: + + twine upload dist/* -r pypitest + (pypi suggest using twine as other methods upload files via plaintext.) + + Check that you can install it in a virtualenv by running: + pip install -i https://testpypi.python.org/pypi allennlp + +6. Upload the final version to actual pypi: + twine upload dist/* -r pypi + +7. Copy the release notes from RELEASE.md to the tag in github once everything is looking hunky-dory. + +""" from setuptools import find_packages, setup setup( name="pytorch_pretrained_bert", - version="0.2.0", + version="0.4.0", author="Thomas Wolf, Victor Sanh, Tim Rault, Google AI Language Team Authors", author_email="thomas@huggingface.co", description="PyTorch version of Google AI BERT model with script to load Google pre-trained models", - long_description=open("README.md", "r").read(), + long_description=open("README.md", "r", encoding='utf-8').read(), long_description_content_type="text/markdown", keywords='BERT NLP deep learning google', license='Apache', @@ -18,7 +53,11 @@ 'boto3', 'requests', 'tqdm'], - scripts=["bin/pytorch_pretrained_bert"], + entry_points={ + 'console_scripts': [ + "pytorch_pretrained_bert=pytorch_pretrained_bert.__main__:main" + ] + }, python_requires='>=3.5.0', tests_require=['pytest'], classifiers=[ diff --git a/test_WSC_child_problem.py b/test_WSC_child_problem.py new file mode 100644 index 00000000000000..7c731feb81712b --- /dev/null +++ b/test_WSC_child_problem.py @@ -0,0 +1,345 @@ +from IPython.core.interactiveshell import InteractiveShell +InteractiveShell.ast_node_interactivity = 'all' + +import os +import json + +import numpy as np +import math +import matplotlib +import matplotlib.pyplot as plt +from pylab import rcParams + +import torch +import torch.nn.functional as F +from pytorch_pretrained_bert import tokenization, BertTokenizer, BertModel, BertForMaskedLM, BertForPreTraining, BertConfig +from examples.extract_features import * + +tokenizer = BertTokenizer.from_pretrained('bert-base-uncased') + +CONFIG_NAME = 'bert_config.json' +BERT_DIR = '/nas/pretrain-bert/pretrain-tensorflow/uncased_L-12_H-768_A-12/' +config_file = os.path.join(BERT_DIR, CONFIG_NAME) +config = BertConfig.from_json_file(config_file) +model = BertForPreTraining.from_pretrained(BERT_DIR) +model.eval() +class Args: + def __init__(self): + pass + +args = Args() +args.no_cuda = False + +device = torch.device("cuda" if torch.cuda.is_available() and not args.no_cuda else "cpu") +model.to(device) + +vis_attn_topk = 3 + + +def has_chinese_label(labels): + labels = [label.split('->')[0].strip() for label in labels] + r = sum([len(label) > 1 for label in labels if label not in ['BOS', 'EOS']]) * 1. / (len(labels) - 1) + return 0 < r < 0.5 # r == 0 means empty query labels used in self attention + +def _plot_attn(ax1, attn_name, attn, key_labels, query_labels, col, color='b'): + assert len(query_labels) == attn.size(0) + assert len(key_labels) == attn.size(1) + + ax1.set_xlim([-1, 1]) + ax1.set_xticks([]) + ax2 = ax1.twinx() + nlabels = max(len(key_labels), len(query_labels)) + pos = range(nlabels) + + if 'self' in attn_name and col < ncols - 1: + query_labels = ['' for _ in query_labels] + + for ax, labels in [(ax1, key_labels), (ax2, query_labels)]: + ax.set_yticks(pos) + if has_chinese_label(labels): + ax.set_yticklabels(labels, fontproperties=zhfont) + else: + ax.set_yticklabels(labels) + ax.set_ylim([nlabels - 1, 0]) + ax.tick_params(width=0, labelsize='xx-large') + + for spine in ax.spines.values(): + spine.set_visible(False) + +# mask, attn = filter_attn(attn) + for qi in range(attn.size(0)): +# if not mask[qi]: +# continue +# for ki in range(attn.size(1)): + for ki in attn[qi].topk(vis_attn_topk)[1]: + a = attn[qi, ki] + ax1.plot((-1, 1), (ki, qi), color, alpha=a) +# print(attn.mean(dim=0).topk(5)[0]) +# ax1.barh(pos, attn.mean(dim=0).data.cpu().numpy()) + +def plot_layer_attn(result_tuple, attn_name='dec_self_attns', layer=0, heads=None): + hypo, nheads, labels_dict = result_tuple + key_labels, query_labels = labels_dict[attn_name] + if heads is None: + heads = range(nheads) + else: + nheads = len(heads) + + stride = 2 if attn_name == 'dec_enc_attns' else 1 + nlabels = max(len(key_labels), len(query_labels)) + rcParams['figure.figsize'] = 20, int(round(nlabels * stride * nheads / 8 * 1.0)) + + rows = nheads // ncols * stride + fig, axes = plt.subplots(rows, ncols) + + # for head in range(nheads): + for head_i, head in enumerate(heads): + row, col = head_i * stride // ncols, head_i * stride % ncols + ax1 = axes[row, col] + attn = hypo[attn_name][layer][head] + _plot_attn(ax1, attn_name, attn, key_labels, query_labels, col) + if attn_name == 'dec_enc_attns': + col = col + 1 + axes[row, col].axis('off') # next subfig acts as blank place holder + # plt.suptitle('%s with %d heads, Layer %d' % (attn_name, nheads, layer), fontsize=20) + plt.show() + +ncols = 4 +import re +def convert_text_to_examples(text): + examples = [] + unique_id = 0 + if True: + for line in text: + line = line.strip() + text_a = None + text_b = None + m = re.match(r"^(.*) \|\|\| (.*)$", line) + if m is None: + text_a = line + else: + text_a = m.group(1) + text_b = m.group(2) + examples.append( + InputExample(unique_id=unique_id, text_a=text_a, text_b=text_b)) + unique_id += 1 + return examples + +def convert_examples_to_features(examples, tokenizer, append_special_tokens=True, replace_mask=True, print_info=False): + features = [] + for (ex_index, example) in enumerate(examples): + tokens_a = tokenizer.tokenize(example.text_a) + tokens_b = None + if example.text_b: + tokens_b = tokenizer.tokenize(example.text_b) + + tokens = [] + input_type_ids = [] + if append_special_tokens: + tokens.append("[CLS]") + input_type_ids.append(0) + for token in tokens_a: + if replace_mask and token == '_': # XD + token = "[MASK]" + tokens.append(token) + input_type_ids.append(0) + if append_special_tokens: + tokens.append("[SEP]") + input_type_ids.append(0) + + if tokens_b: + for token in tokens_b: + if replace_mask and token == '_': # XD + token = "[MASK]" + tokens.append(token) + input_type_ids.append(1) + if append_special_tokens: + tokens.append("[SEP]") + input_type_ids.append(1) + + input_ids = tokenizer.convert_tokens_to_ids(tokens) + input_mask = [1] * len(input_ids) + + if ex_index < 5 and print_info: + logger.info("*** Example ***") + logger.info("unique_id: %s" % (example.unique_id)) + logger.info("tokens: %s" % " ".join([str(x) for x in tokens])) + logger.info("input_ids: %s" % " ".join([str(x) for x in input_ids])) + logger.info("input_mask: %s" % " ".join([str(x) for x in input_mask])) + logger.info( + "input_type_ids: %s" % " ".join([str(x) for x in input_type_ids])) + + features.append( + InputFeatures( + unique_id=example.unique_id, + tokens=tokens, + input_ids=input_ids, + input_mask=input_mask, + input_type_ids=input_type_ids)) + return features + +def copy_and_mask_features(features): + import copy + masked_feature_copies = [] + for feature in features: + for masked_pos in range(len(feature.tokens)): + feature_copy = copy.deepcopy(feature) + feature_copy.input_ids[masked_pos] = tokenizer.vocab["[MASK]"] + masked_feature_copies.append(feature_copy) + return masked_feature_copies + +def show_lm_probs(tokens, input_ids, probs, topk=5, firstk=20): + def print_pair(token, prob, end_str='', hit_mark=' '): + if i < firstk: + # token = token.replace('', '').replace('\n', '/n') + print('{}{: >3} | {: <12}'.format(hit_mark, int(round(prob*100)), token), end=end_str) + + ret = None + for i in range(len(tokens)): + ind_ = input_ids[i].item() if input_ids is not None else tokenizer.vocab[tokens[i]] + prob_ = probs[i][ind_].item() + print_pair(tokens[i], prob_, end_str='\t') + values, indices = probs[i].topk(topk) + top_pairs = [] + for j in range(topk): + ind, prob = indices[j].item(), values[j].item() + hit_mark = '*' if ind == ind_ else ' ' + token = tokenizer.ids_to_tokens[ind] + print_pair(token, prob, hit_mark=hit_mark, end_str='' if j < topk - 1 else '\n') + top_pairs.append((token, prob)) + if tokens[i] == "[MASK]": + ret = top_pairs + return ret + +import colored +from colored import stylize + +def show_abnormals(tokens, probs, show_suggestions=False): + def gap2color(gap): + if gap <= 5: + return 'yellow_1' + elif gap <= 10: + return 'orange_1' + else: + return 'red_1' + + def print_token(token, suggestion, gap): + if gap == 0: + print(stylize(token + ' ', colored.fg('white') + colored.bg('black')), end='') + else: + print(stylize(token, colored.fg(gap2color(gap)) + colored.bg('black')), end='') + if show_suggestions and gap > 5: + print(stylize('/' + suggestion + ' ', colored.fg('green' if gap > 10 else 'cyan') + colored.bg('black')), end='') + else: + print(stylize(' ', colored.fg(gap2color(gap)) + colored.bg('black')), end='') + # print('/' + suggestion, end=' ') + # print('%.2f' % gap, end=' ') + + avg_gap = 0. + for i in range(1, len(tokens) - 1): # skip first [CLS] and last [SEP] + ind_ = tokenizer.vocab[tokens[i]] + prob_ = probs[i][ind_].item() + top_prob = probs[i].max().item() + top_ind = probs[i].argmax().item() + gap = math.log(top_prob) - math.log(prob_) + suggestion = tokenizer.ids_to_tokens[top_ind] + print_token(tokens[i], suggestion, gap) + avg_gap += gap + avg_gap /= (len(tokens) - 2) + print() + print(avg_gap) + +analyzed_cache = {} + +def analyze_text(text, show_suggestions=False, show_firstk_probs=20): + if text[0] in analyzed_cache: + features, mlm_probs = analyzed_cache[text[0]] + given_mask = "[MASK]" in features[0].tokens + else: + examples = convert_text_to_examples(text) + features = convert_examples_to_features(examples, tokenizer, print_info=False) + given_mask = "[MASK]" in features[0].tokens + if not given_mask: + features = copy_and_mask_features(features) + + input_ids = torch.tensor([f.input_ids for f in features], dtype=torch.long) + input_type_ids = torch.tensor([f.input_type_ids for f in features], dtype=torch.long) + input_ids = input_ids.to(device) + input_type_ids = input_type_ids.to(device) + + mlm_logits, _ = model(input_ids, input_type_ids) + mlm_probs = F.softmax(mlm_logits, dim=-1) + + if not given_mask: + seq_len, _, vocab_size = mlm_probs.size() + reduced_mlm_probs = torch.Tensor(1, seq_len, vocab_size) + for i in range(seq_len): + reduced_mlm_probs[0, i] = mlm_probs[i, i] + mlm_probs = reduced_mlm_probs + + analyzed_cache[text[0]] = (features, mlm_probs) + + top_pairs = show_lm_probs(features[0].tokens, None, mlm_probs[0], firstk=show_firstk_probs) + if not given_mask: + show_abnormals(features[0].tokens, mlm_probs[0], show_suggestions=show_suggestions) + return top_pairs + + +def detect_vocabulary(): + import json + import os + path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'WSC_child_problem.json') + with open(path, 'r') as f: + data_l = json.load(f) + f.close() + + print('Detect whether the vocabulary of WSC_child_problem.json in the tokenizer or not ...') + for data in data_l: + for s in data['sentences']: + for a in s['answer0'] + s['answer1']: + a = a.lower() + if a not in tokenizer.vocab: + print(a, 'not in vocab!!!') + print('Done.') + + +def test_by_WSC_child_problem(): + from collections import OrderedDict + import json + import os + import re + path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'WSC_child_problem.json') + with open(path, 'r') as f: + data_l = json.load(f) + f.close() + + result = [] + s_order = ['sentence', 'answer1', 'answer0', 'correct_answer', 'adjacent_ref', 'predict_answer', 'score'] + data_order = ['index', 'sentences'] + for data in data_l: + if data['sentences'] != []: + for i in range(len(data['sentences'])): + s = data['sentences'][i] + s['predict_answer'] = [] + res = analyze_text([s['sentence']], show_firstk_probs=-1) + answer = s['answer1'] + s['answer0'] + print(data['index']) + print(res) + for r in res: + if any(a.lower() == r[0] for a in answer): + s['predict_answer'].append(list(r)) + s = OrderedDict(sorted(s.items(), key=lambda i:s_order.index(i[0]))) + data['sentences'][i] = s + print(s['predict_answer']) + data = OrderedDict(sorted(data.items(), key=lambda i:data_order.index(i[0]))) + result.append(data) + print('Save the predict_answer in WSC_child_problem.json') + path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'WSC_child_problem.json') + with open(path, 'w') as f: + json.dump(result, f, indent=4, separators=(',', ': '), ensure_ascii=False) + f.close() + print('Done.') + + +test_by_WSC_child_problem() +#detect_vocabulary() diff --git a/tests/modeling_test.py b/tests/modeling_test.py index 48d56826f8e914..b5665121397d9b 100644 --- a/tests/modeling_test.py +++ b/tests/modeling_test.py @@ -22,7 +22,10 @@ import torch -from pytorch_pretrained_bert import BertConfig, BertModel +from pytorch_pretrained_bert import (BertConfig, BertModel, BertForMaskedLM, + BertForNextSentencePrediction, BertForPreTraining, + BertForQuestionAnswering, BertForSequenceClassification, + BertForTokenClassification) class BertModelTest(unittest.TestCase): @@ -35,6 +38,7 @@ def __init__(self, is_training=True, use_input_mask=True, use_token_type_ids=True, + use_labels=True, vocab_size=99, hidden_size=32, num_hidden_layers=5, @@ -45,7 +49,9 @@ def __init__(self, attention_probs_dropout_prob=0.1, max_position_embeddings=512, type_vocab_size=16, + type_sequence_label_size=2, initializer_range=0.02, + num_labels=3, scope=None): self.parent = parent self.batch_size = batch_size @@ -53,6 +59,7 @@ def __init__(self, self.is_training = is_training self.use_input_mask = use_input_mask self.use_token_type_ids = use_token_type_ids + self.use_labels = use_labels self.vocab_size = vocab_size self.hidden_size = hidden_size self.num_hidden_layers = num_hidden_layers @@ -63,10 +70,12 @@ def __init__(self, self.attention_probs_dropout_prob = attention_probs_dropout_prob self.max_position_embeddings = max_position_embeddings self.type_vocab_size = type_vocab_size + self.type_sequence_label_size = type_sequence_label_size self.initializer_range = initializer_range + self.num_labels = num_labels self.scope = scope - def create_model(self): + def prepare_config_and_inputs(self): input_ids = BertModelTest.ids_tensor([self.batch_size, self.seq_length], self.vocab_size) input_mask = None @@ -77,6 +86,12 @@ def create_model(self): if self.use_token_type_ids: token_type_ids = BertModelTest.ids_tensor([self.batch_size, self.seq_length], self.type_vocab_size) + sequence_labels = None + token_labels = None + if self.use_labels: + sequence_labels = BertModelTest.ids_tensor([self.batch_size], self.type_sequence_label_size) + token_labels = BertModelTest.ids_tensor([self.batch_size, self.seq_length], self.num_labels) + config = BertConfig( vocab_size_or_config_json_file=self.vocab_size, hidden_size=self.hidden_size, @@ -90,10 +105,16 @@ def create_model(self): type_vocab_size=self.type_vocab_size, initializer_range=self.initializer_range) - model = BertModel(config=config) + return config, input_ids, token_type_ids, input_mask, sequence_labels, token_labels - all_encoder_layers, pooled_output = model(input_ids, token_type_ids, input_mask) + def check_loss_output(self, result): + self.parent.assertListEqual( + list(result["loss"].size()), + []) + def create_bert_model(self, config, input_ids, token_type_ids, input_mask, sequence_labels, token_labels): + model = BertModel(config=config) + all_encoder_layers, pooled_output = model(input_ids, token_type_ids, input_mask) outputs = { "sequence_output": all_encoder_layers[-1], "pooled_output": pooled_output, @@ -101,13 +122,119 @@ def create_model(self): } return outputs - def check_output(self, result): + def check_bert_model_output(self, result): + self.parent.assertListEqual( + [size for layer in result["all_encoder_layers"] for size in layer.size()], + [self.batch_size, self.seq_length, self.hidden_size] * self.num_hidden_layers) self.parent.assertListEqual( list(result["sequence_output"].size()), [self.batch_size, self.seq_length, self.hidden_size]) - self.parent.assertListEqual(list(result["pooled_output"].size()), [self.batch_size, self.hidden_size]) + + def create_bert_for_masked_lm(self, config, input_ids, token_type_ids, input_mask, sequence_labels, token_labels): + model = BertForMaskedLM(config=config) + loss = model(input_ids, token_type_ids, input_mask, token_labels) + prediction_scores = model(input_ids, token_type_ids, input_mask) + outputs = { + "loss": loss, + "prediction_scores": prediction_scores, + } + return outputs + + def check_bert_for_masked_lm_output(self, result): + self.parent.assertListEqual( + list(result["prediction_scores"].size()), + [self.batch_size, self.seq_length, self.vocab_size]) + + def create_bert_for_next_sequence_prediction(self, config, input_ids, token_type_ids, input_mask, sequence_labels, token_labels): + model = BertForNextSentencePrediction(config=config) + loss = model(input_ids, token_type_ids, input_mask, sequence_labels) + seq_relationship_score = model(input_ids, token_type_ids, input_mask) + outputs = { + "loss": loss, + "seq_relationship_score": seq_relationship_score, + } + return outputs + + def check_bert_for_next_sequence_prediction_output(self, result): + self.parent.assertListEqual( + list(result["seq_relationship_score"].size()), + [self.batch_size, 2]) + + + def create_bert_for_pretraining(self, config, input_ids, token_type_ids, input_mask, sequence_labels, token_labels): + model = BertForPreTraining(config=config) + loss = model(input_ids, token_type_ids, input_mask, token_labels, sequence_labels) + prediction_scores, seq_relationship_score = model(input_ids, token_type_ids, input_mask) + outputs = { + "loss": loss, + "prediction_scores": prediction_scores, + "seq_relationship_score": seq_relationship_score, + } + return outputs + + def check_bert_for_pretraining_output(self, result): + self.parent.assertListEqual( + list(result["prediction_scores"].size()), + [self.batch_size, self.seq_length, self.vocab_size]) + self.parent.assertListEqual( + list(result["seq_relationship_score"].size()), + [self.batch_size, 2]) + + + def create_bert_for_question_answering(self, config, input_ids, token_type_ids, input_mask, sequence_labels, token_labels): + model = BertForQuestionAnswering(config=config) + loss = model(input_ids, token_type_ids, input_mask, sequence_labels, sequence_labels) + start_logits, end_logits = model(input_ids, token_type_ids, input_mask) + outputs = { + "loss": loss, + "start_logits": start_logits, + "end_logits": end_logits, + } + return outputs + + def check_bert_for_question_answering_output(self, result): + self.parent.assertListEqual( + list(result["start_logits"].size()), + [self.batch_size, self.seq_length]) + self.parent.assertListEqual( + list(result["end_logits"].size()), + [self.batch_size, self.seq_length]) + + + def create_bert_for_sequence_classification(self, config, input_ids, token_type_ids, input_mask, sequence_labels, token_labels): + model = BertForSequenceClassification(config=config, num_labels=self.num_labels) + loss = model(input_ids, token_type_ids, input_mask, sequence_labels) + logits = model(input_ids, token_type_ids, input_mask) + outputs = { + "loss": loss, + "logits": logits, + } + return outputs + + def check_bert_for_sequence_classification_output(self, result): + self.parent.assertListEqual( + list(result["logits"].size()), + [self.batch_size, self.num_labels]) + + + def create_bert_for_token_classification(self, config, input_ids, token_type_ids, input_mask, sequence_labels, token_labels): + model = BertForTokenClassification(config=config, num_labels=self.num_labels) + loss = model(input_ids, token_type_ids, input_mask, token_labels) + logits = model(input_ids, token_type_ids, input_mask) + outputs = { + "loss": loss, + "logits": logits, + } + return outputs + + def check_bert_for_token_classification_output(self, result): + self.parent.assertListEqual( + list(result["logits"].size()), + [self.batch_size, self.seq_length, self.num_labels]) + + def test_default(self): self.run_tester(BertModelTest.BertModelTester(self)) @@ -118,8 +245,33 @@ def test_config_to_json_string(self): self.assertEqual(obj["hidden_size"], 37) def run_tester(self, tester): - output_result = tester.create_model() - tester.check_output(output_result) + config_and_inputs = tester.prepare_config_and_inputs() + output_result = tester.create_bert_model(*config_and_inputs) + tester.check_bert_model_output(output_result) + + output_result = tester.create_bert_for_masked_lm(*config_and_inputs) + tester.check_bert_for_masked_lm_output(output_result) + tester.check_loss_output(output_result) + + output_result = tester.create_bert_for_next_sequence_prediction(*config_and_inputs) + tester.check_bert_for_next_sequence_prediction_output(output_result) + tester.check_loss_output(output_result) + + output_result = tester.create_bert_for_pretraining(*config_and_inputs) + tester.check_bert_for_pretraining_output(output_result) + tester.check_loss_output(output_result) + + output_result = tester.create_bert_for_question_answering(*config_and_inputs) + tester.check_bert_for_question_answering_output(output_result) + tester.check_loss_output(output_result) + + output_result = tester.create_bert_for_sequence_classification(*config_and_inputs) + tester.check_bert_for_sequence_classification_output(output_result) + tester.check_loss_output(output_result) + + output_result = tester.create_bert_for_token_classification(*config_and_inputs) + tester.check_bert_for_token_classification_output(output_result) + tester.check_loss_output(output_result) @classmethod def ids_tensor(cls, shape, vocab_size, rng=None, name=None): diff --git a/tests/optimization_test.py b/tests/optimization_test.py index 1c010750ae1f0f..848b9d1cf5c2f1 100644 --- a/tests/optimization_test.py +++ b/tests/optimization_test.py @@ -32,10 +32,10 @@ def assertListAlmostEqual(self, list1, list2, tol): def test_adam(self): w = torch.tensor([0.1, -0.2, -0.1], requires_grad=True) target = torch.tensor([0.4, 0.2, -0.5]) - criterion = torch.nn.MSELoss(reduction='elementwise_mean') + criterion = torch.nn.MSELoss() # No warmup, constant schedule, no gradient clipping optimizer = BertAdam(params=[w], lr=2e-1, - weight_decay_rate=0.0, + weight_decay=0.0, max_grad_norm=-1) for _ in range(100): loss = criterion(w, target) diff --git a/tests/tokenization_test.py b/tests/tokenization_test.py index f541a620e8320b..e1474e938bbcb9 100644 --- a/tests/tokenization_test.py +++ b/tests/tokenization_test.py @@ -44,12 +44,30 @@ def test_full_tokenizer(self): self.assertListEqual( tokenizer.convert_tokens_to_ids(tokens), [7, 4, 5, 10, 8, 9]) + def test_full_tokenizer_raises_error_for_long_sequences(self): + vocab_tokens = [ + "[UNK]", "[CLS]", "[SEP]", "want", "##want", "##ed", "wa", "un", "runn", + "##ing", "," + ] + with open("/tmp/bert_tokenizer_test.txt", "w") as vocab_writer: + vocab_writer.write("".join([x + "\n" for x in vocab_tokens])) + vocab_file = vocab_writer.name + + tokenizer = BertTokenizer(vocab_file, max_len=10) + os.remove(vocab_file) + tokens = tokenizer.tokenize(u"the cat sat on the mat in the summer time") + indices = tokenizer.convert_tokens_to_ids(tokens) + self.assertListEqual(indices, [0 for _ in range(10)]) + + tokens = tokenizer.tokenize(u"the cat sat on the mat in the summer time .") + self.assertRaises(ValueError, tokenizer.convert_tokens_to_ids, tokens) + def test_chinese(self): tokenizer = BasicTokenizer() - + self.assertListEqual( tokenizer.tokenize(u"ah\u535A\u63A8zz"), - [u"ah", u"\u535A", u"\u63A8", u"zz"]) + [u"ah", u"\u535A", u"\u63A8", u"zz"]) def test_basic_tokenizer_lower(self): tokenizer = BasicTokenizer(do_lower_case=True) diff --git a/train_child.py b/train_child.py new file mode 100644 index 00000000000000..a9d104e1753e55 --- /dev/null +++ b/train_child.py @@ -0,0 +1,177 @@ +import argparse +import os +import json +import itertools +from itertools import product, permutations +from random import sample + +import torch +from torch.utils.data import DataLoader, RandomSampler, SequentialSampler + +from pytorch_pretrained_bert.tokenization import BertTokenizer +from pytorch_pretrained_bert.modeling import BertForPreTraining, BertForMaskedLM, BertConfig +from pytorch_pretrained_bert.optimization import BertAdam +from run_child_finetuning import * +#from child_frames import frames +#from child_wsc_generator import make_sentences +from child_generator import make_sentences + +BERT_DIR = '/nas/pretrain-bert/pretrain-pytorch/bert-base-uncased' +tokenizer = BertTokenizer.from_pretrained('/nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt') + + +parser = argparse.ArgumentParser() + +parser.add_argument("--max_seq_length", + default=128, + type=int, + help="The maximum total input sequence length after WordPiece tokenization. \n" + "Sequences longer than this will be truncated, and sequences shorter \n" + "than this will be padded.") +parser.add_argument("--do_train", + action='store_true', + help="Whether to run training.") +parser.add_argument("--do_eval", + action='store_true', + help="Whether to run eval on the dev set.") +parser.add_argument("--train_batch_size", + default=32, + type=int, + help="Total batch size for training.") +parser.add_argument("--eval_batch_size", + default=32, + type=int, + help="Total batch size for eval.") +parser.add_argument("--learning_rate", + default=3e-5, + type=float, + help="The initial learning rate for Adam.") +parser.add_argument("--num_train_epochs", + default=3.0, + type=float, + help="Total number of training epochs to perform.") +parser.add_argument("--warmup_proportion", + default=0.1, + type=float, + help="Proportion of training to perform linear learning rate warmup for. " + "E.g., 0.1 = 10%% of training.") +parser.add_argument("--no_cuda", + action='store_true', + help="Whether not to use CUDA when available") +parser.add_argument("--do_lower_case", + action='store_true', + help="Whether to lower case the input text. True for uncased models, False for cased models.") +parser.add_argument('--seed', + type=int, + default=42, + help="random seed for initialization") +parser.add_argument('--gradient_accumulation_steps', + type=int, + default=1, + help="Number of updates steps to accumualte before performing a backward/update pass.") +parser.add_argument("--dev_percent", + default=0.5, + type=float) +# args = parser.parse_args(['--output_dir', '/home']) +# args = parser.parse_args([]) +args = parser.parse_args() +args.do_lower_case = True +args.do_train = True +args.do_eval = True +args.eval_batch_size = 128 +# args.learning_rate = 1e-4 +#args.num_train_epochs = 100 +print(args) + +sentences = make_sentences(maybe=False, structured=False) +#sentences = [] +#for frame in frames: +# sentences += make_sentences(**frame)[-1] +logger.info('num_sent = %d' % len(sentences)) +child_dataset = CHILDDataset(tokenizer, sentences, dev_percent=args.dev_percent) +train_features = child_dataset.get_train_features() +logger.info('num_train_examples = %d' % len(train_features)) +num_train_steps = int( + len(train_features) / args.train_batch_size / args.gradient_accumulation_steps * args.num_train_epochs) +logger.info('num_train_steps = %d' % num_train_steps) +eval_features = child_dataset.get_dev_features() + +train_dataset = child_dataset.build_dataset(train_features) +eval_dataset = child_dataset.build_dataset(eval_features) + +device = torch.device("cuda" if torch.cuda.is_available() and not args.no_cuda else "cpu") +n_gpu = torch.cuda.device_count() +logger.info("device: {} n_gpu: {}".format( + device, n_gpu)) + +args.train_batch_size = int(args.train_batch_size / args.gradient_accumulation_steps) + +random.seed(args.seed) +np.random.seed(args.seed) +torch.manual_seed(args.seed) +if n_gpu > 0: + torch.cuda.manual_seed_all(args.seed) + +# Prepare model +model = BertForMaskedLM.from_pretrained(BERT_DIR) +#CONFIG_NAME = 'bert_config_small.json' +#config = BertConfig(os.path.join(BERT_DIR, CONFIG_NAME)) +#model = BertForMaskedLM(config) +_ = model.to(device) +if n_gpu > 1: + model = torch.nn.DataParallel(model) + +# Prepare optimizer +param_optimizer = list(model.named_parameters()) +no_decay = ['bias', 'LayerNorm.bias', 'LayerNorm.weight'] +optimizer_grouped_parameters = [ + {'params': [p for n, p in param_optimizer if not any(nd in n for nd in no_decay)], 'weight_decay': 0.01}, + {'params': [p for n, p in param_optimizer if any(nd in n for nd in no_decay)], 'weight_decay': 0.0} + ] +optimizer = BertAdam(optimizer_grouped_parameters, + lr=args.learning_rate, + warmup=args.warmup_proportion, + t_total=num_train_steps) + +logger.info("Epoch 0") +logger.info("Evaluating on train set...") +#validate(model, train_dataset, device) +logger.info("Evaluating on valid set...") +#validate(model, eval_dataset, device) + +global_step = 0 +for epoch in trange(int(args.num_train_epochs), desc="Epoch"): + _ = model.train() + tr_loss = 0 + nb_tr_examples, nb_tr_steps = 0, 0 +# for step, batch in enumerate(tqdm(train_dataloader, desc="Iteration")): + for step, batch_idx in enumerate(get_batch_index(len(train_dataset), args.train_batch_size, randomized=True)): + batch = tuple(t[batch_idx] for t in train_dataset.tensors) + batch = tuple(t.to(device) for t in batch) + input_ids, input_mask, segment_ids, lm_label_ids, is_next = batch + loss = model(input_ids, segment_ids, input_mask, lm_label_ids) + if n_gpu > 1: + loss = loss.mean() # mean() to average on multi-gpu. + if args.gradient_accumulation_steps > 1: + loss = loss / args.gradient_accumulation_steps + loss.backward() + tr_loss += loss.item() + nb_tr_examples += input_ids.size(0) + nb_tr_steps += 1 + if (step + 1) % args.gradient_accumulation_steps == 0: + # modify learning rate with special warm up BERT uses + lr_this_step = args.learning_rate * warmup_linear(global_step/num_train_steps, args.warmup_proportion) + if global_step % 1000 == 0: + print('global_step %d, lr = %f' % (global_step, lr_this_step)) + for param_group in optimizer.param_groups: + param_group['lr'] = lr_this_step + optimizer.step() + optimizer.zero_grad() + global_step += 1 + + if args.do_eval: + logger.info("Epoch %d" % (epoch + 1)) + logger.info("Evaluating on train set...") + validate(model, train_dataset, device) + logger.info("Evaluating on valid set...") + validate(model, eval_dataset, device) diff --git a/train_child_wh+yesno_maybe0_structured0_devpercent.1_noiselen30_bert.out b/train_child_wh+yesno_maybe0_structured0_devpercent.1_noiselen30_bert.out new file mode 100644 index 00000000000000..d883d97aef83c6 --- /dev/null +++ b/train_child_wh+yesno_maybe0_structured0_devpercent.1_noiselen30_bert.out @@ -0,0 +1,220 @@ +06/09/2019 19:28:36 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt +06/09/2019 19:28:36 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt +06/09/2019 19:28:36 - INFO - run_child_finetuning - num_sent = 46080 +06/09/2019 19:29:02 - INFO - run_child_finetuning - num_train_steps = 7776 +06/09/2019 19:29:03 - INFO - run_child_finetuning - device: cuda n_gpu: 1 +06/09/2019 19:29:03 - INFO - pytorch_pretrained_bert.modeling - loading archive file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased +06/09/2019 19:29:03 - INFO - pytorch_pretrained_bert.modeling - Model config { + "attention_probs_dropout_prob": 0.1, + "hidden_act": "gelu", + "hidden_dropout_prob": 0.1, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "max_position_embeddings": 512, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "type_vocab_size": 2, + "vocab_size": 30522 +} + +06/09/2019 19:29:06 - INFO - pytorch_pretrained_bert.modeling - Weights from pretrained model not used in BertForMaskedLM: ['cls.seq_relationship.weight', 'cls.seq_relationship.bias'] +06/09/2019 19:29:08 - INFO - run_child_finetuning - Epoch 0 +06/09/2019 19:29:08 - INFO - run_child_finetuning - Evaluating on train set... +06/09/2019 19:30:06 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt +06/09/2019 19:30:06 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt +06/09/2019 19:30:06 - INFO - run_child_finetuning - num_sent = 46080 +06/09/2019 19:30:32 - INFO - run_child_finetuning - num_train_steps = 7776 +06/09/2019 19:30:37 - INFO - run_child_finetuning - device: cuda n_gpu: 1 +06/09/2019 19:30:37 - INFO - pytorch_pretrained_bert.modeling - loading archive file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased +06/09/2019 19:30:37 - INFO - pytorch_pretrained_bert.modeling - Model config { + "attention_probs_dropout_prob": 0.1, + "hidden_act": "gelu", + "hidden_dropout_prob": 0.1, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "max_position_embeddings": 512, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "type_vocab_size": 2, + "vocab_size": 30522 +} + +06/09/2019 19:30:40 - INFO - pytorch_pretrained_bert.modeling - Weights from pretrained model not used in BertForMaskedLM: ['cls.seq_relationship.weight', 'cls.seq_relationship.bias'] +06/09/2019 19:30:45 - INFO - run_child_finetuning - Epoch 0 +06/09/2019 19:30:45 - INFO - run_child_finetuning - Evaluating on train set... +06/09/2019 19:30:45 - INFO - run_child_finetuning - Evaluating on valid set... + Epoch: 0%| | 0/6 [00:00 + validate(model, train_dataset, device) + File "/home/xd/projects/pytorch-pretrained-BERT/run_child_finetuning.py", line 486, in validate + tmp_eval_loss = model(input_ids, segment_ids, input_mask, label_ids) + File "/home/qsj/miniconda3/lib/python3.6/site-packages/torch/nn/modules/module.py", line 477, in __call__ + result = self.forward(*input, **kwargs) + File "/home/xd/projects/pytorch-pretrained-BERT/pytorch_pretrained_bert/modeling.py", line 761, in forward + prediction_scores = self.cls(sequence_output) + File "/home/qsj/miniconda3/lib/python3.6/site-packages/torch/nn/modules/module.py", line 477, in __call__ + result = self.forward(*input, **kwargs) + File "/home/xd/projects/pytorch-pretrained-BERT/pytorch_pretrained_bert/modeling.py", line 393, in forward + prediction_scores = self.predictions(sequence_output) + File "/home/qsj/miniconda3/lib/python3.6/site-packages/torch/nn/modules/module.py", line 477, in __call__ + result = self.forward(*input, **kwargs) + File "/home/xd/projects/pytorch-pretrained-BERT/pytorch_pretrained_bert/modeling.py", line 383, in forward + hidden_states = self.decoder(hidden_states) + self.bias +RuntimeError: CUDA error: out of memory + +Warning: apex was installed without --cpp_ext. Falling back to Python flatten and unflatten. +Warning: apex was installed without --cuda_ext. Fused syncbn kernels will be unavailable. Python fallbacks will be used instead. +Warning: apex was installed without --cuda_ext. FusedAdam will be unavailable. +Warning: apex was installed without --cuda_ext. FusedLayerNorm will be unavailable. +Better speed can be achieved with apex installed from https://www.github.com/nvidia/apex. +Namespace(dev_percent=0.1, do_eval=True, do_lower_case=True, do_train=True, eval_batch_size=128, gradient_accumulation_steps=1, learning_rate=3e-05, max_seq_length=128, no_cuda=False, num_train_epochs=6.0, seed=42, train_batch_size=32, warmup_proportion=0.1) +num_sent = 46080 -> 46080 +global_step 0, lr = 0.000000 +global_step 1000, lr = 0.000026 +global_step 2000, lr = 0.000022 +06/09/2019 20:21:50 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt +06/09/2019 20:21:50 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt +06/09/2019 20:21:50 - INFO - run_child_finetuning - num_sent = 46080 +06/09/2019 20:22:16 - INFO - run_child_finetuning - num_train_steps = 7776 +06/09/2019 20:22:21 - INFO - run_child_finetuning - device: cuda n_gpu: 1 +06/09/2019 20:22:21 - INFO - pytorch_pretrained_bert.modeling - loading archive file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased +06/09/2019 20:22:21 - INFO - pytorch_pretrained_bert.modeling - Model config { + "attention_probs_dropout_prob": 0.1, + "hidden_act": "gelu", + "hidden_dropout_prob": 0.1, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "max_position_embeddings": 512, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "type_vocab_size": 2, + "vocab_size": 30522 +} + +06/09/2019 20:22:24 - INFO - pytorch_pretrained_bert.modeling - Weights from pretrained model not used in BertForMaskedLM: ['cls.seq_relationship.weight', 'cls.seq_relationship.bias'] +06/09/2019 20:22:26 - INFO - run_child_finetuning - Epoch 0 +06/09/2019 20:22:26 - INFO - run_child_finetuning - Evaluating on train set... +06/09/2019 20:22:26 - INFO - run_child_finetuning - Evaluating on valid set... + Epoch: 0%| | 0/6 [00:00 + validate(model, train_dataset, device) + File "/home/xd/projects/pytorch-pretrained-BERT/run_child_finetuning.py", line 486, in validate + tmp_eval_loss = model(input_ids, segment_ids, input_mask, label_ids) + File "/home/qsj/miniconda3/lib/python3.6/site-packages/torch/nn/modules/module.py", line 477, in __call__ + result = self.forward(*input, **kwargs) + File "/home/xd/projects/pytorch-pretrained-BERT/pytorch_pretrained_bert/modeling.py", line 761, in forward + prediction_scores = self.cls(sequence_output) + File "/home/qsj/miniconda3/lib/python3.6/site-packages/torch/nn/modules/module.py", line 477, in __call__ + result = self.forward(*input, **kwargs) + File "/home/xd/projects/pytorch-pretrained-BERT/pytorch_pretrained_bert/modeling.py", line 393, in forward + prediction_scores = self.predictions(sequence_output) + File "/home/qsj/miniconda3/lib/python3.6/site-packages/torch/nn/modules/module.py", line 477, in __call__ + result = self.forward(*input, **kwargs) + File "/home/xd/projects/pytorch-pretrained-BERT/pytorch_pretrained_bert/modeling.py", line 383, in forward + hidden_states = self.decoder(hidden_states) + self.bias +RuntimeError: CUDA error: out of memory + +Warning: apex was installed without --cpp_ext. Falling back to Python flatten and unflatten. +Warning: apex was installed without --cuda_ext. Fused syncbn kernels will be unavailable. Python fallbacks will be used instead. +Warning: apex was installed without --cuda_ext. FusedAdam will be unavailable. +Warning: apex was installed without --cuda_ext. FusedLayerNorm will be unavailable. +Better speed can be achieved with apex installed from https://www.github.com/nvidia/apex. +Namespace(dev_percent=0.1, do_eval=True, do_lower_case=True, do_train=True, eval_batch_size=128, gradient_accumulation_steps=1, learning_rate=3e-05, max_seq_length=128, no_cuda=False, num_train_epochs=6.0, seed=42, train_batch_size=32, warmup_proportion=0.1) +num_sent = 46080 -> 46080 +global_step 0, lr = 0.000000 +global_step 1000, lr = 0.000026 +global_step 2000, lr = 0.000022 +06/09/2019 21:10:16 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt +06/09/2019 21:10:16 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt +06/09/2019 21:10:16 - INFO - run_child_finetuning - num_sent = 46080 +06/09/2019 21:10:38 - INFO - run_child_finetuning - num_train_steps = 7776 +06/09/2019 21:10:43 - INFO - run_child_finetuning - device: cuda n_gpu: 1 +06/09/2019 21:10:43 - INFO - pytorch_pretrained_bert.modeling - loading archive file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased +06/09/2019 21:10:43 - INFO - pytorch_pretrained_bert.modeling - Model config { + "attention_probs_dropout_prob": 0.1, + "hidden_act": "gelu", + "hidden_dropout_prob": 0.1, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "max_position_embeddings": 512, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "type_vocab_size": 2, + "vocab_size": 30522 +} + +06/09/2019 21:10:46 - INFO - pytorch_pretrained_bert.modeling - Weights from pretrained model not used in BertForMaskedLM: ['cls.seq_relationship.weight', 'cls.seq_relationship.bias'] +06/09/2019 21:10:48 - INFO - run_child_finetuning - Epoch 0 +06/09/2019 21:10:48 - INFO - run_child_finetuning - Evaluating on train set... +06/09/2019 21:10:48 - INFO - run_child_finetuning - Evaluating on valid set... + Epoch: 0%| | 0/6 [00:00 + child_dataset = CHILDDataset(tokenizer, sentences, dev_percent=args.dev_percent) + File "/home/xd/projects/pytorch-pretrained-BERT/run_child_finetuning.py", line 68, in __init__ + t1, t2, is_next_label = self.split_sent(line) + File "/home/xd/projects/pytorch-pretrained-BERT/run_child_finetuning.py", line 121, in split_sent + assert self.one_sent +AssertionError +Warning: apex was installed without --cpp_ext. Falling back to Python flatten and unflatten. +Warning: apex was installed without --cuda_ext. Fused syncbn kernels will be unavailable. Python fallbacks will be used instead. +Warning: apex was installed without --cuda_ext. FusedAdam will be unavailable. +Warning: apex was installed without --cuda_ext. FusedLayerNorm will be unavailable. +Better speed can be achieved with apex installed from https://www.github.com/nvidia/apex. +Namespace(dev_percent=0.05, do_eval=True, do_lower_case=True, do_train=True, eval_batch_size=128, gradient_accumulation_steps=1, learning_rate=3e-05, max_seq_length=128, no_cuda=False, num_train_epochs=6.0, seed=42, train_batch_size=32, warmup_proportion=0.1) +num_sent = 92160 -> 92160 +06/09/2019 19:12:36 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt +06/09/2019 19:12:36 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt +06/09/2019 19:12:36 - INFO - run_child_finetuning - num_sent = 92160 +Traceback (most recent call last): + File "/home/qsj/miniconda3/lib/python3.6/runpy.py", line 183, in _run_module_as_main + mod_name, mod_spec, code = _get_module_details(mod_name, _Error) + File "/home/qsj/miniconda3/lib/python3.6/runpy.py", line 109, in _get_module_details + __import__(pkg_name) + File "/home/xd/projects/pytorch-pretrained-BERT/train_child.py", line 86, in + child_dataset = CHILDDataset(tokenizer, sentences, dev_percent=args.dev_percent) + File "/home/xd/projects/pytorch-pretrained-BERT/run_child_finetuning.py", line 71, in __init__ + tokens_b = self.tokenizer.tokenize(t2) + File "/home/xd/projects/pytorch-pretrained-BERT/pytorch_pretrained_bert/tokenization.py", line 94, in tokenize + for token in self.basic_tokenizer.tokenize(text): + File "/home/xd/projects/pytorch-pretrained-BERT/pytorch_pretrained_bert/tokenization.py", line 174, in tokenize + text = self._clean_text(text) + File "/home/xd/projects/pytorch-pretrained-BERT/pytorch_pretrained_bert/tokenization.py", line 264, in _clean_text + for char in text: +TypeError: 'NoneType' object is not iterable +Warning: apex was installed without --cpp_ext. Falling back to Python flatten and unflatten. +Warning: apex was installed without --cuda_ext. Fused syncbn kernels will be unavailable. Python fallbacks will be used instead. +Warning: apex was installed without --cuda_ext. FusedAdam will be unavailable. +Warning: apex was installed without --cuda_ext. FusedLayerNorm will be unavailable. +Better speed can be achieved with apex installed from https://www.github.com/nvidia/apex. +Namespace(dev_percent=0.05, do_eval=True, do_lower_case=True, do_train=True, eval_batch_size=128, gradient_accumulation_steps=1, learning_rate=3e-05, max_seq_length=128, no_cuda=False, num_train_epochs=6.0, seed=42, train_batch_size=32, warmup_proportion=0.1) +num_sent = 92160 -> 92159 +06/09/2019 19:21:08 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt +06/09/2019 19:21:08 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt +06/09/2019 19:21:09 - INFO - run_child_finetuning - num_sent = 92160 +06/09/2019 19:22:02 - INFO - run_child_finetuning - num_train_steps = 16416 +06/09/2019 19:22:03 - INFO - run_child_finetuning - device: cuda n_gpu: 1 +06/09/2019 19:22:03 - INFO - pytorch_pretrained_bert.modeling - loading archive file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased +06/09/2019 19:22:03 - INFO - pytorch_pretrained_bert.modeling - Model config { + "attention_probs_dropout_prob": 0.1, + "hidden_act": "gelu", + "hidden_dropout_prob": 0.1, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "max_position_embeddings": 512, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "type_vocab_size": 2, + "vocab_size": 30522 +} + +06/09/2019 19:22:07 - INFO - pytorch_pretrained_bert.modeling - Weights from pretrained model not used in BertForMaskedLM: ['cls.seq_relationship.weight', 'cls.seq_relationship.bias'] +06/09/2019 19:22:09 - INFO - run_child_finetuning - Epoch 0 +06/09/2019 19:22:09 - INFO - run_child_finetuning - Evaluating on train set... +06/09/2019 19:25:45 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt +06/09/2019 19:25:45 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt +06/09/2019 19:25:46 - INFO - run_child_finetuning - num_sent = 46080 +06/09/2019 19:26:12 - INFO - run_child_finetuning - num_train_steps = 8208 +06/09/2019 19:26:13 - INFO - run_child_finetuning - device: cuda n_gpu: 1 +06/09/2019 19:26:13 - INFO - pytorch_pretrained_bert.modeling - loading archive file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased +06/09/2019 19:26:13 - INFO - pytorch_pretrained_bert.modeling - Model config { + "attention_probs_dropout_prob": 0.1, + "hidden_act": "gelu", + "hidden_dropout_prob": 0.1, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "max_position_embeddings": 512, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "type_vocab_size": 2, + "vocab_size": 30522 +} + +06/09/2019 19:26:16 - INFO - pytorch_pretrained_bert.modeling - Weights from pretrained model not used in BertForMaskedLM: ['cls.seq_relationship.weight', 'cls.seq_relationship.bias'] +06/09/2019 19:26:18 - INFO - run_child_finetuning - Epoch 0 +06/09/2019 19:26:18 - INFO - run_child_finetuning - Evaluating on train set... diff --git a/train_child_wh+yesno_maybe1_structured0_devpercent.1_bert.out b/train_child_wh+yesno_maybe1_structured0_devpercent.1_bert.out new file mode 100644 index 00000000000000..ce94b8158c2bf4 --- /dev/null +++ b/train_child_wh+yesno_maybe1_structured0_devpercent.1_bert.out @@ -0,0 +1,90 @@ +06/09/2019 18:18:33 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt +06/09/2019 18:18:33 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt +06/09/2019 18:19:03 - INFO - run_child_finetuning - device: cuda n_gpu: 1 +06/09/2019 18:19:03 - INFO - pytorch_pretrained_bert.modeling - loading archive file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased +06/09/2019 18:19:03 - INFO - pytorch_pretrained_bert.modeling - Model config { + "attention_probs_dropout_prob": 0.1, + "hidden_act": "gelu", + "hidden_dropout_prob": 0.1, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "max_position_embeddings": 512, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "type_vocab_size": 2, + "vocab_size": 30522 +} + +06/09/2019 18:19:06 - INFO - pytorch_pretrained_bert.modeling - Weights from pretrained model not used in BertForMaskedLM: ['cls.seq_relationship.weight', 'cls.seq_relationship.bias'] +06/09/2019 18:19:08 - INFO - run_child_finetuning - Epoch 0 +06/09/2019 18:19:08 - INFO - run_child_finetuning - Evaluating on train set... +06/09/2019 18:19:08 - INFO - run_child_finetuning - Evaluating on valid set... + Epoch: 0%| | 0/6 [00:00 3840 +num_train_steps = 6000 +global_step 0, lr = 0.000000 +global_step 1000, lr = 0.000417 +global_step 2000, lr = 0.000333 +global_step 3000, lr = 0.000250 +global_step 4000, lr = 0.000167 +global_step 5000, lr = 0.000083 diff --git a/train_child_whonly_maybe0_bert.out b/train_child_whonly_maybe0_bert.out new file mode 100644 index 00000000000000..23245ed9774704 --- /dev/null +++ b/train_child_whonly_maybe0_bert.out @@ -0,0 +1,60 @@ +06/09/2019 16:43:11 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt +06/09/2019 16:43:11 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt +06/09/2019 16:43:13 - INFO - run_child_finetuning - device: cuda n_gpu: 1 +06/09/2019 16:43:13 - INFO - pytorch_pretrained_bert.modeling - loading archive file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased +06/09/2019 16:43:13 - INFO - pytorch_pretrained_bert.modeling - Model config { + "attention_probs_dropout_prob": 0.1, + "hidden_act": "gelu", + "hidden_dropout_prob": 0.1, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "max_position_embeddings": 512, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "type_vocab_size": 2, + "vocab_size": 30522 +} + +06/09/2019 16:43:16 - INFO - pytorch_pretrained_bert.modeling - Weights from pretrained model not used in BertForMaskedLM: ['cls.seq_relationship.weight', 'cls.seq_relationship.bias'] +06/09/2019 16:43:18 - INFO - run_child_finetuning - Epoch 0 +06/09/2019 16:43:18 - INFO - run_child_finetuning - Evaluating on train set... +06/09/2019 16:43:18 - INFO - run_child_finetuning - Evaluating on valid set... + Epoch: 0%| | 0/3 [00:00 3840 +num_train_steps = 180 +global_step 0, lr = 0.000000 diff --git a/train_child_whonly_maybe1.out b/train_child_whonly_maybe1.out new file mode 100644 index 00000000000000..1ca64917d28b87 --- /dev/null +++ b/train_child_whonly_maybe1.out @@ -0,0 +1,927 @@ +Warning: apex was installed without --cpp_ext. Falling back to Python flatten and unflatten. +Warning: apex was installed without --cuda_ext. Fused syncbn kernels will be unavailable. Python fallbacks will be used instead. +Warning: apex was installed without --cuda_ext. FusedAdam will be unavailable. +Warning: apex was installed without --cuda_ext. FusedLayerNorm will be unavailable. +Better speed can be achieved with apex installed from https://www.github.com/nvidia/apex. +06/09/2019 15:39:53 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt +06/09/2019 15:39:53 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt +Namespace(dev_percent=0.5, do_eval=True, do_lower_case=True, do_train=True, eval_batch_size=128, gradient_accumulation_steps=1, learning_rate=0.0005, max_seq_length=128, no_cuda=False, num_train_epochs=100, seed=42, train_batch_size=32, warmup_proportion=0.1) +num_sent = 7680 -> 5760 +num_train_steps = 12000 +06/09/2019 15:39:59 - INFO - run_child_finetuning - device: cuda n_gpu: 1 +06/09/2019 15:40:05 - INFO - run_child_finetuning - Epoch 0 +06/09/2019 15:40:05 - INFO - run_child_finetuning - Evaluating on train set... +06/09/2019 15:40:05 - INFO - run_child_finetuning - Evaluating on valid set... + Epoch: 0%| | 0/100 [00:00 5760 +num_train_steps = 12000 +06/09/2019 16:27:29 - INFO - run_child_finetuning - device: cuda n_gpu: 1 +06/09/2019 16:27:34 - INFO - run_child_finetuning - Epoch 0 +06/09/2019 16:27:34 - INFO - run_child_finetuning - Evaluating on train set... +06/09/2019 16:27:34 - INFO - run_child_finetuning - Evaluating on valid set... + Epoch: 0%| | 0/100 [00:00 5760 +num_train_steps = 360 +global_step 0, lr = 0.000000 diff --git a/train_child_whonly_maybeonly.out b/train_child_whonly_maybeonly.out new file mode 100644 index 00000000000000..87a640ee24b5c9 --- /dev/null +++ b/train_child_whonly_maybeonly.out @@ -0,0 +1,506 @@ +Warning: apex was installed without --cpp_ext. Falling back to Python flatten and unflatten. +Warning: apex was installed without --cuda_ext. Fused syncbn kernels will be unavailable. Python fallbacks will be used instead. +Warning: apex was installed without --cuda_ext. FusedAdam will be unavailable. +Warning: apex was installed without --cuda_ext. FusedLayerNorm will be unavailable. +Better speed can be achieved with apex installed from https://www.github.com/nvidia/apex. +06/09/2019 16:16:25 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt +06/09/2019 16:16:25 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt +Namespace(dev_percent=0.5, do_eval=True, do_lower_case=True, do_train=True, eval_batch_size=128, gradient_accumulation_steps=1, learning_rate=0.0005, max_seq_length=128, no_cuda=False, num_train_epochs=100, seed=42, train_batch_size=32, warmup_proportion=0.1) +num_sent = 3840 -> 1920 +num_train_steps = 6000 +06/09/2019 16:16:33 - INFO - run_child_finetuning - device: cuda n_gpu: 1 +06/09/2019 16:16:36 - INFO - run_child_finetuning - Epoch 0 +06/09/2019 16:16:36 - INFO - run_child_finetuning - Evaluating on train set... +06/09/2019 16:16:36 - INFO - run_child_finetuning - Evaluating on valid set... + Epoch: 0%| | 0/100 [00:00 + self.all_lines = [] +NameError: name 'self' is not defined +Warning: apex was installed without --cpp_ext. Falling back to Python flatten and unflatten. +Warning: apex was installed without --cuda_ext. Fused syncbn kernels will be unavailable. Python fallbacks will be used instead. +Warning: apex was installed without --cuda_ext. FusedAdam will be unavailable. +Warning: apex was installed without --cuda_ext. FusedLayerNorm will be unavailable. +Better speed can be achieved with apex installed from https://www.github.com/nvidia/apex. +Namespace(dev_percent=0.3, do_eval=True, do_lower_case=True, do_train=True, eval_batch_size=128, gradient_accumulation_steps=1, learning_rate=3e-05, max_seq_length=128, no_cuda=False, num_train_epochs=6.0, seed=42, train_batch_size=32, warmup_proportion=0.1) +06/09/2019 22:16:32 - ERROR - pytorch_pretrained_bert.tokenization - Model name '/nas/pretrain-bert/pretrain-tensorflow/uncased_L-12_H-768_A-12/vocab.txt' was not found in model name list (bert-base-uncased, bert-large-uncased, bert-base-cased, bert-large-cased, bert-base-multilingual-uncased, bert-base-multilingual-cased, bert-base-chinese). We assumed '/nas/pretrain-bert/pretrain-tensorflow/uncased_L-12_H-768_A-12/vocab.txt' was a path or url but couldn't find any file associated to this path or url. +06/09/2019 22:16:32 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt +Traceback (most recent call last): + File "/home/qsj/miniconda3/lib/python3.6/runpy.py", line 183, in _run_module_as_main + mod_name, mod_spec, code = _get_module_details(mod_name, _Error) + File "/home/qsj/miniconda3/lib/python3.6/runpy.py", line 109, in _get_module_details + __import__(pkg_name) + File "/home/xd/projects/pytorch-pretrained-BERT/train_child.py", line 88, in + sentences += make_sentences(**frame) + File "/home/xd/projects/pytorch-pretrained-BERT/child_generator.py", line 53, in make_sentences + assert entities[0].lower() in tokenizer.vocab , entities[0] +AttributeError: 'NoneType' object has no attribute 'vocab' +Warning: apex was installed without --cpp_ext. Falling back to Python flatten and unflatten. +Warning: apex was installed without --cuda_ext. Fused syncbn kernels will be unavailable. Python fallbacks will be used instead. +Warning: apex was installed without --cuda_ext. FusedAdam will be unavailable. +Warning: apex was installed without --cuda_ext. FusedLayerNorm will be unavailable. +Better speed can be achieved with apex installed from https://www.github.com/nvidia/apex. +Namespace(dev_percent=0.3, do_eval=True, do_lower_case=True, do_train=True, eval_batch_size=128, gradient_accumulation_steps=1, learning_rate=3e-05, max_seq_length=128, no_cuda=False, num_train_epochs=6.0, seed=42, train_batch_size=32, warmup_proportion=0.1) +06/09/2019 22:17:49 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt +06/09/2019 22:17:49 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt +06/09/2019 22:17:49 - INFO - run_child_finetuning - num_sent = 120 +Warning: apex was installed without --cpp_ext. Falling back to Python flatten and unflatten. +Warning: apex was installed without --cuda_ext. Fused syncbn kernels will be unavailable. Python fallbacks will be used instead. +Warning: apex was installed without --cuda_ext. FusedAdam will be unavailable. +Warning: apex was installed without --cuda_ext. FusedLayerNorm will be unavailable. +Better speed can be achieved with apex installed from https://www.github.com/nvidia/apex. +Namespace(dev_percent=0.3, do_eval=True, do_lower_case=True, do_train=True, eval_batch_size=128, gradient_accumulation_steps=1, learning_rate=3e-05, max_seq_length=128, no_cuda=False, num_train_epochs=6.0, seed=42, train_batch_size=32, warmup_proportion=0.1) +[[("John couldn't see the stage behind Susan ||| because [John] is short.", "John couldn't catch sight of the stage behind Susan ||| because [John] is short."), ("John couldn't see the stage behind Susan ||| because [Susan] is tall.", "John couldn't catch sight of the stage behind Susan ||| because [Susan] is tall."), ("John couldn't see the stage behind Susan ||| because [John] isn't tall.", "John couldn't catch sight of the stage behind Susan ||| because [John] isn't tall."), ("John couldn't see the stage behind Susan ||| because [Susan] isn't short.", "John couldn't catch sight of the stage behind Susan ||| because [Susan] isn't short."), ('Susan blocked the view of John ||| because [John] is short.', 'Susan obstructed the sight of John ||| because [John] is short.'), ('Susan blocked the view of John ||| because [Susan] is tall.', 'Susan obstructed the sight of John ||| because [Susan] is tall.'), ("Susan blocked the view of John ||| because [John] isn't tall.", "Susan obstructed the sight of John ||| because [John] isn't tall."), ("Susan blocked the view of John ||| because [Susan] isn't short.", "Susan obstructed the sight of John ||| because [Susan] isn't short."), ("John could see the stage behind Susan ||| because [John] isn't short.", "John could catch sight of the stage behind Susan ||| because [John] isn't short."), ("John could see the stage behind Susan ||| because [Susan] isn't tall.", "John could catch sight of the stage behind Susan ||| because [Susan] isn't tall."), ('John could see the stage behind Susan ||| because [John] is tall.', 'John could catch sight of the stage behind Susan ||| because [John] is tall.'), ('John could see the stage behind Susan ||| because [Susan] is short.', 'John could catch sight of the stage behind Susan ||| because [Susan] is short.'), ("Susan didn't block the view of John ||| because [John] isn't short.", "Susan didn't obstruct the sight of John ||| because [John] isn't short."), ("Susan didn't block the view of John ||| because [Susan] isn't tall.", "Susan didn't obstruct the sight of John ||| because [Susan] isn't tall."), ("Susan didn't block the view of John ||| because [John] is tall.", "Susan didn't obstruct the sight of John ||| because [John] is tall."), ("Susan didn't block the view of John ||| because [Susan] is short.", "Susan didn't obstruct the sight of John ||| because [Susan] is short."), ("John couldn't see the stage behind Susan ||| although [John] isn't short.", "John couldn't catch sight of the stage behind Susan ||| although [John] isn't short."), ("John couldn't see the stage behind Susan ||| although [Susan] isn't tall.", "John couldn't catch sight of the stage behind Susan ||| although [Susan] isn't tall."), ("John couldn't see the stage behind Susan ||| although [John] is tall.", "John couldn't catch sight of the stage behind Susan ||| although [John] is tall."), ("John couldn't see the stage behind Susan ||| although [Susan] is short.", "John couldn't catch sight of the stage behind Susan ||| although [Susan] is short."), ("Susan blocked the view of John ||| although [John] isn't short.", "Susan obstructed the sight of John ||| although [John] isn't short."), ("Susan blocked the view of John ||| although [Susan] isn't tall.", "Susan obstructed the sight of John ||| although [Susan] isn't tall."), ('Susan blocked the view of John ||| although [John] is tall.', 'Susan obstructed the sight of John ||| although [John] is tall.'), ('Susan blocked the view of John ||| although [Susan] is short.', 'Susan obstructed the sight of John ||| although [Susan] is short.'), ('John could see the stage behind Susan ||| although [John] is short.', 'John could catch sight of the stage behind Susan ||| although [John] is short.'), ('John could see the stage behind Susan ||| although [Susan] is tall.', 'John could catch sight of the stage behind Susan ||| although [Susan] is tall.'), ("John could see the stage behind Susan ||| although [John] isn't tall.", "John could catch sight of the stage behind Susan ||| although [John] isn't tall."), ("John could see the stage behind Susan ||| although [Susan] isn't short.", "John could catch sight of the stage behind Susan ||| although [Susan] isn't short."), ("Susan didn't block the view of John ||| although [John] is short.", "Susan didn't obstruct the sight of John ||| although [John] is short."), ("Susan didn't block the view of John ||| although [Susan] is tall.", "Susan didn't obstruct the sight of John ||| although [Susan] is tall."), ("Susan didn't block the view of John ||| although [John] isn't tall.", "Susan didn't obstruct the sight of John ||| although [John] isn't tall."), ("Susan didn't block the view of John ||| although [Susan] isn't short.", "Susan didn't obstruct the sight of John ||| although [Susan] isn't short.")], [('the newspapers could be placed on all the chairs ||| because there were many of the [newspapers].', 'the newspapers could be put on all the chairs ||| because there were many of the [newspapers].'), ('the newspapers could be placed on all the chairs ||| because there were few of the [chairs].', 'the newspapers could be put on all the chairs ||| because there were few of the [chairs].'), ('the newspapers could be placed on all the chairs ||| because there were not few of the [newspapers].', 'the newspapers could be put on all the chairs ||| because there were not few of the [newspapers].'), ('the newspapers could be placed on all the chairs ||| because there were not many of the [chairs].', 'the newspapers could be put on all the chairs ||| because there were not many of the [chairs].'), ('the chairs could all be covered by the newspapers ||| because there were many of the [newspapers].', 'the chairs could carry all the newspapers ||| because there were many of the [newspapers].'), ('the chairs could all be covered by the newspapers ||| because there were few of the [chairs].', 'the chairs could carry all the newspapers ||| because there were few of the [chairs].'), ('the chairs could all be covered by the newspapers ||| because there were not few of the [newspapers].', 'the chairs could carry all the newspapers ||| because there were not few of the [newspapers].'), ('the chairs could all be covered by the newspapers ||| because there were not many of the [chairs].', 'the chairs could carry all the newspapers ||| because there were not many of the [chairs].'), ("the newspapers couldn't be placed on all the chairs ||| because there were not many of the [newspapers].", "the newspapers couldn't be put on all the chairs ||| because there were not many of the [newspapers]."), ("the newspapers couldn't be placed on all the chairs ||| because there were not few of the [chairs].", "the newspapers couldn't be put on all the chairs ||| because there were not few of the [chairs]."), ("the newspapers couldn't be placed on all the chairs ||| because there were few of the [newspapers].", "the newspapers couldn't be put on all the chairs ||| because there were few of the [newspapers]."), ("the newspapers couldn't be placed on all the chairs ||| because there were many of the [chairs].", "the newspapers couldn't be put on all the chairs ||| because there were many of the [chairs]."), ("the chairs couldn't all be covered by the newspapers ||| because there were not many of the [newspapers].", "the chairs couldn't carry all the newspapers ||| because there were not many of the [newspapers]."), ("the chairs couldn't all be covered by the newspapers ||| because there were not few of the [chairs].", "the chairs couldn't carry all the newspapers ||| because there were not few of the [chairs]."), ("the chairs couldn't all be covered by the newspapers ||| because there were few of the [newspapers].", "the chairs couldn't carry all the newspapers ||| because there were few of the [newspapers]."), ("the chairs couldn't all be covered by the newspapers ||| because there were many of the [chairs].", "the chairs couldn't carry all the newspapers ||| because there were many of the [chairs]."), ('the newspapers could be placed on all the chairs ||| although there were not many of the [newspapers].', 'the newspapers could be put on all the chairs ||| although there were not many of the [newspapers].'), ('the newspapers could be placed on all the chairs ||| although there were not few of the [chairs].', 'the newspapers could be put on all the chairs ||| although there were not few of the [chairs].'), ('the newspapers could be placed on all the chairs ||| although there were few of the [newspapers].', 'the newspapers could be put on all the chairs ||| although there were few of the [newspapers].'), ('the newspapers could be placed on all the chairs ||| although there were many of the [chairs].', 'the newspapers could be put on all the chairs ||| although there were many of the [chairs].'), ('the chairs could all be covered by the newspapers ||| although there were not many of the [newspapers].', 'the chairs could carry all the newspapers ||| although there were not many of the [newspapers].'), ('the chairs could all be covered by the newspapers ||| although there were not few of the [chairs].', 'the chairs could carry all the newspapers ||| although there were not few of the [chairs].'), ('the chairs could all be covered by the newspapers ||| although there were few of the [newspapers].', 'the chairs could carry all the newspapers ||| although there were few of the [newspapers].'), ('the chairs could all be covered by the newspapers ||| although there were many of the [chairs].', 'the chairs could carry all the newspapers ||| although there were many of the [chairs].'), ("the newspapers couldn't be placed on all the chairs ||| although there were many of the [newspapers].", "the newspapers couldn't be put on all the chairs ||| although there were many of the [newspapers]."), ("the newspapers couldn't be placed on all the chairs ||| although there were few of the [chairs].", "the newspapers couldn't be put on all the chairs ||| although there were few of the [chairs]."), ("the newspapers couldn't be placed on all the chairs ||| although there were not few of the [newspapers].", "the newspapers couldn't be put on all the chairs ||| although there were not few of the [newspapers]."), ("the newspapers couldn't be placed on all the chairs ||| although there were not many of the [chairs].", "the newspapers couldn't be put on all the chairs ||| although there were not many of the [chairs]."), ("the chairs couldn't all be covered by the newspapers ||| although there were many of the [newspapers].", "the chairs couldn't carry all the newspapers ||| although there were many of the [newspapers]."), ("the chairs couldn't all be covered by the newspapers ||| although there were few of the [chairs].", "the chairs couldn't carry all the newspapers ||| although there were few of the [chairs]."), ("the chairs couldn't all be covered by the newspapers ||| although there were not few of the [newspapers].", "the chairs couldn't carry all the newspapers ||| although there were not few of the [newspapers]."), ("the chairs couldn't all be covered by the newspapers ||| although there were not many of the [chairs].", "the chairs couldn't carry all the newspapers ||| although there were not many of the [chairs].")], ["Anna did better than Andy on the test ||| although [Anna] hadn't studied hard.", "Anna did better than Andy on the test ||| although [Andy] wasn't lazy in doing homework.", 'Anna did better than Andy on the test ||| although [Anna] was lazy in doing homework.', 'Anna did better than Andy on the test ||| although [Andy] had studied hard.', "Andy did worse than Anna on the test ||| although [Anna] hadn't studied hard.", "Andy did worse than Anna on the test ||| although [Andy] wasn't lazy in doing homework.", 'Andy did worse than Anna on the test ||| although [Anna] was lazy in doing homework.', 'Andy did worse than Anna on the test ||| although [Andy] had studied hard.', "Anna didn't do better than Andy on the test ||| although [Anna] had studied hard.", "Anna didn't do better than Andy on the test ||| although [Andy] was lazy in doing homework.", "Anna didn't do better than Andy on the test ||| although [Anna] wasn't lazy in doing homework.", "Anna didn't do better than Andy on the test ||| although [Andy] hadn't studied hard.", "Andy didn't do worse than Anna on the test ||| although [Anna] had studied hard.", "Andy didn't do worse than Anna on the test ||| although [Andy] was lazy in doing homework.", "Andy didn't do worse than Anna on the test ||| although [Anna] wasn't lazy in doing homework.", "Andy didn't do worse than Anna on the test ||| although [Andy] hadn't studied hard."], ['Bill passed the half-empty plate to Amy ||| because [Bill] was full.', 'Bill passed the half-empty plate to Amy ||| because [Amy] was hungry.', "Bill passed the half-empty plate to Amy ||| because [Bill] wasn't hungry.", "Bill passed the half-empty plate to Amy ||| because [Amy] wasn't full.", 'Amy received the half-empty plate from Bill ||| because [Bill] was full.', 'Amy received the half-empty plate from Bill ||| because [Amy] was hungry.', "Amy received the half-empty plate from Bill ||| because [Bill] wasn't hungry.", "Amy received the half-empty plate from Bill ||| because [Amy] wasn't full.", "Bill didn't pass the half-empty plate to Amy ||| because [Bill] wasn't full.", "Bill didn't pass the half-empty plate to Amy ||| because [Amy] wasn't hungry.", "Bill didn't pass the half-empty plate to Amy ||| because [Bill] was hungry.", "Bill didn't pass the half-empty plate to Amy ||| because [Amy] was full.", "Amy didn't received the half-empty plate from Bill ||| because [Bill] wasn't full.", "Amy didn't received the half-empty plate from Bill ||| because [Amy] wasn't hungry.", "Amy didn't received the half-empty plate from Bill ||| because [Bill] was hungry.", "Amy didn't received the half-empty plate from Bill ||| because [Amy] was full."], ['Running at about the same speed, Tom beat Sue in the running race ||| because [Tom] had a good start.', 'Running at about the same speed, Tom beat Sue in the running race ||| because [Sue] had a bad start.', "Running at about the same speed, Tom beat Sue in the running race ||| because [Tom] didn't have a bad start.", "Running at about the same speed, Tom beat Sue in the running race ||| because [Sue] didn't have a good start.", 'Running at about the same speed, Sue lost to Tom in the running race ||| because [Tom] had a good start.', 'Running at about the same speed, Sue lost to Tom in the running race ||| because [Sue] had a bad start.', "Running at about the same speed, Sue lost to Tom in the running race ||| because [Tom] didn't have a bad start.", "Running at about the same speed, Sue lost to Tom in the running race ||| because [Sue] didn't have a good start.", "Running at about the same speed, Tom didn't beat Sue in the running race ||| because [Tom] didn't have a good start.", "Running at about the same speed, Tom didn't beat Sue in the running race ||| because [Sue] didn't have a bad start.", "Running at about the same speed, Tom didn't beat Sue in the running race ||| because [Tom] had a bad start.", "Running at about the same speed, Tom didn't beat Sue in the running race ||| because [Sue] had a good start.", "Running at about the same speed, Sue didn't lose to Tom in the running race ||| because [Tom] didn't have a good start.", "Running at about the same speed, Sue didn't lose to Tom in the running race ||| because [Sue] didn't have a bad start.", "Running at about the same speed, Sue didn't lose to Tom in the running race ||| because [Tom] had a bad start.", "Running at about the same speed, Sue didn't lose to Tom in the running race ||| because [Sue] had a good start."], [['Charles threw the schoolbag down to Emma ||| after [Charles] reached the top of the stairs.', 'Charles threw the schoolbag down to Linda ||| after [Charles] reached the top of the stairs.', 'Paul threw the schoolbag down to Emma ||| after [Paul] reached the top of the stairs.', 'Paul threw the schoolbag down to Linda ||| after [Paul] reached the top of the stairs.', 'Charles cast the schoolbag down to Emma ||| after [Charles] reached the top of the stairs.', 'Charles cast the schoolbag down to Linda ||| after [Charles] reached the top of the stairs.', 'Paul cast the schoolbag down to Emma ||| after [Paul] reached the top of the stairs.', 'Paul cast the schoolbag down to Linda ||| after [Paul] reached the top of the stairs.'], ['Charles threw the schoolbag down to Emma ||| after [Emma] reached the bottom of the stairs.', 'Charles threw the schoolbag down to Linda ||| after [Linda] reached the bottom of the stairs.', 'Paul threw the schoolbag down to Emma ||| after [Emma] reached the bottom of the stairs.', 'Paul threw the schoolbag down to Linda ||| after [Linda] reached the bottom of the stairs.', 'Charles cast the schoolbag down to Emma ||| after [Emma] reached the bottom of the stairs.', 'Charles cast the schoolbag down to Linda ||| after [Linda] reached the bottom of the stairs.', 'Paul cast the schoolbag down to Emma ||| after [Emma] reached the bottom of the stairs.', 'Paul cast the schoolbag down to Linda ||| after [Linda] reached the bottom of the stairs.'], ['Emma caught the schoolbag thrown down by Charles ||| after [Charles] reached the top of the stairs.', 'Linda caught the schoolbag thrown down by Charles ||| after [Charles] reached the top of the stairs.', 'Emma caught the schoolbag thrown down by Paul ||| after [Paul] reached the top of the stairs.', 'Linda caught the schoolbag thrown down by Paul ||| after [Paul] reached the top of the stairs.', 'Emma took the schoolbag thrown down by Charles ||| after [Charles] reached the top of the stairs.', 'Linda took the schoolbag thrown down by Charles ||| after [Charles] reached the top of the stairs.', 'Emma took the schoolbag thrown down by Paul ||| after [Paul] reached the top of the stairs.', 'Linda took the schoolbag thrown down by Paul ||| after [Paul] reached the top of the stairs.'], ['Emma caught the schoolbag thrown down by Charles ||| after [Emma] reached the bottom of the stairs.', 'Linda caught the schoolbag thrown down by Charles ||| after [Linda] reached the bottom of the stairs.', 'Emma caught the schoolbag thrown down by Paul ||| after [Emma] reached the bottom of the stairs.', 'Linda caught the schoolbag thrown down by Paul ||| after [Linda] reached the bottom of the stairs.', 'Emma took the schoolbag thrown down by Charles ||| after [Emma] reached the bottom of the stairs.', 'Linda took the schoolbag thrown down by Charles ||| after [Linda] reached the bottom of the stairs.', 'Emma took the schoolbag thrown down by Paul ||| after [Emma] reached the bottom of the stairs.', 'Linda took the schoolbag thrown down by Paul ||| after [Linda] reached the bottom of the stairs.'], ['Charles threw the schoolbag up to Emma ||| after [Charles] reached the bottom of the stairs.', 'Charles threw the schoolbag up to Linda ||| after [Charles] reached the bottom of the stairs.', 'Paul threw the schoolbag up to Emma ||| after [Paul] reached the bottom of the stairs.', 'Paul threw the schoolbag up to Linda ||| after [Paul] reached the bottom of the stairs.', 'Charles cast the schoolbag up to Emma ||| after [Charles] reached the bottom of the stairs.', 'Charles cast the schoolbag up to Linda ||| after [Charles] reached the bottom of the stairs.', 'Paul cast the schoolbag up to Emma ||| after [Paul] reached the bottom of the stairs.', 'Paul cast the schoolbag up to Linda ||| after [Paul] reached the bottom of the stairs.'], ['Charles threw the schoolbag up to Emma ||| after [Emma] reached the top of the stairs.', 'Charles threw the schoolbag up to Linda ||| after [Linda] reached the top of the stairs.', 'Paul threw the schoolbag up to Emma ||| after [Emma] reached the top of the stairs.', 'Paul threw the schoolbag up to Linda ||| after [Linda] reached the top of the stairs.', 'Charles cast the schoolbag up to Emma ||| after [Emma] reached the top of the stairs.', 'Charles cast the schoolbag up to Linda ||| after [Linda] reached the top of the stairs.', 'Paul cast the schoolbag up to Emma ||| after [Emma] reached the top of the stairs.', 'Paul cast the schoolbag up to Linda ||| after [Linda] reached the top of the stairs.'], ['Emma caught the schoolbag thrown up by Charles ||| after [Charles] reached the bottom of the stairs.', 'Linda caught the schoolbag thrown up by Charles ||| after [Charles] reached the bottom of the stairs.', 'Emma caught the schoolbag thrown up by Paul ||| after [Paul] reached the bottom of the stairs.', 'Linda caught the schoolbag thrown up by Paul ||| after [Paul] reached the bottom of the stairs.', 'Emma took the schoolbag thrown up by Charles ||| after [Charles] reached the bottom of the stairs.', 'Linda took the schoolbag thrown up by Charles ||| after [Charles] reached the bottom of the stairs.', 'Emma took the schoolbag thrown up by Paul ||| after [Paul] reached the bottom of the stairs.', 'Linda took the schoolbag thrown up by Paul ||| after [Paul] reached the bottom of the stairs.'], ['Emma caught the schoolbag thrown up by Charles ||| after [Emma] reached the top of the stairs.', 'Linda caught the schoolbag thrown up by Charles ||| after [Linda] reached the top of the stairs.', 'Emma caught the schoolbag thrown up by Paul ||| after [Emma] reached the top of the stairs.', 'Linda caught the schoolbag thrown up by Paul ||| after [Linda] reached the top of the stairs.', 'Emma took the schoolbag thrown up by Charles ||| after [Emma] reached the top of the stairs.', 'Linda took the schoolbag thrown up by Charles ||| after [Linda] reached the top of the stairs.', 'Emma took the schoolbag thrown up by Paul ||| after [Emma] reached the top of the stairs.', 'Linda took the schoolbag thrown up by Paul ||| after [Linda] reached the top of the stairs.'], ['Charles threw the schoolbag down to Emma ||| before [Charles] reached the bottom of the stairs.', 'Charles threw the schoolbag down to Linda ||| before [Charles] reached the bottom of the stairs.', 'Paul threw the schoolbag down to Emma ||| before [Paul] reached the bottom of the stairs.', 'Paul threw the schoolbag down to Linda ||| before [Paul] reached the bottom of the stairs.', 'Charles cast the schoolbag down to Emma ||| before [Charles] reached the bottom of the stairs.', 'Charles cast the schoolbag down to Linda ||| before [Charles] reached the bottom of the stairs.', 'Paul cast the schoolbag down to Emma ||| before [Paul] reached the bottom of the stairs.', 'Paul cast the schoolbag down to Linda ||| before [Paul] reached the bottom of the stairs.'], ['Charles threw the schoolbag down to Emma ||| before [Emma] reached the top of the stairs.', 'Charles threw the schoolbag down to Linda ||| before [Linda] reached the top of the stairs.', 'Paul threw the schoolbag down to Emma ||| before [Emma] reached the top of the stairs.', 'Paul threw the schoolbag down to Linda ||| before [Linda] reached the top of the stairs.', 'Charles cast the schoolbag down to Emma ||| before [Emma] reached the top of the stairs.', 'Charles cast the schoolbag down to Linda ||| before [Linda] reached the top of the stairs.', 'Paul cast the schoolbag down to Emma ||| before [Emma] reached the top of the stairs.', 'Paul cast the schoolbag down to Linda ||| before [Linda] reached the top of the stairs.'], ['Emma caught the schoolbag thrown down by Charles ||| before [Charles] reached the bottom of the stairs.', 'Linda caught the schoolbag thrown down by Charles ||| before [Charles] reached the bottom of the stairs.', 'Emma caught the schoolbag thrown down by Paul ||| before [Paul] reached the bottom of the stairs.', 'Linda caught the schoolbag thrown down by Paul ||| before [Paul] reached the bottom of the stairs.', 'Emma took the schoolbag thrown down by Charles ||| before [Charles] reached the bottom of the stairs.', 'Linda took the schoolbag thrown down by Charles ||| before [Charles] reached the bottom of the stairs.', 'Emma took the schoolbag thrown down by Paul ||| before [Paul] reached the bottom of the stairs.', 'Linda took the schoolbag thrown down by Paul ||| before [Paul] reached the bottom of the stairs.'], ['Emma caught the schoolbag thrown down by Charles ||| before [Emma] reached the top of the stairs.', 'Linda caught the schoolbag thrown down by Charles ||| before [Linda] reached the top of the stairs.', 'Emma caught the schoolbag thrown down by Paul ||| before [Emma] reached the top of the stairs.', 'Linda caught the schoolbag thrown down by Paul ||| before [Linda] reached the top of the stairs.', 'Emma took the schoolbag thrown down by Charles ||| before [Emma] reached the top of the stairs.', 'Linda took the schoolbag thrown down by Charles ||| before [Linda] reached the top of the stairs.', 'Emma took the schoolbag thrown down by Paul ||| before [Emma] reached the top of the stairs.', 'Linda took the schoolbag thrown down by Paul ||| before [Linda] reached the top of the stairs.'], ['Charles threw the schoolbag up to Emma ||| before [Charles] reached the top of the stairs.', 'Charles threw the schoolbag up to Linda ||| before [Charles] reached the top of the stairs.', 'Paul threw the schoolbag up to Emma ||| before [Paul] reached the top of the stairs.', 'Paul threw the schoolbag up to Linda ||| before [Paul] reached the top of the stairs.', 'Charles cast the schoolbag up to Emma ||| before [Charles] reached the top of the stairs.', 'Charles cast the schoolbag up to Linda ||| before [Charles] reached the top of the stairs.', 'Paul cast the schoolbag up to Emma ||| before [Paul] reached the top of the stairs.', 'Paul cast the schoolbag up to Linda ||| before [Paul] reached the top of the stairs.'], ['Charles threw the schoolbag up to Emma ||| before [Emma] reached the bottom of the stairs.', 'Charles threw the schoolbag up to Linda ||| before [Linda] reached the bottom of the stairs.', 'Paul threw the schoolbag up to Emma ||| before [Emma] reached the bottom of the stairs.', 'Paul threw the schoolbag up to Linda ||| before [Linda] reached the bottom of the stairs.', 'Charles cast the schoolbag up to Emma ||| before [Emma] reached the bottom of the stairs.', 'Charles cast the schoolbag up to Linda ||| before [Linda] reached the bottom of the stairs.', 'Paul cast the schoolbag up to Emma ||| before [Emma] reached the bottom of the stairs.', 'Paul cast the schoolbag up to Linda ||| before [Linda] reached the bottom of the stairs.'], ['Emma caught the schoolbag thrown up by Charles ||| before [Charles] reached the top of the stairs.', 'Linda caught the schoolbag thrown up by Charles ||| before [Charles] reached the top of the stairs.', 'Emma caught the schoolbag thrown up by Paul ||| before [Paul] reached the top of the stairs.', 'Linda caught the schoolbag thrown up by Paul ||| before [Paul] reached the top of the stairs.', 'Emma took the schoolbag thrown up by Charles ||| before [Charles] reached the top of the stairs.', 'Linda took the schoolbag thrown up by Charles ||| before [Charles] reached the top of the stairs.', 'Emma took the schoolbag thrown up by Paul ||| before [Paul] reached the top of the stairs.', 'Linda took the schoolbag thrown up by Paul ||| before [Paul] reached the top of the stairs.'], ['Emma caught the schoolbag thrown up by Charles ||| before [Emma] reached the bottom of the stairs.', 'Linda caught the schoolbag thrown up by Charles ||| before [Linda] reached the bottom of the stairs.', 'Emma caught the schoolbag thrown up by Paul ||| before [Emma] reached the bottom of the stairs.', 'Linda caught the schoolbag thrown up by Paul ||| before [Linda] reached the bottom of the stairs.', 'Emma took the schoolbag thrown up by Charles ||| before [Emma] reached the bottom of the stairs.', 'Linda took the schoolbag thrown up by Charles ||| before [Linda] reached the bottom of the stairs.', 'Emma took the schoolbag thrown up by Paul ||| before [Emma] reached the bottom of the stairs.', 'Linda took the schoolbag thrown up by Paul ||| before [Linda] reached the bottom of the stairs.']], [["the ball doesn't fit into the bag ||| because the [ball] is large.", "the ball doesn't fit into the box ||| because the [ball] is large.", "the toy doesn't fit into the bag ||| because the [toy] is large.", "the toy doesn't fit into the box ||| because the [toy] is large.", "the ball can't be put into the bag ||| because the [ball] is large.", "the ball can't be put into the box ||| because the [ball] is large.", "the toy can't be put into the bag ||| because the [toy] is large.", "the toy can't be put into the box ||| because the [toy] is large."], ["the ball doesn't fit into the bag ||| because the [bag] is small.", "the ball doesn't fit into the box ||| because the [box] is small.", "the toy doesn't fit into the bag ||| because the [bag] is small.", "the toy doesn't fit into the box ||| because the [box] is small.", "the ball can't be put into the bag ||| because the [bag] is small.", "the ball can't be put into the box ||| because the [box] is small.", "the toy can't be put into the bag ||| because the [bag] is small.", "the toy can't be put into the box ||| because the [box] is small."], ["the ball doesn't fit into the bag ||| because the [ball] isn't small.", "the ball doesn't fit into the box ||| because the [ball] isn't small.", "the toy doesn't fit into the bag ||| because the [toy] isn't small.", "the toy doesn't fit into the box ||| because the [toy] isn't small.", "the ball can't be put into the bag ||| because the [ball] isn't small.", "the ball can't be put into the box ||| because the [ball] isn't small.", "the toy can't be put into the bag ||| because the [toy] isn't small.", "the toy can't be put into the box ||| because the [toy] isn't small."], ["the ball doesn't fit into the bag ||| because the [bag] isn't large.", "the ball doesn't fit into the box ||| because the [box] isn't large.", "the toy doesn't fit into the bag ||| because the [bag] isn't large.", "the toy doesn't fit into the box ||| because the [box] isn't large.", "the ball can't be put into the bag ||| because the [bag] isn't large.", "the ball can't be put into the box ||| because the [box] isn't large.", "the toy can't be put into the bag ||| because the [bag] isn't large.", "the toy can't be put into the box ||| because the [box] isn't large."], ["the bag doesn't hold the ball ||| because the [ball] is large.", "the box doesn't hold the ball ||| because the [ball] is large.", "the bag doesn't hold the toy ||| because the [toy] is large.", "the box doesn't hold the toy ||| because the [toy] is large.", "the bag doesn't have enough room for the ball ||| because the [ball] is large.", "the box doesn't have enough room for the ball ||| because the [ball] is large.", "the bag doesn't have enough room for the toy ||| because the [toy] is large.", "the box doesn't have enough room for the toy ||| because the [toy] is large."], ["the bag doesn't hold the ball ||| because the [bag] is small.", "the box doesn't hold the ball ||| because the [box] is small.", "the bag doesn't hold the toy ||| because the [bag] is small.", "the box doesn't hold the toy ||| because the [box] is small.", "the bag doesn't have enough room for the ball ||| because the [bag] is small.", "the box doesn't have enough room for the ball ||| because the [box] is small.", "the bag doesn't have enough room for the toy ||| because the [bag] is small.", "the box doesn't have enough room for the toy ||| because the [box] is small."], ["the bag doesn't hold the ball ||| because the [ball] isn't small.", "the box doesn't hold the ball ||| because the [ball] isn't small.", "the bag doesn't hold the toy ||| because the [toy] isn't small.", "the box doesn't hold the toy ||| because the [toy] isn't small.", "the bag doesn't have enough room for the ball ||| because the [ball] isn't small.", "the box doesn't have enough room for the ball ||| because the [ball] isn't small.", "the bag doesn't have enough room for the toy ||| because the [toy] isn't small.", "the box doesn't have enough room for the toy ||| because the [toy] isn't small."], ["the bag doesn't hold the ball ||| because the [bag] isn't large.", "the box doesn't hold the ball ||| because the [box] isn't large.", "the bag doesn't hold the toy ||| because the [bag] isn't large.", "the box doesn't hold the toy ||| because the [box] isn't large.", "the bag doesn't have enough room for the ball ||| because the [bag] isn't large.", "the box doesn't have enough room for the ball ||| because the [box] isn't large.", "the bag doesn't have enough room for the toy ||| because the [bag] isn't large.", "the box doesn't have enough room for the toy ||| because the [box] isn't large."], ["the ball can fit into the bag ||| because the [ball] isn't large.", "the ball can fit into the box ||| because the [ball] isn't large.", "the toy can fit into the bag ||| because the [toy] isn't large.", "the toy can fit into the box ||| because the [toy] isn't large.", "the ball can be put into the bag ||| because the [ball] isn't large.", "the ball can be put into the box ||| because the [ball] isn't large.", "the toy can be put into the bag ||| because the [toy] isn't large.", "the toy can be put into the box ||| because the [toy] isn't large."], ["the ball can fit into the bag ||| because the [bag] isn't small.", "the ball can fit into the box ||| because the [box] isn't small.", "the toy can fit into the bag ||| because the [bag] isn't small.", "the toy can fit into the box ||| because the [box] isn't small.", "the ball can be put into the bag ||| because the [bag] isn't small.", "the ball can be put into the box ||| because the [box] isn't small.", "the toy can be put into the bag ||| because the [bag] isn't small.", "the toy can be put into the box ||| because the [box] isn't small."], ['the ball can fit into the bag ||| because the [ball] is small.', 'the ball can fit into the box ||| because the [ball] is small.', 'the toy can fit into the bag ||| because the [toy] is small.', 'the toy can fit into the box ||| because the [toy] is small.', 'the ball can be put into the bag ||| because the [ball] is small.', 'the ball can be put into the box ||| because the [ball] is small.', 'the toy can be put into the bag ||| because the [toy] is small.', 'the toy can be put into the box ||| because the [toy] is small.'], ['the ball can fit into the bag ||| because the [bag] is large.', 'the ball can fit into the box ||| because the [box] is large.', 'the toy can fit into the bag ||| because the [bag] is large.', 'the toy can fit into the box ||| because the [box] is large.', 'the ball can be put into the bag ||| because the [bag] is large.', 'the ball can be put into the box ||| because the [box] is large.', 'the toy can be put into the bag ||| because the [bag] is large.', 'the toy can be put into the box ||| because the [box] is large.'], ["the bag can hold the ball ||| because the [ball] isn't large.", "the box can hold the ball ||| because the [ball] isn't large.", "the bag can hold the toy ||| because the [toy] isn't large.", "the box can hold the toy ||| because the [toy] isn't large.", "the bag has enough room for the ball ||| because the [ball] isn't large.", "the box has enough room for the ball ||| because the [ball] isn't large.", "the bag has enough room for the toy ||| because the [toy] isn't large.", "the box has enough room for the toy ||| because the [toy] isn't large."], ["the bag can hold the ball ||| because the [bag] isn't small.", "the box can hold the ball ||| because the [box] isn't small.", "the bag can hold the toy ||| because the [bag] isn't small.", "the box can hold the toy ||| because the [box] isn't small.", "the bag has enough room for the ball ||| because the [bag] isn't small.", "the box has enough room for the ball ||| because the [box] isn't small.", "the bag has enough room for the toy ||| because the [bag] isn't small.", "the box has enough room for the toy ||| because the [box] isn't small."], ['the bag can hold the ball ||| because the [ball] is small.', 'the box can hold the ball ||| because the [ball] is small.', 'the bag can hold the toy ||| because the [toy] is small.', 'the box can hold the toy ||| because the [toy] is small.', 'the bag has enough room for the ball ||| because the [ball] is small.', 'the box has enough room for the ball ||| because the [ball] is small.', 'the bag has enough room for the toy ||| because the [toy] is small.', 'the box has enough room for the toy ||| because the [toy] is small.'], ['the bag can hold the ball ||| because the [bag] is large.', 'the box can hold the ball ||| because the [box] is large.', 'the bag can hold the toy ||| because the [bag] is large.', 'the box can hold the toy ||| because the [box] is large.', 'the bag has enough room for the ball ||| because the [bag] is large.', 'the box has enough room for the ball ||| because the [box] is large.', 'the bag has enough room for the toy ||| because the [bag] is large.', 'the box has enough room for the toy ||| because the [box] is large.'], ["the ball doesn't fit into the bag ||| although the [ball] isn't large.", "the ball doesn't fit into the box ||| although the [ball] isn't large.", "the toy doesn't fit into the bag ||| although the [toy] isn't large.", "the toy doesn't fit into the box ||| although the [toy] isn't large.", "the ball can't be put into the bag ||| although the [ball] isn't large.", "the ball can't be put into the box ||| although the [ball] isn't large.", "the toy can't be put into the bag ||| although the [toy] isn't large.", "the toy can't be put into the box ||| although the [toy] isn't large."], ["the ball doesn't fit into the bag ||| although the [bag] isn't small.", "the ball doesn't fit into the box ||| although the [box] isn't small.", "the toy doesn't fit into the bag ||| although the [bag] isn't small.", "the toy doesn't fit into the box ||| although the [box] isn't small.", "the ball can't be put into the bag ||| although the [bag] isn't small.", "the ball can't be put into the box ||| although the [box] isn't small.", "the toy can't be put into the bag ||| although the [bag] isn't small.", "the toy can't be put into the box ||| although the [box] isn't small."], ["the ball doesn't fit into the bag ||| although the [ball] is small.", "the ball doesn't fit into the box ||| although the [ball] is small.", "the toy doesn't fit into the bag ||| although the [toy] is small.", "the toy doesn't fit into the box ||| although the [toy] is small.", "the ball can't be put into the bag ||| although the [ball] is small.", "the ball can't be put into the box ||| although the [ball] is small.", "the toy can't be put into the bag ||| although the [toy] is small.", "the toy can't be put into the box ||| although the [toy] is small."], ["the ball doesn't fit into the bag ||| although the [bag] is large.", "the ball doesn't fit into the box ||| although the [box] is large.", "the toy doesn't fit into the bag ||| although the [bag] is large.", "the toy doesn't fit into the box ||| although the [box] is large.", "the ball can't be put into the bag ||| although the [bag] is large.", "the ball can't be put into the box ||| although the [box] is large.", "the toy can't be put into the bag ||| although the [bag] is large.", "the toy can't be put into the box ||| although the [box] is large."], ["the bag doesn't hold the ball ||| although the [ball] isn't large.", "the box doesn't hold the ball ||| although the [ball] isn't large.", "the bag doesn't hold the toy ||| although the [toy] isn't large.", "the box doesn't hold the toy ||| although the [toy] isn't large.", "the bag doesn't have enough room for the ball ||| although the [ball] isn't large.", "the box doesn't have enough room for the ball ||| although the [ball] isn't large.", "the bag doesn't have enough room for the toy ||| although the [toy] isn't large.", "the box doesn't have enough room for the toy ||| although the [toy] isn't large."], ["the bag doesn't hold the ball ||| although the [bag] isn't small.", "the box doesn't hold the ball ||| although the [box] isn't small.", "the bag doesn't hold the toy ||| although the [bag] isn't small.", "the box doesn't hold the toy ||| although the [box] isn't small.", "the bag doesn't have enough room for the ball ||| although the [bag] isn't small.", "the box doesn't have enough room for the ball ||| although the [box] isn't small.", "the bag doesn't have enough room for the toy ||| although the [bag] isn't small.", "the box doesn't have enough room for the toy ||| although the [box] isn't small."], ["the bag doesn't hold the ball ||| although the [ball] is small.", "the box doesn't hold the ball ||| although the [ball] is small.", "the bag doesn't hold the toy ||| although the [toy] is small.", "the box doesn't hold the toy ||| although the [toy] is small.", "the bag doesn't have enough room for the ball ||| although the [ball] is small.", "the box doesn't have enough room for the ball ||| although the [ball] is small.", "the bag doesn't have enough room for the toy ||| although the [toy] is small.", "the box doesn't have enough room for the toy ||| although the [toy] is small."], ["the bag doesn't hold the ball ||| although the [bag] is large.", "the box doesn't hold the ball ||| although the [box] is large.", "the bag doesn't hold the toy ||| although the [bag] is large.", "the box doesn't hold the toy ||| although the [box] is large.", "the bag doesn't have enough room for the ball ||| although the [bag] is large.", "the box doesn't have enough room for the ball ||| although the [box] is large.", "the bag doesn't have enough room for the toy ||| although the [bag] is large.", "the box doesn't have enough room for the toy ||| although the [box] is large."], ['the ball can fit into the bag ||| although the [ball] is large.', 'the ball can fit into the box ||| although the [ball] is large.', 'the toy can fit into the bag ||| although the [toy] is large.', 'the toy can fit into the box ||| although the [toy] is large.', 'the ball can be put into the bag ||| although the [ball] is large.', 'the ball can be put into the box ||| although the [ball] is large.', 'the toy can be put into the bag ||| although the [toy] is large.', 'the toy can be put into the box ||| although the [toy] is large.'], ['the ball can fit into the bag ||| although the [bag] is small.', 'the ball can fit into the box ||| although the [box] is small.', 'the toy can fit into the bag ||| although the [bag] is small.', 'the toy can fit into the box ||| although the [box] is small.', 'the ball can be put into the bag ||| although the [bag] is small.', 'the ball can be put into the box ||| although the [box] is small.', 'the toy can be put into the bag ||| although the [bag] is small.', 'the toy can be put into the box ||| although the [box] is small.'], ["the ball can fit into the bag ||| although the [ball] isn't small.", "the ball can fit into the box ||| although the [ball] isn't small.", "the toy can fit into the bag ||| although the [toy] isn't small.", "the toy can fit into the box ||| although the [toy] isn't small.", "the ball can be put into the bag ||| although the [ball] isn't small.", "the ball can be put into the box ||| although the [ball] isn't small.", "the toy can be put into the bag ||| although the [toy] isn't small.", "the toy can be put into the box ||| although the [toy] isn't small."], ["the ball can fit into the bag ||| although the [bag] isn't large.", "the ball can fit into the box ||| although the [box] isn't large.", "the toy can fit into the bag ||| although the [bag] isn't large.", "the toy can fit into the box ||| although the [box] isn't large.", "the ball can be put into the bag ||| although the [bag] isn't large.", "the ball can be put into the box ||| although the [box] isn't large.", "the toy can be put into the bag ||| although the [bag] isn't large.", "the toy can be put into the box ||| although the [box] isn't large."], ['the bag can hold the ball ||| although the [ball] is large.', 'the box can hold the ball ||| although the [ball] is large.', 'the bag can hold the toy ||| although the [toy] is large.', 'the box can hold the toy ||| although the [toy] is large.', 'the bag has enough room for the ball ||| although the [ball] is large.', 'the box has enough room for the ball ||| although the [ball] is large.', 'the bag has enough room for the toy ||| although the [toy] is large.', 'the box has enough room for the toy ||| although the [toy] is large.'], ['the bag can hold the ball ||| although the [bag] is small.', 'the box can hold the ball ||| although the [box] is small.', 'the bag can hold the toy ||| although the [bag] is small.', 'the box can hold the toy ||| although the [box] is small.', 'the bag has enough room for the ball ||| although the [bag] is small.', 'the box has enough room for the ball ||| although the [box] is small.', 'the bag has enough room for the toy ||| although the [bag] is small.', 'the box has enough room for the toy ||| although the [box] is small.'], ["the bag can hold the ball ||| although the [ball] isn't small.", "the box can hold the ball ||| although the [ball] isn't small.", "the bag can hold the toy ||| although the [toy] isn't small.", "the box can hold the toy ||| although the [toy] isn't small.", "the bag has enough room for the ball ||| although the [ball] isn't small.", "the box has enough room for the ball ||| although the [ball] isn't small.", "the bag has enough room for the toy ||| although the [toy] isn't small.", "the box has enough room for the toy ||| although the [toy] isn't small."], ["the bag can hold the ball ||| although the [bag] isn't large.", "the box can hold the ball ||| although the [box] isn't large.", "the bag can hold the toy ||| although the [bag] isn't large.", "the box can hold the toy ||| although the [box] isn't large.", "the bag has enough room for the ball ||| although the [bag] isn't large.", "the box has enough room for the ball ||| although the [box] isn't large.", "the bag has enough room for the toy ||| although the [bag] isn't large.", "the box has enough room for the toy ||| although the [box] isn't large."]], [('the ball crashed right through the board ||| because the [ball] was hard.', 'the ball penetrated through the board ||| because the [ball] was hard.'), ('the ball crashed right through the board ||| because the [board] was soft.', 'the ball penetrated through the board ||| because the [board] was soft.'), ("the ball crashed right through the board ||| because the [ball] wasn't soft.", "the ball penetrated through the board ||| because the [ball] wasn't soft."), ("the ball crashed right through the board ||| because the [board] wasn't hard.", "the ball penetrated through the board ||| because the [board] wasn't hard."), ('the board failed to block the ball ||| because the [ball] was hard.', 'the board failed to stop the ball ||| because the [ball] was hard.'), ('the board failed to block the ball ||| because the [board] was soft.', 'the board failed to stop the ball ||| because the [board] was soft.'), ("the board failed to block the ball ||| because the [ball] wasn't soft.", "the board failed to stop the ball ||| because the [ball] wasn't soft."), ("the board failed to block the ball ||| because the [board] wasn't hard.", "the board failed to stop the ball ||| because the [board] wasn't hard."), ("the ball didn't crash through the board ||| because the [ball] wasn't hard.", "the ball didn't penetrate through the board ||| because the [ball] wasn't hard."), ("the ball didn't crash through the board ||| because the [board] wasn't soft.", "the ball didn't penetrate through the board ||| because the [board] wasn't soft."), ("the ball didn't crash through the board ||| because the [ball] was soft.", "the ball didn't penetrate through the board ||| because the [ball] was soft."), ("the ball didn't crash through the board ||| because the [board] was hard.", "the ball didn't penetrate through the board ||| because the [board] was hard."), ("the board blocked the ball ||| because the [ball] wasn't hard.", "the board stopped the ball ||| because the [ball] wasn't hard."), ("the board blocked the ball ||| because the [board] wasn't soft.", "the board stopped the ball ||| because the [board] wasn't soft."), ('the board blocked the ball ||| because the [ball] was soft.', 'the board stopped the ball ||| because the [ball] was soft.'), ('the board blocked the ball ||| because the [board] was hard.', 'the board stopped the ball ||| because the [board] was hard.'), ("the ball crashed right through the board ||| although the [ball] wasn't hard.", "the ball penetrated through the board ||| although the [ball] wasn't hard."), ("the ball crashed right through the board ||| although the [board] wasn't soft.", "the ball penetrated through the board ||| although the [board] wasn't soft."), ('the ball crashed right through the board ||| although the [ball] was soft.', 'the ball penetrated through the board ||| although the [ball] was soft.'), ('the ball crashed right through the board ||| although the [board] was hard.', 'the ball penetrated through the board ||| although the [board] was hard.'), ("the board failed to block the ball ||| although the [ball] wasn't hard.", "the board failed to stop the ball ||| although the [ball] wasn't hard."), ("the board failed to block the ball ||| although the [board] wasn't soft.", "the board failed to stop the ball ||| although the [board] wasn't soft."), ('the board failed to block the ball ||| although the [ball] was soft.', 'the board failed to stop the ball ||| although the [ball] was soft.'), ('the board failed to block the ball ||| although the [board] was hard.', 'the board failed to stop the ball ||| although the [board] was hard.'), ("the ball didn't crash through the board ||| although the [ball] was hard.", "the ball didn't penetrate through the board ||| although the [ball] was hard."), ("the ball didn't crash through the board ||| although the [board] was soft.", "the ball didn't penetrate through the board ||| although the [board] was soft."), ("the ball didn't crash through the board ||| although the [ball] wasn't soft.", "the ball didn't penetrate through the board ||| although the [ball] wasn't soft."), ("the ball didn't crash through the board ||| although the [board] wasn't hard.", "the ball didn't penetrate through the board ||| although the [board] wasn't hard."), ('the board blocked the ball ||| although the [ball] was hard.', 'the board stopped the ball ||| although the [ball] was hard.'), ('the board blocked the ball ||| although the [board] was soft.', 'the board stopped the ball ||| although the [board] was soft.'), ("the board blocked the ball ||| although the [ball] wasn't soft.", "the board stopped the ball ||| although the [ball] wasn't soft."), ("the board blocked the ball ||| although the [board] wasn't hard.", "the board stopped the ball ||| although the [board] wasn't hard.")], ['Steve follows Lucy in everything ||| because [Steve] is bad at making decisions.', 'Steve follows Lucy in everything ||| because [Lucy] is good at making decisions.', "Steve follows Lucy in everything ||| because [Steve] isn't good at making decisions.", "Steve follows Lucy in everything ||| because [Lucy] isn't bad at making decisions.", 'Lucy is followed by Steve in everything ||| because [Steve] is bad at making decisions.', 'Lucy is followed by Steve in everything ||| because [Lucy] is good at making decisions.', "Lucy is followed by Steve in everything ||| because [Steve] isn't good at making decisions.", "Lucy is followed by Steve in everything ||| because [Lucy] isn't bad at making decisions.", "Steve doesn't follow Lucy in everything ||| because [Steve] isn't bad at making decisions.", "Steve doesn't follow Lucy in everything ||| because [Lucy] isn't good at making decisions.", "Steve doesn't follow Lucy in everything ||| because [Steve] is good at making decisions.", "Steve doesn't follow Lucy in everything ||| because [Lucy] is bad at making decisions.", "Lucy isn't followed by Steve in everything ||| because [Steve] isn't bad at making decisions.", "Lucy isn't followed by Steve in everything ||| because [Lucy] isn't good at making decisions.", "Lucy isn't followed by Steve in everything ||| because [Steve] is good at making decisions.", "Lucy isn't followed by Steve in everything ||| because [Lucy] is bad at making decisions."], [['the skirt is traded by Grace for the hat ||| because she thinks the [skirt] looks bad.', 'the skirt is traded by Grace for the short ||| because she thinks the [skirt] looks bad.', 'the cap is traded by Grace for the hat ||| because she thinks the [cap] looks bad.', 'the cap is traded by Grace for the short ||| because she thinks the [cap] looks bad.', 'the skirt is replaced by Grace with the hat ||| because she thinks the [skirt] looks bad.', 'the skirt is replaced by Grace with the short ||| because she thinks the [skirt] looks bad.', 'the cap is replaced by Grace with the hat ||| because she thinks the [cap] looks bad.', 'the cap is replaced by Grace with the short ||| because she thinks the [cap] looks bad.'], ['the skirt is traded by Grace for the hat ||| because she thinks the [hat] looks good.', 'the skirt is traded by Grace for the short ||| because she thinks the [short] looks good.', 'the cap is traded by Grace for the hat ||| because she thinks the [hat] looks good.', 'the cap is traded by Grace for the short ||| because she thinks the [short] looks good.', 'the skirt is replaced by Grace with the hat ||| because she thinks the [hat] looks good.', 'the skirt is replaced by Grace with the short ||| because she thinks the [short] looks good.', 'the cap is replaced by Grace with the hat ||| because she thinks the [hat] looks good.', 'the cap is replaced by Grace with the short ||| because she thinks the [short] looks good.'], ['the skirt is traded by Grace for the hat ||| because she thinks the [skirt] looks not good.', 'the skirt is traded by Grace for the short ||| because she thinks the [skirt] looks not good.', 'the cap is traded by Grace for the hat ||| because she thinks the [cap] looks not good.', 'the cap is traded by Grace for the short ||| because she thinks the [cap] looks not good.', 'the skirt is replaced by Grace with the hat ||| because she thinks the [skirt] looks not good.', 'the skirt is replaced by Grace with the short ||| because she thinks the [skirt] looks not good.', 'the cap is replaced by Grace with the hat ||| because she thinks the [cap] looks not good.', 'the cap is replaced by Grace with the short ||| because she thinks the [cap] looks not good.'], ['the skirt is traded by Grace for the hat ||| because she thinks the [hat] looks not bad.', 'the skirt is traded by Grace for the short ||| because she thinks the [short] looks not bad.', 'the cap is traded by Grace for the hat ||| because she thinks the [hat] looks not bad.', 'the cap is traded by Grace for the short ||| because she thinks the [short] looks not bad.', 'the skirt is replaced by Grace with the hat ||| because she thinks the [hat] looks not bad.', 'the skirt is replaced by Grace with the short ||| because she thinks the [short] looks not bad.', 'the cap is replaced by Grace with the hat ||| because she thinks the [hat] looks not bad.', 'the cap is replaced by Grace with the short ||| because she thinks the [short] looks not bad.'], ['the hat is substituted by Grace for the skirt ||| because she thinks the [skirt] looks bad.', 'the short is substituted by Grace for the skirt ||| because she thinks the [skirt] looks bad.', 'the hat is substituted by Grace for the cap ||| because she thinks the [cap] looks bad.', 'the short is substituted by Grace for the cap ||| because she thinks the [cap] looks bad.', 'the hat is preferred by Grace to the skirt ||| because she thinks the [skirt] looks bad.', 'the short is preferred by Grace to the skirt ||| because she thinks the [skirt] looks bad.', 'the hat is preferred by Grace to the cap ||| because she thinks the [cap] looks bad.', 'the short is preferred by Grace to the cap ||| because she thinks the [cap] looks bad.'], ['the hat is substituted by Grace for the skirt ||| because she thinks the [hat] looks good.', 'the short is substituted by Grace for the skirt ||| because she thinks the [short] looks good.', 'the hat is substituted by Grace for the cap ||| because she thinks the [hat] looks good.', 'the short is substituted by Grace for the cap ||| because she thinks the [short] looks good.', 'the hat is preferred by Grace to the skirt ||| because she thinks the [hat] looks good.', 'the short is preferred by Grace to the skirt ||| because she thinks the [short] looks good.', 'the hat is preferred by Grace to the cap ||| because she thinks the [hat] looks good.', 'the short is preferred by Grace to the cap ||| because she thinks the [short] looks good.'], ['the hat is substituted by Grace for the skirt ||| because she thinks the [skirt] looks not good.', 'the short is substituted by Grace for the skirt ||| because she thinks the [skirt] looks not good.', 'the hat is substituted by Grace for the cap ||| because she thinks the [cap] looks not good.', 'the short is substituted by Grace for the cap ||| because she thinks the [cap] looks not good.', 'the hat is preferred by Grace to the skirt ||| because she thinks the [skirt] looks not good.', 'the short is preferred by Grace to the skirt ||| because she thinks the [skirt] looks not good.', 'the hat is preferred by Grace to the cap ||| because she thinks the [cap] looks not good.', 'the short is preferred by Grace to the cap ||| because she thinks the [cap] looks not good.'], ['the hat is substituted by Grace for the skirt ||| because she thinks the [hat] looks not bad.', 'the short is substituted by Grace for the skirt ||| because she thinks the [short] looks not bad.', 'the hat is substituted by Grace for the cap ||| because she thinks the [hat] looks not bad.', 'the short is substituted by Grace for the cap ||| because she thinks the [short] looks not bad.', 'the hat is preferred by Grace to the skirt ||| because she thinks the [hat] looks not bad.', 'the short is preferred by Grace to the skirt ||| because she thinks the [short] looks not bad.', 'the hat is preferred by Grace to the cap ||| because she thinks the [hat] looks not bad.', 'the short is preferred by Grace to the cap ||| because she thinks the [short] looks not bad.'], ["the skirt isn't traded by Grace for the hat ||| because she thinks the [skirt] looks not bad.", "the skirt isn't traded by Grace for the short ||| because she thinks the [skirt] looks not bad.", "the cap isn't traded by Grace for the hat ||| because she thinks the [cap] looks not bad.", "the cap isn't traded by Grace for the short ||| because she thinks the [cap] looks not bad.", "the skirt isn't replaced by Grace with the hat ||| because she thinks the [skirt] looks not bad.", "the skirt isn't replaced by Grace with the short ||| because she thinks the [skirt] looks not bad.", "the cap isn't replaced by Grace with the hat ||| because she thinks the [cap] looks not bad.", "the cap isn't replaced by Grace with the short ||| because she thinks the [cap] looks not bad."], ["the skirt isn't traded by Grace for the hat ||| because she thinks the [hat] looks not good.", "the skirt isn't traded by Grace for the short ||| because she thinks the [short] looks not good.", "the cap isn't traded by Grace for the hat ||| because she thinks the [hat] looks not good.", "the cap isn't traded by Grace for the short ||| because she thinks the [short] looks not good.", "the skirt isn't replaced by Grace with the hat ||| because she thinks the [hat] looks not good.", "the skirt isn't replaced by Grace with the short ||| because she thinks the [short] looks not good.", "the cap isn't replaced by Grace with the hat ||| because she thinks the [hat] looks not good.", "the cap isn't replaced by Grace with the short ||| because she thinks the [short] looks not good."], ["the skirt isn't traded by Grace for the hat ||| because she thinks the [skirt] looks good.", "the skirt isn't traded by Grace for the short ||| because she thinks the [skirt] looks good.", "the cap isn't traded by Grace for the hat ||| because she thinks the [cap] looks good.", "the cap isn't traded by Grace for the short ||| because she thinks the [cap] looks good.", "the skirt isn't replaced by Grace with the hat ||| because she thinks the [skirt] looks good.", "the skirt isn't replaced by Grace with the short ||| because she thinks the [skirt] looks good.", "the cap isn't replaced by Grace with the hat ||| because she thinks the [cap] looks good.", "the cap isn't replaced by Grace with the short ||| because she thinks the [cap] looks good."], ["the skirt isn't traded by Grace for the hat ||| because she thinks the [hat] looks bad.", "the skirt isn't traded by Grace for the short ||| because she thinks the [short] looks bad.", "the cap isn't traded by Grace for the hat ||| because she thinks the [hat] looks bad.", "the cap isn't traded by Grace for the short ||| because she thinks the [short] looks bad.", "the skirt isn't replaced by Grace with the hat ||| because she thinks the [hat] looks bad.", "the skirt isn't replaced by Grace with the short ||| because she thinks the [short] looks bad.", "the cap isn't replaced by Grace with the hat ||| because she thinks the [hat] looks bad.", "the cap isn't replaced by Grace with the short ||| because she thinks the [short] looks bad."], ["the hat isn't substituted by Grace for the skirt ||| because she thinks the [skirt] looks not bad.", "the short isn't substituted by Grace for the skirt ||| because she thinks the [skirt] looks not bad.", "the hat isn't substituted by Grace for the cap ||| because she thinks the [cap] looks not bad.", "the short isn't substituted by Grace for the cap ||| because she thinks the [cap] looks not bad.", "the hat isn't preferred by Grace to the skirt ||| because she thinks the [skirt] looks not bad.", "the short isn't preferred by Grace to the skirt ||| because she thinks the [skirt] looks not bad.", "the hat isn't preferred by Grace to the cap ||| because she thinks the [cap] looks not bad.", "the short isn't preferred by Grace to the cap ||| because she thinks the [cap] looks not bad."], ["the hat isn't substituted by Grace for the skirt ||| because she thinks the [hat] looks not good.", "the short isn't substituted by Grace for the skirt ||| because she thinks the [short] looks not good.", "the hat isn't substituted by Grace for the cap ||| because she thinks the [hat] looks not good.", "the short isn't substituted by Grace for the cap ||| because she thinks the [short] looks not good.", "the hat isn't preferred by Grace to the skirt ||| because she thinks the [hat] looks not good.", "the short isn't preferred by Grace to the skirt ||| because she thinks the [short] looks not good.", "the hat isn't preferred by Grace to the cap ||| because she thinks the [hat] looks not good.", "the short isn't preferred by Grace to the cap ||| because she thinks the [short] looks not good."], ["the hat isn't substituted by Grace for the skirt ||| because she thinks the [skirt] looks good.", "the short isn't substituted by Grace for the skirt ||| because she thinks the [skirt] looks good.", "the hat isn't substituted by Grace for the cap ||| because she thinks the [cap] looks good.", "the short isn't substituted by Grace for the cap ||| because she thinks the [cap] looks good.", "the hat isn't preferred by Grace to the skirt ||| because she thinks the [skirt] looks good.", "the short isn't preferred by Grace to the skirt ||| because she thinks the [skirt] looks good.", "the hat isn't preferred by Grace to the cap ||| because she thinks the [cap] looks good.", "the short isn't preferred by Grace to the cap ||| because she thinks the [cap] looks good."], ["the hat isn't substituted by Grace for the skirt ||| because she thinks the [hat] looks bad.", "the short isn't substituted by Grace for the skirt ||| because she thinks the [short] looks bad.", "the hat isn't substituted by Grace for the cap ||| because she thinks the [hat] looks bad.", "the short isn't substituted by Grace for the cap ||| because she thinks the [short] looks bad.", "the hat isn't preferred by Grace to the skirt ||| because she thinks the [hat] looks bad.", "the short isn't preferred by Grace to the skirt ||| because she thinks the [short] looks bad.", "the hat isn't preferred by Grace to the cap ||| because she thinks the [hat] looks bad.", "the short isn't preferred by Grace to the cap ||| because she thinks the [short] looks bad."], ['the skirt is traded by Grace for the hat ||| although she thinks the [skirt] looks not bad.', 'the skirt is traded by Grace for the short ||| although she thinks the [skirt] looks not bad.', 'the cap is traded by Grace for the hat ||| although she thinks the [cap] looks not bad.', 'the cap is traded by Grace for the short ||| although she thinks the [cap] looks not bad.', 'the skirt is replaced by Grace with the hat ||| although she thinks the [skirt] looks not bad.', 'the skirt is replaced by Grace with the short ||| although she thinks the [skirt] looks not bad.', 'the cap is replaced by Grace with the hat ||| although she thinks the [cap] looks not bad.', 'the cap is replaced by Grace with the short ||| although she thinks the [cap] looks not bad.'], ['the skirt is traded by Grace for the hat ||| although she thinks the [hat] looks not good.', 'the skirt is traded by Grace for the short ||| although she thinks the [short] looks not good.', 'the cap is traded by Grace for the hat ||| although she thinks the [hat] looks not good.', 'the cap is traded by Grace for the short ||| although she thinks the [short] looks not good.', 'the skirt is replaced by Grace with the hat ||| although she thinks the [hat] looks not good.', 'the skirt is replaced by Grace with the short ||| although she thinks the [short] looks not good.', 'the cap is replaced by Grace with the hat ||| although she thinks the [hat] looks not good.', 'the cap is replaced by Grace with the short ||| although she thinks the [short] looks not good.'], ['the skirt is traded by Grace for the hat ||| although she thinks the [skirt] looks good.', 'the skirt is traded by Grace for the short ||| although she thinks the [skirt] looks good.', 'the cap is traded by Grace for the hat ||| although she thinks the [cap] looks good.', 'the cap is traded by Grace for the short ||| although she thinks the [cap] looks good.', 'the skirt is replaced by Grace with the hat ||| although she thinks the [skirt] looks good.', 'the skirt is replaced by Grace with the short ||| although she thinks the [skirt] looks good.', 'the cap is replaced by Grace with the hat ||| although she thinks the [cap] looks good.', 'the cap is replaced by Grace with the short ||| although she thinks the [cap] looks good.'], ['the skirt is traded by Grace for the hat ||| although she thinks the [hat] looks bad.', 'the skirt is traded by Grace for the short ||| although she thinks the [short] looks bad.', 'the cap is traded by Grace for the hat ||| although she thinks the [hat] looks bad.', 'the cap is traded by Grace for the short ||| although she thinks the [short] looks bad.', 'the skirt is replaced by Grace with the hat ||| although she thinks the [hat] looks bad.', 'the skirt is replaced by Grace with the short ||| although she thinks the [short] looks bad.', 'the cap is replaced by Grace with the hat ||| although she thinks the [hat] looks bad.', 'the cap is replaced by Grace with the short ||| although she thinks the [short] looks bad.'], ['the hat is substituted by Grace for the skirt ||| although she thinks the [skirt] looks not bad.', 'the short is substituted by Grace for the skirt ||| although she thinks the [skirt] looks not bad.', 'the hat is substituted by Grace for the cap ||| although she thinks the [cap] looks not bad.', 'the short is substituted by Grace for the cap ||| although she thinks the [cap] looks not bad.', 'the hat is preferred by Grace to the skirt ||| although she thinks the [skirt] looks not bad.', 'the short is preferred by Grace to the skirt ||| although she thinks the [skirt] looks not bad.', 'the hat is preferred by Grace to the cap ||| although she thinks the [cap] looks not bad.', 'the short is preferred by Grace to the cap ||| although she thinks the [cap] looks not bad.'], ['the hat is substituted by Grace for the skirt ||| although she thinks the [hat] looks not good.', 'the short is substituted by Grace for the skirt ||| although she thinks the [short] looks not good.', 'the hat is substituted by Grace for the cap ||| although she thinks the [hat] looks not good.', 'the short is substituted by Grace for the cap ||| although she thinks the [short] looks not good.', 'the hat is preferred by Grace to the skirt ||| although she thinks the [hat] looks not good.', 'the short is preferred by Grace to the skirt ||| although she thinks the [short] looks not good.', 'the hat is preferred by Grace to the cap ||| although she thinks the [hat] looks not good.', 'the short is preferred by Grace to the cap ||| although she thinks the [short] looks not good.'], ['the hat is substituted by Grace for the skirt ||| although she thinks the [skirt] looks good.', 'the short is substituted by Grace for the skirt ||| although she thinks the [skirt] looks good.', 'the hat is substituted by Grace for the cap ||| although she thinks the [cap] looks good.', 'the short is substituted by Grace for the cap ||| although she thinks the [cap] looks good.', 'the hat is preferred by Grace to the skirt ||| although she thinks the [skirt] looks good.', 'the short is preferred by Grace to the skirt ||| although she thinks the [skirt] looks good.', 'the hat is preferred by Grace to the cap ||| although she thinks the [cap] looks good.', 'the short is preferred by Grace to the cap ||| although she thinks the [cap] looks good.'], ['the hat is substituted by Grace for the skirt ||| although she thinks the [hat] looks bad.', 'the short is substituted by Grace for the skirt ||| although she thinks the [short] looks bad.', 'the hat is substituted by Grace for the cap ||| although she thinks the [hat] looks bad.', 'the short is substituted by Grace for the cap ||| although she thinks the [short] looks bad.', 'the hat is preferred by Grace to the skirt ||| although she thinks the [hat] looks bad.', 'the short is preferred by Grace to the skirt ||| although she thinks the [short] looks bad.', 'the hat is preferred by Grace to the cap ||| although she thinks the [hat] looks bad.', 'the short is preferred by Grace to the cap ||| although she thinks the [short] looks bad.'], ["the skirt isn't traded by Grace for the hat ||| although she thinks the [skirt] looks bad.", "the skirt isn't traded by Grace for the short ||| although she thinks the [skirt] looks bad.", "the cap isn't traded by Grace for the hat ||| although she thinks the [cap] looks bad.", "the cap isn't traded by Grace for the short ||| although she thinks the [cap] looks bad.", "the skirt isn't replaced by Grace with the hat ||| although she thinks the [skirt] looks bad.", "the skirt isn't replaced by Grace with the short ||| although she thinks the [skirt] looks bad.", "the cap isn't replaced by Grace with the hat ||| although she thinks the [cap] looks bad.", "the cap isn't replaced by Grace with the short ||| although she thinks the [cap] looks bad."], ["the skirt isn't traded by Grace for the hat ||| although she thinks the [hat] looks good.", "the skirt isn't traded by Grace for the short ||| although she thinks the [short] looks good.", "the cap isn't traded by Grace for the hat ||| although she thinks the [hat] looks good.", "the cap isn't traded by Grace for the short ||| although she thinks the [short] looks good.", "the skirt isn't replaced by Grace with the hat ||| although she thinks the [hat] looks good.", "the skirt isn't replaced by Grace with the short ||| although she thinks the [short] looks good.", "the cap isn't replaced by Grace with the hat ||| although she thinks the [hat] looks good.", "the cap isn't replaced by Grace with the short ||| although she thinks the [short] looks good."], ["the skirt isn't traded by Grace for the hat ||| although she thinks the [skirt] looks not good.", "the skirt isn't traded by Grace for the short ||| although she thinks the [skirt] looks not good.", "the cap isn't traded by Grace for the hat ||| although she thinks the [cap] looks not good.", "the cap isn't traded by Grace for the short ||| although she thinks the [cap] looks not good.", "the skirt isn't replaced by Grace with the hat ||| although she thinks the [skirt] looks not good.", "the skirt isn't replaced by Grace with the short ||| although she thinks the [skirt] looks not good.", "the cap isn't replaced by Grace with the hat ||| although she thinks the [cap] looks not good.", "the cap isn't replaced by Grace with the short ||| although she thinks the [cap] looks not good."], ["the skirt isn't traded by Grace for the hat ||| although she thinks the [hat] looks not bad.", "the skirt isn't traded by Grace for the short ||| although she thinks the [short] looks not bad.", "the cap isn't traded by Grace for the hat ||| although she thinks the [hat] looks not bad.", "the cap isn't traded by Grace for the short ||| although she thinks the [short] looks not bad.", "the skirt isn't replaced by Grace with the hat ||| although she thinks the [hat] looks not bad.", "the skirt isn't replaced by Grace with the short ||| although she thinks the [short] looks not bad.", "the cap isn't replaced by Grace with the hat ||| although she thinks the [hat] looks not bad.", "the cap isn't replaced by Grace with the short ||| although she thinks the [short] looks not bad."], ["the hat isn't substituted by Grace for the skirt ||| although she thinks the [skirt] looks bad.", "the short isn't substituted by Grace for the skirt ||| although she thinks the [skirt] looks bad.", "the hat isn't substituted by Grace for the cap ||| although she thinks the [cap] looks bad.", "the short isn't substituted by Grace for the cap ||| although she thinks the [cap] looks bad.", "the hat isn't preferred by Grace to the skirt ||| although she thinks the [skirt] looks bad.", "the short isn't preferred by Grace to the skirt ||| although she thinks the [skirt] looks bad.", "the hat isn't preferred by Grace to the cap ||| although she thinks the [cap] looks bad.", "the short isn't preferred by Grace to the cap ||| although she thinks the [cap] looks bad."], ["the hat isn't substituted by Grace for the skirt ||| although she thinks the [hat] looks good.", "the short isn't substituted by Grace for the skirt ||| although she thinks the [short] looks good.", "the hat isn't substituted by Grace for the cap ||| although she thinks the [hat] looks good.", "the short isn't substituted by Grace for the cap ||| although she thinks the [short] looks good.", "the hat isn't preferred by Grace to the skirt ||| although she thinks the [hat] looks good.", "the short isn't preferred by Grace to the skirt ||| although she thinks the [short] looks good.", "the hat isn't preferred by Grace to the cap ||| although she thinks the [hat] looks good.", "the short isn't preferred by Grace to the cap ||| although she thinks the [short] looks good."], ["the hat isn't substituted by Grace for the skirt ||| although she thinks the [skirt] looks not good.", "the short isn't substituted by Grace for the skirt ||| although she thinks the [skirt] looks not good.", "the hat isn't substituted by Grace for the cap ||| although she thinks the [cap] looks not good.", "the short isn't substituted by Grace for the cap ||| although she thinks the [cap] looks not good.", "the hat isn't preferred by Grace to the skirt ||| although she thinks the [skirt] looks not good.", "the short isn't preferred by Grace to the skirt ||| although she thinks the [skirt] looks not good.", "the hat isn't preferred by Grace to the cap ||| although she thinks the [cap] looks not good.", "the short isn't preferred by Grace to the cap ||| although she thinks the [cap] looks not good."], ["the hat isn't substituted by Grace for the skirt ||| although she thinks the [hat] looks not bad.", "the short isn't substituted by Grace for the skirt ||| although she thinks the [short] looks not bad.", "the hat isn't substituted by Grace for the cap ||| although she thinks the [hat] looks not bad.", "the short isn't substituted by Grace for the cap ||| although she thinks the [short] looks not bad.", "the hat isn't preferred by Grace to the skirt ||| although she thinks the [hat] looks not bad.", "the short isn't preferred by Grace to the skirt ||| although she thinks the [short] looks not bad.", "the hat isn't preferred by Grace to the cap ||| although she thinks the [hat] looks not bad.", "the short isn't preferred by Grace to the cap ||| although she thinks the [short] looks not bad."]], [('Sam succeeded in fooling Emma ||| so [Sam] got a lot of money.', 'Sam succeeded in cheating Emma ||| so [Sam] got a lot of money.'), ('Sam succeeded in fooling Emma ||| so [Emma] lost a lot of money.', 'Sam succeeded in cheating Emma ||| so [Emma] lost a lot of money.'), ('Emma was fooled by Sam ||| so [Sam] got a lot of money.', 'Emma was cheated by Sam ||| so [Sam] got a lot of money.'), ('Emma was fooled by Sam ||| so [Emma] lost a lot of money.', 'Emma was cheated by Sam ||| so [Emma] lost a lot of money.'), ("Sam failed to fool Emma ||| so [Sam] didn't get a lot of money.", "Sam failed to cheat Emma ||| so [Sam] didn't get a lot of money."), ("Sam failed to fool Emma ||| so [Emma] didn't lose a lot of money.", "Sam failed to cheat Emma ||| so [Emma] didn't lose a lot of money."), ("Emma wasn't fooled by Sam ||| so [Sam] didn't get a lot of money.", "Emma wasn't cheated by Sam ||| so [Sam] didn't get a lot of money."), ("Emma wasn't fooled by Sam ||| so [Emma] didn't lose a lot of money.", "Emma wasn't cheated by Sam ||| so [Emma] didn't lose a lot of money."), ("Sam succeeded in fooling Emma ||| but [Sam] didn't get a lot of money.", "Sam succeeded in cheating Emma ||| but [Sam] didn't get a lot of money."), ("Sam succeeded in fooling Emma ||| but [Emma] didn't lose a lot of money.", "Sam succeeded in cheating Emma ||| but [Emma] didn't lose a lot of money."), ("Emma was fooled by Sam ||| but [Sam] didn't get a lot of money.", "Emma was cheated by Sam ||| but [Sam] didn't get a lot of money."), ("Emma was fooled by Sam ||| but [Emma] didn't lose a lot of money.", "Emma was cheated by Sam ||| but [Emma] didn't lose a lot of money."), ('Sam failed to fool Emma ||| but [Sam] got a lot of money.', 'Sam failed to cheat Emma ||| but [Sam] got a lot of money.'), ('Sam failed to fool Emma ||| but [Emma] lost a lot of money.', 'Sam failed to cheat Emma ||| but [Emma] lost a lot of money.'), ("Emma wasn't fooled by Sam ||| but [Sam] got a lot of money.", "Emma wasn't cheated by Sam ||| but [Sam] got a lot of money."), ("Emma wasn't fooled by Sam ||| but [Emma] lost a lot of money.", "Emma wasn't cheated by Sam ||| but [Emma] lost a lot of money.")], ["John thanked Susan ||| although [John] hadn't received a lot of help.", "John thanked Susan ||| although [Susan] hadn't given a lot of help.", "Susan took good care of John ||| although [John] hadn't received a lot of help.", "Susan took good care of John ||| although [Susan] hadn't given a lot of help.", "John didn't thank Susan ||| although [John] had received a lot of help.", "John didn't thank Susan ||| although [Susan] had given a lot of help.", "Susan didn't good care of John ||| although [John] had received a lot of help.", "Susan didn't good care of John ||| although [Susan] had given a lot of help."], [['the cups could be placed on all the tables ||| because there were many of the [cups].', 'the cups could be placed on all the benches ||| because there were many of the [cups].', 'the pictures could be placed on all the tables ||| because there were many of the [pictures].', 'the pictures could be placed on all the benches ||| because there were many of the [pictures].', 'the cups could be put on all the tables ||| because there were many of the [cups].', 'the cups could be put on all the benches ||| because there were many of the [cups].', 'the pictures could be put on all the tables ||| because there were many of the [pictures].', 'the pictures could be put on all the benches ||| because there were many of the [pictures].'], ['the cups could be placed on all the tables ||| because there were few of the [tables].', 'the cups could be placed on all the benches ||| because there were few of the [benches].', 'the pictures could be placed on all the tables ||| because there were few of the [tables].', 'the pictures could be placed on all the benches ||| because there were few of the [benches].', 'the cups could be put on all the tables ||| because there were few of the [tables].', 'the cups could be put on all the benches ||| because there were few of the [benches].', 'the pictures could be put on all the tables ||| because there were few of the [tables].', 'the pictures could be put on all the benches ||| because there were few of the [benches].'], ['the cups could be placed on all the tables ||| because there were not few of the [cups].', 'the cups could be placed on all the benches ||| because there were not few of the [cups].', 'the pictures could be placed on all the tables ||| because there were not few of the [pictures].', 'the pictures could be placed on all the benches ||| because there were not few of the [pictures].', 'the cups could be put on all the tables ||| because there were not few of the [cups].', 'the cups could be put on all the benches ||| because there were not few of the [cups].', 'the pictures could be put on all the tables ||| because there were not few of the [pictures].', 'the pictures could be put on all the benches ||| because there were not few of the [pictures].'], ['the cups could be placed on all the tables ||| because there were not many of the [tables].', 'the cups could be placed on all the benches ||| because there were not many of the [benches].', 'the pictures could be placed on all the tables ||| because there were not many of the [tables].', 'the pictures could be placed on all the benches ||| because there were not many of the [benches].', 'the cups could be put on all the tables ||| because there were not many of the [tables].', 'the cups could be put on all the benches ||| because there were not many of the [benches].', 'the pictures could be put on all the tables ||| because there were not many of the [tables].', 'the pictures could be put on all the benches ||| because there were not many of the [benches].'], ['the tables could all be covered by the cups ||| because there were many of the [cups].', 'the benches could all be covered by the cups ||| because there were many of the [cups].', 'the tables could all be covered by the pictures ||| because there were many of the [pictures].', 'the benches could all be covered by the pictures ||| because there were many of the [pictures].', 'the tables could carry all the cups ||| because there were many of the [cups].', 'the benches could carry all the cups ||| because there were many of the [cups].', 'the tables could carry all the pictures ||| because there were many of the [pictures].', 'the benches could carry all the pictures ||| because there were many of the [pictures].'], ['the tables could all be covered by the cups ||| because there were few of the [tables].', 'the benches could all be covered by the cups ||| because there were few of the [benches].', 'the tables could all be covered by the pictures ||| because there were few of the [tables].', 'the benches could all be covered by the pictures ||| because there were few of the [benches].', 'the tables could carry all the cups ||| because there were few of the [tables].', 'the benches could carry all the cups ||| because there were few of the [benches].', 'the tables could carry all the pictures ||| because there were few of the [tables].', 'the benches could carry all the pictures ||| because there were few of the [benches].'], ['the tables could all be covered by the cups ||| because there were not few of the [cups].', 'the benches could all be covered by the cups ||| because there were not few of the [cups].', 'the tables could all be covered by the pictures ||| because there were not few of the [pictures].', 'the benches could all be covered by the pictures ||| because there were not few of the [pictures].', 'the tables could carry all the cups ||| because there were not few of the [cups].', 'the benches could carry all the cups ||| because there were not few of the [cups].', 'the tables could carry all the pictures ||| because there were not few of the [pictures].', 'the benches could carry all the pictures ||| because there were not few of the [pictures].'], ['the tables could all be covered by the cups ||| because there were not many of the [tables].', 'the benches could all be covered by the cups ||| because there were not many of the [benches].', 'the tables could all be covered by the pictures ||| because there were not many of the [tables].', 'the benches could all be covered by the pictures ||| because there were not many of the [benches].', 'the tables could carry all the cups ||| because there were not many of the [tables].', 'the benches could carry all the cups ||| because there were not many of the [benches].', 'the tables could carry all the pictures ||| because there were not many of the [tables].', 'the benches could carry all the pictures ||| because there were not many of the [benches].'], ["the cups couldn't be placed on all the tables ||| because there were not many of the [cups].", "the cups couldn't be placed on all the benches ||| because there were not many of the [cups].", "the pictures couldn't be placed on all the tables ||| because there were not many of the [pictures].", "the pictures couldn't be placed on all the benches ||| because there were not many of the [pictures].", "the cups couldn't be put on all the tables ||| because there were not many of the [cups].", "the cups couldn't be put on all the benches ||| because there were not many of the [cups].", "the pictures couldn't be put on all the tables ||| because there were not many of the [pictures].", "the pictures couldn't be put on all the benches ||| because there were not many of the [pictures]."], ["the cups couldn't be placed on all the tables ||| because there were not few of the [tables].", "the cups couldn't be placed on all the benches ||| because there were not few of the [benches].", "the pictures couldn't be placed on all the tables ||| because there were not few of the [tables].", "the pictures couldn't be placed on all the benches ||| because there were not few of the [benches].", "the cups couldn't be put on all the tables ||| because there were not few of the [tables].", "the cups couldn't be put on all the benches ||| because there were not few of the [benches].", "the pictures couldn't be put on all the tables ||| because there were not few of the [tables].", "the pictures couldn't be put on all the benches ||| because there were not few of the [benches]."], ["the cups couldn't be placed on all the tables ||| because there were few of the [cups].", "the cups couldn't be placed on all the benches ||| because there were few of the [cups].", "the pictures couldn't be placed on all the tables ||| because there were few of the [pictures].", "the pictures couldn't be placed on all the benches ||| because there were few of the [pictures].", "the cups couldn't be put on all the tables ||| because there were few of the [cups].", "the cups couldn't be put on all the benches ||| because there were few of the [cups].", "the pictures couldn't be put on all the tables ||| because there were few of the [pictures].", "the pictures couldn't be put on all the benches ||| because there were few of the [pictures]."], ["the cups couldn't be placed on all the tables ||| because there were many of the [tables].", "the cups couldn't be placed on all the benches ||| because there were many of the [benches].", "the pictures couldn't be placed on all the tables ||| because there were many of the [tables].", "the pictures couldn't be placed on all the benches ||| because there were many of the [benches].", "the cups couldn't be put on all the tables ||| because there were many of the [tables].", "the cups couldn't be put on all the benches ||| because there were many of the [benches].", "the pictures couldn't be put on all the tables ||| because there were many of the [tables].", "the pictures couldn't be put on all the benches ||| because there were many of the [benches]."], ["the tables couldn't all be covered by the cups ||| because there were not many of the [cups].", "the benches couldn't all be covered by the cups ||| because there were not many of the [cups].", "the tables couldn't all be covered by the pictures ||| because there were not many of the [pictures].", "the benches couldn't all be covered by the pictures ||| because there were not many of the [pictures].", "the tables couldn't carry all the cups ||| because there were not many of the [cups].", "the benches couldn't carry all the cups ||| because there were not many of the [cups].", "the tables couldn't carry all the pictures ||| because there were not many of the [pictures].", "the benches couldn't carry all the pictures ||| because there were not many of the [pictures]."], ["the tables couldn't all be covered by the cups ||| because there were not few of the [tables].", "the benches couldn't all be covered by the cups ||| because there were not few of the [benches].", "the tables couldn't all be covered by the pictures ||| because there were not few of the [tables].", "the benches couldn't all be covered by the pictures ||| because there were not few of the [benches].", "the tables couldn't carry all the cups ||| because there were not few of the [tables].", "the benches couldn't carry all the cups ||| because there were not few of the [benches].", "the tables couldn't carry all the pictures ||| because there were not few of the [tables].", "the benches couldn't carry all the pictures ||| because there were not few of the [benches]."], ["the tables couldn't all be covered by the cups ||| because there were few of the [cups].", "the benches couldn't all be covered by the cups ||| because there were few of the [cups].", "the tables couldn't all be covered by the pictures ||| because there were few of the [pictures].", "the benches couldn't all be covered by the pictures ||| because there were few of the [pictures].", "the tables couldn't carry all the cups ||| because there were few of the [cups].", "the benches couldn't carry all the cups ||| because there were few of the [cups].", "the tables couldn't carry all the pictures ||| because there were few of the [pictures].", "the benches couldn't carry all the pictures ||| because there were few of the [pictures]."], ["the tables couldn't all be covered by the cups ||| because there were many of the [tables].", "the benches couldn't all be covered by the cups ||| because there were many of the [benches].", "the tables couldn't all be covered by the pictures ||| because there were many of the [tables].", "the benches couldn't all be covered by the pictures ||| because there were many of the [benches].", "the tables couldn't carry all the cups ||| because there were many of the [tables].", "the benches couldn't carry all the cups ||| because there were many of the [benches].", "the tables couldn't carry all the pictures ||| because there were many of the [tables].", "the benches couldn't carry all the pictures ||| because there were many of the [benches]."], ['the cups could be placed on all the tables ||| although there were not many of the [cups].', 'the cups could be placed on all the benches ||| although there were not many of the [cups].', 'the pictures could be placed on all the tables ||| although there were not many of the [pictures].', 'the pictures could be placed on all the benches ||| although there were not many of the [pictures].', 'the cups could be put on all the tables ||| although there were not many of the [cups].', 'the cups could be put on all the benches ||| although there were not many of the [cups].', 'the pictures could be put on all the tables ||| although there were not many of the [pictures].', 'the pictures could be put on all the benches ||| although there were not many of the [pictures].'], ['the cups could be placed on all the tables ||| although there were not few of the [tables].', 'the cups could be placed on all the benches ||| although there were not few of the [benches].', 'the pictures could be placed on all the tables ||| although there were not few of the [tables].', 'the pictures could be placed on all the benches ||| although there were not few of the [benches].', 'the cups could be put on all the tables ||| although there were not few of the [tables].', 'the cups could be put on all the benches ||| although there were not few of the [benches].', 'the pictures could be put on all the tables ||| although there were not few of the [tables].', 'the pictures could be put on all the benches ||| although there were not few of the [benches].'], ['the cups could be placed on all the tables ||| although there were few of the [cups].', 'the cups could be placed on all the benches ||| although there were few of the [cups].', 'the pictures could be placed on all the tables ||| although there were few of the [pictures].', 'the pictures could be placed on all the benches ||| although there were few of the [pictures].', 'the cups could be put on all the tables ||| although there were few of the [cups].', 'the cups could be put on all the benches ||| although there were few of the [cups].', 'the pictures could be put on all the tables ||| although there were few of the [pictures].', 'the pictures could be put on all the benches ||| although there were few of the [pictures].'], ['the cups could be placed on all the tables ||| although there were many of the [tables].', 'the cups could be placed on all the benches ||| although there were many of the [benches].', 'the pictures could be placed on all the tables ||| although there were many of the [tables].', 'the pictures could be placed on all the benches ||| although there were many of the [benches].', 'the cups could be put on all the tables ||| although there were many of the [tables].', 'the cups could be put on all the benches ||| although there were many of the [benches].', 'the pictures could be put on all the tables ||| although there were many of the [tables].', 'the pictures could be put on all the benches ||| although there were many of the [benches].'], ['the tables could all be covered by the cups ||| although there were not many of the [cups].', 'the benches could all be covered by the cups ||| although there were not many of the [cups].', 'the tables could all be covered by the pictures ||| although there were not many of the [pictures].', 'the benches could all be covered by the pictures ||| although there were not many of the [pictures].', 'the tables could carry all the cups ||| although there were not many of the [cups].', 'the benches could carry all the cups ||| although there were not many of the [cups].', 'the tables could carry all the pictures ||| although there were not many of the [pictures].', 'the benches could carry all the pictures ||| although there were not many of the [pictures].'], ['the tables could all be covered by the cups ||| although there were not few of the [tables].', 'the benches could all be covered by the cups ||| although there were not few of the [benches].', 'the tables could all be covered by the pictures ||| although there were not few of the [tables].', 'the benches could all be covered by the pictures ||| although there were not few of the [benches].', 'the tables could carry all the cups ||| although there were not few of the [tables].', 'the benches could carry all the cups ||| although there were not few of the [benches].', 'the tables could carry all the pictures ||| although there were not few of the [tables].', 'the benches could carry all the pictures ||| although there were not few of the [benches].'], ['the tables could all be covered by the cups ||| although there were few of the [cups].', 'the benches could all be covered by the cups ||| although there were few of the [cups].', 'the tables could all be covered by the pictures ||| although there were few of the [pictures].', 'the benches could all be covered by the pictures ||| although there were few of the [pictures].', 'the tables could carry all the cups ||| although there were few of the [cups].', 'the benches could carry all the cups ||| although there were few of the [cups].', 'the tables could carry all the pictures ||| although there were few of the [pictures].', 'the benches could carry all the pictures ||| although there were few of the [pictures].'], ['the tables could all be covered by the cups ||| although there were many of the [tables].', 'the benches could all be covered by the cups ||| although there were many of the [benches].', 'the tables could all be covered by the pictures ||| although there were many of the [tables].', 'the benches could all be covered by the pictures ||| although there were many of the [benches].', 'the tables could carry all the cups ||| although there were many of the [tables].', 'the benches could carry all the cups ||| although there were many of the [benches].', 'the tables could carry all the pictures ||| although there were many of the [tables].', 'the benches could carry all the pictures ||| although there were many of the [benches].'], ["the cups couldn't be placed on all the tables ||| although there were many of the [cups].", "the cups couldn't be placed on all the benches ||| although there were many of the [cups].", "the pictures couldn't be placed on all the tables ||| although there were many of the [pictures].", "the pictures couldn't be placed on all the benches ||| although there were many of the [pictures].", "the cups couldn't be put on all the tables ||| although there were many of the [cups].", "the cups couldn't be put on all the benches ||| although there were many of the [cups].", "the pictures couldn't be put on all the tables ||| although there were many of the [pictures].", "the pictures couldn't be put on all the benches ||| although there were many of the [pictures]."], ["the cups couldn't be placed on all the tables ||| although there were few of the [tables].", "the cups couldn't be placed on all the benches ||| although there were few of the [benches].", "the pictures couldn't be placed on all the tables ||| although there were few of the [tables].", "the pictures couldn't be placed on all the benches ||| although there were few of the [benches].", "the cups couldn't be put on all the tables ||| although there were few of the [tables].", "the cups couldn't be put on all the benches ||| although there were few of the [benches].", "the pictures couldn't be put on all the tables ||| although there were few of the [tables].", "the pictures couldn't be put on all the benches ||| although there were few of the [benches]."], ["the cups couldn't be placed on all the tables ||| although there were not few of the [cups].", "the cups couldn't be placed on all the benches ||| although there were not few of the [cups].", "the pictures couldn't be placed on all the tables ||| although there were not few of the [pictures].", "the pictures couldn't be placed on all the benches ||| although there were not few of the [pictures].", "the cups couldn't be put on all the tables ||| although there were not few of the [cups].", "the cups couldn't be put on all the benches ||| although there were not few of the [cups].", "the pictures couldn't be put on all the tables ||| although there were not few of the [pictures].", "the pictures couldn't be put on all the benches ||| although there were not few of the [pictures]."], ["the cups couldn't be placed on all the tables ||| although there were not many of the [tables].", "the cups couldn't be placed on all the benches ||| although there were not many of the [benches].", "the pictures couldn't be placed on all the tables ||| although there were not many of the [tables].", "the pictures couldn't be placed on all the benches ||| although there were not many of the [benches].", "the cups couldn't be put on all the tables ||| although there were not many of the [tables].", "the cups couldn't be put on all the benches ||| although there were not many of the [benches].", "the pictures couldn't be put on all the tables ||| although there were not many of the [tables].", "the pictures couldn't be put on all the benches ||| although there were not many of the [benches]."], ["the tables couldn't all be covered by the cups ||| although there were many of the [cups].", "the benches couldn't all be covered by the cups ||| although there were many of the [cups].", "the tables couldn't all be covered by the pictures ||| although there were many of the [pictures].", "the benches couldn't all be covered by the pictures ||| although there were many of the [pictures].", "the tables couldn't carry all the cups ||| although there were many of the [cups].", "the benches couldn't carry all the cups ||| although there were many of the [cups].", "the tables couldn't carry all the pictures ||| although there were many of the [pictures].", "the benches couldn't carry all the pictures ||| although there were many of the [pictures]."], ["the tables couldn't all be covered by the cups ||| although there were few of the [tables].", "the benches couldn't all be covered by the cups ||| although there were few of the [benches].", "the tables couldn't all be covered by the pictures ||| although there were few of the [tables].", "the benches couldn't all be covered by the pictures ||| although there were few of the [benches].", "the tables couldn't carry all the cups ||| although there were few of the [tables].", "the benches couldn't carry all the cups ||| although there were few of the [benches].", "the tables couldn't carry all the pictures ||| although there were few of the [tables].", "the benches couldn't carry all the pictures ||| although there were few of the [benches]."], ["the tables couldn't all be covered by the cups ||| although there were not few of the [cups].", "the benches couldn't all be covered by the cups ||| although there were not few of the [cups].", "the tables couldn't all be covered by the pictures ||| although there were not few of the [pictures].", "the benches couldn't all be covered by the pictures ||| although there were not few of the [pictures].", "the tables couldn't carry all the cups ||| although there were not few of the [cups].", "the benches couldn't carry all the cups ||| although there were not few of the [cups].", "the tables couldn't carry all the pictures ||| although there were not few of the [pictures].", "the benches couldn't carry all the pictures ||| although there were not few of the [pictures]."], ["the tables couldn't all be covered by the cups ||| although there were not many of the [tables].", "the benches couldn't all be covered by the cups ||| although there were not many of the [benches].", "the tables couldn't all be covered by the pictures ||| although there were not many of the [tables].", "the benches couldn't all be covered by the pictures ||| although there were not many of the [benches].", "the tables couldn't carry all the cups ||| although there were not many of the [tables].", "the benches couldn't carry all the cups ||| although there were not many of the [benches].", "the tables couldn't carry all the pictures ||| although there were not many of the [tables].", "the benches couldn't carry all the pictures ||| although there were not many of the [benches]."]], ['Germany defeated Italy ||| because [Germany] was more powerful.', 'Germany defeated Italy ||| because [Italy] was less powerful.', "Germany defeated Italy ||| because [Germany] wasn't less powerful.", "Germany defeated Italy ||| because [Italy] wasn't more powerful.", 'Italy was defeated by Germany ||| because [Germany] was more powerful.', 'Italy was defeated by Germany ||| because [Italy] was less powerful.', "Italy was defeated by Germany ||| because [Germany] wasn't less powerful.", "Italy was defeated by Germany ||| because [Italy] wasn't more powerful.", "Germany didn't defeat Italy ||| because [Germany] wasn't more powerful.", "Germany didn't defeat Italy ||| because [Italy] wasn't less powerful.", "Germany didn't defeat Italy ||| because [Germany] was less powerful.", "Germany didn't defeat Italy ||| because [Italy] was more powerful.", "Italy wasn't defeated by Germany ||| because [Germany] wasn't more powerful.", "Italy wasn't defeated by Germany ||| because [Italy] wasn't less powerful.", "Italy wasn't defeated by Germany ||| because [Germany] was less powerful.", "Italy wasn't defeated by Germany ||| because [Italy] was more powerful."], ['James ceded the presidency to Amy ||| because [James] was notorious.', 'James ceded the presidency to Amy ||| because [Amy] was popular.', "James ceded the presidency to Amy ||| because [James] wasn't popular.", 'James ceded the presidency to Amy ||| because [Amy] was not notorious.', 'Amy took over the presidency from James ||| because [James] was notorious.', 'Amy took over the presidency from James ||| because [Amy] was popular.', "Amy took over the presidency from James ||| because [James] wasn't popular.", 'Amy took over the presidency from James ||| because [Amy] was not notorious.', "James didn't cede the presidency to Amy ||| because [James] was not notorious.", "James didn't cede the presidency to Amy ||| because [Amy] wasn't popular.", "James didn't cede the presidency to Amy ||| because [James] was popular.", "James didn't cede the presidency to Amy ||| because [Amy] was notorious.", "Amy didn't take over the presidency from James ||| because [James] was not notorious.", "Amy didn't take over the presidency from James ||| because [Amy] wasn't popular.", "Amy didn't take over the presidency from James ||| because [James] was popular.", "Amy didn't take over the presidency from James ||| because [Amy] was notorious."], [('James ceded the presidency to Amy ||| because [James] was notorious.', 'James gave the presidency to Amy ||| because [James] was notorious.'), ('James ceded the presidency to Amy ||| because [Amy] was popular.', 'James gave the presidency to Amy ||| because [Amy] was popular.'), ("James ceded the presidency to Amy ||| because [James] wasn't popular.", "James gave the presidency to Amy ||| because [James] wasn't popular."), ('James ceded the presidency to Amy ||| because [Amy] was not notorious.', 'James gave the presidency to Amy ||| because [Amy] was not notorious.'), ('Amy took over the presidency from James ||| because [James] was notorious.', 'Amy got the presidency from James ||| because [James] was notorious.'), ('Amy took over the presidency from James ||| because [Amy] was popular.', 'Amy got the presidency from James ||| because [Amy] was popular.'), ("Amy took over the presidency from James ||| because [James] wasn't popular.", "Amy got the presidency from James ||| because [James] wasn't popular."), ('Amy took over the presidency from James ||| because [Amy] was not notorious.', 'Amy got the presidency from James ||| because [Amy] was not notorious.'), ("James didn't cede the presidency to Amy ||| because [James] was not notorious.", "James didn't give the presidency to Amy ||| because [James] was not notorious."), ("James didn't cede the presidency to Amy ||| because [Amy] wasn't popular.", "James didn't give the presidency to Amy ||| because [Amy] wasn't popular."), ("James didn't cede the presidency to Amy ||| because [James] was popular.", "James didn't give the presidency to Amy ||| because [James] was popular."), ("James didn't cede the presidency to Amy ||| because [Amy] was notorious.", "James didn't give the presidency to Amy ||| because [Amy] was notorious."), ("Amy didn't take over the presidency from James ||| because [James] was not notorious.", "Amy didn't get the presidency from James ||| because [James] was not notorious."), ("Amy didn't take over the presidency from James ||| because [Amy] wasn't popular.", "Amy didn't get the presidency from James ||| because [Amy] wasn't popular."), ("Amy didn't take over the presidency from James ||| because [James] was popular.", "Amy didn't get the presidency from James ||| because [James] was popular."), ("Amy didn't take over the presidency from James ||| because [Amy] was notorious.", "Amy didn't get the presidency from James ||| because [Amy] was notorious."), ('James ceded the presidency to Amy ||| although [James] was not notorious.', 'James gave the presidency to Amy ||| although [James] was not notorious.'), ("James ceded the presidency to Amy ||| although [Amy] wasn't popular.", "James gave the presidency to Amy ||| although [Amy] wasn't popular."), ('James ceded the presidency to Amy ||| although [James] was popular.', 'James gave the presidency to Amy ||| although [James] was popular.'), ('James ceded the presidency to Amy ||| although [Amy] was notorious.', 'James gave the presidency to Amy ||| although [Amy] was notorious.'), ('Amy took over the presidency from James ||| although [James] was not notorious.', 'Amy got the presidency from James ||| although [James] was not notorious.'), ("Amy took over the presidency from James ||| although [Amy] wasn't popular.", "Amy got the presidency from James ||| although [Amy] wasn't popular."), ('Amy took over the presidency from James ||| although [James] was popular.', 'Amy got the presidency from James ||| although [James] was popular.'), ('Amy took over the presidency from James ||| although [Amy] was notorious.', 'Amy got the presidency from James ||| although [Amy] was notorious.'), ("James didn't cede the presidency to Amy ||| although [James] was notorious.", "James didn't give the presidency to Amy ||| although [James] was notorious."), ("James didn't cede the presidency to Amy ||| although [Amy] was popular.", "James didn't give the presidency to Amy ||| although [Amy] was popular."), ("James didn't cede the presidency to Amy ||| although [James] wasn't popular.", "James didn't give the presidency to Amy ||| although [James] wasn't popular."), ("James didn't cede the presidency to Amy ||| although [Amy] was not notorious.", "James didn't give the presidency to Amy ||| although [Amy] was not notorious."), ("Amy didn't take over the presidency from James ||| although [James] was notorious.", "Amy didn't get the presidency from James ||| although [James] was notorious."), ("Amy didn't take over the presidency from James ||| although [Amy] was popular.", "Amy didn't get the presidency from James ||| although [Amy] was popular."), ("Amy didn't take over the presidency from James ||| although [James] wasn't popular.", "Amy didn't get the presidency from James ||| although [James] wasn't popular."), ("Amy didn't take over the presidency from James ||| although [Amy] was not notorious.", "Amy didn't get the presidency from James ||| although [Amy] was not notorious.")], [['the apples are more popular than the grapes ||| so the [apples] should be made more next time.', 'the apples are more popular than the sandwiches ||| so the [apples] should be made more next time.', 'the bananas are more popular than the grapes ||| so the [bananas] should be made more next time.', 'the bananas are more popular than the sandwiches ||| so the [bananas] should be made more next time.', 'the apples are sold more than the grapes ||| so the [apples] should be made more next time.', 'the apples are sold more than the sandwiches ||| so the [apples] should be made more next time.', 'the bananas are sold more than the grapes ||| so the [bananas] should be made more next time.', 'the bananas are sold more than the sandwiches ||| so the [bananas] should be made more next time.'], ['the apples are more popular than the grapes ||| so the [grapes] should be made less next time.', 'the apples are more popular than the sandwiches ||| so the [sandwiches] should be made less next time.', 'the bananas are more popular than the grapes ||| so the [grapes] should be made less next time.', 'the bananas are more popular than the sandwiches ||| so the [sandwiches] should be made less next time.', 'the apples are sold more than the grapes ||| so the [grapes] should be made less next time.', 'the apples are sold more than the sandwiches ||| so the [sandwiches] should be made less next time.', 'the bananas are sold more than the grapes ||| so the [grapes] should be made less next time.', 'the bananas are sold more than the sandwiches ||| so the [sandwiches] should be made less next time.'], ["the apples are more popular than the grapes ||| so the [apples] shouldn't be made less next time.", "the apples are more popular than the sandwiches ||| so the [apples] shouldn't be made less next time.", "the bananas are more popular than the grapes ||| so the [bananas] shouldn't be made less next time.", "the bananas are more popular than the sandwiches ||| so the [bananas] shouldn't be made less next time.", "the apples are sold more than the grapes ||| so the [apples] shouldn't be made less next time.", "the apples are sold more than the sandwiches ||| so the [apples] shouldn't be made less next time.", "the bananas are sold more than the grapes ||| so the [bananas] shouldn't be made less next time.", "the bananas are sold more than the sandwiches ||| so the [bananas] shouldn't be made less next time."], ["the apples are more popular than the grapes ||| so the [grapes] shouldn't be made more next time.", "the apples are more popular than the sandwiches ||| so the [sandwiches] shouldn't be made more next time.", "the bananas are more popular than the grapes ||| so the [grapes] shouldn't be made more next time.", "the bananas are more popular than the sandwiches ||| so the [sandwiches] shouldn't be made more next time.", "the apples are sold more than the grapes ||| so the [grapes] shouldn't be made more next time.", "the apples are sold more than the sandwiches ||| so the [sandwiches] shouldn't be made more next time.", "the bananas are sold more than the grapes ||| so the [grapes] shouldn't be made more next time.", "the bananas are sold more than the sandwiches ||| so the [sandwiches] shouldn't be made more next time."], ['the grapes lose to the apples ||| so the [apples] should be made more next time.', 'the sandwiches lose to the apples ||| so the [apples] should be made more next time.', 'the grapes lose to the bananas ||| so the [bananas] should be made more next time.', 'the sandwiches lose to the bananas ||| so the [bananas] should be made more next time.', 'the grapes are not as popular as the apples ||| so the [apples] should be made more next time.', 'the sandwiches are not as popular as the apples ||| so the [apples] should be made more next time.', 'the grapes are not as popular as the bananas ||| so the [bananas] should be made more next time.', 'the sandwiches are not as popular as the bananas ||| so the [bananas] should be made more next time.'], ['the grapes lose to the apples ||| so the [grapes] should be made less next time.', 'the sandwiches lose to the apples ||| so the [sandwiches] should be made less next time.', 'the grapes lose to the bananas ||| so the [grapes] should be made less next time.', 'the sandwiches lose to the bananas ||| so the [sandwiches] should be made less next time.', 'the grapes are not as popular as the apples ||| so the [grapes] should be made less next time.', 'the sandwiches are not as popular as the apples ||| so the [sandwiches] should be made less next time.', 'the grapes are not as popular as the bananas ||| so the [grapes] should be made less next time.', 'the sandwiches are not as popular as the bananas ||| so the [sandwiches] should be made less next time.'], ["the grapes lose to the apples ||| so the [apples] shouldn't be made less next time.", "the sandwiches lose to the apples ||| so the [apples] shouldn't be made less next time.", "the grapes lose to the bananas ||| so the [bananas] shouldn't be made less next time.", "the sandwiches lose to the bananas ||| so the [bananas] shouldn't be made less next time.", "the grapes are not as popular as the apples ||| so the [apples] shouldn't be made less next time.", "the sandwiches are not as popular as the apples ||| so the [apples] shouldn't be made less next time.", "the grapes are not as popular as the bananas ||| so the [bananas] shouldn't be made less next time.", "the sandwiches are not as popular as the bananas ||| so the [bananas] shouldn't be made less next time."], ["the grapes lose to the apples ||| so the [grapes] shouldn't be made more next time.", "the sandwiches lose to the apples ||| so the [sandwiches] shouldn't be made more next time.", "the grapes lose to the bananas ||| so the [grapes] shouldn't be made more next time.", "the sandwiches lose to the bananas ||| so the [sandwiches] shouldn't be made more next time.", "the grapes are not as popular as the apples ||| so the [grapes] shouldn't be made more next time.", "the sandwiches are not as popular as the apples ||| so the [sandwiches] shouldn't be made more next time.", "the grapes are not as popular as the bananas ||| so the [grapes] shouldn't be made more next time.", "the sandwiches are not as popular as the bananas ||| so the [sandwiches] shouldn't be made more next time."], ["the apples are less popular than the grapes ||| so the [apples] shouldn't be made more next time.", "the apples are less popular than the sandwiches ||| so the [apples] shouldn't be made more next time.", "the bananas are less popular than the grapes ||| so the [bananas] shouldn't be made more next time.", "the bananas are less popular than the sandwiches ||| so the [bananas] shouldn't be made more next time.", "the apples are sold less than the grapes ||| so the [apples] shouldn't be made more next time.", "the apples are sold less than the sandwiches ||| so the [apples] shouldn't be made more next time.", "the bananas are sold less than the grapes ||| so the [bananas] shouldn't be made more next time.", "the bananas are sold less than the sandwiches ||| so the [bananas] shouldn't be made more next time."], ["the apples are less popular than the grapes ||| so the [grapes] shouldn't be made less next time.", "the apples are less popular than the sandwiches ||| so the [sandwiches] shouldn't be made less next time.", "the bananas are less popular than the grapes ||| so the [grapes] shouldn't be made less next time.", "the bananas are less popular than the sandwiches ||| so the [sandwiches] shouldn't be made less next time.", "the apples are sold less than the grapes ||| so the [grapes] shouldn't be made less next time.", "the apples are sold less than the sandwiches ||| so the [sandwiches] shouldn't be made less next time.", "the bananas are sold less than the grapes ||| so the [grapes] shouldn't be made less next time.", "the bananas are sold less than the sandwiches ||| so the [sandwiches] shouldn't be made less next time."], ['the apples are less popular than the grapes ||| so the [apples] should be made less next time.', 'the apples are less popular than the sandwiches ||| so the [apples] should be made less next time.', 'the bananas are less popular than the grapes ||| so the [bananas] should be made less next time.', 'the bananas are less popular than the sandwiches ||| so the [bananas] should be made less next time.', 'the apples are sold less than the grapes ||| so the [apples] should be made less next time.', 'the apples are sold less than the sandwiches ||| so the [apples] should be made less next time.', 'the bananas are sold less than the grapes ||| so the [bananas] should be made less next time.', 'the bananas are sold less than the sandwiches ||| so the [bananas] should be made less next time.'], ['the apples are less popular than the grapes ||| so the [grapes] should be made more next time.', 'the apples are less popular than the sandwiches ||| so the [sandwiches] should be made more next time.', 'the bananas are less popular than the grapes ||| so the [grapes] should be made more next time.', 'the bananas are less popular than the sandwiches ||| so the [sandwiches] should be made more next time.', 'the apples are sold less than the grapes ||| so the [grapes] should be made more next time.', 'the apples are sold less than the sandwiches ||| so the [sandwiches] should be made more next time.', 'the bananas are sold less than the grapes ||| so the [grapes] should be made more next time.', 'the bananas are sold less than the sandwiches ||| so the [sandwiches] should be made more next time.'], ["the grapes don't lose to the apples ||| so the [apples] shouldn't be made more next time.", "the sandwiches don't lose to the apples ||| so the [apples] shouldn't be made more next time.", "the grapes don't lose to the bananas ||| so the [bananas] shouldn't be made more next time.", "the sandwiches don't lose to the bananas ||| so the [bananas] shouldn't be made more next time.", "the grapes are as popular as the apples ||| so the [apples] shouldn't be made more next time.", "the sandwiches are as popular as the apples ||| so the [apples] shouldn't be made more next time.", "the grapes are as popular as the bananas ||| so the [bananas] shouldn't be made more next time.", "the sandwiches are as popular as the bananas ||| so the [bananas] shouldn't be made more next time."], ["the grapes don't lose to the apples ||| so the [grapes] shouldn't be made less next time.", "the sandwiches don't lose to the apples ||| so the [sandwiches] shouldn't be made less next time.", "the grapes don't lose to the bananas ||| so the [grapes] shouldn't be made less next time.", "the sandwiches don't lose to the bananas ||| so the [sandwiches] shouldn't be made less next time.", "the grapes are as popular as the apples ||| so the [grapes] shouldn't be made less next time.", "the sandwiches are as popular as the apples ||| so the [sandwiches] shouldn't be made less next time.", "the grapes are as popular as the bananas ||| so the [grapes] shouldn't be made less next time.", "the sandwiches are as popular as the bananas ||| so the [sandwiches] shouldn't be made less next time."], ["the grapes don't lose to the apples ||| so the [apples] should be made less next time.", "the sandwiches don't lose to the apples ||| so the [apples] should be made less next time.", "the grapes don't lose to the bananas ||| so the [bananas] should be made less next time.", "the sandwiches don't lose to the bananas ||| so the [bananas] should be made less next time.", 'the grapes are as popular as the apples ||| so the [apples] should be made less next time.', 'the sandwiches are as popular as the apples ||| so the [apples] should be made less next time.', 'the grapes are as popular as the bananas ||| so the [bananas] should be made less next time.', 'the sandwiches are as popular as the bananas ||| so the [bananas] should be made less next time.'], ["the grapes don't lose to the apples ||| so the [grapes] should be made more next time.", "the sandwiches don't lose to the apples ||| so the [sandwiches] should be made more next time.", "the grapes don't lose to the bananas ||| so the [grapes] should be made more next time.", "the sandwiches don't lose to the bananas ||| so the [sandwiches] should be made more next time.", 'the grapes are as popular as the apples ||| so the [grapes] should be made more next time.', 'the sandwiches are as popular as the apples ||| so the [sandwiches] should be made more next time.', 'the grapes are as popular as the bananas ||| so the [grapes] should be made more next time.', 'the sandwiches are as popular as the bananas ||| so the [sandwiches] should be made more next time.'], ["the apples are more popular than the grapes ||| but the [apples] shouldn't be made more next time.", "the apples are more popular than the sandwiches ||| but the [apples] shouldn't be made more next time.", "the bananas are more popular than the grapes ||| but the [bananas] shouldn't be made more next time.", "the bananas are more popular than the sandwiches ||| but the [bananas] shouldn't be made more next time.", "the apples are sold more than the grapes ||| but the [apples] shouldn't be made more next time.", "the apples are sold more than the sandwiches ||| but the [apples] shouldn't be made more next time.", "the bananas are sold more than the grapes ||| but the [bananas] shouldn't be made more next time.", "the bananas are sold more than the sandwiches ||| but the [bananas] shouldn't be made more next time."], ["the apples are more popular than the grapes ||| but the [grapes] shouldn't be made less next time.", "the apples are more popular than the sandwiches ||| but the [sandwiches] shouldn't be made less next time.", "the bananas are more popular than the grapes ||| but the [grapes] shouldn't be made less next time.", "the bananas are more popular than the sandwiches ||| but the [sandwiches] shouldn't be made less next time.", "the apples are sold more than the grapes ||| but the [grapes] shouldn't be made less next time.", "the apples are sold more than the sandwiches ||| but the [sandwiches] shouldn't be made less next time.", "the bananas are sold more than the grapes ||| but the [grapes] shouldn't be made less next time.", "the bananas are sold more than the sandwiches ||| but the [sandwiches] shouldn't be made less next time."], ['the apples are more popular than the grapes ||| but the [apples] should be made less next time.', 'the apples are more popular than the sandwiches ||| but the [apples] should be made less next time.', 'the bananas are more popular than the grapes ||| but the [bananas] should be made less next time.', 'the bananas are more popular than the sandwiches ||| but the [bananas] should be made less next time.', 'the apples are sold more than the grapes ||| but the [apples] should be made less next time.', 'the apples are sold more than the sandwiches ||| but the [apples] should be made less next time.', 'the bananas are sold more than the grapes ||| but the [bananas] should be made less next time.', 'the bananas are sold more than the sandwiches ||| but the [bananas] should be made less next time.'], ['the apples are more popular than the grapes ||| but the [grapes] should be made more next time.', 'the apples are more popular than the sandwiches ||| but the [sandwiches] should be made more next time.', 'the bananas are more popular than the grapes ||| but the [grapes] should be made more next time.', 'the bananas are more popular than the sandwiches ||| but the [sandwiches] should be made more next time.', 'the apples are sold more than the grapes ||| but the [grapes] should be made more next time.', 'the apples are sold more than the sandwiches ||| but the [sandwiches] should be made more next time.', 'the bananas are sold more than the grapes ||| but the [grapes] should be made more next time.', 'the bananas are sold more than the sandwiches ||| but the [sandwiches] should be made more next time.'], ["the grapes lose to the apples ||| but the [apples] shouldn't be made more next time.", "the sandwiches lose to the apples ||| but the [apples] shouldn't be made more next time.", "the grapes lose to the bananas ||| but the [bananas] shouldn't be made more next time.", "the sandwiches lose to the bananas ||| but the [bananas] shouldn't be made more next time.", "the grapes are not as popular as the apples ||| but the [apples] shouldn't be made more next time.", "the sandwiches are not as popular as the apples ||| but the [apples] shouldn't be made more next time.", "the grapes are not as popular as the bananas ||| but the [bananas] shouldn't be made more next time.", "the sandwiches are not as popular as the bananas ||| but the [bananas] shouldn't be made more next time."], ["the grapes lose to the apples ||| but the [grapes] shouldn't be made less next time.", "the sandwiches lose to the apples ||| but the [sandwiches] shouldn't be made less next time.", "the grapes lose to the bananas ||| but the [grapes] shouldn't be made less next time.", "the sandwiches lose to the bananas ||| but the [sandwiches] shouldn't be made less next time.", "the grapes are not as popular as the apples ||| but the [grapes] shouldn't be made less next time.", "the sandwiches are not as popular as the apples ||| but the [sandwiches] shouldn't be made less next time.", "the grapes are not as popular as the bananas ||| but the [grapes] shouldn't be made less next time.", "the sandwiches are not as popular as the bananas ||| but the [sandwiches] shouldn't be made less next time."], ['the grapes lose to the apples ||| but the [apples] should be made less next time.', 'the sandwiches lose to the apples ||| but the [apples] should be made less next time.', 'the grapes lose to the bananas ||| but the [bananas] should be made less next time.', 'the sandwiches lose to the bananas ||| but the [bananas] should be made less next time.', 'the grapes are not as popular as the apples ||| but the [apples] should be made less next time.', 'the sandwiches are not as popular as the apples ||| but the [apples] should be made less next time.', 'the grapes are not as popular as the bananas ||| but the [bananas] should be made less next time.', 'the sandwiches are not as popular as the bananas ||| but the [bananas] should be made less next time.'], ['the grapes lose to the apples ||| but the [grapes] should be made more next time.', 'the sandwiches lose to the apples ||| but the [sandwiches] should be made more next time.', 'the grapes lose to the bananas ||| but the [grapes] should be made more next time.', 'the sandwiches lose to the bananas ||| but the [sandwiches] should be made more next time.', 'the grapes are not as popular as the apples ||| but the [grapes] should be made more next time.', 'the sandwiches are not as popular as the apples ||| but the [sandwiches] should be made more next time.', 'the grapes are not as popular as the bananas ||| but the [grapes] should be made more next time.', 'the sandwiches are not as popular as the bananas ||| but the [sandwiches] should be made more next time.'], ['the apples are less popular than the grapes ||| but the [apples] should be made more next time.', 'the apples are less popular than the sandwiches ||| but the [apples] should be made more next time.', 'the bananas are less popular than the grapes ||| but the [bananas] should be made more next time.', 'the bananas are less popular than the sandwiches ||| but the [bananas] should be made more next time.', 'the apples are sold less than the grapes ||| but the [apples] should be made more next time.', 'the apples are sold less than the sandwiches ||| but the [apples] should be made more next time.', 'the bananas are sold less than the grapes ||| but the [bananas] should be made more next time.', 'the bananas are sold less than the sandwiches ||| but the [bananas] should be made more next time.'], ['the apples are less popular than the grapes ||| but the [grapes] should be made less next time.', 'the apples are less popular than the sandwiches ||| but the [sandwiches] should be made less next time.', 'the bananas are less popular than the grapes ||| but the [grapes] should be made less next time.', 'the bananas are less popular than the sandwiches ||| but the [sandwiches] should be made less next time.', 'the apples are sold less than the grapes ||| but the [grapes] should be made less next time.', 'the apples are sold less than the sandwiches ||| but the [sandwiches] should be made less next time.', 'the bananas are sold less than the grapes ||| but the [grapes] should be made less next time.', 'the bananas are sold less than the sandwiches ||| but the [sandwiches] should be made less next time.'], ["the apples are less popular than the grapes ||| but the [apples] shouldn't be made less next time.", "the apples are less popular than the sandwiches ||| but the [apples] shouldn't be made less next time.", "the bananas are less popular than the grapes ||| but the [bananas] shouldn't be made less next time.", "the bananas are less popular than the sandwiches ||| but the [bananas] shouldn't be made less next time.", "the apples are sold less than the grapes ||| but the [apples] shouldn't be made less next time.", "the apples are sold less than the sandwiches ||| but the [apples] shouldn't be made less next time.", "the bananas are sold less than the grapes ||| but the [bananas] shouldn't be made less next time.", "the bananas are sold less than the sandwiches ||| but the [bananas] shouldn't be made less next time."], ["the apples are less popular than the grapes ||| but the [grapes] shouldn't be made more next time.", "the apples are less popular than the sandwiches ||| but the [sandwiches] shouldn't be made more next time.", "the bananas are less popular than the grapes ||| but the [grapes] shouldn't be made more next time.", "the bananas are less popular than the sandwiches ||| but the [sandwiches] shouldn't be made more next time.", "the apples are sold less than the grapes ||| but the [grapes] shouldn't be made more next time.", "the apples are sold less than the sandwiches ||| but the [sandwiches] shouldn't be made more next time.", "the bananas are sold less than the grapes ||| but the [grapes] shouldn't be made more next time.", "the bananas are sold less than the sandwiches ||| but the [sandwiches] shouldn't be made more next time."], ["the grapes don't lose to the apples ||| but the [apples] should be made more next time.", "the sandwiches don't lose to the apples ||| but the [apples] should be made more next time.", "the grapes don't lose to the bananas ||| but the [bananas] should be made more next time.", "the sandwiches don't lose to the bananas ||| but the [bananas] should be made more next time.", 'the grapes are as popular as the apples ||| but the [apples] should be made more next time.', 'the sandwiches are as popular as the apples ||| but the [apples] should be made more next time.', 'the grapes are as popular as the bananas ||| but the [bananas] should be made more next time.', 'the sandwiches are as popular as the bananas ||| but the [bananas] should be made more next time.'], ["the grapes don't lose to the apples ||| but the [grapes] should be made less next time.", "the sandwiches don't lose to the apples ||| but the [sandwiches] should be made less next time.", "the grapes don't lose to the bananas ||| but the [grapes] should be made less next time.", "the sandwiches don't lose to the bananas ||| but the [sandwiches] should be made less next time.", 'the grapes are as popular as the apples ||| but the [grapes] should be made less next time.', 'the sandwiches are as popular as the apples ||| but the [sandwiches] should be made less next time.', 'the grapes are as popular as the bananas ||| but the [grapes] should be made less next time.', 'the sandwiches are as popular as the bananas ||| but the [sandwiches] should be made less next time.'], ["the grapes don't lose to the apples ||| but the [apples] shouldn't be made less next time.", "the sandwiches don't lose to the apples ||| but the [apples] shouldn't be made less next time.", "the grapes don't lose to the bananas ||| but the [bananas] shouldn't be made less next time.", "the sandwiches don't lose to the bananas ||| but the [bananas] shouldn't be made less next time.", "the grapes are as popular as the apples ||| but the [apples] shouldn't be made less next time.", "the sandwiches are as popular as the apples ||| but the [apples] shouldn't be made less next time.", "the grapes are as popular as the bananas ||| but the [bananas] shouldn't be made less next time.", "the sandwiches are as popular as the bananas ||| but the [bananas] shouldn't be made less next time."], ["the grapes don't lose to the apples ||| but the [grapes] shouldn't be made more next time.", "the sandwiches don't lose to the apples ||| but the [sandwiches] shouldn't be made more next time.", "the grapes don't lose to the bananas ||| but the [grapes] shouldn't be made more next time.", "the sandwiches don't lose to the bananas ||| but the [sandwiches] shouldn't be made more next time.", "the grapes are as popular as the apples ||| but the [grapes] shouldn't be made more next time.", "the sandwiches are as popular as the apples ||| but the [sandwiches] shouldn't be made more next time.", "the grapes are as popular as the bananas ||| but the [grapes] shouldn't be made more next time.", "the sandwiches are as popular as the bananas ||| but the [sandwiches] shouldn't be made more next time."]], [("George gave the tickets of the play to Linda ||| because [George] wasn't interested in it.", "George sent the tickets of the play to Linda ||| because [George] wasn't interested in it."), ('George gave the tickets of the play to Linda ||| because [Linda] was eager to see it.', 'George sent the tickets of the play to Linda ||| because [Linda] was eager to see it.'), ("George gave the tickets of the play to Linda ||| because [George] wasn't eager to see it.", "George sent the tickets of the play to Linda ||| because [George] wasn't eager to see it."), ('George gave the tickets of the play to Linda ||| because [Linda] was interested in it.', 'George sent the tickets of the play to Linda ||| because [Linda] was interested in it.'), ("Linda received the tickets of the play from George ||| because [George] wasn't interested in it.", "Linda took the tickets of the play from George ||| because [George] wasn't interested in it."), ('Linda received the tickets of the play from George ||| because [Linda] was eager to see it.', 'Linda took the tickets of the play from George ||| because [Linda] was eager to see it.'), ("Linda received the tickets of the play from George ||| because [George] wasn't eager to see it.", "Linda took the tickets of the play from George ||| because [George] wasn't eager to see it."), ('Linda received the tickets of the play from George ||| because [Linda] was interested in it.', 'Linda took the tickets of the play from George ||| because [Linda] was interested in it.'), ("George didn't give the tickets of the play to Linda ||| because [George] was interested in it.", "George didn't send the tickets of the play to Linda ||| because [George] was interested in it."), ("George didn't give the tickets of the play to Linda ||| because [Linda] wasn't eager to see it.", "George didn't send the tickets of the play to Linda ||| because [Linda] wasn't eager to see it."), ("George didn't give the tickets of the play to Linda ||| because [George] was eager to see it.", "George didn't send the tickets of the play to Linda ||| because [George] was eager to see it."), ("George didn't give the tickets of the play to Linda ||| because [Linda] wasn't interested in it.", "George didn't send the tickets of the play to Linda ||| because [Linda] wasn't interested in it."), ("Linda didn't receive the tickets of the play from George ||| because [George] was interested in it.", "Linda didn't take the tickets of the play from George ||| because [George] was interested in it."), ("Linda didn't receive the tickets of the play from George ||| because [Linda] wasn't eager to see it.", "Linda didn't take the tickets of the play from George ||| because [Linda] wasn't eager to see it."), ("Linda didn't receive the tickets of the play from George ||| because [George] was eager to see it.", "Linda didn't take the tickets of the play from George ||| because [George] was eager to see it."), ("Linda didn't receive the tickets of the play from George ||| because [Linda] wasn't interested in it.", "Linda didn't take the tickets of the play from George ||| because [Linda] wasn't interested in it."), ('George gave the tickets of the play to Linda ||| although [George] was interested in it.', 'George sent the tickets of the play to Linda ||| although [George] was interested in it.'), ("George gave the tickets of the play to Linda ||| although [Linda] wasn't eager to see it.", "George sent the tickets of the play to Linda ||| although [Linda] wasn't eager to see it."), ('George gave the tickets of the play to Linda ||| although [George] was eager to see it.', 'George sent the tickets of the play to Linda ||| although [George] was eager to see it.'), ("George gave the tickets of the play to Linda ||| although [Linda] wasn't interested in it.", "George sent the tickets of the play to Linda ||| although [Linda] wasn't interested in it."), ('Linda received the tickets of the play from George ||| although [George] was interested in it.', 'Linda took the tickets of the play from George ||| although [George] was interested in it.'), ("Linda received the tickets of the play from George ||| although [Linda] wasn't eager to see it.", "Linda took the tickets of the play from George ||| although [Linda] wasn't eager to see it."), ('Linda received the tickets of the play from George ||| although [George] was eager to see it.', 'Linda took the tickets of the play from George ||| although [George] was eager to see it.'), ("Linda received the tickets of the play from George ||| although [Linda] wasn't interested in it.", "Linda took the tickets of the play from George ||| although [Linda] wasn't interested in it."), ("George didn't give the tickets of the play to Linda ||| although [George] wasn't interested in it.", "George didn't send the tickets of the play to Linda ||| although [George] wasn't interested in it."), ("George didn't give the tickets of the play to Linda ||| although [Linda] was eager to see it.", "George didn't send the tickets of the play to Linda ||| although [Linda] was eager to see it."), ("George didn't give the tickets of the play to Linda ||| although [George] wasn't eager to see it.", "George didn't send the tickets of the play to Linda ||| although [George] wasn't eager to see it."), ("George didn't give the tickets of the play to Linda ||| although [Linda] was interested in it.", "George didn't send the tickets of the play to Linda ||| although [Linda] was interested in it."), ("Linda didn't receive the tickets of the play from George ||| although [George] wasn't interested in it.", "Linda didn't take the tickets of the play from George ||| although [George] wasn't interested in it."), ("Linda didn't receive the tickets of the play from George ||| although [Linda] was eager to see it.", "Linda didn't take the tickets of the play from George ||| although [Linda] was eager to see it."), ("Linda didn't receive the tickets of the play from George ||| although [George] wasn't eager to see it.", "Linda didn't take the tickets of the play from George ||| although [George] wasn't eager to see it."), ("Linda didn't receive the tickets of the play from George ||| although [Linda] was interested in it.", "Linda didn't take the tickets of the play from George ||| although [Linda] was interested in it.")], [('Peter envied Mandy ||| because [Peter] failed.', 'Peter was jealous of Mandy ||| because [Peter] failed.'), ('Peter envied Mandy ||| because [Mandy] was successful.', 'Peter was jealous of Mandy ||| because [Mandy] was successful.'), ("Peter envied Mandy ||| because [Peter] wasn't successful.", "Peter was jealous of Mandy ||| because [Peter] wasn't successful."), ("Peter envied Mandy ||| because [Mandy] didn't fail.", "Peter was jealous of Mandy ||| because [Mandy] didn't fail."), ('Mandy was envied by Peter ||| because [Peter] failed.', 'Mandy was admired by Peter ||| because [Peter] failed.'), ('Mandy was envied by Peter ||| because [Mandy] was successful.', 'Mandy was admired by Peter ||| because [Mandy] was successful.'), ("Mandy was envied by Peter ||| because [Peter] wasn't successful.", "Mandy was admired by Peter ||| because [Peter] wasn't successful."), ("Mandy was envied by Peter ||| because [Mandy] didn't fail.", "Mandy was admired by Peter ||| because [Mandy] didn't fail."), ("Peter didn't envy Mandy ||| because [Peter] didn't fail.", "Peter wasn't jealous of Mandy ||| because [Peter] didn't fail."), ("Peter didn't envy Mandy ||| because [Mandy] wasn't successful.", "Peter wasn't jealous of Mandy ||| because [Mandy] wasn't successful."), ("Peter didn't envy Mandy ||| because [Peter] was successful.", "Peter wasn't jealous of Mandy ||| because [Peter] was successful."), ("Peter didn't envy Mandy ||| because [Mandy] failed.", "Peter wasn't jealous of Mandy ||| because [Mandy] failed."), ("Mandy wasn't envied by Peter ||| because [Peter] didn't fail.", "Mandy wasn't admired by Peter ||| because [Peter] didn't fail."), ("Mandy wasn't envied by Peter ||| because [Mandy] wasn't successful.", "Mandy wasn't admired by Peter ||| because [Mandy] wasn't successful."), ("Mandy wasn't envied by Peter ||| because [Peter] was successful.", "Mandy wasn't admired by Peter ||| because [Peter] was successful."), ("Mandy wasn't envied by Peter ||| because [Mandy] failed.", "Mandy wasn't admired by Peter ||| because [Mandy] failed."), ("Peter envied Mandy ||| although [Peter] didn't fail.", "Peter was jealous of Mandy ||| although [Peter] didn't fail."), ("Peter envied Mandy ||| although [Mandy] wasn't successful.", "Peter was jealous of Mandy ||| although [Mandy] wasn't successful."), ('Peter envied Mandy ||| although [Peter] was successful.', 'Peter was jealous of Mandy ||| although [Peter] was successful.'), ('Peter envied Mandy ||| although [Mandy] failed.', 'Peter was jealous of Mandy ||| although [Mandy] failed.'), ("Mandy was envied by Peter ||| although [Peter] didn't fail.", "Mandy was admired by Peter ||| although [Peter] didn't fail."), ("Mandy was envied by Peter ||| although [Mandy] wasn't successful.", "Mandy was admired by Peter ||| although [Mandy] wasn't successful."), ('Mandy was envied by Peter ||| although [Peter] was successful.', 'Mandy was admired by Peter ||| although [Peter] was successful.'), ('Mandy was envied by Peter ||| although [Mandy] failed.', 'Mandy was admired by Peter ||| although [Mandy] failed.'), ("Peter didn't envy Mandy ||| although [Peter] failed.", "Peter wasn't jealous of Mandy ||| although [Peter] failed."), ("Peter didn't envy Mandy ||| although [Mandy] was successful.", "Peter wasn't jealous of Mandy ||| although [Mandy] was successful."), ("Peter didn't envy Mandy ||| although [Peter] wasn't successful.", "Peter wasn't jealous of Mandy ||| although [Peter] wasn't successful."), ("Peter didn't envy Mandy ||| although [Mandy] didn't fail.", "Peter wasn't jealous of Mandy ||| although [Mandy] didn't fail."), ("Mandy wasn't envied by Peter ||| although [Peter] failed.", "Mandy wasn't admired by Peter ||| although [Peter] failed."), ("Mandy wasn't envied by Peter ||| although [Mandy] was successful.", "Mandy wasn't admired by Peter ||| although [Mandy] was successful."), ("Mandy wasn't envied by Peter ||| although [Peter] wasn't successful.", "Mandy wasn't admired by Peter ||| although [Peter] wasn't successful."), ("Mandy wasn't envied by Peter ||| although [Mandy] didn't fail.", "Mandy wasn't admired by Peter ||| although [Mandy] didn't fail.")], ['James ceded the presidency to Amy ||| although [James] was not notorious.', "James ceded the presidency to Amy ||| although [Amy] wasn't popular.", 'James ceded the presidency to Amy ||| although [James] was popular.', 'James ceded the presidency to Amy ||| although [Amy] was notorious.', 'Amy took over the presidency from James ||| although [James] was not notorious.', "Amy took over the presidency from James ||| although [Amy] wasn't popular.", 'Amy took over the presidency from James ||| although [James] was popular.', 'Amy took over the presidency from James ||| although [Amy] was notorious.', "James didn't cede the presidency to Amy ||| although [James] was notorious.", "James didn't cede the presidency to Amy ||| although [Amy] was popular.", "James didn't cede the presidency to Amy ||| although [James] wasn't popular.", "James didn't cede the presidency to Amy ||| although [Amy] was not notorious.", "Amy didn't take over the presidency from James ||| although [James] was notorious.", "Amy didn't take over the presidency from James ||| although [Amy] was popular.", "Amy didn't take over the presidency from James ||| although [James] wasn't popular.", "Amy didn't take over the presidency from James ||| although [Amy] was not notorious."]]Traceback (most recent call last): + File "/home/qsj/miniconda3/lib/python3.6/runpy.py", line 183, in _run_module_as_main + mod_name, mod_spec, code = _get_module_details(mod_name, _Error) + File "/home/qsj/miniconda3/lib/python3.6/runpy.py", line 109, in _get_module_details + __import__(pkg_name) + File "/home/xd/projects/pytorch-pretrained-BERT/train_child.py", line 91, in + child_dataset = CHILDDataset(tokenizer, sentences, dev_percent=args.dev_percent) + File "/home/xd/projects/pytorch-pretrained-BERT/run_child_finetuning.py", line 68, in __init__ + t1, t2, is_next_label = self.split_sent(line) + File "/home/xd/projects/pytorch-pretrained-BERT/run_child_finetuning.py", line 128, in split_sent + t1, t2 = line.strip(), None +AttributeError: 'list' object has no attribute 'strip' + +06/09/2019 22:23:17 - ERROR - pytorch_pretrained_bert.tokenization - Model name '/nas/pretrain-bert/pretrain-tensorflow/uncased_L-12_H-768_A-12/vocab.txt' was not found in model name list (bert-base-uncased, bert-large-uncased, bert-base-cased, bert-large-cased, bert-base-multilingual-uncased, bert-base-multilingual-cased, bert-base-chinese). We assumed '/nas/pretrain-bert/pretrain-tensorflow/uncased_L-12_H-768_A-12/vocab.txt' was a path or url but couldn't find any file associated to this path or url. +06/09/2019 22:23:17 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt +Traceback (most recent call last): + File "/home/qsj/miniconda3/lib/python3.6/runpy.py", line 183, in _run_module_as_main + mod_name, mod_spec, code = _get_module_details(mod_name, _Error) + File "/home/qsj/miniconda3/lib/python3.6/runpy.py", line 109, in _get_module_details + __import__(pkg_name) + File "/home/xd/projects/pytorch-pretrained-BERT/train_child.py", line 88, in + sentences += make_sentences(**frame)[-1] + File "/home/xd/projects/pytorch-pretrained-BERT/child_generator.py", line 53, in make_sentences + assert entities[0].lower() in tokenizer.vocab , entities[0] +AttributeError: 'NoneType' object has no attribute 'vocab' +Warning: apex was installed without --cpp_ext. Falling back to Python flatten and unflatten. +Warning: apex was installed without --cuda_ext. Fused syncbn kernels will be unavailable. Python fallbacks will be used instead. +Warning: apex was installed without --cuda_ext. FusedAdam will be unavailable. +Warning: apex was installed without --cuda_ext. FusedLayerNorm will be unavailable. +Better speed can be achieved with apex installed from https://www.github.com/nvidia/apex. +Namespace(dev_percent=0.3, do_eval=True, do_lower_case=True, do_train=True, eval_batch_size=128, gradient_accumulation_steps=1, learning_rate=3e-05, max_seq_length=128, no_cuda=False, num_train_epochs=6.0, seed=42, train_batch_size=32, warmup_proportion=0.1) +06/09/2019 22:24:05 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt +06/09/2019 22:24:05 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt +Traceback (most recent call last): + File "/home/qsj/miniconda3/lib/python3.6/runpy.py", line 183, in _run_module_as_main + mod_name, mod_spec, code = _get_module_details(mod_name, _Error) + File "/home/qsj/miniconda3/lib/python3.6/runpy.py", line 109, in _get_module_details + __import__(pkg_name) + File "/home/xd/projects/pytorch-pretrained-BERT/train_child.py", line 88, in + sentences += make_sentences(**frame)[-1] + File "/home/xd/projects/pytorch-pretrained-BERT/child_generator.py", line 80, in make_sentences + B_template = B_template[int(prepositive_pred)] +UnboundLocalError: local variable 'B_template' referenced before assignment +Warning: apex was installed without --cpp_ext. Falling back to Python flatten and unflatten. +Warning: apex was installed without --cuda_ext. Fused syncbn kernels will be unavailable. Python fallbacks will be used instead. +Warning: apex was installed without --cuda_ext. FusedAdam will be unavailable. +Warning: apex was installed without --cuda_ext. FusedLayerNorm will be unavailable. +Better speed can be achieved with apex installed from https://www.github.com/nvidia/apex. +Namespace(dev_percent=0.3, do_eval=True, do_lower_case=True, do_train=True, eval_batch_size=128, gradient_accumulation_steps=1, learning_rate=3e-05, max_seq_length=128, no_cuda=False, num_train_epochs=6.0, seed=42, train_batch_size=32, warmup_proportion=0.1) +06/09/2019 22:28:12 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt +06/09/2019 22:28:12 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt +06/09/2019 22:28:12 - INFO - run_child_finetuning - num_sent = 15840 +Traceback (most recent call last): + File "/home/qsj/miniconda3/lib/python3.6/runpy.py", line 183, in _run_module_as_main + mod_name, mod_spec, code = _get_module_details(mod_name, _Error) + File "/home/qsj/miniconda3/lib/python3.6/runpy.py", line 109, in _get_module_details + __import__(pkg_name) + File "/home/xd/projects/pytorch-pretrained-BERT/train_child.py", line 91, in + child_dataset = CHILDDataset(tokenizer, sentences, dev_percent=args.dev_percent) + File "/home/xd/projects/pytorch-pretrained-BERT/run_child_finetuning.py", line 88, in __init__ + self.features = [convert_example_to_features(example, self.seq_len, self.tokenizer) for example in self.examples] + File "/home/xd/projects/pytorch-pretrained-BERT/run_child_finetuning.py", line 88, in + self.features = [convert_example_to_features(example, self.seq_len, self.tokenizer) for example in self.examples] + File "/home/xd/projects/pytorch-pretrained-BERT/run_child_finetuning.py", line 204, in convert_example_to_features + assert len(input_ids) == max_seq_length +AssertionError +Warning: apex was installed without --cpp_ext. Falling back to Python flatten and unflatten. +Warning: apex was installed without --cuda_ext. Fused syncbn kernels will be unavailable. Python fallbacks will be used instead. +Warning: apex was installed without --cuda_ext. FusedAdam will be unavailable. +Warning: apex was installed without --cuda_ext. FusedLayerNorm will be unavailable. +Better speed can be achieved with apex installed from https://www.github.com/nvidia/apex. +Namespace(dev_percent=0.3, do_eval=True, do_lower_case=True, do_train=True, eval_batch_size=128, gradient_accumulation_steps=1, learning_rate=3e-05, max_seq_length=128, no_cuda=False, num_train_epochs=6.0, seed=42, train_batch_size=32, warmup_proportion=0.1) +['the students were not more in number than the sponsors ||| so the [students] were in the minority.', "Tom dosen't look after Betty ||| because [Betty] is older.", "the ball can't be put into the bag ||| although the [bag] isn't small.", "Amy didn't take over the presidency from Jack ||| although [Amy] was not notorious.", "Jack didn't cede the presidency to Donna ||| because [Donna] was notorious.", "Susan was fooled by Sam ||| but [Sam] didn't get a lot of money.", "Wendy is imitated by George in everything ||| although [Wendy] isn't good at making decisions.", "the apples are sold more than the grapes ||| so the [apples] shouldn't be made less next time.", "Donna didn't obstruct the sight of Edward ||| although [Edward] isn't tall.", "Michael didn't subsidize Mary ||| because [Michael] was poor.", "Lucy didn't perform better than Andy on the test ||| although [Andy] was lazy in doing homework.", "Mary wasn't appreciated by Michael ||| because [Michael] hadn't received a lot of help.", 'Linda was defeated by Michael in the game ||| but [Michael] was sad.', "Caroline was replaced by Amy as the actress's new name ||| although [Caroline] is easy to pronounce.", "the bag of flour hadn't been put below the bag of candy ||| so the bag of [candy] couldn't be moved first.", 'the apples are less popular than the sandwiches ||| but the [apples] should be made more next time.', "the sandwiches don't lose to the apples ||| so the [apples] should be made less next time.", 'Running at about the same speed, Sally lost to Tom in the running race ||| although [Sally] had a good start.', "the boss didn't arrive before the employee ||| because the [employee] didn't come from far away.", "Charles can't defeat Cindy at tennis ||| although [Cindy] is younger."] +06/09/2019 22:36:18 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt +06/09/2019 22:36:18 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt +06/09/2019 22:36:18 - INFO - run_child_finetuning - num_sent = 15840 +Traceback (most recent call last): + File "/home/qsj/miniconda3/lib/python3.6/runpy.py", line 183, in _run_module_as_main + mod_name, mod_spec, code = _get_module_details(mod_name, _Error) + File "/home/qsj/miniconda3/lib/python3.6/runpy.py", line 109, in _get_module_details + __import__(pkg_name) + File "/home/xd/projects/pytorch-pretrained-BERT/train_child.py", line 91, in + child_dataset = CHILDDataset(tokenizer, sentences, dev_percent=args.dev_percent) + File "/home/xd/projects/pytorch-pretrained-BERT/run_child_finetuning.py", line 88, in __init__ + self.features = [convert_example_to_features(example, self.seq_len, self.tokenizer) for example in self.examples] + File "/home/xd/projects/pytorch-pretrained-BERT/run_child_finetuning.py", line 88, in + self.features = [convert_example_to_features(example, self.seq_len, self.tokenizer) for example in self.examples] + File "/home/xd/projects/pytorch-pretrained-BERT/run_child_finetuning.py", line 204, in convert_example_to_features + assert len(input_ids) == max_seq_length, '%d != %d' % (len(input_ids), max_seq_length) +AssertionError: 29 != 28 +Warning: apex was installed without --cpp_ext. Falling back to Python flatten and unflatten. +Warning: apex was installed without --cuda_ext. Fused syncbn kernels will be unavailable. Python fallbacks will be used instead. +Warning: apex was installed without --cuda_ext. FusedAdam will be unavailable. +Warning: apex was installed without --cuda_ext. FusedLayerNorm will be unavailable. +Better speed can be achieved with apex installed from https://www.github.com/nvidia/apex. +Namespace(dev_percent=0.3, do_eval=True, do_lower_case=True, do_train=True, eval_batch_size=128, gradient_accumulation_steps=1, learning_rate=3e-05, max_seq_length=128, no_cuda=False, num_train_epochs=6.0, seed=42, train_batch_size=32, warmup_proportion=0.1) +["Running at about the same speed, Susan lost to David in the running race ||| because [Susan] didn't have a good start.", 'Charles cast the schoolbag up to Emma ||| before [Emma] reached the bottom of the stairs.', "the sandwiches lose to the apples ||| but the [sandwiches] shouldn't be made less next time.", 'the bucket was suffused with water from the cup ||| before the [cup] was empty.', "Jack dosen't take care of Betty ||| because [Jack] isn't older.", 'the cup was suffused with water from the bowl ||| after the [bowl] was full.', "Lucy wasn't fooled by Bush ||| so [Lucy] didn't lose a lot of money.", 'the trophy can fit into the box ||| because the [trophy] is small.', "Mary was told by Michael what time the library closes ||| although [Michael] didn't remember.", "Robert didn't cede the presidency to Donna ||| because [Robert] was popular.", 'the cup dripped water into the bowl ||| after the [cup] was full.', "Donna didn't get the presidency from Jack ||| because [Donna] wasn't popular.", "Betty didn't block the view of John ||| because [Betty] isn't tall.", "Susan didn't receive a lot of money from John ||| because [Susan] wasn't poor.", "Eric didn't send the tickets of the play to Linda ||| although [Linda] was eager to see it.", 'the bottle dripped water into the tube ||| before the [bottle] was empty.', "Running at about the same speed, Sally wasn't defeated by John in the running race ||| although [Sally] didn't have a good start.", "the bicycle was left behind the ambulance ||| because the [ambulance] wasn't going slow.", 'the pictures could be placed on all the chairs ||| although there were few of the [pictures].', "Tom always looks after Betty ||| although [Tom] isn't older."] +06/09/2019 22:42:30 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt +06/09/2019 22:42:30 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt +06/09/2019 22:42:30 - INFO - run_child_finetuning - num_sent = 15840 +06/09/2019 22:42:34 - INFO - run_child_finetuning - num_train_steps = 2079 +06/09/2019 22:42:37 - INFO - run_child_finetuning - device: cuda n_gpu: 1 +06/09/2019 22:42:37 - INFO - pytorch_pretrained_bert.modeling - loading archive file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased +06/09/2019 22:42:37 - INFO - pytorch_pretrained_bert.modeling - Model config { + "attention_probs_dropout_prob": 0.1, + "hidden_act": "gelu", + "hidden_dropout_prob": 0.1, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "max_position_embeddings": 512, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "type_vocab_size": 2, + "vocab_size": 30522 +} + +06/09/2019 22:42:40 - INFO - pytorch_pretrained_bert.modeling - Weights from pretrained model not used in BertForMaskedLM: ['cls.seq_relationship.weight', 'cls.seq_relationship.bias'] +06/09/2019 22:42:45 - INFO - run_child_finetuning - Epoch 0 +06/09/2019 22:42:45 - INFO - run_child_finetuning - Evaluating on train set... +06/09/2019 22:42:45 - INFO - run_child_finetuning - Evaluating on valid set... + Epoch: 0%| | 0/6 [00:00 7680 +num_train_steps = 12000 +global_step 0, lr = 0.000000 +global_step 1000, lr = 0.000417 +global_step 2000, lr = 0.000417 +global_step 3000, lr = 0.000375 +global_step 4000, lr = 0.000333 +global_step 5000, lr = 0.000292 +global_step 6000, lr = 0.000250 +global_step 7000, lr = 0.000208 +global_step 8000, lr = 0.000167 +global_step 9000, lr = 0.000125 +global_step 10000, lr = 0.000083 +global_step 11000, lr = 0.000042 diff --git a/train_child_yesnoonly_maybe0_bert.out b/train_child_yesnoonly_maybe0_bert.out new file mode 100644 index 00000000000000..3e09894c9f001a --- /dev/null +++ b/train_child_yesnoonly_maybe0_bert.out @@ -0,0 +1,41 @@ +06/09/2019 16:56:11 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt +06/09/2019 16:56:11 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt +06/09/2019 16:56:16 - INFO - run_child_finetuning - device: cuda n_gpu: 1 +06/09/2019 16:56:16 - INFO - pytorch_pretrained_bert.modeling - loading archive file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased +06/09/2019 16:56:16 - INFO - pytorch_pretrained_bert.modeling - Model config { + "attention_probs_dropout_prob": 0.1, + "hidden_act": "gelu", + "hidden_dropout_prob": 0.1, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "max_position_embeddings": 512, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "type_vocab_size": 2, + "vocab_size": 30522 +} + +06/09/2019 16:56:19 - INFO - pytorch_pretrained_bert.modeling - Weights from pretrained model not used in BertForMaskedLM: ['cls.seq_relationship.weight', 'cls.seq_relationship.bias'] +06/09/2019 16:56:24 - INFO - run_child_finetuning - Epoch 0 +06/09/2019 16:56:24 - INFO - run_child_finetuning - Evaluating on train set... +06/09/2019 16:56:24 - INFO - run_child_finetuning - Evaluating on valid set... + Epoch: 0%| | 0/3 [00:00 11520 +num_train_steps = 720 +global_step 0, lr = 0.000000 +06/09/2019 17:05:29 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt +06/09/2019 17:05:30 - INFO - pytorch_pretrained_bert.tokenization - loading vocabulary file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased-vocab.txt +06/09/2019 17:05:38 - INFO - run_child_finetuning - device: cuda n_gpu: 1 +06/09/2019 17:05:38 - INFO - pytorch_pretrained_bert.modeling - loading archive file /nas/pretrain-bert/pretrain-pytorch/bert-base-uncased +06/09/2019 17:05:38 - INFO - pytorch_pretrained_bert.modeling - Model config { + "attention_probs_dropout_prob": 0.1, + "hidden_act": "gelu", + "hidden_dropout_prob": 0.1, + "hidden_size": 768, + "initializer_range": 0.02, + "intermediate_size": 3072, + "max_position_embeddings": 512, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "type_vocab_size": 2, + "vocab_size": 30522 +} + +06/09/2019 17:05:41 - INFO - pytorch_pretrained_bert.modeling - Weights from pretrained model not used in BertForMaskedLM: ['cls.seq_relationship.weight', 'cls.seq_relationship.bias'] +06/09/2019 17:05:45 - INFO - run_child_finetuning - Epoch 0 +06/09/2019 17:05:45 - INFO - run_child_finetuning - Evaluating on train set... +06/09/2019 17:05:45 - INFO - run_child_finetuning - Evaluating on valid set... + Epoch: 0%| | 0/6 [00:00